_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
aa4e8726033ce20e69a2e7a5e598e8bb25c4f68b93bd86dac2ecf9fea7c4fb52 | samrushing/irken-compiler | t_match_record.scm |
(include "lib/core.scm")
(define thing
{a=x b=2} -> x
{a=3 b=y} -> y
{a=m b=n} -> (+ m n)
)
;; =>
;; (define (thing r)
;; (match r.a r.b with
;; x 2 -> x
;; 3 y -> y
;; ))
(printn (thing {a=3 b=1}))
(printn (thing {a=3 b=2}))
(printn (thing {a=4 b=5}))
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/tests/t_match_record.scm | scheme | =>
(define (thing r)
(match r.a r.b with
x 2 -> x
3 y -> y
)) |
(include "lib/core.scm")
(define thing
{a=x b=2} -> x
{a=3 b=y} -> y
{a=m b=n} -> (+ m n)
)
(printn (thing {a=3 b=1}))
(printn (thing {a=3 b=2}))
(printn (thing {a=4 b=5}))
|
6d7b12d212268fc7d151b297e30bcf4a9b293145dbbf34eed0b0fcfe07d6847e | regex-generate/regenerate | regex.ml | type 'a cset = 'a list
type 'a t
= One
| Set of bool * 'a cset
| Seq of 'a t * 'a t
| Or of 'a t * 'a t
| And of 'a t * 'a t
| Not of 'a t
| Rep of int * int option * 'a t
(** Smart constructors *)
let epsilon = One
let void = Set (true, [])
let atom c = Set (true, [c])
let char c = atom c
let charset cs = Set (true, cs)
let complset cs = Set (false, cs)
let enumerate c1 c2 =
if c1 > c2 then None
else
let rec aux i m =
if i > m then []
else Char.chr i :: aux (i+1) m
in
Some (aux (Char.code c1) (Char.code c2))
let rec reduce init f = function
| [] -> init
| [x] -> x
| x :: l -> f x (reduce init f l)
let seq l = reduce One (fun x y -> Seq (x,y)) l
let alt x y = Or (x,y)
let inter x y = And (x,y)
let compl x = Not x
let rep i j x = Rep (i, j, x)
let star x = rep 0 None x
let plus x = rep 1 None x
let opt x = rep 0 (Some 1) x
(** QCheck utilities *)
let rec size = function
| One -> 1
| Set _ -> 1
| Rep (_,_,a)
| Not a -> size a + 1
| Or (a,b)
| And (a,b)
| Seq (a,b) -> size a + size b + 1
let prio = function
| And (_,_) -> 1
| Or (_,_) -> 2
| Seq (_,_) -> 3
| Not _ -> 4
| Rep (_,_,_) -> -1
| One
| Set _ -> 6
let rec pp ?(epsilon=true) ppalpha fmt x =
let f fmt y =
if prio y < prio x || prio y = -1
then Fmt.parens (pp ~epsilon ppalpha) fmt y
else pp ~epsilon ppalpha fmt y
in
match x with
| One -> Fmt.pf fmt (if epsilon then "ε" else "")
| Set (true,[x]) -> Fmt.pf fmt "%a" ppalpha x
| Set (b,l) -> Fmt.pf fmt "[%s%a]"
(if b then "" else "^") (Fmt.list ~sep:Fmt.nop ppalpha) l
| Seq (a,b) -> Fmt.pf fmt "%a%a" f a f b
| Or (a,b) -> Fmt.pf fmt "%a|%a" f a f b
| And (a,b) -> Fmt.pf fmt "%a&%a" f a f b
| Not a -> Fmt.pf fmt "~%a" f a
| Rep (0,None,a) -> Fmt.pf fmt "%a*" f a
| Rep (1,None,a) -> Fmt.pf fmt "%a+" f a
| Rep (i,None,a) -> Fmt.pf fmt "%a{%i,}" f a i
| Rep (i,Some j,a) when i = j -> Fmt.pf fmt "%a{%i}" f a i
| Rep (i,Some j,a) -> Fmt.pf fmt "%a{%i,%i}" f a i j
let gen ~compl:with_compl alphabet =
let open QCheck.Gen in
let opt a = frequency [ 1, pure None ; 1, map CCOpt.return a] in
let proba_compl = if with_compl then 3 else 0 in
let gatom = alphabet >|= atom in
let gset =
bool >>= fun b ->
map
(fun l -> Set (b, CCList.uniq ~eq:(=) l))
(list_size (1 -- 10) alphabet)
in
let gbase = frequency [
1 , pure void ;
1, pure epsilon ;
8, gatom ;
5, gset ;
] in
let rec gen nbRep n st =
if n <= 1 then gbase st else
frequency [
1, gbase ;
proba_compl, gcompl nbRep n ;
3, gbin nbRep n alt ;
2, gbin nbRep n inter ;
5, gbin nbRep n (fun x y -> Seq (x,y)) ;
nbRep * 2, grep nbRep n ;
] st
and grep nbRep n =
int_bound 3 >>= fun i ->
opt (int_range i 5) >>= fun j ->
gen (nbRep - 1) (n-1) >|= fun a ->
rep i j a
and gcompl nbRep n = gen nbRep (n-1) >|= compl
and gbin nbRep n f =
gen nbRep ((n-1)/2) >>= fun a ->
gen nbRep ((n-1)/2) >|= fun b ->
f a b
in
sized_size (int_range 2 20) (gen 2)
| null | https://raw.githubusercontent.com/regex-generate/regenerate/a616d6c2faf4a55f794ac9b6fbf03acca91fbeb9/lib/regex.ml | ocaml | * Smart constructors
* QCheck utilities | type 'a cset = 'a list
type 'a t
= One
| Set of bool * 'a cset
| Seq of 'a t * 'a t
| Or of 'a t * 'a t
| And of 'a t * 'a t
| Not of 'a t
| Rep of int * int option * 'a t
let epsilon = One
let void = Set (true, [])
let atom c = Set (true, [c])
let char c = atom c
let charset cs = Set (true, cs)
let complset cs = Set (false, cs)
let enumerate c1 c2 =
if c1 > c2 then None
else
let rec aux i m =
if i > m then []
else Char.chr i :: aux (i+1) m
in
Some (aux (Char.code c1) (Char.code c2))
let rec reduce init f = function
| [] -> init
| [x] -> x
| x :: l -> f x (reduce init f l)
let seq l = reduce One (fun x y -> Seq (x,y)) l
let alt x y = Or (x,y)
let inter x y = And (x,y)
let compl x = Not x
let rep i j x = Rep (i, j, x)
let star x = rep 0 None x
let plus x = rep 1 None x
let opt x = rep 0 (Some 1) x
let rec size = function
| One -> 1
| Set _ -> 1
| Rep (_,_,a)
| Not a -> size a + 1
| Or (a,b)
| And (a,b)
| Seq (a,b) -> size a + size b + 1
let prio = function
| And (_,_) -> 1
| Or (_,_) -> 2
| Seq (_,_) -> 3
| Not _ -> 4
| Rep (_,_,_) -> -1
| One
| Set _ -> 6
let rec pp ?(epsilon=true) ppalpha fmt x =
let f fmt y =
if prio y < prio x || prio y = -1
then Fmt.parens (pp ~epsilon ppalpha) fmt y
else pp ~epsilon ppalpha fmt y
in
match x with
| One -> Fmt.pf fmt (if epsilon then "ε" else "")
| Set (true,[x]) -> Fmt.pf fmt "%a" ppalpha x
| Set (b,l) -> Fmt.pf fmt "[%s%a]"
(if b then "" else "^") (Fmt.list ~sep:Fmt.nop ppalpha) l
| Seq (a,b) -> Fmt.pf fmt "%a%a" f a f b
| Or (a,b) -> Fmt.pf fmt "%a|%a" f a f b
| And (a,b) -> Fmt.pf fmt "%a&%a" f a f b
| Not a -> Fmt.pf fmt "~%a" f a
| Rep (0,None,a) -> Fmt.pf fmt "%a*" f a
| Rep (1,None,a) -> Fmt.pf fmt "%a+" f a
| Rep (i,None,a) -> Fmt.pf fmt "%a{%i,}" f a i
| Rep (i,Some j,a) when i = j -> Fmt.pf fmt "%a{%i}" f a i
| Rep (i,Some j,a) -> Fmt.pf fmt "%a{%i,%i}" f a i j
let gen ~compl:with_compl alphabet =
let open QCheck.Gen in
let opt a = frequency [ 1, pure None ; 1, map CCOpt.return a] in
let proba_compl = if with_compl then 3 else 0 in
let gatom = alphabet >|= atom in
let gset =
bool >>= fun b ->
map
(fun l -> Set (b, CCList.uniq ~eq:(=) l))
(list_size (1 -- 10) alphabet)
in
let gbase = frequency [
1 , pure void ;
1, pure epsilon ;
8, gatom ;
5, gset ;
] in
let rec gen nbRep n st =
if n <= 1 then gbase st else
frequency [
1, gbase ;
proba_compl, gcompl nbRep n ;
3, gbin nbRep n alt ;
2, gbin nbRep n inter ;
5, gbin nbRep n (fun x y -> Seq (x,y)) ;
nbRep * 2, grep nbRep n ;
] st
and grep nbRep n =
int_bound 3 >>= fun i ->
opt (int_range i 5) >>= fun j ->
gen (nbRep - 1) (n-1) >|= fun a ->
rep i j a
and gcompl nbRep n = gen nbRep (n-1) >|= compl
and gbin nbRep n f =
gen nbRep ((n-1)/2) >>= fun a ->
gen nbRep ((n-1)/2) >|= fun b ->
f a b
in
sized_size (int_range 2 20) (gen 2)
|
a50b3cbc2488dad7a2610aea24c5b3b2a32cabc7ea6cf21efec9c58cf88a6d6e | ghc/ghc | Env.hs | ( c ) The University of Glasgow 2002 - 2006
{-# LANGUAGE RankNTypes #-}
module GHC.Iface.Env (
newGlobalBinder, newInteractiveBinder,
externaliseName,
lookupIfaceTop,
lookupOrig, lookupNameCache, lookupOrigNameCache,
newIfaceName, newIfaceNames,
extendIfaceIdEnv, extendIfaceTyVarEnv,
tcIfaceLclId, tcIfaceTyVar, lookupIfaceVar,
lookupIfaceTyVar, extendIfaceEnvs,
setNameModule,
ifaceExportNames,
trace_if, trace_hi_diffs,
-- Name-cache stuff
allocateGlobalBinder,
) where
import GHC.Prelude
import GHC.Driver.Env
import GHC.Driver.Session
import GHC.Tc.Utils.Monad
import GHC.Core.Type
import GHC.Iface.Type
import GHC.Runtime.Context
import GHC.Unit.Module
import GHC.Unit.Module.ModIface
import GHC.Data.FastString
import GHC.Data.FastString.Env
import GHC.Types.Var
import GHC.Types.Name
import GHC.Types.Avail
import GHC.Types.Name.Cache
import GHC.Types.Unique.Supply
import GHC.Types.SrcLoc
import GHC.Utils.Outputable
import GHC.Utils.Error
import GHC.Utils.Logger
import Data.List ( partition )
import Control.Monad
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Allocating new Names in the Name Cache
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
See Also : Note [ The Name Cache ] in GHC.Types . Name . Cache
*********************************************************
* *
Allocating new Names in the Name Cache
* *
*********************************************************
See Also: Note [The Name Cache] in GHC.Types.Name.Cache
-}
newGlobalBinder :: Module -> OccName -> SrcSpan -> TcRnIf a b Name
-- Used for source code and interface files, to make the
-- Name for a thing, given its Module and OccName
See Note [ The Name Cache ] in GHC.Types . Name . Cache
--
-- The cache may already have a binding for this thing,
-- because we may have seen an occurrence before, but now is the
-- moment when we know its Module and SrcLoc in their full glory
newGlobalBinder mod occ loc
= do { hsc_env <- getTopEnv
; name <- liftIO $ allocateGlobalBinder (hsc_NC hsc_env) mod occ loc
; traceIf (text "newGlobalBinder" <+>
(vcat [ ppr mod <+> ppr occ <+> ppr loc, ppr name]))
; return name }
newInteractiveBinder :: HscEnv -> OccName -> SrcSpan -> IO Name
Works in the IO monad , and gets the Module
-- from the interactive context
newInteractiveBinder hsc_env occ loc = do
let mod = icInteractiveModule (hsc_IC hsc_env)
allocateGlobalBinder (hsc_NC hsc_env) mod occ loc
allocateGlobalBinder
:: NameCache
-> Module -> OccName -> SrcSpan
-> IO Name
See Note [ The Name Cache ] in GHC.Types . Name . Cache
allocateGlobalBinder nc mod occ loc
= updateNameCache nc mod occ $ \cache0 -> do
case lookupOrigNameCache cache0 mod occ of
-- A hit in the cache! We are at the binding site of the name.
This is the moment when we know the SrcLoc
-- of the Name, so we set this field in the Name we return.
--
-- Then (bogus) multiple bindings of the same Name
get different SrcLocs can be reported as such .
--
-- Possible other reason: it might be in the cache because we
-- encountered an occurrence before the binding site for an
implicitly - imported Name . Perhaps the current SrcLoc is
-- better... but not really: it'll still just say 'imported'
--
-- IMPORTANT: Don't mess with wired-in names.
Their wired - in - ness is in their NameSort
-- and their Module is correct.
Just name | isWiredInName name
-> pure (cache0, name)
| otherwise
-> pure (new_cache, name')
where
uniq = nameUnique name
name' = mkExternalName uniq mod occ loc
name ' is like name , but with the right SrcSpan
new_cache = extendOrigNameCache cache0 mod occ name'
-- Miss in the cache!
-- Build a completely new Name, and put it in the cache
_ -> do
uniq <- takeUniqFromNameCache nc
let name = mkExternalName uniq mod occ loc
let new_cache = extendOrigNameCache cache0 mod occ name
pure (new_cache, name)
ifaceExportNames :: [IfaceExport] -> TcRnIf gbl lcl [AvailInfo]
ifaceExportNames exports = return exports
{-
************************************************************************
* *
Name cache access
* *
************************************************************************
-}
-- | Look up the 'Name' for a given 'Module' and 'OccName'.
Consider alternatively using ' lookupIfaceTop ' if you 're in the ' IfL ' monad
and ' Module ' is simply that of the ' ModIface ' you are typechecking .
lookupOrig :: Module -> OccName -> TcRnIf a b Name
lookupOrig mod occ = do
hsc_env <- getTopEnv
traceIf (text "lookup_orig" <+> ppr mod <+> ppr occ)
liftIO $ lookupNameCache (hsc_NC hsc_env) mod occ
lookupNameCache :: NameCache -> Module -> OccName -> IO Name
Lookup up the ( Module , OccName ) in the NameCache
-- If you find it, return it; if not, allocate a fresh original name and extend
the NameCache .
Reason : this may the first occurrence of ( say ) Foo.bar we have encountered .
If we need to explore its value we will load Foo.hi ; but meanwhile all we
-- need is a Name for it.
lookupNameCache nc mod occ = updateNameCache nc mod occ $ \cache0 ->
case lookupOrigNameCache cache0 mod occ of
Just name -> pure (cache0, name)
Nothing -> do
uniq <- takeUniqFromNameCache nc
let name = mkExternalName uniq mod occ noSrcSpan
let new_cache = extendOrigNameCache cache0 mod occ name
pure (new_cache, name)
externaliseName :: Module -> Name -> TcRnIf m n Name
-- Take an Internal Name and make it an External one,
-- with the same unique
externaliseName mod name
= do { let occ = nameOccName name
loc = nameSrcSpan name
uniq = nameUnique name
c.f . seq in newGlobalBinder
; hsc_env <- getTopEnv
; liftIO $ updateNameCache (hsc_NC hsc_env) mod occ $ \cache -> do
let name' = mkExternalName uniq mod occ loc
cache' = extendOrigNameCache cache mod occ name'
pure (cache', name') }
-- | Set the 'Module' of a 'Name'.
setNameModule :: Maybe Module -> Name -> TcRnIf m n Name
setNameModule Nothing n = return n
setNameModule (Just m) n =
newGlobalBinder m (nameOccName n) (nameSrcSpan n)
{-
************************************************************************
* *
Type variables and local Ids
* *
************************************************************************
-}
tcIfaceLclId :: FastString -> IfL Id
tcIfaceLclId occ
= do { lcl <- getLclEnv
; case (lookupFsEnv (if_id_env lcl) occ) of
Just ty_var -> return ty_var
Nothing -> failIfM (text "Iface id out of scope: " <+> ppr occ)
}
extendIfaceIdEnv :: [Id] -> IfL a -> IfL a
extendIfaceIdEnv ids
= updLclEnv $ \env ->
let { id_env' = extendFsEnvList (if_id_env env) pairs
; pairs = [(occNameFS (getOccName id), id) | id <- ids] }
in env { if_id_env = id_env' }
tcIfaceTyVar :: FastString -> IfL TyVar
tcIfaceTyVar occ
= do { lcl <- getLclEnv
; case (lookupFsEnv (if_tv_env lcl) occ) of
Just ty_var -> return ty_var
Nothing -> failIfM (text "Iface type variable out of scope: " <+> ppr occ)
}
lookupIfaceTyVar :: IfaceTvBndr -> IfL (Maybe TyVar)
lookupIfaceTyVar (occ, _)
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_tv_env lcl) occ) }
lookupIfaceVar :: IfaceBndr -> IfL (Maybe TyCoVar)
lookupIfaceVar (IfaceIdBndr (_, occ, _))
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_id_env lcl) occ) }
lookupIfaceVar (IfaceTvBndr (occ, _))
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_tv_env lcl) occ) }
extendIfaceTyVarEnv :: [TyVar] -> IfL a -> IfL a
extendIfaceTyVarEnv tyvars
= updLclEnv $ \env ->
let { tv_env' = extendFsEnvList (if_tv_env env) pairs
; pairs = [(occNameFS (getOccName tv), tv) | tv <- tyvars] }
in env { if_tv_env = tv_env' }
extendIfaceEnvs :: [TyCoVar] -> IfL a -> IfL a
extendIfaceEnvs tcvs thing_inside
= extendIfaceTyVarEnv tvs $
extendIfaceIdEnv cvs $
thing_inside
where
(tvs, cvs) = partition isTyVar tcvs
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Getting from RdrNames to Names
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
************************************************************************
* *
Getting from RdrNames to Names
* *
************************************************************************
-}
| Look up a top - level name from the current Iface module
lookupIfaceTop :: OccName -> IfL Name
lookupIfaceTop occ
= do { env <- getLclEnv; lookupOrig (if_mod env) occ }
newIfaceName :: OccName -> IfL Name
newIfaceName occ
= do { uniq <- newUnique
; return $! mkInternalName uniq occ noSrcSpan }
newIfaceNames :: [OccName] -> IfL [Name]
newIfaceNames occs
= do { uniqs <- getUniquesM
; return [ mkInternalName uniq occ noSrcSpan
| (occ,uniq) <- occs `zip` uniqs] }
trace_if :: Logger -> SDoc -> IO ()
# INLINE trace_if #
trace_if logger doc = when (logHasDumpFlag logger Opt_D_dump_if_trace) $ putMsg logger doc
trace_hi_diffs :: Logger -> SDoc -> IO ()
# INLINE trace_hi_diffs #
trace_hi_diffs logger doc = when (logHasDumpFlag logger Opt_D_dump_hi_diffs) $ putMsg logger doc
| null | https://raw.githubusercontent.com/ghc/ghc/3c0f0c6d99486502c72e6514a40e7264baaa6afc/compiler/GHC/Iface/Env.hs | haskell | # LANGUAGE RankNTypes #
Name-cache stuff
Used for source code and interface files, to make the
Name for a thing, given its Module and OccName
The cache may already have a binding for this thing,
because we may have seen an occurrence before, but now is the
moment when we know its Module and SrcLoc in their full glory
from the interactive context
A hit in the cache! We are at the binding site of the name.
of the Name, so we set this field in the Name we return.
Then (bogus) multiple bindings of the same Name
Possible other reason: it might be in the cache because we
encountered an occurrence before the binding site for an
better... but not really: it'll still just say 'imported'
IMPORTANT: Don't mess with wired-in names.
and their Module is correct.
Miss in the cache!
Build a completely new Name, and put it in the cache
************************************************************************
* *
Name cache access
* *
************************************************************************
| Look up the 'Name' for a given 'Module' and 'OccName'.
If you find it, return it; if not, allocate a fresh original name and extend
need is a Name for it.
Take an Internal Name and make it an External one,
with the same unique
| Set the 'Module' of a 'Name'.
************************************************************************
* *
Type variables and local Ids
* *
************************************************************************
| ( c ) The University of Glasgow 2002 - 2006
module GHC.Iface.Env (
newGlobalBinder, newInteractiveBinder,
externaliseName,
lookupIfaceTop,
lookupOrig, lookupNameCache, lookupOrigNameCache,
newIfaceName, newIfaceNames,
extendIfaceIdEnv, extendIfaceTyVarEnv,
tcIfaceLclId, tcIfaceTyVar, lookupIfaceVar,
lookupIfaceTyVar, extendIfaceEnvs,
setNameModule,
ifaceExportNames,
trace_if, trace_hi_diffs,
allocateGlobalBinder,
) where
import GHC.Prelude
import GHC.Driver.Env
import GHC.Driver.Session
import GHC.Tc.Utils.Monad
import GHC.Core.Type
import GHC.Iface.Type
import GHC.Runtime.Context
import GHC.Unit.Module
import GHC.Unit.Module.ModIface
import GHC.Data.FastString
import GHC.Data.FastString.Env
import GHC.Types.Var
import GHC.Types.Name
import GHC.Types.Avail
import GHC.Types.Name.Cache
import GHC.Types.Unique.Supply
import GHC.Types.SrcLoc
import GHC.Utils.Outputable
import GHC.Utils.Error
import GHC.Utils.Logger
import Data.List ( partition )
import Control.Monad
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Allocating new Names in the Name Cache
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
See Also : Note [ The Name Cache ] in GHC.Types . Name . Cache
*********************************************************
* *
Allocating new Names in the Name Cache
* *
*********************************************************
See Also: Note [The Name Cache] in GHC.Types.Name.Cache
-}
newGlobalBinder :: Module -> OccName -> SrcSpan -> TcRnIf a b Name
See Note [ The Name Cache ] in GHC.Types . Name . Cache
newGlobalBinder mod occ loc
= do { hsc_env <- getTopEnv
; name <- liftIO $ allocateGlobalBinder (hsc_NC hsc_env) mod occ loc
; traceIf (text "newGlobalBinder" <+>
(vcat [ ppr mod <+> ppr occ <+> ppr loc, ppr name]))
; return name }
newInteractiveBinder :: HscEnv -> OccName -> SrcSpan -> IO Name
Works in the IO monad , and gets the Module
newInteractiveBinder hsc_env occ loc = do
let mod = icInteractiveModule (hsc_IC hsc_env)
allocateGlobalBinder (hsc_NC hsc_env) mod occ loc
allocateGlobalBinder
:: NameCache
-> Module -> OccName -> SrcSpan
-> IO Name
See Note [ The Name Cache ] in GHC.Types . Name . Cache
allocateGlobalBinder nc mod occ loc
= updateNameCache nc mod occ $ \cache0 -> do
case lookupOrigNameCache cache0 mod occ of
This is the moment when we know the SrcLoc
get different SrcLocs can be reported as such .
implicitly - imported Name . Perhaps the current SrcLoc is
Their wired - in - ness is in their NameSort
Just name | isWiredInName name
-> pure (cache0, name)
| otherwise
-> pure (new_cache, name')
where
uniq = nameUnique name
name' = mkExternalName uniq mod occ loc
name ' is like name , but with the right SrcSpan
new_cache = extendOrigNameCache cache0 mod occ name'
_ -> do
uniq <- takeUniqFromNameCache nc
let name = mkExternalName uniq mod occ loc
let new_cache = extendOrigNameCache cache0 mod occ name
pure (new_cache, name)
ifaceExportNames :: [IfaceExport] -> TcRnIf gbl lcl [AvailInfo]
ifaceExportNames exports = return exports
Consider alternatively using ' lookupIfaceTop ' if you 're in the ' IfL ' monad
and ' Module ' is simply that of the ' ModIface ' you are typechecking .
lookupOrig :: Module -> OccName -> TcRnIf a b Name
lookupOrig mod occ = do
hsc_env <- getTopEnv
traceIf (text "lookup_orig" <+> ppr mod <+> ppr occ)
liftIO $ lookupNameCache (hsc_NC hsc_env) mod occ
lookupNameCache :: NameCache -> Module -> OccName -> IO Name
Lookup up the ( Module , OccName ) in the NameCache
the NameCache .
Reason : this may the first occurrence of ( say ) Foo.bar we have encountered .
If we need to explore its value we will load Foo.hi ; but meanwhile all we
lookupNameCache nc mod occ = updateNameCache nc mod occ $ \cache0 ->
case lookupOrigNameCache cache0 mod occ of
Just name -> pure (cache0, name)
Nothing -> do
uniq <- takeUniqFromNameCache nc
let name = mkExternalName uniq mod occ noSrcSpan
let new_cache = extendOrigNameCache cache0 mod occ name
pure (new_cache, name)
externaliseName :: Module -> Name -> TcRnIf m n Name
externaliseName mod name
= do { let occ = nameOccName name
loc = nameSrcSpan name
uniq = nameUnique name
c.f . seq in newGlobalBinder
; hsc_env <- getTopEnv
; liftIO $ updateNameCache (hsc_NC hsc_env) mod occ $ \cache -> do
let name' = mkExternalName uniq mod occ loc
cache' = extendOrigNameCache cache mod occ name'
pure (cache', name') }
setNameModule :: Maybe Module -> Name -> TcRnIf m n Name
setNameModule Nothing n = return n
setNameModule (Just m) n =
newGlobalBinder m (nameOccName n) (nameSrcSpan n)
tcIfaceLclId :: FastString -> IfL Id
tcIfaceLclId occ
= do { lcl <- getLclEnv
; case (lookupFsEnv (if_id_env lcl) occ) of
Just ty_var -> return ty_var
Nothing -> failIfM (text "Iface id out of scope: " <+> ppr occ)
}
extendIfaceIdEnv :: [Id] -> IfL a -> IfL a
extendIfaceIdEnv ids
= updLclEnv $ \env ->
let { id_env' = extendFsEnvList (if_id_env env) pairs
; pairs = [(occNameFS (getOccName id), id) | id <- ids] }
in env { if_id_env = id_env' }
tcIfaceTyVar :: FastString -> IfL TyVar
tcIfaceTyVar occ
= do { lcl <- getLclEnv
; case (lookupFsEnv (if_tv_env lcl) occ) of
Just ty_var -> return ty_var
Nothing -> failIfM (text "Iface type variable out of scope: " <+> ppr occ)
}
lookupIfaceTyVar :: IfaceTvBndr -> IfL (Maybe TyVar)
lookupIfaceTyVar (occ, _)
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_tv_env lcl) occ) }
lookupIfaceVar :: IfaceBndr -> IfL (Maybe TyCoVar)
lookupIfaceVar (IfaceIdBndr (_, occ, _))
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_id_env lcl) occ) }
lookupIfaceVar (IfaceTvBndr (occ, _))
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_tv_env lcl) occ) }
extendIfaceTyVarEnv :: [TyVar] -> IfL a -> IfL a
extendIfaceTyVarEnv tyvars
= updLclEnv $ \env ->
let { tv_env' = extendFsEnvList (if_tv_env env) pairs
; pairs = [(occNameFS (getOccName tv), tv) | tv <- tyvars] }
in env { if_tv_env = tv_env' }
extendIfaceEnvs :: [TyCoVar] -> IfL a -> IfL a
extendIfaceEnvs tcvs thing_inside
= extendIfaceTyVarEnv tvs $
extendIfaceIdEnv cvs $
thing_inside
where
(tvs, cvs) = partition isTyVar tcvs
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Getting from RdrNames to Names
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
************************************************************************
* *
Getting from RdrNames to Names
* *
************************************************************************
-}
| Look up a top - level name from the current Iface module
lookupIfaceTop :: OccName -> IfL Name
lookupIfaceTop occ
= do { env <- getLclEnv; lookupOrig (if_mod env) occ }
newIfaceName :: OccName -> IfL Name
newIfaceName occ
= do { uniq <- newUnique
; return $! mkInternalName uniq occ noSrcSpan }
newIfaceNames :: [OccName] -> IfL [Name]
newIfaceNames occs
= do { uniqs <- getUniquesM
; return [ mkInternalName uniq occ noSrcSpan
| (occ,uniq) <- occs `zip` uniqs] }
trace_if :: Logger -> SDoc -> IO ()
# INLINE trace_if #
trace_if logger doc = when (logHasDumpFlag logger Opt_D_dump_if_trace) $ putMsg logger doc
trace_hi_diffs :: Logger -> SDoc -> IO ()
# INLINE trace_hi_diffs #
trace_hi_diffs logger doc = when (logHasDumpFlag logger Opt_D_dump_hi_diffs) $ putMsg logger doc
|
724dbb44fc51df5ba268e83710f6e6c73ffbc8d2290a041ede494bcba1e7db5a | adityavkk/N-Body-Simulations | Spec.hs | import Test.Hspec
main :: IO ()
main = hspec test
test = describe "testing" $
it "tests" $
1 `shouldBe` 1
| null | https://raw.githubusercontent.com/adityavkk/N-Body-Simulations/23e379e513b3254cd7144408fe132a5280ff9ce6/Barnes-Hut/test/Spec.hs | haskell | import Test.Hspec
main :: IO ()
main = hspec test
test = describe "testing" $
it "tests" $
1 `shouldBe` 1
| |
6d5737884ceadeec9c2245335677861b80e8d08b526267f429808be14206df36 | prg-titech/Kani-CUDA | while.rkt | #lang rosette
;; T x; => (define x (new-vec T))
- NOTE : T x = e ; を ( define x e ) と書いてはならない . vector が共有される可能性がある.
代わりに ( begin ( define x ( new - vec T ) ) ( vec - set ! x e ) ) と書く.
x = e ; = > ( vec - set ! x e )
;; threadIdx.x => (tid)
;; arr[ix]; => (array-ref arr ix)
;; arr[ix] = e => array-set!
;; if, while -> if-, while
;; syncthreads -> barrier
(provide
;; syntax
if- while : = :=
;; arithmetic/Boolean operators
/LS is for avoiding naming conflicts
+/LS eq?/LS !/LS &&/LS </LS >/LS quotient/LS
;; thread ID
ntid tid
;; vector/array
new-vec scalar->vec new-sh-array vec-set! array-ref! array-set! make-element element-content array array-contents
;; barrier
barrier
;; kernel invocation
invoke-kernel
;; real type
int
)
;; preambles for executing the program in pure racket
;; (define ! not)
;; (define (&& x y) (and x y))
;; (define-syntax (define-symbolic* stx)
( syntax - case stx ( )
;; [(_ var type) #'(define var 0)]))
;; number of threads and mask
;; before running the kernel, ntid need to be specified with a number:
;; (parameterize ([ntid ?]) (kernel))
(define ntid (make-parameter 16))
;; mask is only internally used
(define mask (make-parameter (make-vector (ntid) #t)))
; element of array
(struct element
([content #:mutable] ;int or boolean
[read #:mutable] ;int or boolean
[write #:mutable]) ;int or boolean
#:property prop:procedure
(lambda (elem) (element-content elem)))
; make new element
(define (make-element e)
(element e #f #f))
; array
; type check?
(struct array
([contents #:mutable])
; #:property prop:procedure
; (lambda (arr) (array-contents arr))
#:property prop:procedure
(lambda (arr idx)
(array-ref! (array-contents arr) idx)))
;; convert a scalar value to a vector value
(define (vecfy x)
;(printf "vecfy x = ~a\n" x)
(cond [(or (integer? x) (boolean? x)) (make-vector (ntid) x)]
[(vector? x) x]
[else (raise "vecfy: expected an integer/boolean or a vector")]))
;; map, zipWith
;; 'masked-value は mask されたスレッドが返す値を表し,map, zipWith は 'masked-value を無視する
(define (zipWith-vec f xs ys)
(for/vector ([x xs]
[y ys])
(if (or (eq? x 'masked-value) (eq? y 'masked-value)) 'masked-value
(f x y))))
(define (map-vec f xs)
(for/vector ([x xs])
(if (eq? x 'masked-value) 'masked-value
(f x))))
;; lifting an operator on scalar values to an operator on vector
(define (LSop op)
(lambda (x)
(let ([x (vecfy x)])
(map-vec op x))))
(define (LSop2 op)
(lambda (x y)
(let ([x (vecfy x)]
[y (vecfy y)])
(zipWith-vec op x y))))
;; lifting basic operators
(define +/LS (LSop2 +))
(define eq?/LS (LSop2 eq?))
(define !/LS (LSop !))
(define &&/LS (LSop2 &&))
(define >/LS (LSop2 >))
(define </LS (LSop2 <))
(define quotient/LS (LSop2 quotient))
;; thread id
(define (tid) (for/vector ([i (in-range (ntid))]) i))
;; make a symbolic vector with length ``n'' and type ``type''
(define (new-symbolic-vector n type)
(for/vector ([i (in-range n)])
(define-symbolic* x type)
x))
;; create a new vector value with type ``type''
(define (new-vec type)
(new-symbolic-vector (ntid) type))
create a scalar value from scalar ` ` s ''
(define (scalar->vec s)
(make-vector (ntid) s))
;; create a new array with length n and type ``type''
(define (new-sh-array n type)
(new-symbolic-vector n type))
;; denotation of the statement ``xs = vs''
;; assign each element of vs to xs, except masked values
(define (vec-set! xs vs)
(for ([i (in-range (ntid))]
[m (mask)]
[v (vecfy vs)])
(when m (vector-set! xs i v))))
TODO ; implement Read / Write set
;; denotation of an expression arr[ixs]
;; if a thread is masked, array-ref! returns the special symbol 'masked-value
(define (array-ref! arr ixs)
(for/vector ([tid (tid)]
[i (vecfy ixs)]
[m (mask)])
(if m (let* ([elem (vector-ref arr i)]
[cont (element-content elem)]
[read (element-read elem)]
[write (element-write elem)])
(if (or (eq? write tid) (eq? write #f))
(begin
(set! read tid)
cont)
(raise "array-ref!: expected an conflict")))
'masked-value)))
;; denotation of the statement arr[ixs] = vs
;; array-set! assigns vs to each elements of arr[ixs]
(define (array-set! arr ixs vs)
(for ([tid (tid)]
[m (mask)]
[i ixs]
[v (vecfy vs)])
( printf " m , i , v = ~a , ~a , ~a\n " m i v )
(when m
(let* ([elem (vector-ref arr i)]
[cont (element-content elem)]
[read (element-read elem)]
[write (element-write elem)])
(if (or (eq? write tid) (eq? write #f) (eq? read tid) (eq? read #f))
(begin
(set! read tid)
(set! write tid)
(vector-set! arr i (make-element v)))
(raise "array-set!: expected an conflict"))))))
;; denotation of if (b) {then-cl} {else-cl}
;; execute each clause with additional masks by b
;; then-cl, else-cl :: M -> ()
;; b :: () -> boolean
(define (if/LS b then-cl else-cl)
(let ([bval (b)])
(parameterize ([mask (&&/LS bval (mask))]) (then-cl))
(parameterize ([mask (&&/LS (!/LS bval) (mask))]) (else-cl))))
;; denotation of while (b) {body}
;; execute body with addtional mask by b until all threads are masked
;; b :: () -> boolean?
;; body :: M -> ()
(define (while/LS b body)
(let* ([bval (b)]
[m (&&/LS bval (mask))])
(when (for/or ([v m]) v) ;; check that whether there are any non-masked thread
(parameterize ([mask m])
(body)
(while/LS b body)))))
;; barrier divergence check
;; when the execution reach a barrier, we need to check that all
;; threads are participate in this barrier
(define (barrier-ok m)
(or (for/and ([x m]) x)
(for/and ([x m]) (! x))))
;; barrier
;; just do the barrier divergence check
(define (barrier)
(let ([m (mask)])
(assert (barrier-ok m))))
;; syntax
(define-syntax (if- stx)
(syntax-case stx ()
[(_ b then-cl else-cl)
#'(if/LS (lambda () b)
(lambda () then-cl)
(lambda () else-cl))]
[(_ b then-cl)
#'(if- b then-cl (void))]))
(define-syntax (while stx)
(syntax-case stx ()
[(_ b body ...)
#'(while/LS (lambda () b) (lambda () body ...))]))
(define-syntax (: stx)
(syntax-case stx ()
[(_ type x ...)
#'(begin (define x (new-vec type)) ...)]))
(define-syntax (:= stx)
(syntax-case stx ()
[(_ type x val)
#'(begin
(define x (new-vec type))
(vec-set! x val))]))
(define-syntax (= stx)
(syntax-case stx ()
[(_ var exp)
(identifier? #'var)
#'(vec-set! var exp)]
[(_ [arr idx] exp)
#'(array-set! (array-contents arr) idx exp)]))
(define int integer?)
(define (invoke-kernel ker n . arg)
(parameterize ([ntid n]
[mask (make-vector n #t)])
(apply ker arg)))
| null | https://raw.githubusercontent.com/prg-titech/Kani-CUDA/e97c4bede43a5fc4031a7d2cfc32d71b01ac26c4/Emulator/while.rkt | racket | T x; => (define x (new-vec T))
を ( define x e ) と書いてはならない . vector が共有される可能性がある.
= > ( vec - set ! x e )
threadIdx.x => (tid)
arr[ix]; => (array-ref arr ix)
arr[ix] = e => array-set!
if, while -> if-, while
syncthreads -> barrier
syntax
arithmetic/Boolean operators
thread ID
vector/array
barrier
kernel invocation
real type
preambles for executing the program in pure racket
(define ! not)
(define (&& x y) (and x y))
(define-syntax (define-symbolic* stx)
[(_ var type) #'(define var 0)]))
number of threads and mask
before running the kernel, ntid need to be specified with a number:
(parameterize ([ntid ?]) (kernel))
mask is only internally used
element of array
int or boolean
int or boolean
int or boolean
make new element
array
type check?
#:property prop:procedure
(lambda (arr) (array-contents arr))
convert a scalar value to a vector value
(printf "vecfy x = ~a\n" x)
map, zipWith
'masked-value は mask されたスレッドが返す値を表し,map, zipWith は 'masked-value を無視する
lifting an operator on scalar values to an operator on vector
lifting basic operators
thread id
make a symbolic vector with length ``n'' and type ``type''
create a new vector value with type ``type''
create a new array with length n and type ``type''
denotation of the statement ``xs = vs''
assign each element of vs to xs, except masked values
implement Read / Write set
denotation of an expression arr[ixs]
if a thread is masked, array-ref! returns the special symbol 'masked-value
denotation of the statement arr[ixs] = vs
array-set! assigns vs to each elements of arr[ixs]
denotation of if (b) {then-cl} {else-cl}
execute each clause with additional masks by b
then-cl, else-cl :: M -> ()
b :: () -> boolean
denotation of while (b) {body}
execute body with addtional mask by b until all threads are masked
b :: () -> boolean?
body :: M -> ()
check that whether there are any non-masked thread
barrier divergence check
when the execution reach a barrier, we need to check that all
threads are participate in this barrier
barrier
just do the barrier divergence check
syntax
| #lang rosette
代わりに ( begin ( define x ( new - vec T ) ) ( vec - set ! x e ) ) と書く.
(provide
if- while : = :=
/LS is for avoiding naming conflicts
+/LS eq?/LS !/LS &&/LS </LS >/LS quotient/LS
ntid tid
new-vec scalar->vec new-sh-array vec-set! array-ref! array-set! make-element element-content array array-contents
barrier
invoke-kernel
int
)
( syntax - case stx ( )
(define ntid (make-parameter 16))
(define mask (make-parameter (make-vector (ntid) #t)))
(struct element
#:property prop:procedure
(lambda (elem) (element-content elem)))
(define (make-element e)
(element e #f #f))
(struct array
([contents #:mutable])
#:property prop:procedure
(lambda (arr idx)
(array-ref! (array-contents arr) idx)))
(define (vecfy x)
(cond [(or (integer? x) (boolean? x)) (make-vector (ntid) x)]
[(vector? x) x]
[else (raise "vecfy: expected an integer/boolean or a vector")]))
(define (zipWith-vec f xs ys)
(for/vector ([x xs]
[y ys])
(if (or (eq? x 'masked-value) (eq? y 'masked-value)) 'masked-value
(f x y))))
(define (map-vec f xs)
(for/vector ([x xs])
(if (eq? x 'masked-value) 'masked-value
(f x))))
(define (LSop op)
(lambda (x)
(let ([x (vecfy x)])
(map-vec op x))))
(define (LSop2 op)
(lambda (x y)
(let ([x (vecfy x)]
[y (vecfy y)])
(zipWith-vec op x y))))
(define +/LS (LSop2 +))
(define eq?/LS (LSop2 eq?))
(define !/LS (LSop !))
(define &&/LS (LSop2 &&))
(define >/LS (LSop2 >))
(define </LS (LSop2 <))
(define quotient/LS (LSop2 quotient))
(define (tid) (for/vector ([i (in-range (ntid))]) i))
(define (new-symbolic-vector n type)
(for/vector ([i (in-range n)])
(define-symbolic* x type)
x))
(define (new-vec type)
(new-symbolic-vector (ntid) type))
create a scalar value from scalar ` ` s ''
(define (scalar->vec s)
(make-vector (ntid) s))
(define (new-sh-array n type)
(new-symbolic-vector n type))
(define (vec-set! xs vs)
(for ([i (in-range (ntid))]
[m (mask)]
[v (vecfy vs)])
(when m (vector-set! xs i v))))
(define (array-ref! arr ixs)
(for/vector ([tid (tid)]
[i (vecfy ixs)]
[m (mask)])
(if m (let* ([elem (vector-ref arr i)]
[cont (element-content elem)]
[read (element-read elem)]
[write (element-write elem)])
(if (or (eq? write tid) (eq? write #f))
(begin
(set! read tid)
cont)
(raise "array-ref!: expected an conflict")))
'masked-value)))
(define (array-set! arr ixs vs)
(for ([tid (tid)]
[m (mask)]
[i ixs]
[v (vecfy vs)])
( printf " m , i , v = ~a , ~a , ~a\n " m i v )
(when m
(let* ([elem (vector-ref arr i)]
[cont (element-content elem)]
[read (element-read elem)]
[write (element-write elem)])
(if (or (eq? write tid) (eq? write #f) (eq? read tid) (eq? read #f))
(begin
(set! read tid)
(set! write tid)
(vector-set! arr i (make-element v)))
(raise "array-set!: expected an conflict"))))))
(define (if/LS b then-cl else-cl)
(let ([bval (b)])
(parameterize ([mask (&&/LS bval (mask))]) (then-cl))
(parameterize ([mask (&&/LS (!/LS bval) (mask))]) (else-cl))))
(define (while/LS b body)
(let* ([bval (b)]
[m (&&/LS bval (mask))])
(parameterize ([mask m])
(body)
(while/LS b body)))))
(define (barrier-ok m)
(or (for/and ([x m]) x)
(for/and ([x m]) (! x))))
(define (barrier)
(let ([m (mask)])
(assert (barrier-ok m))))
(define-syntax (if- stx)
(syntax-case stx ()
[(_ b then-cl else-cl)
#'(if/LS (lambda () b)
(lambda () then-cl)
(lambda () else-cl))]
[(_ b then-cl)
#'(if- b then-cl (void))]))
(define-syntax (while stx)
(syntax-case stx ()
[(_ b body ...)
#'(while/LS (lambda () b) (lambda () body ...))]))
(define-syntax (: stx)
(syntax-case stx ()
[(_ type x ...)
#'(begin (define x (new-vec type)) ...)]))
(define-syntax (:= stx)
(syntax-case stx ()
[(_ type x val)
#'(begin
(define x (new-vec type))
(vec-set! x val))]))
(define-syntax (= stx)
(syntax-case stx ()
[(_ var exp)
(identifier? #'var)
#'(vec-set! var exp)]
[(_ [arr idx] exp)
#'(array-set! (array-contents arr) idx exp)]))
(define int integer?)
(define (invoke-kernel ker n . arg)
(parameterize ([ntid n]
[mask (make-vector n #t)])
(apply ker arg)))
|
c420f3ef16e1bcc861914f2473c9e45cfd06eaba735c00dbf0b6ef8fc52efbc4 | replikativ/datahike | db.cljc | (ns ^:no-doc datahike.db
(:require
[clojure.data :as data]
[clojure.walk :refer [postwalk]]
#?(:clj [clojure.pprint :as pp])
[datahike.config :as dc]
[datahike.constants :as c :refer [ue0 e0 tx0 utx0 emax txmax system-schema]]
[datahike.datom :as dd :refer [datom datom-tx datom-added]]
[datahike.db.interface :as dbi]
[datahike.db.search :as dbs]
[datahike.db.utils :as dbu]
[datahike.index :as di]
[datahike.schema :as ds]
[datahike.tools :as tools :refer [raise]]
[me.tonsky.persistent-sorted-set.arrays :as arrays]
[medley.core :as m]
[taoensso.timbre :refer [warn]])
#?(:cljs (:require-macros [datahike.db :refer [defrecord-updatable]]
[datahike.datom :refer [combine-cmp datom]]
[datahike.tools :refer [case-tree raise]]))
(:refer-clojure :exclude [seqable?])
#?(:clj (:import [clojure.lang AMapEntry ITransientCollection IEditableCollection IPersistentCollection Seqable
IHashEq Associative IKeywordLookup ILookup]
[datahike.datom Datom]
[java.io Writer]
[java.util Date])))
(declare equiv-db empty-db)
#?(:cljs (declare pr-db))
;; ----------------------------------------------------------------------------
;; macros and funcs to support writing defrecords and updating
( replacing ) builtins , i.e. , Object / hashCode , IHashEq hasheq , etc .
;; code taken from prismatic:
;;
;;
;; ----------------------------------------------------------------------------
#?(:cljs
(do
(def Exception js/Error)
(def IllegalArgumentException js/Error)
(def UnsupportedOperationException js/Error)))
;; ----------------------------------------------------------------------------
(defn #?@(:clj [^Boolean seqable?]
:cljs [^boolean seqable?])
[x]
(and (not (string? x))
#?(:cljs (or (cljs.core/seqable? x)
(arrays/array? x))
:clj (or (seq? x)
(instance? Seqable x)
(nil? x)
(instance? Iterable x)
(arrays/array? x)
(instance? java.util.Map x)))))
(defn- cljs-env?
"Take the &env from a macro, and tell whether we are expanding into cljs."
[env]
(boolean (:ns env)))
#?(:clj
(defmacro if-cljs
"Return then if we are generating cljs code and else for Clojure code.
"
[then else]
(if (cljs-env? &env) then else)))
#?(:clj
(defn- get-sig [method]
;; expects something like '(method-symbol [arg arg arg] ...)
;; if the thing matches, returns [fully-qualified-symbol arity], otherwise nil
(and (sequential? method)
(symbol? (first method))
(vector? (second method))
(let [sym (first method)
ns (or (some->> sym resolve meta :ns str) "clojure.core")]
[(symbol ns (name sym)) (-> method second count)]))))
#?(:clj
(defn- dedupe-interfaces [deftype-form]
;; get the interfaces list, remove any duplicates, similar to remove-nil-implements in potemkin
;; verified w/ deftype impl in compiler:
;; (deftype* tagname classname [fields] :implements [interfaces] :tag tagname methods*)
(let [[deftype* tagname classname fields implements interfaces & rest] deftype-form]
(when (or (not= deftype* 'deftype*) (not= implements :implements))
(throw (IllegalArgumentException. "deftype-form mismatch")))
(list* deftype* tagname classname fields implements (vec (distinct interfaces)) rest))))
#?(:clj
(defn- make-record-updatable-clj [name fields & impls]
(let [impl-map (->> impls (map (juxt get-sig identity)) (filter first) (into {}))
body (macroexpand-1 (list* 'defrecord name fields impls))]
(postwalk
(fn [form]
(if (and (sequential? form) (= 'deftype* (first form)))
(->> form
dedupe-interfaces
(remove (fn [method]
(when-let [impl (-> method get-sig impl-map)]
(not= method impl)))))
form))
body))))
#?(:clj
(defn- make-record-updatable-cljs [name fields & impls]
`(do
(defrecord ~name ~fields)
(extend-type ~name ~@impls))))
#?(:clj
(defmacro defrecord-updatable [name fields & impls]
`(if-cljs
~(apply make-record-updatable-cljs name fields impls)
~(apply make-record-updatable-clj name fields impls))))
TxReport
(defrecord TxReport [db-before db-after tx-data tempids tx-meta])
;; DB
(defn db-transient [db]
(-> db
(update :eavt di/-transient)
(update :aevt di/-transient)
(update :avet di/-transient)))
(defn db-persistent! [db]
(-> db
(update :eavt di/-persistent!)
(update :aevt di/-persistent!)
(update :avet di/-persistent!)))
(defrecord-updatable DB [schema eavt aevt avet temporal-eavt temporal-aevt temporal-avet max-eid max-tx op-count rschema hash config system-entities ident-ref-map ref-ident-map meta]
#?@(:cljs
[IHash (-hash [db] hash)
IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (di/-seq eavt))
IReversible (-rseq [db] (-rseq eavt))
ICounted (-count [db] (count eavt))
IEmptyableCollection (-empty [db] (empty-db (ds/get-user-schema db)))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEditableCollection (-as-transient [db] (db-transient db))
ITransientCollection (-conj! [db key] (throw (ex-info "datahike.DB/conj! is not supported" {})))
(-persistent! [db] (db-persistent! db))]
:clj
[Object (hashCode [db] hash)
clojure.lang.IHashEq (hasheq [db] hash)
Seqable (seq [db] (di/-seq eavt))
IPersistentCollection
(count [db] (di/-count eavt))
(equiv [db other] (equiv-db db other))
(empty [db] (empty-db (ds/get-user-schema db)))
IEditableCollection
(asTransient [db] (db-transient db))
ITransientCollection
(conj [db key] (throw (ex-info "datahike.DB/conj! is not supported" {})))
(persistent [db] (db-persistent! db))])
dbi/IDB
(-schema [db] schema)
(-rschema [db] rschema)
(-system-entities [db] system-entities)
(-attrs-by [db property] (rschema property))
(-temporal-index? [db] (dbi/-keep-history? db))
(-keep-history? [db] (:keep-history? config))
(-max-tx [db] max-tx)
(-max-eid [db] max-eid)
(-config [db] config)
(-ref-for [db a-ident]
(if (:attribute-refs? config)
(let [ref (get ident-ref-map a-ident)]
(when (nil? ref)
(warn (str "Attribute " a-ident " has not been found in database")))
ref)
a-ident))
(-ident-for [db a-ref]
(if (:attribute-refs? config)
(let [a-ident (get ref-ident-map a-ref)]
(when (nil? a-ident)
(warn (str "Attribute with reference number " a-ref " has not been found in database")))
a-ident)
a-ref))
dbi/ISearch
(-search [db pattern]
(dbs/search-current-indices db pattern))
dbi/IIndexAccess
(-datoms [db index-type cs]
(di/-slice (get db index-type)
(dbu/components->pattern db index-type cs e0 tx0)
(dbu/components->pattern db index-type cs emax txmax)
index-type))
(-seek-datoms [db index-type cs]
(di/-slice (get db index-type)
(dbu/components->pattern db index-type cs e0 tx0)
(datom emax nil nil txmax)
index-type))
(-rseek-datoms [db index-type cs]
(-> (di/-slice (get db index-type)
(dbu/components->pattern db index-type cs e0 tx0)
(datom emax nil nil txmax)
index-type)
vec
rseq))
(-index-range [db attr start end]
(when-not (dbu/indexing? db attr)
(raise "Attribute" attr "should be marked as :db/index true" {}))
(dbu/validate-attr attr (list '-index-range 'db attr start end) db)
(di/-slice avet
(dbu/resolve-datom db nil attr start nil e0 tx0)
(dbu/resolve-datom db nil attr end nil emax txmax)
:avet))
data/EqualityPartition
(equality-partition [x] :datahike/db)
data/Diff
(diff-similar [a b]
(let [datoms-a (di/-slice (:eavt a) (datom e0 nil nil tx0) (datom emax nil nil txmax) :eavt)
datoms-b (di/-slice (:eavt b) (datom e0 nil nil tx0) (datom emax nil nil txmax) :eavt)]
(dd/diff-sorted datoms-a datoms-b dd/cmp-datoms-eavt-quick))))
;; FilteredDB
(defrecord-updatable FilteredDB [unfiltered-db pred]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on FilteredDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on FilteredDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on FilteredDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on FilteredDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on FilteredDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on FilteredDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on FilteredDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on FilteredDB")))
Seqable (seq [db] (dbi/-datoms db :eavt []))
clojure.lang.ILookup (valAt [db k] (throw (UnsupportedOperationException. "valAt/2 is not supported on FilteredDB")))
(valAt [db k nf] (throw (UnsupportedOperationException. "valAt/3 is not supported on FilteredDB")))
clojure.lang.IKeywordLookup (getLookupThunk [db k]
(throw (UnsupportedOperationException. "getLookupThunk is not supported on FilteredDB")))
Associative
(containsKey [e k] (throw (UnsupportedOperationException. "containsKey is not supported on FilteredDB")))
(entryAt [db k] (throw (UnsupportedOperationException. "entryAt is not supported on FilteredDB")))
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on FilteredDB")))])
dbi/IDB
(-schema [db] (dbi/-schema unfiltered-db))
(-rschema [db] (dbi/-rschema unfiltered-db))
(-system-entities [db] (dbi/-system-entities unfiltered-db))
(-attrs-by [db property] (dbi/-attrs-by unfiltered-db property))
(-temporal-index? [db] (dbi/-keep-history? db))
(-keep-history? [db] (dbi/-keep-history? unfiltered-db))
(-max-tx [db] (dbi/-max-tx unfiltered-db))
(-max-eid [db] (dbi/-max-eid unfiltered-db))
(-config [db] (dbi/-config unfiltered-db))
(-ref-for [db a-ident] (dbi/-ref-for unfiltered-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for unfiltered-db a-ref))
dbi/ISearch
(-search [db pattern]
(filter (.-pred db) (dbi/-search unfiltered-db pattern)))
dbi/IIndexAccess
(-datoms [db index cs]
(filter (.-pred db) (dbi/-datoms unfiltered-db index cs)))
(-seek-datoms [db index cs]
(filter (.-pred db) (dbi/-seek-datoms unfiltered-db index cs)))
(-rseek-datoms [db index cs]
(filter (.-pred db) (dbi/-rseek-datoms unfiltered-db index cs)))
(-index-range [db attr start end]
(filter (.-pred db) (dbi/-index-range unfiltered-db attr start end))))
HistoricalDB
(defrecord-updatable HistoricalDB [origin-db]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on HistoricalDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on HistoricalDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on HistoricalDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on HistoricalDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on HistoricalDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on HistoricalDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on HistoricalDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on HistoricalDB")))
Seqable
(seq [db] (dbi/-datoms db :eavt []))
Associative
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on HistoricalDB")))])
dbi/IDB
(-schema [db] (dbi/-schema origin-db))
(-rschema [db] (dbi/-rschema origin-db))
(-system-entities [db] (dbi/-system-entities origin-db))
(-attrs-by [db property] (dbi/-attrs-by origin-db property))
(-temporal-index? [db] (dbi/-keep-history? origin-db))
(-keep-history? [db] (dbi/-keep-history? origin-db))
(-max-tx [db] (dbi/-max-tx origin-db))
(-max-eid [db] (dbi/-max-eid origin-db))
(-config [db] (dbi/-config origin-db))
(-ref-for [db a-ident] (dbi/-ref-for origin-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for origin-db a-ref))
dbi/IHistory
(-time-point [db] nil)
(-origin [db] origin-db)
dbi/ISearch
(-search [db pattern]
(dbs/temporal-search origin-db pattern))
dbi/IIndexAccess
(-datoms [db index-type cs] (dbu/temporal-datoms origin-db index-type cs))
(-seek-datoms [db index-type cs] (dbs/temporal-seek-datoms origin-db index-type cs))
(-rseek-datoms [db index-type cs] (dbs/temporal-rseek-datoms origin-db index-type cs))
(-index-range [db attr start end] (dbs/temporal-index-range origin-db db attr start end)))
AsOfDB
(defn- date? [d]
#?(:cljs (instance? js/Date d)
:clj (instance? Date d)))
(defn get-current-values [db history-datoms]
(->> history-datoms
(group-by (fn [^Datom datom] [(.-e datom) (.-a datom)]))
(mapcat
(fn [[[_ a] datoms]]
(if (dbu/multival? db a)
(->> datoms
(sort-by datom-tx)
(reduce (fn [current-datoms ^Datom datom]
(if (datom-added datom)
(assoc current-datoms (.-v datom) datom)
(dissoc current-datoms (.-v datom))))
{})
vals)
(let [last-ea-tx (apply max (map datom-tx datoms))
current-ea-datom (first (filter #(and (datom-added %) (= last-ea-tx (datom-tx %)))
datoms))]
(if current-ea-datom
[current-ea-datom]
[])))))))
(defn filter-as-of-datoms [datoms time-point db]
(let [as-of-pred (fn [^Datom d]
(if (date? time-point)
(.before ^Date (.-v d) ^Date time-point)
(<= (dd/datom-tx d) time-point)))
filtered-tx-ids (dbu/filter-txInstant datoms as-of-pred db)
filtered-datoms (->> datoms
(filter (fn [^Datom d] (contains? filtered-tx-ids (datom-tx d))))
(get-current-values db))]
filtered-datoms))
(defrecord-updatable AsOfDB [origin-db time-point]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on AsOfDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on AsOfDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on AsOfDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on AsOfDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on AsOfDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on AsOfDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on AsOfDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on AsOfDB")))
Seqable
(seq [db] (dbi/-datoms db :eavt []))
Associative
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on AsOfDB")))])
dbi/IDB
(-schema [db] (dbi/-schema origin-db))
(-rschema [db] (dbi/-rschema origin-db))
(-system-entities [db] (dbi/-system-entities origin-db))
(-attrs-by [db property] (dbi/-attrs-by origin-db property))
(-temporal-index? [db] (dbi/-keep-history? origin-db))
(-keep-history? [db] (dbi/-keep-history? origin-db))
(-max-tx [db] (dbi/-max-tx origin-db))
(-max-eid [db] (dbi/-max-eid origin-db))
(-config [db] (dbi/-config origin-db))
(-ref-for [db a-ident] (dbi/-ref-for origin-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for origin-db a-ref))
dbi/IHistory
(-time-point [db] time-point)
(-origin [db] origin-db)
dbi/ISearch
(-search [db pattern]
(-> (dbs/temporal-search origin-db pattern)
(filter-as-of-datoms time-point origin-db)))
dbi/IIndexAccess
(-datoms [db index-type cs]
(-> (dbu/temporal-datoms origin-db index-type cs)
(filter-as-of-datoms time-point origin-db)))
(-seek-datoms [db index-type cs]
(-> (dbs/temporal-seek-datoms origin-db index-type cs)
(filter-as-of-datoms time-point origin-db)))
(-rseek-datoms [db index-type cs]
(-> (dbs/temporal-rseek-datoms origin-db index-type cs)
(filter-as-of-datoms time-point origin-db)))
(-index-range [db attr start end]
(-> (dbs/temporal-index-range origin-db db attr start end)
(filter-as-of-datoms time-point origin-db))))
SinceDB
(defn- filter-since [datoms time-point db]
(let [since-pred (fn [^Datom d]
(if (date? time-point)
(.after ^Date (.-v d) ^Date time-point)
(>= (.-tx d) time-point)))
filtered-tx-ids (dbu/filter-txInstant datoms since-pred db)]
(->> datoms
(filter datom-added)
(filter (fn [^Datom d] (contains? filtered-tx-ids (datom-tx d)))))))
(defrecord-updatable SinceDB [origin-db time-point]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on SinceDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on SinceDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on SinceDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on SinceDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on SinceDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on SinceDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on SinceDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on SinceDB")))
Seqable
(seq [db] (dbi/-datoms db :eavt []))
Associative
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on SinceDB")))])
dbi/IDB
(-schema [db] (dbi/-schema origin-db))
(-rschema [db] (dbi/-rschema origin-db))
(-system-entities [db] (dbi/-system-entities origin-db))
(-attrs-by [db property] (dbi/-attrs-by origin-db property))
(-temporal-index? [db] (dbi/-keep-history? db))
(-keep-history? [db] (dbi/-keep-history? origin-db))
(-max-tx [db] (dbi/-max-tx origin-db))
(-max-eid [db] (dbi/-max-eid origin-db))
(-config [db] (dbi/-config origin-db))
(-ref-for [db a-ident] (dbi/-ref-for origin-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for origin-db a-ref))
dbi/IHistory
(-time-point [db] time-point)
(-origin [db] origin-db)
dbi/ISearch
(-search [db pattern]
(-> (dbs/temporal-search origin-db pattern)
(filter-since time-point origin-db)))
dbi/IIndexAccess
(dbi/-datoms [db index-type cs]
(-> (dbu/temporal-datoms origin-db index-type cs)
(filter-since time-point origin-db)))
(dbi/-seek-datoms [db index-type cs]
(-> (dbs/temporal-seek-datoms origin-db index-type cs)
(filter-since time-point origin-db)))
(dbi/-rseek-datoms [db index-type cs]
(-> (dbs/temporal-rseek-datoms origin-db index-type cs)
(filter-since time-point origin-db)))
(dbi/-index-range [db attr start end]
(-> (dbs/temporal-index-range origin-db db attr start end)
(filter-since time-point origin-db))))
(defn- equiv-db-index [x y]
(loop [xs (seq x)
ys (seq y)]
(cond
(nil? xs) (nil? ys)
(= (first xs) (first ys)) (recur (next xs) (next ys))
:else false)))
(defn- equiv-db [db other]
(and (or (instance? DB other) (instance? FilteredDB other))
(= (dbi/-schema db) (dbi/-schema other))
(equiv-db-index (dbi/-datoms db :eavt []) (dbi/-datoms other :eavt []))))
#?(:cljs
(defn pr-db [db w opts]
(-write w "#datahike/DB {")
(-write w (str ":max-tx " (dbi/-max-tx db) " "))
(-write w (str ":max-eid " (dbi/-max-eid db) " "))
(-write w "}")))
#?(:clj
(do
(defn pr-db [db, ^Writer w]
(.write w (str "#datahike/DB {"))
(.write w (str ":max-tx " (dbi/-max-tx db) " "))
(.write w (str ":max-eid " (dbi/-max-eid db)))
(.write w "}"))
(defn pr-hist-db [db ^Writer w flavor time-point?]
(.write w (str "#datahike/" flavor " {"))
(.write w ":origin ")
(binding [*out* w]
(pr (dbi/-origin db)))
(when time-point?
(.write w " :time-point ")
(binding [*out* w]
(pr (dbi/-time-point db))))
(.write w "}"))
(defmethod print-method DB [db w] (pr-db db w))
(defmethod print-method FilteredDB [db w] (pr-db db w)) ;; why not with "FilteredDB" ?
(defmethod print-method HistoricalDB [db w] (pr-hist-db db w "HistoricalDB" false))
(defmethod print-method AsOfDB [db w] (pr-hist-db db w "AsOfDB" true))
(defmethod print-method SinceDB [db w] (pr-hist-db db w "SinceDB" true))
(defmethod pp/simple-dispatch Datom [^Datom d]
(pp/pprint-logical-block :prefix "#datahike/Datom [" :suffix "]"
(pp/write-out (.-e d))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (.-a d))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (.-v d))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (datom-tx d))))
(defn- pp-db [db ^Writer w]
(pp/pprint-logical-block :prefix "#datahike/DB {" :suffix "}"
(pp/pprint-logical-block
(pp/write-out :max-tx)
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (dbi/-max-tx db))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out :max-eid)
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (dbi/-max-eid db)))
(pp/pprint-newline :linear)))
(defmethod pp/simple-dispatch DB [db] (pp-db db *out*))
(defmethod pp/simple-dispatch FilteredDB [db] (pp-db db *out*))))
(defn- validate-schema-key [a k v expected]
(when-not (or (nil? v)
(contains? expected v))
(throw (ex-info (str "Bad attribute specification for " (pr-str {a {k v}}) ", expected one of " expected)
{:error :schema/validation
:attribute a
:key k
:value v}))))
(defn- validate-tuple-schema [a kv]
(when (= :db.type/tuple (:db/valueType kv))
(case (some #{:db/tupleAttrs :db/tupleTypes :db/tupleType} (keys kv))
:db/tupleAttrs (when (not (vector? (:db/tupleAttrs kv)))
(throw (ex-info (str "Bad attribute specification for " a ": {:db/tupleAttrs ...} should be a vector}")
{:error :schema/validation
:attribute a
:key :db/tupleAttrs})))
:db/tupleTypes (when (not (vector? (:db/tupleTypes kv)))
(throw (ex-info (str "Bad attribute specification for " a ": {:db/tupleTypes ...} should be a vector}")
{:error :schema/validation
:attribute a
:key :db/tupleTypes})))
:db/tupleType (when (not (keyword? (:db/tupleType kv)))
(throw (ex-info (str "Bad attribute specification for " a ": {:db/tupleType ...} should be a keyword}")
{:error :schema/validation
:attribute a
:key :db/tupleType}))))))
(defn- validate-schema [schema]
(doseq [[a-ident kv] schema]
(let [comp? (:db/isComponent kv false)]
(validate-schema-key a-ident :db/isComponent (:db/isComponent kv) #{true false})
(when (and comp? (not= (:db/valueType kv) :db.type/ref))
(throw (ex-info (str "Bad attribute specification for " a-ident ": {:db/isComponent true} should also have {:db/valueType :db.type/ref}")
{:error :schema/validation
:attribute a-ident
:key :db/isComponent}))))
(validate-schema-key a-ident :db/unique (:db/unique kv) #{:db.unique/value :db.unique/identity})
(validate-schema-key a-ident :db/valueType (:db/valueType kv) #{:db.type/ref :db.type/tuple})
(validate-schema-key a-ident :db/cardinality (:db/cardinality kv) #{:db.cardinality/one :db.cardinality/many})
(validate-tuple-schema a-ident kv)))
(defn to-old-schema [new-schema]
(if (or (vector? new-schema) (seq? new-schema))
(reduce
(fn [acc {:keys [:db/ident] :as schema-entity}]
(assoc acc ident schema-entity))
{}
new-schema)
new-schema))
(defn- validate-write-schema [schema]
(when-not (ds/old-schema-valid? schema)
(raise "Incomplete schema attributes, expected at least :db/valueType, :db/cardinality"
(ds/explain-old-schema schema))))
(defn init-max-eid [eavt]
solved with reverse slice first in datascript
(if-let [datoms (di/-slice
eavt
(datom e0 nil nil tx0)
(datom (dec tx0) nil nil txmax)
:eavt)]
(-> datoms vec rseq first :e) ;; :e of last datom in slice
e0))
(defn get-max-tx [eavt]
(transduce (map (fn [^Datom d] (datom-tx d))) max tx0 (di/-all eavt)))
(def ref-datoms ;; maps enums as well
(let [idents (reduce (fn [m {:keys [db/ident db/id]}]
(assoc m ident id))
{}
system-schema)]
(->> system-schema
(mapcat
(fn [{:keys [db/id] :as i}]
(reduce-kv
(fn [coll k v]
(let [v-ref (idents v)
can be system schema eid ( v - ref ) , or ident or regular ( v )
d-val (if (and (not= k :db/ident) v-ref) v-ref v)]
(conj coll (dd/datom id (idents k) d-val tx0))))
[]
(dissoc i :db/id))))
vec)))
(defn get-ident-ref-map
"Maps IDs of system entities to their names (keyword) and attribute names to the attribute's specification"
[schema]
(reduce
(fn [m [a {:keys [db/id]}]]
(when a
(assoc m a id)))
{}
schema))
(defn ^DB empty-db
"Prefer create-database in api, schema only in index for attribute reference database."
([] (empty-db nil nil nil))
([schema] (empty-db schema nil nil))
([schema user-config] (empty-db schema user-config nil))
([schema user-config store]
{:pre [(or (nil? schema) (map? schema) (coll? schema))]}
(let [complete-config (merge (dc/storeless-config) user-config)
_ (dc/validate-config complete-config)
{:keys [keep-history? index schema-flexibility attribute-refs?]} complete-config
on-read? (= :read schema-flexibility)
schema (to-old-schema schema)
_ (if on-read?
(validate-schema schema)
(validate-write-schema schema))
complete-schema (merge schema
(if attribute-refs?
c/ref-implicit-schema
c/non-ref-implicit-schema))
rschema (dbu/rschema complete-schema)
ident-ref-map (if attribute-refs? (get-ident-ref-map complete-schema) {})
ref-ident-map (if attribute-refs? (clojure.set/map-invert ident-ref-map) {})
system-entities (if attribute-refs? c/system-entities #{})
indexed (if attribute-refs?
(set (map ident-ref-map (:db/index rschema)))
(:db/index rschema))
index-config (merge (:index-config complete-config)
{:indexed indexed})
eavt (if attribute-refs?
(di/init-index index store ref-datoms :eavt 0 index-config)
(di/empty-index index store :eavt index-config))
aevt (if attribute-refs?
(di/init-index index store ref-datoms :aevt 0 index-config)
(di/empty-index index store :aevt index-config))
indexed-datoms (filter (fn [[_ a _ _]] (contains? indexed a)) ref-datoms)
avet (if attribute-refs?
(di/init-index index store indexed-datoms :avet 0 index-config)
(di/empty-index index store :avet index-config))
max-eid (if attribute-refs? ue0 e0)
max-tx (if attribute-refs? utx0 tx0)]
(map->DB
(merge
{:schema complete-schema
:rschema rschema
:config complete-config
:eavt eavt
:aevt aevt
:avet avet
:max-eid max-eid
:max-tx max-tx
:hash 0
:system-entities system-entities
:ref-ident-map ref-ident-map
:ident-ref-map ident-ref-map
:meta (tools/meta-data)
:op-count (if attribute-refs? (count ref-datoms) 0)}
(when keep-history? ;; no difference for attribute references since no update possible
{:temporal-eavt eavt
:temporal-aevt aevt
:temporal-avet avet}))))))
(defn get-max-tx [eavt]
(transduce (map (fn [^Datom d] (datom-tx d))) max tx0 (di/-all eavt)))
(defn ^DB init-db
([datoms] (init-db datoms nil nil nil))
([datoms schema] (init-db datoms schema nil nil))
([datoms schema user-config] (init-db datoms schema user-config nil))
([datoms schema user-config store]
(validate-schema schema)
(let [{:keys [index keep-history? attribute-refs?] :as complete-config} (merge (dc/storeless-config) user-config)
_ (dc/validate-config complete-config)
complete-schema (merge schema
(if attribute-refs?
c/ref-implicit-schema
c/non-ref-implicit-schema))
rschema (dbu/rschema complete-schema)
ident-ref-map (if attribute-refs? (get-ident-ref-map schema) {})
ref-ident-map (if attribute-refs? (clojure.set/map-invert ident-ref-map) {})
system-entities (if attribute-refs? c/system-entities #{})
indexed (if attribute-refs?
(set (map ident-ref-map (:db/index rschema)))
(:db/index rschema))
new-datoms (if attribute-refs? (concat ref-datoms datoms) datoms)
indexed-datoms (filter (fn [[_ a _ _]] (contains? indexed a)) new-datoms)
op-count 0
index-config (assoc (:index-config complete-config)
:indexed indexed)
avet (di/init-index index store indexed-datoms :avet op-count index-config)
eavt (di/init-index index store new-datoms :eavt op-count index-config)
aevt (di/init-index index store new-datoms :aevt op-count index-config)
max-eid (init-max-eid eavt)
max-tx (get-max-tx eavt)
op-count (count new-datoms)]
(map->DB (merge {:schema complete-schema
:rschema rschema
:config complete-config
:eavt eavt
:aevt aevt
:avet avet
:max-eid max-eid
:max-tx max-tx
:op-count op-count
:hash (reduce #(+ %1 (hash %2)) 0 datoms)
:system-entities system-entities
:meta (tools/meta-data)
:ref-ident-map ref-ident-map
:ident-ref-map ident-ref-map}
(when keep-history?
{:temporal-eavt (di/empty-index index store :eavt index-config)
:temporal-aevt (di/empty-index index store :aevt index-config)
:temporal-avet (di/empty-index index store :avet index-config)}))))))
(defn db-from-reader [{:keys [schema datoms]}]
(init-db (map (fn [[e a v tx]] (datom e a v tx)) datoms) schema))
(defn metrics [^DB db]
(let [update-count-in (fn [m ks] (update-in m ks #(if % (inc %) 1)))
counts-map (->> (di/-seq (.-eavt db))
(reduce (fn [m ^Datom datom]
(-> m
(update-count-in [:per-attr-counts (dbi/-ident-for db (.-a datom))])
(update-count-in [:per-entity-counts (.-e datom)])))
{:per-attr-counts {}
:per-entity-counts {}}))
sum-indexed-attr-counts (fn [attr-counts] (->> attr-counts
(m/filter-keys #(contains? (:db/index (.-rschema db)) %))
vals
(reduce + 0)))]
(cond-> (merge counts-map
{:count (di/-count (.-eavt db))
:avet-count (->> (:per-attr-counts counts-map)
sum-indexed-attr-counts)})
(dbi/-keep-history? db)
(merge {:temporal-count (di/-count (.-temporal-eavt db))
:temporal-avet-count (->> (di/-seq (.-temporal-eavt db))
(reduce (fn [m ^Datom datom] (update-count-in m [(dbi/-ident-for db (.-a datom))]))
{})
sum-indexed-attr-counts)}))))
| null | https://raw.githubusercontent.com/replikativ/datahike/3625234370d336aa7e4caa77ec07566d340ca910/src/datahike/db.cljc | clojure | ----------------------------------------------------------------------------
macros and funcs to support writing defrecords and updating
code taken from prismatic:
----------------------------------------------------------------------------
----------------------------------------------------------------------------
expects something like '(method-symbol [arg arg arg] ...)
if the thing matches, returns [fully-qualified-symbol arity], otherwise nil
get the interfaces list, remove any duplicates, similar to remove-nil-implements in potemkin
verified w/ deftype impl in compiler:
(deftype* tagname classname [fields] :implements [interfaces] :tag tagname methods*)
DB
FilteredDB
why not with "FilteredDB" ?
:e of last datom in slice
maps enums as well
no difference for attribute references since no update possible | (ns ^:no-doc datahike.db
(:require
[clojure.data :as data]
[clojure.walk :refer [postwalk]]
#?(:clj [clojure.pprint :as pp])
[datahike.config :as dc]
[datahike.constants :as c :refer [ue0 e0 tx0 utx0 emax txmax system-schema]]
[datahike.datom :as dd :refer [datom datom-tx datom-added]]
[datahike.db.interface :as dbi]
[datahike.db.search :as dbs]
[datahike.db.utils :as dbu]
[datahike.index :as di]
[datahike.schema :as ds]
[datahike.tools :as tools :refer [raise]]
[me.tonsky.persistent-sorted-set.arrays :as arrays]
[medley.core :as m]
[taoensso.timbre :refer [warn]])
#?(:cljs (:require-macros [datahike.db :refer [defrecord-updatable]]
[datahike.datom :refer [combine-cmp datom]]
[datahike.tools :refer [case-tree raise]]))
(:refer-clojure :exclude [seqable?])
#?(:clj (:import [clojure.lang AMapEntry ITransientCollection IEditableCollection IPersistentCollection Seqable
IHashEq Associative IKeywordLookup ILookup]
[datahike.datom Datom]
[java.io Writer]
[java.util Date])))
(declare equiv-db empty-db)
#?(:cljs (declare pr-db))
( replacing ) builtins , i.e. , Object / hashCode , IHashEq hasheq , etc .
#?(:cljs
(do
(def Exception js/Error)
(def IllegalArgumentException js/Error)
(def UnsupportedOperationException js/Error)))
(defn #?@(:clj [^Boolean seqable?]
:cljs [^boolean seqable?])
[x]
(and (not (string? x))
#?(:cljs (or (cljs.core/seqable? x)
(arrays/array? x))
:clj (or (seq? x)
(instance? Seqable x)
(nil? x)
(instance? Iterable x)
(arrays/array? x)
(instance? java.util.Map x)))))
(defn- cljs-env?
"Take the &env from a macro, and tell whether we are expanding into cljs."
[env]
(boolean (:ns env)))
#?(:clj
(defmacro if-cljs
"Return then if we are generating cljs code and else for Clojure code.
"
[then else]
(if (cljs-env? &env) then else)))
#?(:clj
(defn- get-sig [method]
(and (sequential? method)
(symbol? (first method))
(vector? (second method))
(let [sym (first method)
ns (or (some->> sym resolve meta :ns str) "clojure.core")]
[(symbol ns (name sym)) (-> method second count)]))))
#?(:clj
(defn- dedupe-interfaces [deftype-form]
(let [[deftype* tagname classname fields implements interfaces & rest] deftype-form]
(when (or (not= deftype* 'deftype*) (not= implements :implements))
(throw (IllegalArgumentException. "deftype-form mismatch")))
(list* deftype* tagname classname fields implements (vec (distinct interfaces)) rest))))
#?(:clj
(defn- make-record-updatable-clj [name fields & impls]
(let [impl-map (->> impls (map (juxt get-sig identity)) (filter first) (into {}))
body (macroexpand-1 (list* 'defrecord name fields impls))]
(postwalk
(fn [form]
(if (and (sequential? form) (= 'deftype* (first form)))
(->> form
dedupe-interfaces
(remove (fn [method]
(when-let [impl (-> method get-sig impl-map)]
(not= method impl)))))
form))
body))))
#?(:clj
(defn- make-record-updatable-cljs [name fields & impls]
`(do
(defrecord ~name ~fields)
(extend-type ~name ~@impls))))
#?(:clj
(defmacro defrecord-updatable [name fields & impls]
`(if-cljs
~(apply make-record-updatable-cljs name fields impls)
~(apply make-record-updatable-clj name fields impls))))
TxReport
(defrecord TxReport [db-before db-after tx-data tempids tx-meta])
(defn db-transient [db]
(-> db
(update :eavt di/-transient)
(update :aevt di/-transient)
(update :avet di/-transient)))
(defn db-persistent! [db]
(-> db
(update :eavt di/-persistent!)
(update :aevt di/-persistent!)
(update :avet di/-persistent!)))
(defrecord-updatable DB [schema eavt aevt avet temporal-eavt temporal-aevt temporal-avet max-eid max-tx op-count rschema hash config system-entities ident-ref-map ref-ident-map meta]
#?@(:cljs
[IHash (-hash [db] hash)
IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (di/-seq eavt))
IReversible (-rseq [db] (-rseq eavt))
ICounted (-count [db] (count eavt))
IEmptyableCollection (-empty [db] (empty-db (ds/get-user-schema db)))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEditableCollection (-as-transient [db] (db-transient db))
ITransientCollection (-conj! [db key] (throw (ex-info "datahike.DB/conj! is not supported" {})))
(-persistent! [db] (db-persistent! db))]
:clj
[Object (hashCode [db] hash)
clojure.lang.IHashEq (hasheq [db] hash)
Seqable (seq [db] (di/-seq eavt))
IPersistentCollection
(count [db] (di/-count eavt))
(equiv [db other] (equiv-db db other))
(empty [db] (empty-db (ds/get-user-schema db)))
IEditableCollection
(asTransient [db] (db-transient db))
ITransientCollection
(conj [db key] (throw (ex-info "datahike.DB/conj! is not supported" {})))
(persistent [db] (db-persistent! db))])
dbi/IDB
(-schema [db] schema)
(-rschema [db] rschema)
(-system-entities [db] system-entities)
(-attrs-by [db property] (rschema property))
(-temporal-index? [db] (dbi/-keep-history? db))
(-keep-history? [db] (:keep-history? config))
(-max-tx [db] max-tx)
(-max-eid [db] max-eid)
(-config [db] config)
(-ref-for [db a-ident]
(if (:attribute-refs? config)
(let [ref (get ident-ref-map a-ident)]
(when (nil? ref)
(warn (str "Attribute " a-ident " has not been found in database")))
ref)
a-ident))
(-ident-for [db a-ref]
(if (:attribute-refs? config)
(let [a-ident (get ref-ident-map a-ref)]
(when (nil? a-ident)
(warn (str "Attribute with reference number " a-ref " has not been found in database")))
a-ident)
a-ref))
dbi/ISearch
(-search [db pattern]
(dbs/search-current-indices db pattern))
dbi/IIndexAccess
(-datoms [db index-type cs]
(di/-slice (get db index-type)
(dbu/components->pattern db index-type cs e0 tx0)
(dbu/components->pattern db index-type cs emax txmax)
index-type))
(-seek-datoms [db index-type cs]
(di/-slice (get db index-type)
(dbu/components->pattern db index-type cs e0 tx0)
(datom emax nil nil txmax)
index-type))
(-rseek-datoms [db index-type cs]
(-> (di/-slice (get db index-type)
(dbu/components->pattern db index-type cs e0 tx0)
(datom emax nil nil txmax)
index-type)
vec
rseq))
(-index-range [db attr start end]
(when-not (dbu/indexing? db attr)
(raise "Attribute" attr "should be marked as :db/index true" {}))
(dbu/validate-attr attr (list '-index-range 'db attr start end) db)
(di/-slice avet
(dbu/resolve-datom db nil attr start nil e0 tx0)
(dbu/resolve-datom db nil attr end nil emax txmax)
:avet))
data/EqualityPartition
(equality-partition [x] :datahike/db)
data/Diff
(diff-similar [a b]
(let [datoms-a (di/-slice (:eavt a) (datom e0 nil nil tx0) (datom emax nil nil txmax) :eavt)
datoms-b (di/-slice (:eavt b) (datom e0 nil nil tx0) (datom emax nil nil txmax) :eavt)]
(dd/diff-sorted datoms-a datoms-b dd/cmp-datoms-eavt-quick))))
(defrecord-updatable FilteredDB [unfiltered-db pred]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on FilteredDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on FilteredDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on FilteredDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on FilteredDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on FilteredDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on FilteredDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on FilteredDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on FilteredDB")))
Seqable (seq [db] (dbi/-datoms db :eavt []))
clojure.lang.ILookup (valAt [db k] (throw (UnsupportedOperationException. "valAt/2 is not supported on FilteredDB")))
(valAt [db k nf] (throw (UnsupportedOperationException. "valAt/3 is not supported on FilteredDB")))
clojure.lang.IKeywordLookup (getLookupThunk [db k]
(throw (UnsupportedOperationException. "getLookupThunk is not supported on FilteredDB")))
Associative
(containsKey [e k] (throw (UnsupportedOperationException. "containsKey is not supported on FilteredDB")))
(entryAt [db k] (throw (UnsupportedOperationException. "entryAt is not supported on FilteredDB")))
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on FilteredDB")))])
dbi/IDB
(-schema [db] (dbi/-schema unfiltered-db))
(-rschema [db] (dbi/-rschema unfiltered-db))
(-system-entities [db] (dbi/-system-entities unfiltered-db))
(-attrs-by [db property] (dbi/-attrs-by unfiltered-db property))
(-temporal-index? [db] (dbi/-keep-history? db))
(-keep-history? [db] (dbi/-keep-history? unfiltered-db))
(-max-tx [db] (dbi/-max-tx unfiltered-db))
(-max-eid [db] (dbi/-max-eid unfiltered-db))
(-config [db] (dbi/-config unfiltered-db))
(-ref-for [db a-ident] (dbi/-ref-for unfiltered-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for unfiltered-db a-ref))
dbi/ISearch
(-search [db pattern]
(filter (.-pred db) (dbi/-search unfiltered-db pattern)))
dbi/IIndexAccess
(-datoms [db index cs]
(filter (.-pred db) (dbi/-datoms unfiltered-db index cs)))
(-seek-datoms [db index cs]
(filter (.-pred db) (dbi/-seek-datoms unfiltered-db index cs)))
(-rseek-datoms [db index cs]
(filter (.-pred db) (dbi/-rseek-datoms unfiltered-db index cs)))
(-index-range [db attr start end]
(filter (.-pred db) (dbi/-index-range unfiltered-db attr start end))))
HistoricalDB
(defrecord-updatable HistoricalDB [origin-db]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on HistoricalDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on HistoricalDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on HistoricalDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on HistoricalDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on HistoricalDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on HistoricalDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on HistoricalDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on HistoricalDB")))
Seqable
(seq [db] (dbi/-datoms db :eavt []))
Associative
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on HistoricalDB")))])
dbi/IDB
(-schema [db] (dbi/-schema origin-db))
(-rschema [db] (dbi/-rschema origin-db))
(-system-entities [db] (dbi/-system-entities origin-db))
(-attrs-by [db property] (dbi/-attrs-by origin-db property))
(-temporal-index? [db] (dbi/-keep-history? origin-db))
(-keep-history? [db] (dbi/-keep-history? origin-db))
(-max-tx [db] (dbi/-max-tx origin-db))
(-max-eid [db] (dbi/-max-eid origin-db))
(-config [db] (dbi/-config origin-db))
(-ref-for [db a-ident] (dbi/-ref-for origin-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for origin-db a-ref))
dbi/IHistory
(-time-point [db] nil)
(-origin [db] origin-db)
dbi/ISearch
(-search [db pattern]
(dbs/temporal-search origin-db pattern))
dbi/IIndexAccess
(-datoms [db index-type cs] (dbu/temporal-datoms origin-db index-type cs))
(-seek-datoms [db index-type cs] (dbs/temporal-seek-datoms origin-db index-type cs))
(-rseek-datoms [db index-type cs] (dbs/temporal-rseek-datoms origin-db index-type cs))
(-index-range [db attr start end] (dbs/temporal-index-range origin-db db attr start end)))
AsOfDB
(defn- date? [d]
#?(:cljs (instance? js/Date d)
:clj (instance? Date d)))
(defn get-current-values [db history-datoms]
(->> history-datoms
(group-by (fn [^Datom datom] [(.-e datom) (.-a datom)]))
(mapcat
(fn [[[_ a] datoms]]
(if (dbu/multival? db a)
(->> datoms
(sort-by datom-tx)
(reduce (fn [current-datoms ^Datom datom]
(if (datom-added datom)
(assoc current-datoms (.-v datom) datom)
(dissoc current-datoms (.-v datom))))
{})
vals)
(let [last-ea-tx (apply max (map datom-tx datoms))
current-ea-datom (first (filter #(and (datom-added %) (= last-ea-tx (datom-tx %)))
datoms))]
(if current-ea-datom
[current-ea-datom]
[])))))))
(defn filter-as-of-datoms [datoms time-point db]
(let [as-of-pred (fn [^Datom d]
(if (date? time-point)
(.before ^Date (.-v d) ^Date time-point)
(<= (dd/datom-tx d) time-point)))
filtered-tx-ids (dbu/filter-txInstant datoms as-of-pred db)
filtered-datoms (->> datoms
(filter (fn [^Datom d] (contains? filtered-tx-ids (datom-tx d))))
(get-current-values db))]
filtered-datoms))
(defrecord-updatable AsOfDB [origin-db time-point]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on AsOfDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on AsOfDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on AsOfDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on AsOfDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on AsOfDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on AsOfDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on AsOfDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on AsOfDB")))
Seqable
(seq [db] (dbi/-datoms db :eavt []))
Associative
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on AsOfDB")))])
dbi/IDB
(-schema [db] (dbi/-schema origin-db))
(-rschema [db] (dbi/-rschema origin-db))
(-system-entities [db] (dbi/-system-entities origin-db))
(-attrs-by [db property] (dbi/-attrs-by origin-db property))
(-temporal-index? [db] (dbi/-keep-history? origin-db))
(-keep-history? [db] (dbi/-keep-history? origin-db))
(-max-tx [db] (dbi/-max-tx origin-db))
(-max-eid [db] (dbi/-max-eid origin-db))
(-config [db] (dbi/-config origin-db))
(-ref-for [db a-ident] (dbi/-ref-for origin-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for origin-db a-ref))
dbi/IHistory
(-time-point [db] time-point)
(-origin [db] origin-db)
dbi/ISearch
(-search [db pattern]
(-> (dbs/temporal-search origin-db pattern)
(filter-as-of-datoms time-point origin-db)))
dbi/IIndexAccess
(-datoms [db index-type cs]
(-> (dbu/temporal-datoms origin-db index-type cs)
(filter-as-of-datoms time-point origin-db)))
(-seek-datoms [db index-type cs]
(-> (dbs/temporal-seek-datoms origin-db index-type cs)
(filter-as-of-datoms time-point origin-db)))
(-rseek-datoms [db index-type cs]
(-> (dbs/temporal-rseek-datoms origin-db index-type cs)
(filter-as-of-datoms time-point origin-db)))
(-index-range [db attr start end]
(-> (dbs/temporal-index-range origin-db db attr start end)
(filter-as-of-datoms time-point origin-db))))
SinceDB
(defn- filter-since [datoms time-point db]
(let [since-pred (fn [^Datom d]
(if (date? time-point)
(.after ^Date (.-v d) ^Date time-point)
(>= (.-tx d) time-point)))
filtered-tx-ids (dbu/filter-txInstant datoms since-pred db)]
(->> datoms
(filter datom-added)
(filter (fn [^Datom d] (contains? filtered-tx-ids (datom-tx d)))))))
(defrecord-updatable SinceDB [origin-db time-point]
#?@(:cljs
[IEquiv (-equiv [db other] (equiv-db db other))
ISeqable (-seq [db] (dbi/-datoms db :eavt []))
ICounted (-count [db] (count (dbi/-datoms db :eavt [])))
IPrintWithWriter (-pr-writer [db w opts] (pr-db db w opts))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on SinceDB")))
IEmptyableCollection (-empty [_] (throw (js/Error. "-empty is not supported on SinceDB")))
ILookup (-lookup ([_ _] (throw (js/Error. "-lookup is not supported on SinceDB")))
([_ _ _] (throw (js/Error. "-lookup is not supported on SinceDB"))))
IAssociative (-contains-key? [_ _] (throw (js/Error. "-contains-key? is not supported on SinceDB")))
(-assoc [_ _ _] (throw (js/Error. "-assoc is not supported on SinceDB")))]
:clj
[IPersistentCollection
(count [db] (count (dbi/-datoms db :eavt [])))
(equiv [db o] (equiv-db db o))
(cons [db [k v]] (throw (UnsupportedOperationException. "cons is not supported on SinceDB")))
(empty [db] (throw (UnsupportedOperationException. "empty is not supported on SinceDB")))
Seqable
(seq [db] (dbi/-datoms db :eavt []))
Associative
(assoc [db k v] (throw (UnsupportedOperationException. "assoc is not supported on SinceDB")))])
dbi/IDB
(-schema [db] (dbi/-schema origin-db))
(-rschema [db] (dbi/-rschema origin-db))
(-system-entities [db] (dbi/-system-entities origin-db))
(-attrs-by [db property] (dbi/-attrs-by origin-db property))
(-temporal-index? [db] (dbi/-keep-history? db))
(-keep-history? [db] (dbi/-keep-history? origin-db))
(-max-tx [db] (dbi/-max-tx origin-db))
(-max-eid [db] (dbi/-max-eid origin-db))
(-config [db] (dbi/-config origin-db))
(-ref-for [db a-ident] (dbi/-ref-for origin-db a-ident))
(-ident-for [db a-ref] (dbi/-ident-for origin-db a-ref))
dbi/IHistory
(-time-point [db] time-point)
(-origin [db] origin-db)
dbi/ISearch
(-search [db pattern]
(-> (dbs/temporal-search origin-db pattern)
(filter-since time-point origin-db)))
dbi/IIndexAccess
(dbi/-datoms [db index-type cs]
(-> (dbu/temporal-datoms origin-db index-type cs)
(filter-since time-point origin-db)))
(dbi/-seek-datoms [db index-type cs]
(-> (dbs/temporal-seek-datoms origin-db index-type cs)
(filter-since time-point origin-db)))
(dbi/-rseek-datoms [db index-type cs]
(-> (dbs/temporal-rseek-datoms origin-db index-type cs)
(filter-since time-point origin-db)))
(dbi/-index-range [db attr start end]
(-> (dbs/temporal-index-range origin-db db attr start end)
(filter-since time-point origin-db))))
(defn- equiv-db-index [x y]
(loop [xs (seq x)
ys (seq y)]
(cond
(nil? xs) (nil? ys)
(= (first xs) (first ys)) (recur (next xs) (next ys))
:else false)))
(defn- equiv-db [db other]
(and (or (instance? DB other) (instance? FilteredDB other))
(= (dbi/-schema db) (dbi/-schema other))
(equiv-db-index (dbi/-datoms db :eavt []) (dbi/-datoms other :eavt []))))
#?(:cljs
(defn pr-db [db w opts]
(-write w "#datahike/DB {")
(-write w (str ":max-tx " (dbi/-max-tx db) " "))
(-write w (str ":max-eid " (dbi/-max-eid db) " "))
(-write w "}")))
#?(:clj
(do
(defn pr-db [db, ^Writer w]
(.write w (str "#datahike/DB {"))
(.write w (str ":max-tx " (dbi/-max-tx db) " "))
(.write w (str ":max-eid " (dbi/-max-eid db)))
(.write w "}"))
(defn pr-hist-db [db ^Writer w flavor time-point?]
(.write w (str "#datahike/" flavor " {"))
(.write w ":origin ")
(binding [*out* w]
(pr (dbi/-origin db)))
(when time-point?
(.write w " :time-point ")
(binding [*out* w]
(pr (dbi/-time-point db))))
(.write w "}"))
(defmethod print-method DB [db w] (pr-db db w))
(defmethod print-method HistoricalDB [db w] (pr-hist-db db w "HistoricalDB" false))
(defmethod print-method AsOfDB [db w] (pr-hist-db db w "AsOfDB" true))
(defmethod print-method SinceDB [db w] (pr-hist-db db w "SinceDB" true))
(defmethod pp/simple-dispatch Datom [^Datom d]
(pp/pprint-logical-block :prefix "#datahike/Datom [" :suffix "]"
(pp/write-out (.-e d))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (.-a d))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (.-v d))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (datom-tx d))))
(defn- pp-db [db ^Writer w]
(pp/pprint-logical-block :prefix "#datahike/DB {" :suffix "}"
(pp/pprint-logical-block
(pp/write-out :max-tx)
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (dbi/-max-tx db))
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out :max-eid)
(.write ^Writer *out* " ")
(pp/pprint-newline :linear)
(pp/write-out (dbi/-max-eid db)))
(pp/pprint-newline :linear)))
(defmethod pp/simple-dispatch DB [db] (pp-db db *out*))
(defmethod pp/simple-dispatch FilteredDB [db] (pp-db db *out*))))
(defn- validate-schema-key [a k v expected]
(when-not (or (nil? v)
(contains? expected v))
(throw (ex-info (str "Bad attribute specification for " (pr-str {a {k v}}) ", expected one of " expected)
{:error :schema/validation
:attribute a
:key k
:value v}))))
(defn- validate-tuple-schema [a kv]
(when (= :db.type/tuple (:db/valueType kv))
(case (some #{:db/tupleAttrs :db/tupleTypes :db/tupleType} (keys kv))
:db/tupleAttrs (when (not (vector? (:db/tupleAttrs kv)))
(throw (ex-info (str "Bad attribute specification for " a ": {:db/tupleAttrs ...} should be a vector}")
{:error :schema/validation
:attribute a
:key :db/tupleAttrs})))
:db/tupleTypes (when (not (vector? (:db/tupleTypes kv)))
(throw (ex-info (str "Bad attribute specification for " a ": {:db/tupleTypes ...} should be a vector}")
{:error :schema/validation
:attribute a
:key :db/tupleTypes})))
:db/tupleType (when (not (keyword? (:db/tupleType kv)))
(throw (ex-info (str "Bad attribute specification for " a ": {:db/tupleType ...} should be a keyword}")
{:error :schema/validation
:attribute a
:key :db/tupleType}))))))
(defn- validate-schema [schema]
(doseq [[a-ident kv] schema]
(let [comp? (:db/isComponent kv false)]
(validate-schema-key a-ident :db/isComponent (:db/isComponent kv) #{true false})
(when (and comp? (not= (:db/valueType kv) :db.type/ref))
(throw (ex-info (str "Bad attribute specification for " a-ident ": {:db/isComponent true} should also have {:db/valueType :db.type/ref}")
{:error :schema/validation
:attribute a-ident
:key :db/isComponent}))))
(validate-schema-key a-ident :db/unique (:db/unique kv) #{:db.unique/value :db.unique/identity})
(validate-schema-key a-ident :db/valueType (:db/valueType kv) #{:db.type/ref :db.type/tuple})
(validate-schema-key a-ident :db/cardinality (:db/cardinality kv) #{:db.cardinality/one :db.cardinality/many})
(validate-tuple-schema a-ident kv)))
(defn to-old-schema [new-schema]
(if (or (vector? new-schema) (seq? new-schema))
(reduce
(fn [acc {:keys [:db/ident] :as schema-entity}]
(assoc acc ident schema-entity))
{}
new-schema)
new-schema))
(defn- validate-write-schema [schema]
(when-not (ds/old-schema-valid? schema)
(raise "Incomplete schema attributes, expected at least :db/valueType, :db/cardinality"
(ds/explain-old-schema schema))))
(defn init-max-eid [eavt]
solved with reverse slice first in datascript
(if-let [datoms (di/-slice
eavt
(datom e0 nil nil tx0)
(datom (dec tx0) nil nil txmax)
:eavt)]
e0))
(defn get-max-tx [eavt]
(transduce (map (fn [^Datom d] (datom-tx d))) max tx0 (di/-all eavt)))
(let [idents (reduce (fn [m {:keys [db/ident db/id]}]
(assoc m ident id))
{}
system-schema)]
(->> system-schema
(mapcat
(fn [{:keys [db/id] :as i}]
(reduce-kv
(fn [coll k v]
(let [v-ref (idents v)
can be system schema eid ( v - ref ) , or ident or regular ( v )
d-val (if (and (not= k :db/ident) v-ref) v-ref v)]
(conj coll (dd/datom id (idents k) d-val tx0))))
[]
(dissoc i :db/id))))
vec)))
(defn get-ident-ref-map
"Maps IDs of system entities to their names (keyword) and attribute names to the attribute's specification"
[schema]
(reduce
(fn [m [a {:keys [db/id]}]]
(when a
(assoc m a id)))
{}
schema))
(defn ^DB empty-db
"Prefer create-database in api, schema only in index for attribute reference database."
([] (empty-db nil nil nil))
([schema] (empty-db schema nil nil))
([schema user-config] (empty-db schema user-config nil))
([schema user-config store]
{:pre [(or (nil? schema) (map? schema) (coll? schema))]}
(let [complete-config (merge (dc/storeless-config) user-config)
_ (dc/validate-config complete-config)
{:keys [keep-history? index schema-flexibility attribute-refs?]} complete-config
on-read? (= :read schema-flexibility)
schema (to-old-schema schema)
_ (if on-read?
(validate-schema schema)
(validate-write-schema schema))
complete-schema (merge schema
(if attribute-refs?
c/ref-implicit-schema
c/non-ref-implicit-schema))
rschema (dbu/rschema complete-schema)
ident-ref-map (if attribute-refs? (get-ident-ref-map complete-schema) {})
ref-ident-map (if attribute-refs? (clojure.set/map-invert ident-ref-map) {})
system-entities (if attribute-refs? c/system-entities #{})
indexed (if attribute-refs?
(set (map ident-ref-map (:db/index rschema)))
(:db/index rschema))
index-config (merge (:index-config complete-config)
{:indexed indexed})
eavt (if attribute-refs?
(di/init-index index store ref-datoms :eavt 0 index-config)
(di/empty-index index store :eavt index-config))
aevt (if attribute-refs?
(di/init-index index store ref-datoms :aevt 0 index-config)
(di/empty-index index store :aevt index-config))
indexed-datoms (filter (fn [[_ a _ _]] (contains? indexed a)) ref-datoms)
avet (if attribute-refs?
(di/init-index index store indexed-datoms :avet 0 index-config)
(di/empty-index index store :avet index-config))
max-eid (if attribute-refs? ue0 e0)
max-tx (if attribute-refs? utx0 tx0)]
(map->DB
(merge
{:schema complete-schema
:rschema rschema
:config complete-config
:eavt eavt
:aevt aevt
:avet avet
:max-eid max-eid
:max-tx max-tx
:hash 0
:system-entities system-entities
:ref-ident-map ref-ident-map
:ident-ref-map ident-ref-map
:meta (tools/meta-data)
:op-count (if attribute-refs? (count ref-datoms) 0)}
{:temporal-eavt eavt
:temporal-aevt aevt
:temporal-avet avet}))))))
(defn get-max-tx [eavt]
(transduce (map (fn [^Datom d] (datom-tx d))) max tx0 (di/-all eavt)))
(defn ^DB init-db
([datoms] (init-db datoms nil nil nil))
([datoms schema] (init-db datoms schema nil nil))
([datoms schema user-config] (init-db datoms schema user-config nil))
([datoms schema user-config store]
(validate-schema schema)
(let [{:keys [index keep-history? attribute-refs?] :as complete-config} (merge (dc/storeless-config) user-config)
_ (dc/validate-config complete-config)
complete-schema (merge schema
(if attribute-refs?
c/ref-implicit-schema
c/non-ref-implicit-schema))
rschema (dbu/rschema complete-schema)
ident-ref-map (if attribute-refs? (get-ident-ref-map schema) {})
ref-ident-map (if attribute-refs? (clojure.set/map-invert ident-ref-map) {})
system-entities (if attribute-refs? c/system-entities #{})
indexed (if attribute-refs?
(set (map ident-ref-map (:db/index rschema)))
(:db/index rschema))
new-datoms (if attribute-refs? (concat ref-datoms datoms) datoms)
indexed-datoms (filter (fn [[_ a _ _]] (contains? indexed a)) new-datoms)
op-count 0
index-config (assoc (:index-config complete-config)
:indexed indexed)
avet (di/init-index index store indexed-datoms :avet op-count index-config)
eavt (di/init-index index store new-datoms :eavt op-count index-config)
aevt (di/init-index index store new-datoms :aevt op-count index-config)
max-eid (init-max-eid eavt)
max-tx (get-max-tx eavt)
op-count (count new-datoms)]
(map->DB (merge {:schema complete-schema
:rschema rschema
:config complete-config
:eavt eavt
:aevt aevt
:avet avet
:max-eid max-eid
:max-tx max-tx
:op-count op-count
:hash (reduce #(+ %1 (hash %2)) 0 datoms)
:system-entities system-entities
:meta (tools/meta-data)
:ref-ident-map ref-ident-map
:ident-ref-map ident-ref-map}
(when keep-history?
{:temporal-eavt (di/empty-index index store :eavt index-config)
:temporal-aevt (di/empty-index index store :aevt index-config)
:temporal-avet (di/empty-index index store :avet index-config)}))))))
(defn db-from-reader [{:keys [schema datoms]}]
(init-db (map (fn [[e a v tx]] (datom e a v tx)) datoms) schema))
(defn metrics [^DB db]
(let [update-count-in (fn [m ks] (update-in m ks #(if % (inc %) 1)))
counts-map (->> (di/-seq (.-eavt db))
(reduce (fn [m ^Datom datom]
(-> m
(update-count-in [:per-attr-counts (dbi/-ident-for db (.-a datom))])
(update-count-in [:per-entity-counts (.-e datom)])))
{:per-attr-counts {}
:per-entity-counts {}}))
sum-indexed-attr-counts (fn [attr-counts] (->> attr-counts
(m/filter-keys #(contains? (:db/index (.-rschema db)) %))
vals
(reduce + 0)))]
(cond-> (merge counts-map
{:count (di/-count (.-eavt db))
:avet-count (->> (:per-attr-counts counts-map)
sum-indexed-attr-counts)})
(dbi/-keep-history? db)
(merge {:temporal-count (di/-count (.-temporal-eavt db))
:temporal-avet-count (->> (di/-seq (.-temporal-eavt db))
(reduce (fn [m ^Datom datom] (update-count-in m [(dbi/-ident-for db (.-a datom))]))
{})
sum-indexed-attr-counts)}))))
|
321347b3aa5a49c009905d04c8192b9f9422ef6f2877db6b3567b1a2733c174b | zippy/anansi | irc_bridge_out.clj | (ns anansi.test.streamscapes.channels.irc-bridge-out
(:use [anansi.streamscapes.channels.irc-bridge-out] :reload)
(:use [anansi.ceptr]
[anansi.receptor.scape]
[anansi.receptor.user :only [user-def]]
[anansi.streamscapes.streamscapes]
[anansi.streamscapes.contact :only [contact-def]]
[anansi.streamscapes.channel :only [channel-def]])
(:use [midje.sweet])
(:use [clojure.test]))
(deftest irc-bridge-out
(let [m (make-receptor user-def nil "eric")
r (make-receptor streamscapes-def nil {:matrice-addr (address-of m) :attributes {:_password "password" :data {:datax "x"}}})
eric (make-receptor contact-def r {:attributes {:name "Eric"}})
cc-addr (s-> matrice->make-channel r {:name :irc-stream})
cc (get-receptor r cc-addr)
b (make-receptor irc-bridge-out-def cc {})
irc-contacts (get-scape r :irc-address-contact true)]
(--> key->set b irc-contacts "zippy" (address-of eric))
(fact
(receptor-state b false) => (contains {:fingerprint :anansi.streamscapes.channels.irc-bridge-out.irc-bridge-out}))
(facts "about restoring serialized receptor"
(let [state (receptor-state b true)]
state => (receptor-state (receptor-restore state nil) true)
))
))
| null | https://raw.githubusercontent.com/zippy/anansi/881aa279e5e7836f3002fc2ef7623f2ee1860c9a/test/anansi/test/streamscapes/channels/irc_bridge_out.clj | clojure | (ns anansi.test.streamscapes.channels.irc-bridge-out
(:use [anansi.streamscapes.channels.irc-bridge-out] :reload)
(:use [anansi.ceptr]
[anansi.receptor.scape]
[anansi.receptor.user :only [user-def]]
[anansi.streamscapes.streamscapes]
[anansi.streamscapes.contact :only [contact-def]]
[anansi.streamscapes.channel :only [channel-def]])
(:use [midje.sweet])
(:use [clojure.test]))
(deftest irc-bridge-out
(let [m (make-receptor user-def nil "eric")
r (make-receptor streamscapes-def nil {:matrice-addr (address-of m) :attributes {:_password "password" :data {:datax "x"}}})
eric (make-receptor contact-def r {:attributes {:name "Eric"}})
cc-addr (s-> matrice->make-channel r {:name :irc-stream})
cc (get-receptor r cc-addr)
b (make-receptor irc-bridge-out-def cc {})
irc-contacts (get-scape r :irc-address-contact true)]
(--> key->set b irc-contacts "zippy" (address-of eric))
(fact
(receptor-state b false) => (contains {:fingerprint :anansi.streamscapes.channels.irc-bridge-out.irc-bridge-out}))
(facts "about restoring serialized receptor"
(let [state (receptor-state b true)]
state => (receptor-state (receptor-restore state nil) true)
))
))
| |
d6aa191fbe76fb2d26334da8ddb7a201838049c1dd67ab802bb9fa30f5db0eb1 | sadiqj/ocaml-esp32 | module_constraints.ml | module type S = sig val y : float end;;
module type T = sig val x : float val y : float end;;
type t = T : (module S) -> t;;
let rec x = let module M = (val m) in T (module M)
and (m : (module T)) = (module (struct let x = 10.0 and y = 20.0 end));;
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/testsuite/tests/letrec-disallowed/module_constraints.ml | ocaml | module type S = sig val y : float end;;
module type T = sig val x : float val y : float end;;
type t = T : (module S) -> t;;
let rec x = let module M = (val m) in T (module M)
and (m : (module T)) = (module (struct let x = 10.0 and y = 20.0 end));;
| |
089f35bdf0cb019f26872074b40cc5bb2e01405b53ce4eb44dcdffc2ab379f55 | jesperes/aoc_erlang | aoc2020_day09.erl | %%%=============================================================================
%%% @doc Advent of code puzzle solution
%%% @end
%%%=============================================================================
-module(aoc2020_day09).
-behavior(aoc_puzzle).
-export([ parse/1
, solve1/1
, solve2/1
, info/0
]).
-include("aoc_puzzle.hrl").
%%------------------------------------------------------------------------------
%% @doc info/0
%% Returns info about this puzzle.
%% @end
%%------------------------------------------------------------------------------
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{ module = ?MODULE
, year = 2020
, day = 9
, name = "Encoding Error"
, expected = {138879426, 23761694}
, has_input_file = true
}.
%%==============================================================================
%% Types
%%==============================================================================
-type input_type() :: [integer()].
-type result1_type() :: any().
-type result2_type() :: result1_type().
%%------------------------------------------------------------------------------
%% @doc parse/1
%% Parses input file.
%% @end
%%------------------------------------------------------------------------------
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
lists:map(fun erlang:list_to_integer/1,
string:tokens(binary_to_list(Input), "\n\r")).
%%------------------------------------------------------------------------------
%% @doc solve1/1
Solves part 1 . Receives parsed input as returned from parse/1 .
%% @end
%%------------------------------------------------------------------------------
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
find_first_invalid(Input, 25).
%%------------------------------------------------------------------------------
%% @doc solve2/1
Solves part 2 . Receives parsed input as returned from parse/1 .
%% @end
%%------------------------------------------------------------------------------
-spec solve2(Prog :: input_type()) -> result2_type().
solve2(Input) ->
find_range(Input, 138879426).
%%==============================================================================
%% Internals
%%==============================================================================
find_first_invalid([_|Rest] = List, N) ->
{Preamble, [Next|_]} = lists:split(N, List),
case [Next || X <- Preamble,
Y <- Preamble,
X /= Y, X + Y == Next] of
[_|_] ->
find_first_invalid(Rest, N);
_ ->
Next
end.
find_range(List, Num) ->
find_range(List, 2, Num).
find_range(List, Len, Num) when Len < length(List) ->
case find_range0(List, Len, Num) of
N when is_integer(N) ->
N;
false ->
No range of len Num was found , try a longer one
find_range(List, Len + 1, Num)
end.
%% Find a range in `List' of length `Len' which sums up to `Num`.
Returns the sum of the first and last numbers if found , or false if
%% no such range was found.
find_range0(List, Len, _Num) when length(List) < Len ->
false;
find_range0([_|Rest] = List, Len, Num) ->
{Range, _} = lists:split(Len, List),
Sum = lists:sum(Range),
if Sum == Num ->
lists:min(Range) + lists:max(Range);
true ->
find_range0(Rest, Len, Num)
end.
%%%_* Emacs ====================================================================
%%% Local Variables:
%%% allout-layout: t
erlang - indent - level : 2
%%% End:
| null | https://raw.githubusercontent.com/jesperes/aoc_erlang/ec0786088fb9ab886ee57e17ea0149ba3e91810a/src/2020/aoc2020_day09.erl | erlang | =============================================================================
@doc Advent of code puzzle solution
@end
=============================================================================
------------------------------------------------------------------------------
@doc info/0
Returns info about this puzzle.
@end
------------------------------------------------------------------------------
==============================================================================
Types
==============================================================================
------------------------------------------------------------------------------
@doc parse/1
Parses input file.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc solve1/1
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc solve2/1
@end
------------------------------------------------------------------------------
==============================================================================
Internals
==============================================================================
Find a range in `List' of length `Len' which sums up to `Num`.
no such range was found.
_* Emacs ====================================================================
Local Variables:
allout-layout: t
End: | -module(aoc2020_day09).
-behavior(aoc_puzzle).
-export([ parse/1
, solve1/1
, solve2/1
, info/0
]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{ module = ?MODULE
, year = 2020
, day = 9
, name = "Encoding Error"
, expected = {138879426, 23761694}
, has_input_file = true
}.
-type input_type() :: [integer()].
-type result1_type() :: any().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
lists:map(fun erlang:list_to_integer/1,
string:tokens(binary_to_list(Input), "\n\r")).
Solves part 1 . Receives parsed input as returned from parse/1 .
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
find_first_invalid(Input, 25).
Solves part 2 . Receives parsed input as returned from parse/1 .
-spec solve2(Prog :: input_type()) -> result2_type().
solve2(Input) ->
find_range(Input, 138879426).
find_first_invalid([_|Rest] = List, N) ->
{Preamble, [Next|_]} = lists:split(N, List),
case [Next || X <- Preamble,
Y <- Preamble,
X /= Y, X + Y == Next] of
[_|_] ->
find_first_invalid(Rest, N);
_ ->
Next
end.
find_range(List, Num) ->
find_range(List, 2, Num).
find_range(List, Len, Num) when Len < length(List) ->
case find_range0(List, Len, Num) of
N when is_integer(N) ->
N;
false ->
No range of len Num was found , try a longer one
find_range(List, Len + 1, Num)
end.
Returns the sum of the first and last numbers if found , or false if
find_range0(List, Len, _Num) when length(List) < Len ->
false;
find_range0([_|Rest] = List, Len, Num) ->
{Range, _} = lists:split(Len, List),
Sum = lists:sum(Range),
if Sum == Num ->
lists:min(Range) + lists:max(Range);
true ->
find_range0(Rest, Len, Num)
end.
erlang - indent - level : 2
|
ddaef702dfb6dfd104fb6dbd6ed1bf5d86c401b0d97ecafffcce9d34c6b443f2 | weavejester/flupot-pixi | core.cljs | (ns example.core
(:require [brutha.core :as br]
[flupot.pixi :as pixi]
[flupot.dom :as dom]))
(enable-console-print!)
(defn- blur-filter [blur]
(let [filter (PIXI.filters.BlurFilter.)]
(set! (.-blur filter) blur)
filter))
(def canvas
(let [filter (blur-filter 15)]
(.log js/console filter)
(br/component
(fn [{:keys [rotation]}]
(pixi/stage
{:width 400, :height 300}
(pixi/text {:x 100, :y 100, :rotation rotation, :text "Hello World"})
(pixi/sprite {:x 300, :y 100
:rotation rotation
:image "bunny.png"
:filters [filter]}))))))
(def content
(br/component
(fn [state]
(dom/div
(dom/p "Example")
(canvas state)))))
(defn bind [state component root]
(br/mount (component @state) root)
(add-watch state ::bind (fn [_ _ _ s] (br/mount (component s) root))))
(defn animate [state]
(swap! state update :rotation + 0.01)
(js/setTimeout #(animate state) 16))
(let [state (atom {:rotation 0})
app (.getElementById js/document "app")]
(bind state content app)
(animate state))
| null | https://raw.githubusercontent.com/weavejester/flupot-pixi/958e42dc5ee5121e77f8776d402c71b096e71da5/example/src/example/core.cljs | clojure | (ns example.core
(:require [brutha.core :as br]
[flupot.pixi :as pixi]
[flupot.dom :as dom]))
(enable-console-print!)
(defn- blur-filter [blur]
(let [filter (PIXI.filters.BlurFilter.)]
(set! (.-blur filter) blur)
filter))
(def canvas
(let [filter (blur-filter 15)]
(.log js/console filter)
(br/component
(fn [{:keys [rotation]}]
(pixi/stage
{:width 400, :height 300}
(pixi/text {:x 100, :y 100, :rotation rotation, :text "Hello World"})
(pixi/sprite {:x 300, :y 100
:rotation rotation
:image "bunny.png"
:filters [filter]}))))))
(def content
(br/component
(fn [state]
(dom/div
(dom/p "Example")
(canvas state)))))
(defn bind [state component root]
(br/mount (component @state) root)
(add-watch state ::bind (fn [_ _ _ s] (br/mount (component s) root))))
(defn animate [state]
(swap! state update :rotation + 0.01)
(js/setTimeout #(animate state) 16))
(let [state (atom {:rotation 0})
app (.getElementById js/document "app")]
(bind state content app)
(animate state))
| |
9b7e137ab6be44493bf790fc08c08305284c4c7f51624f742b33a223d689c72b | robrix/sequoia | Negate.hs | # LANGUAGE TypeFamilies #
module Sequoia.Calculus.Negate
( -- * Negate
NegateIntro(..)
, negateL'
, negateR'
, shiftN
, dnePK
, dniPK
, negateLK
, negateRK
, negateLK'
, negateRK'
-- * Connectives
, module Sequoia.Connective.Negate
) where
import Data.Profunctor
import Prelude hiding (init)
import Sequoia.Calculus.Context
import Sequoia.Calculus.Control
import Sequoia.Calculus.Core
import Sequoia.Calculus.Structural
import Sequoia.Conjunction
import Sequoia.Connective.Negate
import Sequoia.Connective.Negation
import Sequoia.Contextual
import Sequoia.Polarity
import Sequoia.Profunctor.Command
import Sequoia.Profunctor.Continuation as K
import Sequoia.Profunctor.Value
-- Negate
class Core s => NegateIntro s where
negateL
:: Neg a
=> _Γ ⊣s e r⊢ _Δ > a
-- ----------------------------------
-> Negate e a r < _Γ ⊣s e r⊢ _Δ
negateR
:: Neg a
=> a < _Γ ⊣s e r⊢ _Δ
-- ----------------------------------
-> _Γ ⊣s e r⊢ _Δ > Negate e a r
negateL'
:: (NegateIntro s, Weaken s, Neg a)
=> Negate e a r < _Γ ⊣s e r⊢ _Δ
-- ----------------------------------
-> _Γ ⊣s e r⊢ _Δ > a
negateL' p = negateR init >>> wkR p
negateR'
:: (NegateIntro s, Weaken s, Neg a)
=> _Γ ⊣s e r⊢ _Δ > Negate e a r
-- ----------------------------------
-> a < _Γ ⊣s e r⊢ _Δ
negateR' p = wkL p >>> negateL init
shiftN
:: (Control s, Contextual s)
=> Negate e a r < _Γ ⊣s e r⊢ _Δ > r
-- ----------------------------------
-> _Γ ⊣s e r⊢ _Δ > a
shiftN = shift . negateLK'
dnePK
:: Contextual s
=> a •• r < _Γ ⊣s e r⊢ _Δ
-- --------------------------------------
-> Negate e (a ¬ r) r < _Γ ⊣s e r⊢ _Δ
dnePK = mapL (fmap getNegateNot)
dniPK
:: Contextual s
=> _Γ ⊣s e r⊢ _Δ > a •• r
-- ------------------------------------
-> _Γ ⊣s e r⊢ _Δ > Negate e (a ¬ r) r
dniPK s = sequent (\ _Δ _Γ -> env (\ e -> appSequent s (lmap (fmap (negateNot e)) _Δ) _Γ))
negateLK
:: Contextual s
=> a • r < _Γ ⊣s e r⊢ _Δ
-- ------------------------------
-> Negate e a r < _Γ ⊣s e r⊢ _Δ
negateLK = mapL (fmap negateK)
negateRK
:: Contextual s
=> _Γ ⊣s e r⊢ _Δ > a • r
-- ------------------------------
-> _Γ ⊣s e r⊢ _Δ > Negate e a r
negateRK s = sequent (\ _Δ _Γ -> env (\ e -> appSequent s (lmap (fmap (Negate e)) _Δ) _Γ))
negateLK'
:: Contextual s
=> Negate e a r < _Γ ⊣s e r⊢ _Δ
-- ------------------------------
-> a • r < _Γ ⊣s e r⊢ _Δ
negateLK' s = sequent (\ _Δ _Γ -> env (\ e -> appSequent s _Δ (pure (Negate e (e ∘ exlF _Γ)) >∘∘< exrF _Γ)))
negateRK'
:: Contextual s
=> _Γ ⊣s e r⊢ _Δ > Negate e a r
-- ------------------------------
-> _Γ ⊣s e r⊢ _Δ > a • r
negateRK' = mapR (lmap negateK)
| null | https://raw.githubusercontent.com/robrix/sequoia/e4fae1100fa977a656f2fc654762f23d4448ad76/src/Sequoia/Calculus/Negate.hs | haskell | * Negate
* Connectives
Negate
----------------------------------
----------------------------------
----------------------------------
----------------------------------
----------------------------------
--------------------------------------
------------------------------------
------------------------------
------------------------------
------------------------------
------------------------------ | # LANGUAGE TypeFamilies #
module Sequoia.Calculus.Negate
NegateIntro(..)
, negateL'
, negateR'
, shiftN
, dnePK
, dniPK
, negateLK
, negateRK
, negateLK'
, negateRK'
, module Sequoia.Connective.Negate
) where
import Data.Profunctor
import Prelude hiding (init)
import Sequoia.Calculus.Context
import Sequoia.Calculus.Control
import Sequoia.Calculus.Core
import Sequoia.Calculus.Structural
import Sequoia.Conjunction
import Sequoia.Connective.Negate
import Sequoia.Connective.Negation
import Sequoia.Contextual
import Sequoia.Polarity
import Sequoia.Profunctor.Command
import Sequoia.Profunctor.Continuation as K
import Sequoia.Profunctor.Value
class Core s => NegateIntro s where
negateL
:: Neg a
=> _Γ ⊣s e r⊢ _Δ > a
-> Negate e a r < _Γ ⊣s e r⊢ _Δ
negateR
:: Neg a
=> a < _Γ ⊣s e r⊢ _Δ
-> _Γ ⊣s e r⊢ _Δ > Negate e a r
negateL'
:: (NegateIntro s, Weaken s, Neg a)
=> Negate e a r < _Γ ⊣s e r⊢ _Δ
-> _Γ ⊣s e r⊢ _Δ > a
negateL' p = negateR init >>> wkR p
negateR'
:: (NegateIntro s, Weaken s, Neg a)
=> _Γ ⊣s e r⊢ _Δ > Negate e a r
-> a < _Γ ⊣s e r⊢ _Δ
negateR' p = wkL p >>> negateL init
shiftN
:: (Control s, Contextual s)
=> Negate e a r < _Γ ⊣s e r⊢ _Δ > r
-> _Γ ⊣s e r⊢ _Δ > a
shiftN = shift . negateLK'
dnePK
:: Contextual s
=> a •• r < _Γ ⊣s e r⊢ _Δ
-> Negate e (a ¬ r) r < _Γ ⊣s e r⊢ _Δ
dnePK = mapL (fmap getNegateNot)
dniPK
:: Contextual s
=> _Γ ⊣s e r⊢ _Δ > a •• r
-> _Γ ⊣s e r⊢ _Δ > Negate e (a ¬ r) r
dniPK s = sequent (\ _Δ _Γ -> env (\ e -> appSequent s (lmap (fmap (negateNot e)) _Δ) _Γ))
negateLK
:: Contextual s
=> a • r < _Γ ⊣s e r⊢ _Δ
-> Negate e a r < _Γ ⊣s e r⊢ _Δ
negateLK = mapL (fmap negateK)
negateRK
:: Contextual s
=> _Γ ⊣s e r⊢ _Δ > a • r
-> _Γ ⊣s e r⊢ _Δ > Negate e a r
negateRK s = sequent (\ _Δ _Γ -> env (\ e -> appSequent s (lmap (fmap (Negate e)) _Δ) _Γ))
negateLK'
:: Contextual s
=> Negate e a r < _Γ ⊣s e r⊢ _Δ
-> a • r < _Γ ⊣s e r⊢ _Δ
negateLK' s = sequent (\ _Δ _Γ -> env (\ e -> appSequent s _Δ (pure (Negate e (e ∘ exlF _Γ)) >∘∘< exrF _Γ)))
negateRK'
:: Contextual s
=> _Γ ⊣s e r⊢ _Δ > Negate e a r
-> _Γ ⊣s e r⊢ _Δ > a • r
negateRK' = mapR (lmap negateK)
|
9c8493d59de18fd6b426a22345a9eb32aaf92113c26cf219fabaf5a6fd639dc0 | coccinelle/coccinelle | file_transform.ml |
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
module Ast0 = Ast0_cocci
(* ------------------------------------------------------------------------- *)
Transforms the original Coccinelle script and prints it .
* Prints preface and added virtual rules .
*
* Naming conventions : outch = out_channel , inch = in_channel .
*
* TODO : There are a number of edge cases that are not handled well in this
* module due to using pure string - matching without context .
* Example : @ 's inside comments within rule declarations .
* For most _ reasonable _ SmPL scripts , this should n't be a problem though .
* Prints preface and added virtual rules.
*
* Naming conventions: outch = out_channel, inch = in_channel.
*
* TODO: There are a number of edge cases that are not handled well in this
* module due to using pure string-matching without context.
* Example: @'s inside comments within rule declarations.
* For most _reasonable_ SmPL scripts, this shouldn't be a problem though.
*)
(* ------------------------------------------------------------------------- *)
(* GENERAL PURPOSE FUNCTIONS *)
let line_number = ref 0
let get_line inch = line_number := !line_number + 1; input_line inch
let nothing _ = ()
let print = output_string
let print_newline outch = output_string outch "\n"
let print_nl outch x = print outch x; print_newline outch
let print_virtuals outch virtuals =
print_newline outch;
List.iter (fun x -> print_nl outch ("virtual " ^ x)) virtuals;
print_newline outch
exception Eof_error of string
let fail_eof name =
let errmsg = "Error: Reached end of file before rule "^name^" was found." in
raise (Eof_error errmsg)
(* ------------------------------------------------------------------------- *)
(* REGEXES AND STRING MATCH FUNCTIONS *)
(* returns true if str matches the regular expression in regexp *)
let regex_match regex str = Str.string_match (Str.regexp regex) str 0
(* regex for any number of same-line whitespace *)
let sp_re = "[ \t]*"
regex for at least one space or tab
let spp_re = "[ \t]+"
(* regex for any number of /**/ comments *)
let cmnt_re = "\\(" ^ sp_re ^ "/\\*.*\\*/" ^ sp_re ^ "\\)*"
(* regex for any number of /**/ comments with arbitrary whitespace *)
let spcmnt_re = sp_re ^ cmnt_re ^ sp_re
(* we have to handle many cases since it is technically possible to have
* comments and large amounts of whitespace in rule header declarations.
* If someone actually writes a script like this, they should be punished.
*)
let escape = Str.global_replace (Str.regexp "\\$") "\\\\$"
let match_full rule_name =
regex_match ("^@"^spcmnt_re^(escape rule_name)^"\\(@\\|"^spp_re^".*@\\)")
let match_part rule_name =
regex_match ("^@"^spcmnt_re^(escape rule_name)^"\\("^spp_re^".*\\)?$")
let match_end = regex_match ".*@"
let match_nameless_rule = regex_match "\\(^\\(@@\\)\\|^@.*@$\\)"
let match_rule_start = regex_match ("^@")
let match_rule_start_arob = regex_match ("^@"^spcmnt_re^"$")
let match_rule_end = regex_match (spcmnt_re^"@@")
let match_non_empty = regex_match (spcmnt_re^"[^ \t]")
(* ------------------------------------------------------------------------- *)
IN_CHANNEL TRAVERSAL
let rec find_match ~do_this ~until inch =
let line = get_line inch in
if until line then (line, inch)
else begin
do_this line;
find_match ~do_this ~until inch
end
let find_line ~do_this ~until_line inch =
find_match ~do_this ~until:(fun _ -> until_line = !line_number) inch
(* upon a call to regex string matching, print what follows after the match *)
let print_rest outch line =
let i = Str.match_end() in
let length = String.length line in
let rest = String.sub line i (length - i) in
if i <> length then print_nl outch rest
(* prints the contents of the opened channel until finishes *)
let rec print_to_end outch inch =
(try
print_nl outch (get_line inch)
with
End_of_file -> (print_newline outch; raise End_of_file));
print_to_end outch inch
goes through the file , printing it as it goes , until finding the rule
* declaration of name , without printing the rule declaration .
* returns the line where the rule dec ends , and the in_channel at that stage .
* declaration of name, without printing the rule declaration.
* returns the line where the rule dec ends, and the in_channel at that stage.
*)
let skip_rule_dec name outch inch =
let rec traverse outch inch =
let line = get_line inch in
if match_full name line then (* "@rulename@" *)
(line, inch)
else if match_part name line then (* "@rulename" *)
find_match ~do_this:nothing ~until:match_end inch
else if match_rule_start_arob line then (* "@", next line maybe rulename *)
let (line,inch) =
find_match ~do_this:nothing ~until:match_non_empty inch in
if regex_match (sp_re^name) line then
find_match ~do_this:nothing ~until:match_end inch
else begin
print_nl outch ("@"^line);
traverse outch inch
end
else begin (* line does not contain rule dec *)
print_nl outch line;
traverse outch inch
end in
traverse outch inch
(* ------------------------------------------------------------------------- *)
(* PATCH SPECIFIC *)
outputs the rule declaration with standard patch dependencies .
* rule_name is the new name which overrules the one in the Ast0 rule .
* rule_name is the new name which overrules the one in the Ast0 rule.
*)
let print_patch_decl outch rule_name = function
| Ast0.InitialScriptRule (nm,_,_,_,_,_)
| Ast0.FinalScriptRule (nm,_,_,_,_,_)
| Ast0.ScriptRule (nm,_,_,_,_,_,_) ->
failwith ("Error: The rule " ^ nm ^ " is a script rule ...!")
| Ast0.CocciRule ((_,_,(isos,drop_isos,deps,_,exists)),_,_,_) ->
let deps = Globals.add_patch_dependency deps in
let patch_header = Rule_header.generate
~isos ~drop_isos ~deps ~rule_name ~exists ~meta_vars:[] ~meta_pos:[] in
Rule_header.print_declaration outch patch_header
(* prints the file until the declaration of the rule, which is then substituted
* with whatever handler does.
*)
let print_named_rule ~rule ~handler ~outch ~inch =
let name = Ast0.get_rule_name rule in
let (line,inch) = skip_rule_dec name outch inch in
handler line inch
(* prints the file until the rule declaration (rule_name must follow the format
* "rule starting on line <num>"), which is substituted with whatever handler
* does.
*)
let print_nameless_rule ~rule ~handler ~outch ~inch =
let rule_name = Ast0.get_rule_name rule in
let rule_line = Globals.extract_line rule_name in
let _ = assert (rule_line > !line_number) in
let (line, inch) =
find_line ~do_this:(print_nl outch) ~until_line:rule_line inch in
if match_nameless_rule line then
handler line inch
else if String.contains line '@' then
let (line, inch) = find_match ~do_this:nothing ~until:match_end inch in
handler line inch
else
failwith ("Error: Did not find a " ^rule_name^ ", instead found: " ^line)
(* Finds the declaration of the input rule ("@rulename ...@") and substitutes
* it with a patch dependent version ("@rulename depends on patch ...@").
*)
let print_rule_patch outch inch (rule, new_name) =
(* prints out patch header. If there was anything after the rule declaration,
* print that too. returns the in_channel at the point after the printing.
*)
let handler line inch =
print_patch_decl outch new_name rule;
print_rest outch line;
inch in
let old_name = Ast0.get_rule_name rule in
try
if old_name = new_name then
print_named_rule ~rule ~handler ~outch ~inch
else
print_nameless_rule ~rule ~handler ~outch ~inch
with
| End_of_file -> fail_eof old_name
| e -> raise e (* propagate exception upwards *)
let print_patch outch inch rules =
let inch = List.fold_left (print_rule_patch outch) inch rules in
print_to_end outch inch
(* ------------------------------------------------------------------------- *)
(* CONTEXT SPECIFIC *)
(* in context mode, we do not want to keep the original rules, since our
* generated versions contain the same information + added metapositions. *)
find the start of the next rule .
let next outch inch =
try
let r = find_match ~do_this:nothing ~until:match_rule_start inch in Some r
with
End_of_file -> None (* there were no more rules *)
(* find the rule with that name and skip it entirely. *)
let skip_named_rule ~rule ~last_line ~outch ~inch =
let name = Ast0.get_rule_name rule in
let (_,inch) =
if match_part name last_line
then (last_line, inch)
else begin
if String.contains last_line '@' then failwith
("Transform error: Can't currently handle this case. Don't " ^
"put newlines in your rule declarations!!!");
print_nl outch last_line;
skip_rule_dec name outch inch
end in
let (_,inch) = find_match ~do_this:nothing ~until:match_rule_end inch in
next outch inch
(* find the rule that starts on that line and skip it entirely *)
let skip_nameless_rule ~rule ~outch ~inch =
let name = Ast0.get_rule_name rule in
let rule_line = Globals.extract_line name in
let _ = assert (rule_line >= !line_number) in
let inch =
if rule_line = !line_number then
inch
else
snd (find_line ~do_this:(print_nl outch) ~until_line:rule_line inch) in
at this point , line is the line that contains the rule header . so we need
* the rule header end @@ and then the start of the next rule .
* the rule header end @@ and then the start of the next rule.
*)
let (_,inch) = find_match ~do_this:nothing ~until:match_rule_end inch in
next outch inch
(* print a context rule (that is, don't print it, but find it and skip it!)
* last_res is (the last line contents, the in_channel) from the previous call.
*
* returns Some (last line contents, in_channel) if there was another rule
* after the input rule, otherwise None.
*)
let print_rule_context outch last_res (rule, new_name) =
let old_name = Ast0.get_rule_name rule in
try
match last_res with
| None -> raise End_of_file
| Some (last_line, inch) ->
if old_name = new_name then
skip_named_rule ~rule ~last_line ~outch ~inch
else
skip_nameless_rule ~rule ~outch ~inch
with
| End_of_file -> fail_eof old_name
| e -> raise e (* propagate exception upwards *)
let print_context outch inch rules =
let res = List.fold_left (print_rule_context outch) (Some ("",inch)) rules in
match res with
| Some (_,i) -> print_to_end outch i
| None -> raise End_of_file
(* ------------------------------------------------------------------------- *)
(* ENTRY POINT *)
(* reads the file and prints it with transformations.
* assumes rules are sorted in order of when they occur in the script.
*)
let print ~context_mode ~file_name ~preface ~virtuals ~ordered_rules outch =
let _ = line_number := 0 in
let _ = print_nl outch preface in
let _ = print_virtuals outch virtuals in
let inch = open_in file_name in
try
if context_mode then
print_context outch inch ordered_rules
else
print_patch outch inch ordered_rules
with
| End_of_file -> flush outch; close_in inch (* ended safely *)
| Eof_error msg -> flush outch; close_in inch; failwith msg
| e -> close_in_noerr inch; raise e
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/57cbff0c5768e22bb2d8c20e8dae74294515c6b3/tools/spgen/source/file_transform.ml | ocaml | -------------------------------------------------------------------------
-------------------------------------------------------------------------
GENERAL PURPOSE FUNCTIONS
-------------------------------------------------------------------------
REGEXES AND STRING MATCH FUNCTIONS
returns true if str matches the regular expression in regexp
regex for any number of same-line whitespace
regex for any number of /**/ comments
regex for any number of /**/ comments with arbitrary whitespace
we have to handle many cases since it is technically possible to have
* comments and large amounts of whitespace in rule header declarations.
* If someone actually writes a script like this, they should be punished.
-------------------------------------------------------------------------
upon a call to regex string matching, print what follows after the match
prints the contents of the opened channel until finishes
"@rulename@"
"@rulename"
"@", next line maybe rulename
line does not contain rule dec
-------------------------------------------------------------------------
PATCH SPECIFIC
prints the file until the declaration of the rule, which is then substituted
* with whatever handler does.
prints the file until the rule declaration (rule_name must follow the format
* "rule starting on line <num>"), which is substituted with whatever handler
* does.
Finds the declaration of the input rule ("@rulename ...@") and substitutes
* it with a patch dependent version ("@rulename depends on patch ...@").
prints out patch header. If there was anything after the rule declaration,
* print that too. returns the in_channel at the point after the printing.
propagate exception upwards
-------------------------------------------------------------------------
CONTEXT SPECIFIC
in context mode, we do not want to keep the original rules, since our
* generated versions contain the same information + added metapositions.
there were no more rules
find the rule with that name and skip it entirely.
find the rule that starts on that line and skip it entirely
print a context rule (that is, don't print it, but find it and skip it!)
* last_res is (the last line contents, the in_channel) from the previous call.
*
* returns Some (last line contents, in_channel) if there was another rule
* after the input rule, otherwise None.
propagate exception upwards
-------------------------------------------------------------------------
ENTRY POINT
reads the file and prints it with transformations.
* assumes rules are sorted in order of when they occur in the script.
ended safely |
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
module Ast0 = Ast0_cocci
Transforms the original Coccinelle script and prints it .
* Prints preface and added virtual rules .
*
* Naming conventions : outch = out_channel , inch = in_channel .
*
* TODO : There are a number of edge cases that are not handled well in this
* module due to using pure string - matching without context .
* Example : @ 's inside comments within rule declarations .
* For most _ reasonable _ SmPL scripts , this should n't be a problem though .
* Prints preface and added virtual rules.
*
* Naming conventions: outch = out_channel, inch = in_channel.
*
* TODO: There are a number of edge cases that are not handled well in this
* module due to using pure string-matching without context.
* Example: @'s inside comments within rule declarations.
* For most _reasonable_ SmPL scripts, this shouldn't be a problem though.
*)
let line_number = ref 0
let get_line inch = line_number := !line_number + 1; input_line inch
let nothing _ = ()
let print = output_string
let print_newline outch = output_string outch "\n"
let print_nl outch x = print outch x; print_newline outch
let print_virtuals outch virtuals =
print_newline outch;
List.iter (fun x -> print_nl outch ("virtual " ^ x)) virtuals;
print_newline outch
exception Eof_error of string
let fail_eof name =
let errmsg = "Error: Reached end of file before rule "^name^" was found." in
raise (Eof_error errmsg)
let regex_match regex str = Str.string_match (Str.regexp regex) str 0
let sp_re = "[ \t]*"
regex for at least one space or tab
let spp_re = "[ \t]+"
let cmnt_re = "\\(" ^ sp_re ^ "/\\*.*\\*/" ^ sp_re ^ "\\)*"
let spcmnt_re = sp_re ^ cmnt_re ^ sp_re
let escape = Str.global_replace (Str.regexp "\\$") "\\\\$"
let match_full rule_name =
regex_match ("^@"^spcmnt_re^(escape rule_name)^"\\(@\\|"^spp_re^".*@\\)")
let match_part rule_name =
regex_match ("^@"^spcmnt_re^(escape rule_name)^"\\("^spp_re^".*\\)?$")
let match_end = regex_match ".*@"
let match_nameless_rule = regex_match "\\(^\\(@@\\)\\|^@.*@$\\)"
let match_rule_start = regex_match ("^@")
let match_rule_start_arob = regex_match ("^@"^spcmnt_re^"$")
let match_rule_end = regex_match (spcmnt_re^"@@")
let match_non_empty = regex_match (spcmnt_re^"[^ \t]")
IN_CHANNEL TRAVERSAL
let rec find_match ~do_this ~until inch =
let line = get_line inch in
if until line then (line, inch)
else begin
do_this line;
find_match ~do_this ~until inch
end
let find_line ~do_this ~until_line inch =
find_match ~do_this ~until:(fun _ -> until_line = !line_number) inch
let print_rest outch line =
let i = Str.match_end() in
let length = String.length line in
let rest = String.sub line i (length - i) in
if i <> length then print_nl outch rest
let rec print_to_end outch inch =
(try
print_nl outch (get_line inch)
with
End_of_file -> (print_newline outch; raise End_of_file));
print_to_end outch inch
goes through the file , printing it as it goes , until finding the rule
* declaration of name , without printing the rule declaration .
* returns the line where the rule dec ends , and the in_channel at that stage .
* declaration of name, without printing the rule declaration.
* returns the line where the rule dec ends, and the in_channel at that stage.
*)
let skip_rule_dec name outch inch =
let rec traverse outch inch =
let line = get_line inch in
(line, inch)
find_match ~do_this:nothing ~until:match_end inch
let (line,inch) =
find_match ~do_this:nothing ~until:match_non_empty inch in
if regex_match (sp_re^name) line then
find_match ~do_this:nothing ~until:match_end inch
else begin
print_nl outch ("@"^line);
traverse outch inch
end
print_nl outch line;
traverse outch inch
end in
traverse outch inch
outputs the rule declaration with standard patch dependencies .
* rule_name is the new name which overrules the one in the Ast0 rule .
* rule_name is the new name which overrules the one in the Ast0 rule.
*)
let print_patch_decl outch rule_name = function
| Ast0.InitialScriptRule (nm,_,_,_,_,_)
| Ast0.FinalScriptRule (nm,_,_,_,_,_)
| Ast0.ScriptRule (nm,_,_,_,_,_,_) ->
failwith ("Error: The rule " ^ nm ^ " is a script rule ...!")
| Ast0.CocciRule ((_,_,(isos,drop_isos,deps,_,exists)),_,_,_) ->
let deps = Globals.add_patch_dependency deps in
let patch_header = Rule_header.generate
~isos ~drop_isos ~deps ~rule_name ~exists ~meta_vars:[] ~meta_pos:[] in
Rule_header.print_declaration outch patch_header
let print_named_rule ~rule ~handler ~outch ~inch =
let name = Ast0.get_rule_name rule in
let (line,inch) = skip_rule_dec name outch inch in
handler line inch
let print_nameless_rule ~rule ~handler ~outch ~inch =
let rule_name = Ast0.get_rule_name rule in
let rule_line = Globals.extract_line rule_name in
let _ = assert (rule_line > !line_number) in
let (line, inch) =
find_line ~do_this:(print_nl outch) ~until_line:rule_line inch in
if match_nameless_rule line then
handler line inch
else if String.contains line '@' then
let (line, inch) = find_match ~do_this:nothing ~until:match_end inch in
handler line inch
else
failwith ("Error: Did not find a " ^rule_name^ ", instead found: " ^line)
let print_rule_patch outch inch (rule, new_name) =
let handler line inch =
print_patch_decl outch new_name rule;
print_rest outch line;
inch in
let old_name = Ast0.get_rule_name rule in
try
if old_name = new_name then
print_named_rule ~rule ~handler ~outch ~inch
else
print_nameless_rule ~rule ~handler ~outch ~inch
with
| End_of_file -> fail_eof old_name
let print_patch outch inch rules =
let inch = List.fold_left (print_rule_patch outch) inch rules in
print_to_end outch inch
find the start of the next rule .
let next outch inch =
try
let r = find_match ~do_this:nothing ~until:match_rule_start inch in Some r
with
let skip_named_rule ~rule ~last_line ~outch ~inch =
let name = Ast0.get_rule_name rule in
let (_,inch) =
if match_part name last_line
then (last_line, inch)
else begin
if String.contains last_line '@' then failwith
("Transform error: Can't currently handle this case. Don't " ^
"put newlines in your rule declarations!!!");
print_nl outch last_line;
skip_rule_dec name outch inch
end in
let (_,inch) = find_match ~do_this:nothing ~until:match_rule_end inch in
next outch inch
let skip_nameless_rule ~rule ~outch ~inch =
let name = Ast0.get_rule_name rule in
let rule_line = Globals.extract_line name in
let _ = assert (rule_line >= !line_number) in
let inch =
if rule_line = !line_number then
inch
else
snd (find_line ~do_this:(print_nl outch) ~until_line:rule_line inch) in
at this point , line is the line that contains the rule header . so we need
* the rule header end @@ and then the start of the next rule .
* the rule header end @@ and then the start of the next rule.
*)
let (_,inch) = find_match ~do_this:nothing ~until:match_rule_end inch in
next outch inch
let print_rule_context outch last_res (rule, new_name) =
let old_name = Ast0.get_rule_name rule in
try
match last_res with
| None -> raise End_of_file
| Some (last_line, inch) ->
if old_name = new_name then
skip_named_rule ~rule ~last_line ~outch ~inch
else
skip_nameless_rule ~rule ~outch ~inch
with
| End_of_file -> fail_eof old_name
let print_context outch inch rules =
let res = List.fold_left (print_rule_context outch) (Some ("",inch)) rules in
match res with
| Some (_,i) -> print_to_end outch i
| None -> raise End_of_file
let print ~context_mode ~file_name ~preface ~virtuals ~ordered_rules outch =
let _ = line_number := 0 in
let _ = print_nl outch preface in
let _ = print_virtuals outch virtuals in
let inch = open_in file_name in
try
if context_mode then
print_context outch inch ordered_rules
else
print_patch outch inch ordered_rules
with
| Eof_error msg -> flush outch; close_in inch; failwith msg
| e -> close_in_noerr inch; raise e
|
b24f5f90c1d68a49867def7fab9db65aa8dc6e14df922ac184e212f0485ce370 | ghc/packages-directory | Util.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE CPP #
-- | A rudimentary testing framework
module Util where
import Prelude ()
import System.Directory.Internal.Prelude
import System.Directory
import Data.Time.Clock (NominalDiffTime, UTCTime, diffUTCTime)
#if MIN_VERSION_base(4, 7, 0)
import System.Environment (getEnvironment, setEnv, unsetEnv)
#elif !defined(mingw32_HOST_OS)
import qualified System.Posix as Posix
#endif
import System.FilePath ((</>), normalise)
import qualified Data.List as List
modifyIORef' :: IORef a -> (a -> a) -> IO ()
modifyIORef' r f = do
x <- readIORef r
let !x' = f x in writeIORef r x'
tryAny :: IO a -> IO (Either SomeException a)
tryAny action = do
result <- newEmptyMVar
mask $ \ unmask -> do
thread <- forkIO (try (unmask action) >>= putMVar result)
unmask (readMVar result) `onException` killThread thread
timeLimit :: Double -> IO a -> IO a
timeLimit time action = do
result <- timeout (round (1000000 * time)) action
case result of
Nothing -> throwIO (userError "timed out")
Just x -> return x
data TestEnv =
TestEnv
{ testCounter :: IORef Int
, testSilent :: Bool
, testKeepDirs :: Bool
, testArgs :: [(String, String)]
}
printInfo :: TestEnv -> [String] -> IO ()
printInfo TestEnv{testSilent = True} _ = return ()
printInfo TestEnv{testSilent = False} msg = do
putStrLn (List.intercalate ": " msg)
hFlush stdout
printErr :: [String] -> IO ()
printErr msg = do
hPutStrLn stderr ("*** " <> List.intercalate ": " msg)
hFlush stderr
printFailure :: TestEnv -> [String] -> IO ()
printFailure TestEnv{testCounter = n} msg = do
modifyIORef' n (+ 1)
printErr msg
check :: TestEnv -> Bool -> [String] -> [String] -> [String] -> IO ()
check t True prefix msg _ = printInfo t (prefix <> msg)
check t False prefix _ msg = printFailure t (prefix <> msg)
checkEither :: TestEnv -> [String] -> Either [String] [String] -> IO ()
checkEither t prefix (Right msg) = printInfo t (prefix <> msg)
checkEither t prefix (Left msg) = printFailure t (prefix <> msg)
showContext :: Show a => String -> Integer -> a -> String
showContext file line context =
file <> ":" <> show line <>
case show context of
"()" -> ""
s -> ":" <> s
inform :: TestEnv -> String -> Integer -> String -> IO ()
inform t file line msg =
printInfo t [showContext file line (), msg]
expect :: Show a => TestEnv -> String -> Integer -> a -> Bool -> IO ()
expect t file line context x =
check t x
[showContext file line context]
["True"]
["False, but True was expected"]
expectEq :: (Eq a, Show a, Show b) =>
TestEnv -> String -> Integer -> b -> a -> a -> IO ()
expectEq t file line context x y =
check t (x == y)
[showContext file line context]
[show x <> " equals " <> show y]
[show x <> " is not equal to " <> show y]
expectNe :: (Eq a, Show a, Show b) =>
TestEnv -> String -> Integer -> b -> a -> a -> IO ()
expectNe t file line context x y =
check t (x /= y)
[showContext file line context]
[show x <> " is not equal to " <> show y]
[show x <> " equals " <> show y]
expectNear :: (Num a, Ord a, Show a, Show b) =>
TestEnv -> String -> Integer -> b -> a -> a -> a -> IO ()
expectNear t file line context x y diff =
check t (abs (x - y) <= diff)
[showContext file line context]
[show x <> " is near " <> show y]
[show x <> " is not near " <> show y]
expectNearTime :: Show a =>
TestEnv -> String -> Integer -> a ->
UTCTime -> UTCTime -> NominalDiffTime -> IO ()
expectNearTime t file line context x y diff =
check t (abs (diffUTCTime x y) <= diff)
[showContext file line context]
[show x <> " is near " <> show y]
[show x <> " is not near " <> show y]
expectIOErrorType :: Show a =>
TestEnv -> String -> Integer -> a
-> (IOError -> Bool) -> IO b -> IO ()
expectIOErrorType t file line context which action = do
result <- try action
checkEither t [showContext file line context] $ case result of
Left e | which e -> Right ["got expected exception (" <> show e <> ")"]
| otherwise -> Left ["got wrong exception: ", show e]
Right _ -> Left ["did not throw an exception"]
-- | Traverse the directory tree in preorder.
preprocessPathRecursive :: (FilePath -> IO ()) -> FilePath -> IO ()
preprocessPathRecursive f path = do
dirExists <- doesDirectoryExist path
if dirExists
then do
isLink <- pathIsSymbolicLink path
f path
when (not isLink) $ do
names <- listDirectory path
traverse_ (preprocessPathRecursive f) ((path </>) <$> names)
else do
f path
withNewDirectory :: Bool -> FilePath -> IO a -> IO a
withNewDirectory keep dir action = do
dir' <- makeAbsolute dir
bracket_ (createDirectoryIfMissing True dir') (cleanup dir') action
where cleanup dir' | keep = return ()
| otherwise = removePathForcibly dir'
isolateEnvironment :: IO a -> IO a
isolateEnvironment = bracket getEnvs setEnvs . const
where
getEnvs = List.sort . filter (\(k, _) -> k /= "") <$> getEnvironment
setEnvs target = do
current <- getEnvs
updateEnvs current target
new <- getEnvs
when (target /= new) $ do
-- Environment variables may be sensitive, so don't log them.
throwIO (userError "isolateEnvironment.setEnvs failed")
updateEnvs kvs1@((k1, v1) : kvs1') kvs2@((k2, v2) : kvs2') =
case compare k1 k2 of
LT -> unsetEnv k1 *> updateEnvs kvs1' kvs2
EQ | v1 == v2 -> updateEnvs kvs1' kvs2'
| otherwise -> setEnv k1 v2 *> updateEnvs kvs1' kvs2'
GT -> setEnv k2 v2 *> updateEnvs kvs1 kvs2'
updateEnvs [] [] = pure ()
updateEnvs kvs1 [] = for_ kvs1 (unsetEnv . fst)
updateEnvs [] kvs2 = for_ kvs2 (uncurry setEnv)
#if MIN_VERSION_base(4, 7, 0)
#elif !defined(mingw32_HOST_OS)
getEnvironment = Posix.getEnvironment
setEnv k v = Posix.setEnv k v True
unsetEnv = Posix.unsetEnv
#else
getEnvironment = pure []
setEnv _ _ = pure ()
unsetEnv _ = pure ()
#endif
isolateWorkingDirectory :: Bool -> FilePath -> IO a -> IO a
isolateWorkingDirectory keep dir action = do
when (normalise dir `List.elem` [".", "./"]) $
throwIO (userError ("isolateWorkingDirectory cannot be used " <>
"with current directory"))
dir' <- makeAbsolute dir
removePathForcibly dir'
withNewDirectory keep dir' $
withCurrentDirectory dir' $
action
run :: TestEnv -> String -> (TestEnv -> IO ()) -> IO ()
run t name action = do
result <- tryAny (action t)
case result of
Left e -> check t False [name] [] ["exception", show e]
Right () -> return ()
isolatedRun :: TestEnv -> String -> (TestEnv -> IO ()) -> IO ()
isolatedRun t@TestEnv{testKeepDirs = keep} name = run t name . (isolate .)
where
isolate =
isolateEnvironment .
isolateWorkingDirectory keep ("dist/test-" <> name <> ".tmp")
tryRead :: Read a => String -> Maybe a
tryRead s =
case reads s of
[(x, "")] -> Just x
_ -> Nothing
getArg :: (String -> Maybe a) -> TestEnv -> String -> String -> a -> a
getArg parse TestEnv{testArgs = args} testname name defaultValue =
fromMaybe defaultValue (List.lookup (prefix <> name) args >>= parse)
where prefix | testname == "" = ""
| otherwise = testname <> "."
readArg :: Read a => TestEnv -> String -> String -> a -> a
readArg = getArg tryRead
readBool :: String -> Maybe Bool
readBool s = Just $
case toLower <$> s of
'y' : _ -> True
't' : _ -> True
_ -> False
parseArgs :: [String] -> [(String, String)]
parseArgs = List.reverse . (second (List.drop 1) . List.span (/= '=') <$>)
testMain :: (TestEnv -> IO ()) -> IO ()
testMain action = do
args <- parseArgs <$> getArgs
counter <- newIORef 0
let t = TestEnv
{ testCounter = counter
, testSilent = getArg readBool t "" "silent" False
, testKeepDirs = getArg readBool t "" "keep-dirs" False
, testArgs = args
}
action t
n <- readIORef (counter)
unless (n == 0) $ do
putStrLn ("[" <> show n <> " failures]")
hFlush stdout
exitFailure
| null | https://raw.githubusercontent.com/ghc/packages-directory/75165a9d69bebba96e0e3a1e519ab481d1362dd2/tests/Util.hs | haskell | # LANGUAGE BangPatterns #
| A rudimentary testing framework
| Traverse the directory tree in preorder.
Environment variables may be sensitive, so don't log them. | # LANGUAGE CPP #
module Util where
import Prelude ()
import System.Directory.Internal.Prelude
import System.Directory
import Data.Time.Clock (NominalDiffTime, UTCTime, diffUTCTime)
#if MIN_VERSION_base(4, 7, 0)
import System.Environment (getEnvironment, setEnv, unsetEnv)
#elif !defined(mingw32_HOST_OS)
import qualified System.Posix as Posix
#endif
import System.FilePath ((</>), normalise)
import qualified Data.List as List
modifyIORef' :: IORef a -> (a -> a) -> IO ()
modifyIORef' r f = do
x <- readIORef r
let !x' = f x in writeIORef r x'
tryAny :: IO a -> IO (Either SomeException a)
tryAny action = do
result <- newEmptyMVar
mask $ \ unmask -> do
thread <- forkIO (try (unmask action) >>= putMVar result)
unmask (readMVar result) `onException` killThread thread
timeLimit :: Double -> IO a -> IO a
timeLimit time action = do
result <- timeout (round (1000000 * time)) action
case result of
Nothing -> throwIO (userError "timed out")
Just x -> return x
data TestEnv =
TestEnv
{ testCounter :: IORef Int
, testSilent :: Bool
, testKeepDirs :: Bool
, testArgs :: [(String, String)]
}
printInfo :: TestEnv -> [String] -> IO ()
printInfo TestEnv{testSilent = True} _ = return ()
printInfo TestEnv{testSilent = False} msg = do
putStrLn (List.intercalate ": " msg)
hFlush stdout
printErr :: [String] -> IO ()
printErr msg = do
hPutStrLn stderr ("*** " <> List.intercalate ": " msg)
hFlush stderr
printFailure :: TestEnv -> [String] -> IO ()
printFailure TestEnv{testCounter = n} msg = do
modifyIORef' n (+ 1)
printErr msg
check :: TestEnv -> Bool -> [String] -> [String] -> [String] -> IO ()
check t True prefix msg _ = printInfo t (prefix <> msg)
check t False prefix _ msg = printFailure t (prefix <> msg)
checkEither :: TestEnv -> [String] -> Either [String] [String] -> IO ()
checkEither t prefix (Right msg) = printInfo t (prefix <> msg)
checkEither t prefix (Left msg) = printFailure t (prefix <> msg)
showContext :: Show a => String -> Integer -> a -> String
showContext file line context =
file <> ":" <> show line <>
case show context of
"()" -> ""
s -> ":" <> s
inform :: TestEnv -> String -> Integer -> String -> IO ()
inform t file line msg =
printInfo t [showContext file line (), msg]
expect :: Show a => TestEnv -> String -> Integer -> a -> Bool -> IO ()
expect t file line context x =
check t x
[showContext file line context]
["True"]
["False, but True was expected"]
expectEq :: (Eq a, Show a, Show b) =>
TestEnv -> String -> Integer -> b -> a -> a -> IO ()
expectEq t file line context x y =
check t (x == y)
[showContext file line context]
[show x <> " equals " <> show y]
[show x <> " is not equal to " <> show y]
expectNe :: (Eq a, Show a, Show b) =>
TestEnv -> String -> Integer -> b -> a -> a -> IO ()
expectNe t file line context x y =
check t (x /= y)
[showContext file line context]
[show x <> " is not equal to " <> show y]
[show x <> " equals " <> show y]
expectNear :: (Num a, Ord a, Show a, Show b) =>
TestEnv -> String -> Integer -> b -> a -> a -> a -> IO ()
expectNear t file line context x y diff =
check t (abs (x - y) <= diff)
[showContext file line context]
[show x <> " is near " <> show y]
[show x <> " is not near " <> show y]
expectNearTime :: Show a =>
TestEnv -> String -> Integer -> a ->
UTCTime -> UTCTime -> NominalDiffTime -> IO ()
expectNearTime t file line context x y diff =
check t (abs (diffUTCTime x y) <= diff)
[showContext file line context]
[show x <> " is near " <> show y]
[show x <> " is not near " <> show y]
expectIOErrorType :: Show a =>
TestEnv -> String -> Integer -> a
-> (IOError -> Bool) -> IO b -> IO ()
expectIOErrorType t file line context which action = do
result <- try action
checkEither t [showContext file line context] $ case result of
Left e | which e -> Right ["got expected exception (" <> show e <> ")"]
| otherwise -> Left ["got wrong exception: ", show e]
Right _ -> Left ["did not throw an exception"]
preprocessPathRecursive :: (FilePath -> IO ()) -> FilePath -> IO ()
preprocessPathRecursive f path = do
dirExists <- doesDirectoryExist path
if dirExists
then do
isLink <- pathIsSymbolicLink path
f path
when (not isLink) $ do
names <- listDirectory path
traverse_ (preprocessPathRecursive f) ((path </>) <$> names)
else do
f path
withNewDirectory :: Bool -> FilePath -> IO a -> IO a
withNewDirectory keep dir action = do
dir' <- makeAbsolute dir
bracket_ (createDirectoryIfMissing True dir') (cleanup dir') action
where cleanup dir' | keep = return ()
| otherwise = removePathForcibly dir'
isolateEnvironment :: IO a -> IO a
isolateEnvironment = bracket getEnvs setEnvs . const
where
getEnvs = List.sort . filter (\(k, _) -> k /= "") <$> getEnvironment
setEnvs target = do
current <- getEnvs
updateEnvs current target
new <- getEnvs
when (target /= new) $ do
throwIO (userError "isolateEnvironment.setEnvs failed")
updateEnvs kvs1@((k1, v1) : kvs1') kvs2@((k2, v2) : kvs2') =
case compare k1 k2 of
LT -> unsetEnv k1 *> updateEnvs kvs1' kvs2
EQ | v1 == v2 -> updateEnvs kvs1' kvs2'
| otherwise -> setEnv k1 v2 *> updateEnvs kvs1' kvs2'
GT -> setEnv k2 v2 *> updateEnvs kvs1 kvs2'
updateEnvs [] [] = pure ()
updateEnvs kvs1 [] = for_ kvs1 (unsetEnv . fst)
updateEnvs [] kvs2 = for_ kvs2 (uncurry setEnv)
#if MIN_VERSION_base(4, 7, 0)
#elif !defined(mingw32_HOST_OS)
getEnvironment = Posix.getEnvironment
setEnv k v = Posix.setEnv k v True
unsetEnv = Posix.unsetEnv
#else
getEnvironment = pure []
setEnv _ _ = pure ()
unsetEnv _ = pure ()
#endif
isolateWorkingDirectory :: Bool -> FilePath -> IO a -> IO a
isolateWorkingDirectory keep dir action = do
when (normalise dir `List.elem` [".", "./"]) $
throwIO (userError ("isolateWorkingDirectory cannot be used " <>
"with current directory"))
dir' <- makeAbsolute dir
removePathForcibly dir'
withNewDirectory keep dir' $
withCurrentDirectory dir' $
action
run :: TestEnv -> String -> (TestEnv -> IO ()) -> IO ()
run t name action = do
result <- tryAny (action t)
case result of
Left e -> check t False [name] [] ["exception", show e]
Right () -> return ()
isolatedRun :: TestEnv -> String -> (TestEnv -> IO ()) -> IO ()
isolatedRun t@TestEnv{testKeepDirs = keep} name = run t name . (isolate .)
where
isolate =
isolateEnvironment .
isolateWorkingDirectory keep ("dist/test-" <> name <> ".tmp")
tryRead :: Read a => String -> Maybe a
tryRead s =
case reads s of
[(x, "")] -> Just x
_ -> Nothing
getArg :: (String -> Maybe a) -> TestEnv -> String -> String -> a -> a
getArg parse TestEnv{testArgs = args} testname name defaultValue =
fromMaybe defaultValue (List.lookup (prefix <> name) args >>= parse)
where prefix | testname == "" = ""
| otherwise = testname <> "."
readArg :: Read a => TestEnv -> String -> String -> a -> a
readArg = getArg tryRead
readBool :: String -> Maybe Bool
readBool s = Just $
case toLower <$> s of
'y' : _ -> True
't' : _ -> True
_ -> False
parseArgs :: [String] -> [(String, String)]
parseArgs = List.reverse . (second (List.drop 1) . List.span (/= '=') <$>)
testMain :: (TestEnv -> IO ()) -> IO ()
testMain action = do
args <- parseArgs <$> getArgs
counter <- newIORef 0
let t = TestEnv
{ testCounter = counter
, testSilent = getArg readBool t "" "silent" False
, testKeepDirs = getArg readBool t "" "keep-dirs" False
, testArgs = args
}
action t
n <- readIORef (counter)
unless (n == 0) $ do
putStrLn ("[" <> show n <> " failures]")
hFlush stdout
exitFailure
|
3e996743f556918b5abbb52b56f7e96f7421f82512aacdea4d924bddda14435d | RedHatQE/katello.auto | repositories.clj | (ns katello.repositories
(:require [webdriver :as browser]
[katello :as kt]
(katello [tasks :as tasks]
[organizations :as organization]
[navigation :as nav]
[providers :as provider] ;to load navigation
[notifications :as notification]
[ui :as ui]
[rest :as rest])))
;; Locators
(ui/defelements :katello.deployment/any [katello.ui]
{::repositories-link "//nav[@class='details-navigation']//a[contains(.,'Repositories')]"
::create-repo "//button[contains(@class,'ng-binding') and contains(.,'Create Repository')]"
::repo-name-text "//input[@name='name']"
::repo-label-text "//input[@name='label']"
::repo-type-select "//select[@name='content_type']"
::repo-url-text "//input[@name='url']"
::repo-protection-checkbox "//input[@name='unprotected']"
::repo-gpg-select "//select[@name='gpg_key_id']"
::repo-save "//form[@name='repositoryForm']//span[@class='ng-binding' and contains(.,'Create')]"
::repo-remove "//button[contains(@class,'ng-binding') and contains(.,'Remove Repository')]"
::confirm-repo-rm "//div[@alch-confirm-modal='removeRepository(repository)']//button[contains(.,'Yes')]"
::repo-list "//a[@class='ng-binding' and contains(.,'Back to Repository List')]"
::repo-gpgkey-update "//div[@selector='repository.gpg_key_id']//i[contains(@class,'icon-edit')]"
::repo-gpgkey-update-select "//div[@selector='repository.gpg_key_id']//select[@ng-model='selector']"
::save-updated-gpg-key "//div[@selector='repository.gpg_key_id']//button[contains(.,'Save')]"})
(browser/template-fns
{select-repository "//a[contains(@href,'repositories') and contains(.,'%s')]"
gpgkey-under-repo-details "//span[contains(@class,'ng-binding') and normalize-space(.)='%s']"})
(nav/defpages :katello.deployment/any katello.providers
[::provider/products-page
[::product-page (fn [ent] (browser/click (provider/select-product (:name (kt/product ent)))))
[::product-repo-page (fn [_] (browser/click ::repositories-link))
[::repo-page (fn [ent] (browser/click (select-repository (:name ent))))]]]])
;; Tasks
(defn- create
"Adds a repository under the product.
Requires a name and url be given for the repo."
[{:keys [product name url gpg-key repo-type http?]}]
{:pre [(instance? katello.Product product)
(instance? katello.Provider (kt/provider product))
(instance? katello.Organization (kt/org product))]}
(nav/go-to ::product-page product)
(browser/click ::create-repo)
(when gpg-key (browser/select-by-text ::repo-gpg-select (:name gpg-key)))
(browser/select-by-text ::repo-type-select repo-type)
(when http? (browser/click ::repo-protection-checkbox))
(browser/input-text ::repo-name-text name)
(when url (browser/input-text ::repo-url-text url))
(browser/click ::repo-save))
(defn- update
"Edits a repository. Currently the only property of a repository that
can be edited is the gpg-key associated."
[repo {:keys [gpg-key]}]
(when (not= (:gpg-key repo) gpg-key)
(nav/go-to repo)
(browser/click ::repo-gpgkey-update)
(browser/select-by-text ::repo-gpgkey-update-select gpg-key)
(browser/click ::save-updated-gpg-key)))
(defn- delete "Deletes a repository from the given product."
[repo]
{:pre [(instance? katello.Repository repo)]}
(nav/go-to repo)
(when (browser/displayed? ::repo-remove)
(browser/click ::repo-remove)
(browser/click ::confirm-repo-rm)))
(defn gpgkey-associated?
[repo]
(nav/go-to repo)
(browser/exists? (gpgkey-under-repo-details (-> repo kt/product :gpg-key :name))))
(extend katello.Repository
ui/CRUD {:create create
:update* update
:delete delete}
rest/CRUD {:create (fn [{:keys [product name url]}]
{:pre [(instance? katello.Product product)
(instance? katello.Provider (kt/provider product))
(instance? katello.Organization (kt/org product))]}
(rest/http-post (rest/api-url "api/repositories/")
{:body {:organization_id (-> product kt/org :name)
:product_id (rest/get-id product)
:name name
:url url}}))
:read (partial rest/read-impl (partial rest/url-maker [["api/repositories/%s" [identity]]]))
:id rest/id-field
:query (partial rest/query-by-name (partial rest/url-maker [["api/organizations/%s/products/%s/repositories" [kt/org kt/product]]]) )}
tasks/Uniqueable tasks/entity-uniqueable-impl
nav/Destination {:go-to (partial nav/go-to ::repo-page)})
| null | https://raw.githubusercontent.com/RedHatQE/katello.auto/79fec96581044bce5db5350d0da325e517024962/src/katello/repositories.clj | clojure | to load navigation
Locators
Tasks | (ns katello.repositories
(:require [webdriver :as browser]
[katello :as kt]
(katello [tasks :as tasks]
[organizations :as organization]
[navigation :as nav]
[notifications :as notification]
[ui :as ui]
[rest :as rest])))
(ui/defelements :katello.deployment/any [katello.ui]
{::repositories-link "//nav[@class='details-navigation']//a[contains(.,'Repositories')]"
::create-repo "//button[contains(@class,'ng-binding') and contains(.,'Create Repository')]"
::repo-name-text "//input[@name='name']"
::repo-label-text "//input[@name='label']"
::repo-type-select "//select[@name='content_type']"
::repo-url-text "//input[@name='url']"
::repo-protection-checkbox "//input[@name='unprotected']"
::repo-gpg-select "//select[@name='gpg_key_id']"
::repo-save "//form[@name='repositoryForm']//span[@class='ng-binding' and contains(.,'Create')]"
::repo-remove "//button[contains(@class,'ng-binding') and contains(.,'Remove Repository')]"
::confirm-repo-rm "//div[@alch-confirm-modal='removeRepository(repository)']//button[contains(.,'Yes')]"
::repo-list "//a[@class='ng-binding' and contains(.,'Back to Repository List')]"
::repo-gpgkey-update "//div[@selector='repository.gpg_key_id']//i[contains(@class,'icon-edit')]"
::repo-gpgkey-update-select "//div[@selector='repository.gpg_key_id']//select[@ng-model='selector']"
::save-updated-gpg-key "//div[@selector='repository.gpg_key_id']//button[contains(.,'Save')]"})
(browser/template-fns
{select-repository "//a[contains(@href,'repositories') and contains(.,'%s')]"
gpgkey-under-repo-details "//span[contains(@class,'ng-binding') and normalize-space(.)='%s']"})
(nav/defpages :katello.deployment/any katello.providers
[::provider/products-page
[::product-page (fn [ent] (browser/click (provider/select-product (:name (kt/product ent)))))
[::product-repo-page (fn [_] (browser/click ::repositories-link))
[::repo-page (fn [ent] (browser/click (select-repository (:name ent))))]]]])
(defn- create
"Adds a repository under the product.
Requires a name and url be given for the repo."
[{:keys [product name url gpg-key repo-type http?]}]
{:pre [(instance? katello.Product product)
(instance? katello.Provider (kt/provider product))
(instance? katello.Organization (kt/org product))]}
(nav/go-to ::product-page product)
(browser/click ::create-repo)
(when gpg-key (browser/select-by-text ::repo-gpg-select (:name gpg-key)))
(browser/select-by-text ::repo-type-select repo-type)
(when http? (browser/click ::repo-protection-checkbox))
(browser/input-text ::repo-name-text name)
(when url (browser/input-text ::repo-url-text url))
(browser/click ::repo-save))
(defn- update
"Edits a repository. Currently the only property of a repository that
can be edited is the gpg-key associated."
[repo {:keys [gpg-key]}]
(when (not= (:gpg-key repo) gpg-key)
(nav/go-to repo)
(browser/click ::repo-gpgkey-update)
(browser/select-by-text ::repo-gpgkey-update-select gpg-key)
(browser/click ::save-updated-gpg-key)))
(defn- delete "Deletes a repository from the given product."
[repo]
{:pre [(instance? katello.Repository repo)]}
(nav/go-to repo)
(when (browser/displayed? ::repo-remove)
(browser/click ::repo-remove)
(browser/click ::confirm-repo-rm)))
(defn gpgkey-associated?
[repo]
(nav/go-to repo)
(browser/exists? (gpgkey-under-repo-details (-> repo kt/product :gpg-key :name))))
(extend katello.Repository
ui/CRUD {:create create
:update* update
:delete delete}
rest/CRUD {:create (fn [{:keys [product name url]}]
{:pre [(instance? katello.Product product)
(instance? katello.Provider (kt/provider product))
(instance? katello.Organization (kt/org product))]}
(rest/http-post (rest/api-url "api/repositories/")
{:body {:organization_id (-> product kt/org :name)
:product_id (rest/get-id product)
:name name
:url url}}))
:read (partial rest/read-impl (partial rest/url-maker [["api/repositories/%s" [identity]]]))
:id rest/id-field
:query (partial rest/query-by-name (partial rest/url-maker [["api/organizations/%s/products/%s/repositories" [kt/org kt/product]]]) )}
tasks/Uniqueable tasks/entity-uniqueable-impl
nav/Destination {:go-to (partial nav/go-to ::repo-page)})
|
dc3df65c71f6ac9a53acf0c22f3009a8038228f8348c63c8352bdd806833fab1 | alavrik/piqi | check.ml |
Copyright 2009 , 2010 , 2011 , 2012 , 2013 , 2014 , 2015 , 2017
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2017 Anton Lavrik
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
" piqi check " validates input and essentially the same as " piqi convert "
* except it does n't produce any output
* except it doesn't produce any output *)
module C = Piqi_common
open C
(* command-line arguments *)
let usage = "Usage: piqi check [options] [input file]\nOptions:"
let speclist = Main.common_speclist @
[
Main.arg__strict;
Convert.arg_f;
Convert.arg__type;
Convert.arg__piq_relaxed_parsing;
Convert.arg__piq_frameless_input;
Main.arg__include_extension;
Main.arg__;
]
let check_file () =
Convert.init ();
let input_format, piq_input_format = Convert.get_input_format !Convert.input_format in
let reader = Convert.make_reader input_format piq_input_format in
(* main convert cycle *)
Convert.do_convert reader
let run () =
Main.parse_args () ~speclist ~usage ~min_arg_count:1 ~max_arg_count:1;
check_file ()
let _ =
Main.register_command run "check"
"check data validity for various file formats (piqi, piq, json, xml, pb, pib)"
| null | https://raw.githubusercontent.com/alavrik/piqi/bcea4d44997966198dc295df0609591fa787b1d2/src/check.ml | ocaml | command-line arguments
main convert cycle |
Copyright 2009 , 2010 , 2011 , 2012 , 2013 , 2014 , 2015 , 2017
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2017 Anton Lavrik
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
" piqi check " validates input and essentially the same as " piqi convert "
* except it does n't produce any output
* except it doesn't produce any output *)
module C = Piqi_common
open C
let usage = "Usage: piqi check [options] [input file]\nOptions:"
let speclist = Main.common_speclist @
[
Main.arg__strict;
Convert.arg_f;
Convert.arg__type;
Convert.arg__piq_relaxed_parsing;
Convert.arg__piq_frameless_input;
Main.arg__include_extension;
Main.arg__;
]
let check_file () =
Convert.init ();
let input_format, piq_input_format = Convert.get_input_format !Convert.input_format in
let reader = Convert.make_reader input_format piq_input_format in
Convert.do_convert reader
let run () =
Main.parse_args () ~speclist ~usage ~min_arg_count:1 ~max_arg_count:1;
check_file ()
let _ =
Main.register_command run "check"
"check data validity for various file formats (piqi, piq, json, xml, pb, pib)"
|
9f5f31d19812a5f8eb57a889808836c87d685d810f6daa9ecdbfee13b68b2241 | Bost/corona_cases | tables.clj | ;; (printf "Current-ns [%s] loading %s ...\n" *ns* 'corona.tables)
;; (ns corona.tables
;; (:require
;; [corona.countries :as ccr]
;; ))
;; ;; (set! *warn-on-reflection* true)
;; (def ^:const regions
;; "
;; Contains \"Americas\" which is not a standard name. Bloody hell!
;; (United_Nations)#List"
;; [
[ " China " " Asia " " Eastern Asia " " 1427647786 " " 1433783686 " " +0.43 % " ]
[ " India " " Asia " " Southern Asia " " 1352642280 " " 1366417754 " " +1.02 % " ]
[ " United States " " Americas " " Northern America " " 327096265 " " 329064917 " " +0.60 % " ]
[ " Indonesia " " Asia " " South - eastern Asia " " 267670543 " " 270625568 " " +1.10 % " ]
[ " Pakistan " " Asia " " Southern Asia " " 212228286 " " 216565318 " " +2.04 % " ]
[ " Brazil " " Americas " " South America " " 209469323 " " 211049527 " " +0.75 % " ]
[ " Nigeria " " Africa " " Western Africa " " 195874683 " " 200963599 " " +2.60 % " ]
[ " Bangladesh " " Asia " " Southern Asia " " 161376708 " " 163046161 " " +1.03 % " ]
[ " Russia " " Europe " " Eastern Europe " " 145734038 " " 145872256 " " +0.09 % " ]
[ " Mexico " " Americas " " Central America " " 126190788 " " 127575529 " " +1.10 % " ]
[ " Japan " " Asia " " Eastern Asia " " 127202192 " " 126860301 " " −0.27 % " ]
[ " Ethiopia " " Africa " " Eastern Africa " " 109224414 " " 112078730 " " +2.61 % " ]
[ " Philippines " " Asia " " South - eastern Asia " " 106651394 " " 108116615 " " +1.37 % " ]
[ " Egypt " " Africa " " Northern Africa " " 98423598 " " 100388073 " " +2.00 % " ]
[ " Vietnam " " Asia " " South - eastern Asia " " 95545962 " " 96462106 " " +0.96 % " ]
[ " DR Congo " " Africa " " Middle Africa " " 84068091 " " 86790567 " " +3.24 % " ] ; ; Congo - Brazzaville
[ " Germany " " Europe " " Western Europe " " 83124418 " " 83517045 " " +0.47 % " ]
[ " Turkey " " Asia " " Western Asia " " 82340088 " " 83429615 " " +1.32 % " ]
[ " Iran " " Asia " " Southern Asia " " 81800188 " " 82913906 " " +1.36 % " ]
[ " Thailand " " Asia " " South - eastern Asia " " 68863514 " " 69037513 " " +0.25 % " ]
[ " United Kingdom " " Europe " " Northern Europe " " 67141684 " " 67530172 " " +0.58 % " ]
[ " France " " Europe " " Western Europe " " 64990511 " " 65129728 " " +0.21 % " ]
[ " Italy " " Europe " " Southern Europe " " 60627291 " " 60550075 " " −0.13 % " ]
[ " South Africa " " Africa " " Southern Africa " " 57792518 " " 58558270 " " +1.33 % " ]
[ " Tanzania " " Africa " " Eastern Africa " " 56313438 " " 58005463 " " +3.00 % " ]
[ " Myanmar " " Asia " " South - eastern Asia " " 53708320 " " 54045420 " " +0.63 % " ]
[ " Kenya " " Africa " " Eastern Africa " " 51392565 " " 52573973 " " +2.30 % " ]
[ " South Korea " " Asia " " Eastern Asia " " 51171706 " " 51225308 " " +0.10 % " ]
[ " Colombia " " Americas " " South America " " 49661048 " " 50339443 " " +1.37 % " ]
[ " Spain " " Europe " " Southern Europe " " 46692858 " " 46736776 " " +0.09 % " ]
[ " Argentina " " Americas " " South America " " 44361150 " " 44780677 " " +0.95 % " ]
[ " Uganda " " Africa " " Eastern Africa " " 42729036 " " 44269594 " " +3.61 % " ]
[ " Ukraine " " Europe " " Eastern Europe " " 44246156 " " 43993638 " " −0.57 % " ]
[ " Algeria " " Africa " " Northern Africa " " 42228408 " " 43053054 " " +1.95 % " ]
[ " Sudan " " Africa " " Northern Africa " " 41801533 " " 42813238 " " +2.42 % " ]
[ " Iraq " " Asia " " Western Asia " " 38433600 " " 39309783 " " " ]
[ " Afghanistan " " Asia " " Southern Asia " " 37171921 " " 38041754 " " +2.34 % " ]
[ " Poland " " Europe " " Eastern Europe " " 37921592 " " 37887768 " " −0.09 % " ]
[ " Canada " " Americas " " Northern America " " 37074562 " " 37411047 " " +0.91 % " ]
[ " Morocco " " Africa " " Northern Africa " " 36029093 " " 36471769 " " +1.23 % " ]
[ " Saudi Arabia " " Asia " " Western Asia " " 33702756 " " 34268528 " " +1.68 % " ]
[ " Uzbekistan " " Asia " " Central Asia " " 32476244 " " 32981716 " " +1.56 % " ]
[ " Peru " " Americas " " South America " " 31989260 " " 32510453 " " +1.63 % " ]
[ " Malaysia " " Asia " " South - eastern Asia " " 31528033 " " 31949777 " " +1.34 % " ]
[ " Angola " " Africa " " Middle Africa " " 30809787 " " 31825295 " " +3.30 % " ]
[ " Mozambique " " Africa " " Eastern Africa " " 29496004 " " 30366036 " " % " ]
[ " Yemen " " Asia " " Western Asia " " 28498683 " " 29161922 " " +2.33 % " ]
[ " Ghana " " Africa " " Western Africa " " 28206728 " " 28833629 " " +2.22 % " ]
[ " Nepal " " Asia " " Southern Asia " " 28095714 " " 28608710 " " +1.83 % " ]
[ " Venezuela " " Americas " " South America " " 28887118 " " 28515829 " " −1.29 % " ]
[ " Madagascar " " Africa " " Eastern Africa " " 26262313 " " 26969307 " " +2.69 % " ]
[ " North Korea " " Asia " " Eastern Asia " " 25549604 " " 25666161 " " +0.46 % " ]
[ " Ivory Coast " " Africa " " Western Africa " " 25069230 " " 25716544 " " +2.58 % " ]
[ " Cameroon " " Africa " " Middle Africa " " 25216267 " " 25876380 " " +2.62 % " ]
[ " Australia " " Oceania " " Australia and New Zealand " " 24898152 " " 25203198 " " +1.23 % " ]
[ " Taiwan " " Asia " " Eastern Asia " " 23726460 " " 23773876 " " +0.20 % " ]
[ " Niger " " Africa " " Western Africa " " 22442822 " " 23310715 " " +3.87 % " ]
[ " Sri Lanka " " Asia " " Southern Asia " " 21228763 " " 21323733 " " +0.45 % " ]
[ " Burkina Faso " " Africa " " Western Africa " " 19751466 " " 20321378 " " +2.89 % " ]
[ " Mali " " Africa " " Western Africa " " 19077749 " " 19658031 " " +3.04 % " ]
[ " Romania " " Europe " " Eastern Europe " " 19506114 " " 19364557 " " −0.73 % " ]
[ " Malawi " " Africa " " Eastern Africa " " 18143217 " " 18628747 " " +2.68 % " ]
[ " Chile " " Americas " " South America " " 18729160 " " 18952038 " " +1.19 % " ]
[ " Kazakhstan " " Asia " " Central Asia " " 18319618 " " 18551427 " " +1.27 % " ]
[ " Zambia " " Africa " " Eastern Africa " " 17351708 " " 17861030 " " +2.94 % " ]
[ " Guatemala " " Americas " " Central America " " 17247849 " " 17581472 " " +1.93 % " ]
[ " Ecuador " " Americas " " South America " " 17084358 " " 17373662 " " +1.69 % " ]
[ " Netherlands " " Europe " " Western Europe " " 17059560 " " 17097130 " " +0.22 % " ]
[ " Syria " " Asia " " Western Asia " " 16945057 " " 17070135 " " +0.74 % " ]
[ " Cambodia " " Asia " " South - eastern Asia " " 16249792 " " 16486542 " " +1.46 % " ]
[ " Senegal " " Africa " " Western Africa " " 15854323 " " 16296364 " " +2.79 % " ]
[ " Chad " " Africa " " Middle Africa " " 15477729 " " 15946876 " " +3.03 % " ]
[ " Somalia " " Africa " " Eastern Africa " " 15008226 " " 15442905 " " +2.90 % " ]
[ " Zimbabwe " " Africa " " Eastern Africa " " 14438802 " " 14645468 " " +1.43 % " ]
[ " Guinea " " Africa " " Western Africa " " 12414293 " " 12771246 " " +2.88 % " ]
[ " Rwanda " " Africa " " Eastern Africa " " 12301970 " " 12626950 " " +2.64 % " ]
[ " Benin " " Africa " " Western Africa " " 11485044 " " 11801151 " " +2.75 % " ]
[ " Tunisia " " Africa " " Northern Africa " " 11565201 " " 11694719 " " +1.12 % " ]
[ " Belgium " " Europe " " Western Europe " " 11482178 " " 11539328 " " +0.50 % " ]
[ " Bolivia " " Americas " " South America " " 11353142 " " 11513100 " " +1.41 % " ]
[ " Cuba " " Americas " " Caribbean " " 11338134 " " 11333483 " " −0.04 % " ]
[ " Haiti " " Americas " " Caribbean " " 11123178 " " 11263770 " " +1.26 % " ]
[ " South Sudan " " Africa " " Eastern Africa " " 10975927 " " 11062113 " " +0.79 % " ]
[ " Burundi " " Africa " " Eastern Africa " " 10524117 " " 10864245 " " +3.23 % " ]
[ " Dominican Republic " " Americas " " Caribbean " " 10627141 " " 10738958 " " +1.05 % " ]
[ " Czech Republic " " Europe " " Eastern Europe " " 10665677 " " 10689209 " " +0.22 % " ]
[ " Greece " " Europe " " Southern Europe " " 10522246 " " 10473455 " " −0.46 % " ]
[ " Portugal " " Europe " " Southern Europe " " 10256193 " " 10226187 " " −0.29 % " ]
[ " " " Asia " " Western Asia " " 9965318 " " 10101694 " " +1.37 % " ]
[ " Azerbaijan " " Asia " " Western Asia " " 9949537 " " 10047718 " " +0.99 % " ]
[ " Sweden " " Europe " " Northern Europe " " 9971638 " " 10036379 " " +0.65 % " ]
[ " United Arab Emirates " " Asia " " Western Asia " " 9630959 " " 9770529 " " +1.45 % " ]
[ " Honduras " " Americas " " Central America " " 9587522 " " 9746117 " " +1.65 % " ]
[ " Hungary " " Europe " " Eastern Europe " " 9707499 " " 9684679 " " −0.24 % " ]
[ " Belarus " " Europe " " Eastern Europe " " 9452617 " " 9452411 " " 0.00 % " ]
[ " Tajikistan " " Asia " " Central Asia " " 9100835 " " 9321018 " " +2.42 % " ]
[ " Austria " " Europe " " Western Europe " " 8891388 " " 8955102 " " +0.72 % " ]
[ " Papua New Guinea " " Oceania " " Melanesia " " 8606323 " " 8776109 " " +1.97 % " ]
[ " Serbia " " Europe " " Southern Europe " " 8802754 " " 8772235 " " −0.35 % " ]
[ " Switzerland " " Europe " " Western Europe " " 8525611 " " 8591365 " " +0.77 % " ]
[ " Israel " " Asia " " Western Asia " " 8381516 " " 8519377 " " +1.64 % " ]
[ " Togo " " Africa " " Western Africa " " 7889093 " " 8082366 " " +2.45 % " ]
[ " Sierra Leone " " Africa " " Western Africa " " 7650150 " " 7813215 " " +2.13 % " ]
[ " Hong Kong " " Asia " " Eastern Asia " " 7371730 " " 7436154 " " +0.87 % " ]
[ " Laos " " Asia " " South - eastern Asia " " 7061507 " " 7169455 " " +1.53 % " ]
[ " Paraguay " " Americas " " South America " " 6956066 " " 7044636 " " +1.27 % " ]
[ " Bulgaria " " Europe " " Eastern Europe " " 7051608 " " 7000119 " " −0.73 % " ]
[ " Lebanon " " Asia " " Western Asia " " 6859408 " " 6855713 " " −0.05 % " ]
[ " Libya " " Africa " " Northern Africa " " 6678559 " " 6777452 " " +1.48 % " ]
[ " Nicaragua " " Americas " " Central America " " 6465501 " " 6545502 " " +1.24 % " ]
[ " El Salvador " " Americas " " Central America " " 6420746 " " 6453553 " " +0.51 % " ]
[ " Kyrgyzstan " " Asia " " Central Asia " " 6304030 " " 6415850 " " +1.77 % " ]
[ " Turkmenistan " " Asia " " Central Asia " " 5850901 " " 5942089 " " +1.56 % " ]
[ " Singapore " " Asia " " South - eastern Asia " " 5757499 " " 5804337 " " +0.81 % " ]
[ " Denmark " " Europe " " Northern Europe " " 5752126 " " 5771876 " " +0.34 % " ]
[ " Finland " " Europe " " Northern Europe " " 5522576 " " 5532156 " " +0.17 % " ]
[ " Slovakia " " Europe " " Eastern Europe " " 5453014 " " 5457013 " " +0.07 % " ]
[ " Congo " " Africa " " Middle Africa " " 5244359 " " 5380508 " " +2.60 % " ]
[ " Norway " " Europe " " Northern Europe " " 5337962 " " 5378857 " " +0.77 % " ]
[ " Costa Rica " " Americas " " Central America " " 4999441 " " 5047561 " " +0.96 % " ]
[ " Palestine " " Asia " " Western Asia " " 4862979 " " 4981420 " " +2.44 % " ]
[ " Oman " " Asia " " Western Asia " " 4829473 " " 4974986 " " +3.01 % " ]
[ " Liberia " " Africa " " Western Africa " " 4818973 " " 4937374 " " +2.46 % " ]
[ " Ireland " " Europe " " Northern Europe " " 4818690 " " 4882495 " " +1.32 % " ]
[ " New Zealand " " Oceania " " Australia and New Zealand " " 4743131 " " " " +0.84 % " ]
[ " Central African Republic " " Africa " " Middle Africa " " 4666368 " " 4745185 " " +1.69 % " ]
[ " Mauritania " " Africa " " Western Africa " " 4403313 " " 4525696 " " +2.78 % " ]
[ " Panama " " Americas " " Central America " " 4176869 " " 4246439 " " +1.67 % " ]
[ " Kuwait " " Asia " " Western Asia " " 4137312 " " 4207083 " " +1.69 % " ]
[ " Croatia " " Europe " " Southern Europe " " 4156405 " " 4130304 " " −0.63 % " ]
[ " Moldova " " Europe " " Eastern Europe " " 4051944 " " 4043263 " " −0.21 % " ]
[ " Georgia " " Asia " " Western Asia " " 4002942 " " 3996765 " " −0.15 % " ]
[ " Eritrea " " Africa " " Eastern Africa " " 3452786 " " 3497117 " " +1.28 % " ]
[ " Uruguay " " Americas " " South America " " 3449285 " " 3461734 " " +0.36 % " ]
[ " Bosnia and Herzegovina " " Europe " " Southern Europe " " 3323925 " " 3301000 " " −0.69 % " ]
[ " Mongolia " " Asia " " Eastern Asia " " 3170216 " " 3225167 " " +1.73 % " ]
[ " Armenia " " Asia " " Western Asia " " 2951745 " " 2957731 " " +0.20 % " ]
[ " Jamaica " " Americas " " Caribbean " " 2934847 " " 2948279 " " +0.46 % " ]
[ " Puerto Rico " " Americas " " Caribbean " " 3039596 " " 2933408 " " −3.49 % " ]
[ " Albania " " Europe " " Southern Europe " " 2882740 " " 2880917 " " −0.06 % " ]
[ " Qatar " " Asia " " Western Asia " " 2781682 " " 2832067 " " +1.81 % " ]
[ " Lithuania " " Europe " " Northern Europe " " 2801264 " " 2759627 " " −1.49 % " ]
[ " Namibia " " Africa " " Southern Africa " " 2448301 " " 2494530 " " +1.89 % " ]
[ " Gambia " " Africa " " Western Africa " " 2280094 " " 2347706 " " +2.97 % " ]
[ " Botswana " " Africa " " Southern Africa " " 2254068 " " 2303697 " " +2.20 % " ]
[ " Gabon " " Africa " " Middle Africa " " 2119275 " " 2172579 " " +2.52 % " ]
[ " Lesotho " " Africa " " Southern Africa " " 2108328 " " 2125268 " " +0.80 % " ]
[ " North Macedonia " " Europe " " Southern Europe " " 2082957 " " 2083459 " " +0.02 % " ]
[ " Slovenia " " Europe " " Southern Europe " " 2077837 " " 2078654 " " +0.04 % " ]
[ " Guinea - Bissau " " Africa " " Western Africa " " 1874303 " " 1920922 " " +2.49 % " ]
[ " Latvia " " Europe " " Northern Europe " " 1928459 " " 1906743 " " −1.13 % " ]
[ " Bahrain " " Asia " " Western Asia " " 1569446 " " 1641172 " " +4.57 % " ]
[ " Trinidad and Tobago " " Americas " " Caribbean " " 1389843 " " 1394973 " " +0.37 % " ]
[ " Equatorial Guinea " " Africa " " Middle Africa " " 1308975 " " 1355986 " " +3.59 % " ]
[ " Estonia " " Europe " " Northern Europe " " 1322920 " " 1325648 " " +0.21 % " ]
[ " East Timor " " Asia " " South - eastern Asia " " 1267974 " " 1293119 " " +1.98 % " ]
[ " Mauritius " " Africa " " Eastern Africa " " 1189265 " " 1198575 " " +0.78 % " ]
[ " Cyprus " " Asia " " Western Asia " " 1170125 " " 1179551 " " +0.81 % " ]
[ " Eswatini " " Africa " " Southern Africa " " 1136281 " " 1148130 " " +1.04 % " ]
[ " Djibouti " " Africa " " Eastern Africa " " 958923 " " 973560 " " +1.53 % " ]
[ " Fiji " " Oceania " " Melanesia " " 883483 " " 889953 " " +0.73 % " ]
[ " Réunion " " Africa " " Eastern Africa " " 882526 " " 888927 " " +0.73 % " ]
[ " Comoros " " Africa " " Eastern Africa " " " " 850886 " " +2.23 % " ]
[ " Guyana " " Americas " " South America " " 779006 " " 782766 " " +0.48 % " ]
[ " Bhutan " " Asia " " Southern Asia " " 754388 " " 763092 " " +1.15 % " ]
[ " Solomon Islands " " Oceania " " Melanesia " " 652857 " " 669823 " " +2.60 % " ]
[ " Macau " " Asia " " Eastern Asia " " 631636 " " 640445 " " +1.39 % " ]
[ " Montenegro " " Europe " " Southern Europe " " 627809 " " 627987 " " +0.03 % " ]
[ " Luxembourg " " Europe " " Western Europe " " 604245 " " 615729 " " +1.90 % " ]
[ " Western Sahara " " Africa " " Northern Africa " " 567402 " " 582463 " " +2.65 % " ]
[ " Suriname " " Americas " " South America " " 575990 " " 581372 " " +0.93 % " ]
[ " Cape Verde " " Africa " " Western Africa " " 543767 " " 549935 " " +1.13 % " ]
[ " Maldives " " Asia " " Southern Asia " " 515696 " " 530953 " " +2.96 % " ]
[ " Guadeloupe " " Americas " " Caribbean " " 446928 " " 447905 " " ]
[ " Malta " " Europe " " Southern Europe " " 439248 " " 440372 " " +0.26 % " ]
[ " Brunei " " Asia " " South - eastern Asia " " 428963 " " 433285 " " +1.01 % " ]
[ " Belize " " Americas " " Central America " " 383071 " " " " +1.90 % " ]
[ " Bahamas " " Americas " " Caribbean " " 385637 " " 389482 " " +1.00 % " ]
[ " Martinique " " Americas " " Caribbean " " 375673 " " 375554 " " −0.03 % " ]
[ " Iceland " " Europe " " Northern Europe " " 336713 " " 339031 " " +0.69 % " ]
[ " Vanuatu " " Oceania " " Melanesia " " 292680 " " 299882 " " +2.46 % " ]
[ " Barbados " " Americas " " Caribbean " " 286641 " " 287025 " " +0.13 % " ]
[ " New Caledonia " " Oceania " " Melanesia " " 279993 " " 282750 " " +0.98 % " ]
[ " French Guiana " " Americas " " South America " " 275713 " " 282731 " " +2.55 % " ]
[ " French Polynesia " " Oceania " " Polynesia " " 277679 " " 279287 " " ]
[ " Mayotte " " Africa " " Eastern Africa " " 259531 " " 266150 " " +2.55 % " ]
[ " São Tomé and " " Africa " " Middle Africa " " 211028 " " 215056 " " +1.91 % " ]
[ " Samoa " " Oceania " " Polynesia " " 196129 " " 197097 " " ]
[ " Saint Lucia " " Americas " " Caribbean " " 181889 " " 182790 " " +0.50 % " ]
[ " Guernsey and Jersey " " Europe " " Northern Europe " " 170499 " " 172259 " " +1.03 % " ]
[ " Guam " " Oceania " " Micronesia " " 165768 " " 167294 " " +0.92 % " ]
[ " Curaçao " " Americas " " Caribbean " " 162752 " " 163424 " " +0.41 % " ]
[ " Kiribati " " Oceania " " Micronesia " " 115847 " " 117606 " " +1.52 % " ]
[ " F.S. Micronesia " " Oceania " " Micronesia " " 112640 " " 113815 " " +1.04 % " ]
[ " Grenada " " Americas " " Caribbean " " 111454 " " 112003 " " ]
[ " Tonga " " Oceania " " Polynesia " " 110589 " " 110940 " " +0.32 % " ]
[ " Saint Vincent and the Grenadines " " Americas " " Caribbean " " 110211 " " 110589 " " +0.34 % " ]
[ " Aruba " " Americas " " Caribbean " " 105845 " " 106314 " " +0.44 % " ]
[ " U.S. Virgin Islands " " Americas " " Caribbean " " 104680 " " 104578 " " −0.10 % " ]
[ " Seychelles " " Africa " " Eastern Africa " " 97096 " " 97739 " " +0.66 % " ]
[ " Antigua and " " Americas " " Caribbean " " 96286 " " " " +0.86 % " ]
[ " Isle of Man " " Europe " " Northern Europe " " 84077 " " 84584 " " +0.60 % " ]
[ " Andorra " " Europe " " Southern Europe " " 77006 " " 77142 " " +0.18 % " ]
[ " Dominica " " Americas " " Caribbean " " 71625 " " 71808 " " +0.26 % " ]
[ " Cayman Islands " " Americas " " Caribbean " " 64174 " " 64948 " " +1.21 % " ]
[ " Bermuda " " Americas " " Northern America " " 62756 " " 62506 " " −0.40 % " ]
[ " Marshall Islands " " Oceania " " Micronesia " " 58413 " " 58791 " " +0.65 % " ]
[ " Greenland " " Americas " " Northern America " " 56564 " " 56672 " " +0.19 % " ]
[ " Northern Mariana Islands " " Oceania " " Micronesia " " 56882 " " 56188 " " −1.22 % " ]
[ " American Samoa " " Oceania " " Polynesia " " 55465 " " 55312 " " −0.28 % " ]
[ " Saint Kitts and Nevis " " Americas " " Caribbean " " 52441 " " 52823 " " +0.73 % " ]
[ " Faroe Islands " " Europe " " Northern Europe " " 48497 " " 48678 " " +0.37 % " ]
[ " Sint Maarten " " Americas " " Caribbean " " 41940 " " 42388 " " +1.07 % " ]
[ " Monaco " " Europe " " Western Europe " " 38682 " " 38964 " " +0.73 % " ]
[ " Turks and Caicos Islands " " Americas " " Caribbean " " 37665 " " 38191 " " +1.40 % " ]
[ " Liechtenstein " " Europe " " Western Europe " " 37910 " " 38019 " " +0.29 % " ]
[ " San Marino " " Europe " " Southern Europe " " 33785 " " 33860 " " +0.22 % " ]
[ " Gibraltar " " Europe " " Southern Europe " " 33718 " " 33701 " " −0.05 % " ]
[ " British Virgin Islands " " Americas " " Caribbean " " 29802 " " 30030 " " +0.77 % " ]
[ " Caribbean Netherlands " " Americas " " Caribbean " " 25711 " " 25979 " " +1.04 % " ]
[ " Palau " " Oceania " " Micronesia " " 17907 " " 18008 " " +0.56 % " ]
[ " Cook Islands " " Oceania " " Polynesia " " 17518 " " 17548 " " +0.17 % " ]
[ " Anguilla " " Americas " " Caribbean " " 14731 " " 14869 " " +0.94 % " ]
[ " Tuvalu " " Oceania " " Polynesia " " 11508 " " 11646 " " +1.20 % " ]
[ " Wallis and Futuna " " Oceania " " Polynesia " " 11661 " " 11432 " " −1.96 % " ]
[ " Nauru " " Oceania " " Micronesia " " 10670 " " 10756 " " +0.81 % " ]
[ " Saint Helena , Ascension and " " Africa " " Western Africa " " 6035 " " 6059 " " +0.40 % " ]
[ " Saint Pierre and Miquelon " " Americas " " Northern America " " 5849 " " 5822 " " −0.46 % " ]
[ " Montserrat " " Americas " " Caribbean " " 4993 " " 4989 " " −0.08 % " ]
[ " Falkland Islands " " Americas " " South America " " 3234 " " 3377 " " +4.42 % " ]
[ " Niue " " Oceania " " Polynesia " " 1620 " " 1615 " " −0.31 % " ]
[ " Tokelau " " Oceania " " Polynesia " " 1319 " " 1340 " " +1.59 % " ]
[ " Vatican City " " Europe " " Southern Europe " " 801 " " 799 " " −0.25 % " ]
;; ]
;; )
;; (def ^:const population-table
;; "
;; -population/population-by-country/
TODO create spec or use typed - clojure , e.g. Rate is percentage so it must be between 0 and 100
;; Country
;; Population
;; YearlyChangeRate
NetChange
;; Density
;; LandArea
;; Migrants
FertilityRate
MedianAge
UrbanPopulationRate
;; WorldShareRate
;; "
;; [
[ " China " 1439323776 0.39 5540090 153 9388211 -348399 1.7 38 61 18.47 ]
[ " India " 1380004385 0.99 13586631 464 2973190 -532687 2.2 28 35 17.7 ]
[ " United States " 331002651 0.59 1937734 36 9147420 954806 1.8 38 83 4.25 ]
[ " Indonesia " 273523615 1.07 2898047 151 1811570 -98955 2.3 30 56 3.51 ]
[ " Pakistan " 220892340 2.0 4327022 287 770880 -233379 3.6 23 35 2.83 ]
[ " Brazil " 212559417 0.72 1509890 25 8358140 21200 1.7 33 88 2.73 ]
[ " Nigeria " 206139589 2.58 5175990 226 910770 -60000 5.4 18 52 2.64 ]
[ " Bangladesh " 164689383 1.01 1643222 1265 130170 -369501 2.1 28 39 2.11 ]
[ " Russia " 145934462 0.04 62206 9 16376870 182456 1.8 40 74 1.87 ]
[ " Mexico " 128932753 1.06 1357224 66 1943950 -60000 2.1 29 84 1.65 ]
[ " Japan " 126476461 -0.3 -383840 347 364555 71560 1.4 48 92 1.62 ]
[ " Ethiopia " 114963588 2.57 2884858 115 1000000 30000 4.3 19 21 1.47 ]
[ " Philippines " 109581078 1.35 1464463 368 298170 -67152 2.6 26 47 1.41 ]
[ " Egypt " 102334404 1.94 1946331 103 995450 -38033 3.3 25 43 1.31 ]
[ " Vietnam " 97338579 0.91 876473 314 310070 -80000 2.1 32 38 1.25 ]
[ " DR Congo " 89561403 3.19 2770836 40 2267050 23861 6.0 17 46 1.15 ]
[ " Turkey " 84339067 1.09 909452 110 769630 283922 2.1 32 76 1.08 ]
[ " Iran " 83992949 1.3 1079043 52 1628550 -55000 2.2 32 76 1.08 ]
[ " Germany " 83783942 0.32 266897 240 348560 543822 1.6 46 76 1.07 ]
[ " Thailand " 69799978 0.25 174396 137 510890 19444 1.5 40 51 0.9 ]
[ " United Kingdom " 67886011 0.53 355839 281 241930 260650 1.8 40 83 0.87 ]
[ " France " 65273511 0.22 143783 119 547557 36527 1.9 42 82 0.84 ]
[ " Italy " 60461826 -0.15 -88249 206 294140 148943 1.3 47 69 0.78 ]
[ " Tanzania " 59734218 2.98 1728755 67 885800 -40076 4.9 18 37 0.77 ]
[ " South Africa " 59308690 1.28 750420 49 1213090 145405 2.4 28 67 0.76 ]
[ " Myanmar " 54409800 0.67 364380 83 653290 -163313 2.2 29 31 0.7 ]
[ " Kenya " 53771296 2.28 1197323 94 569140 -10000 3.5 20 28 0.69 ]
[ " South Korea " 51269185 0.09 43877 527 97230 11731 1.1 44 82 0.66 ]
[ " Colombia " 50882891 1.08 543448 46 1109500 204796 1.8 31 80 0.65 ]
[ " Spain " 46754778 0.04 18002 94 498800 40000 1.3 45 80 0.6 ]
[ " Uganda " 45741007 3.32 1471413 229 199810 168694 5.0 17 26 0.59 ]
[ " Argentina " 45195774 0.93 415097 17 2736690 4800 2.3 32 93 0.58 ]
[ " Algeria " 43851044 1.85 797990 18 2381740 -10000 3.1 29 73 0.56 ]
[ " Sudan " 43849260 2.42 1036022 25 1765048 -50000 4.4 20 35 0.56 ]
[ " Ukraine " 43733762 -0.59 -259876 75 579320 10000 1.4 41 69 0.56 ]
[ " Iraq " 40222493 2.32 912710 93 434320 7834 3.7 21 73 0.52 ]
[ " Afghanistan " 38928346 2.33 886592 60 652860 -62920 4.6 18 25 0.5 ]
[ " Poland " 37846611 -0.11 -41157 124 306230 -29395 1.4 42 60 0.49 ]
[ " Canada " 37742154 0.89 331107 4 9093510 242032 1.5 41 81 0.48 ]
[ " Morocco " 36910560 1.2 438791 83 446300 -51419 2.4 30 64 0.47 ]
[ " Saudi Arabia " 34813871 1.59 545343 16 2149690 134979 2.3 32 84 0.45 ]
[ " Uzbekistan " 33469203 1.48 487487 79 425400 -8863 2.4 28 50 0.43 ]
[ " Peru " 32971854 1.42 461401 26 1280000 99069 2.3 31 79 0.42 ]
[ " Angola " 32866272 3.27 1040977 26 1246700 6413 5.6 17 67 0.42 ]
[ " Malaysia " 32365999 1.3 416222 99 328550 50000 2.0 30 78 0.42 ]
[ " Mozambique " 31255435 2.93 889399 40 786380 -5000 4.9 18 38 0.4 ]
[ " Ghana " 31072940 2.15 655084 137 227540 -10000 3.9 22 57 0.4 ]
[ " Yemen " 29825964 2.28 664042 56 527970 -30000 3.8 20 38 0.38 ]
;; ["Nepal" 29136808 1.85 528098 203 143350 41710 1.9 25 21 0.37]
[ " Venezuela " 28435940 -0.28 -79889 32 882050 -653249 2.3 30 nil 0.36 ]
[ " Madagascar " 27691018 2.68 721711 48 581795 -1500 4.1 20 39 0.36 ]
[ " Cameroon " 26545863 2.59 669483 56 472710 -4800 4.6 19 56 0.34 ]
[ " Côte d'Ivoire " 26378274 2.57 661730 83 318000 -8000 4.7 19 51 0.34 ]
[ " North Korea " 25778816 0.44 112655 214 120410 -5403 1.9 35 63 0.33 ]
[ " Australia " 25499884 1.18 296686 3 7682300 158246 1.8 38 86 0.33 ]
[ " Niger " 24206644 3.84 895929 19 1266700 4000 7.0 15 17 0.31 ]
[ " Taiwan " 23816775 0.18 42899 673 35410 30001 1.2 42 79 0.31 ]
;; ["Sri Lanka" 21413249 0.42 89516 341 62710 -97986 2.2 34 18 0.27]
[ " Burkina Faso " 20903273 2.86 581895 76 273600 -25000 5.2 18 31 0.27 ]
[ " Mali " 20250833 3.02 592802 17 1220190 -40000 5.9 16 44 0.26 ]
[ " Romania " 19237691 -0.66 -126866 84 230170 -73999 1.6 43 55 0.25 ]
[ " Malawi " 19129952 2.69 501205 203 94280 -16053 4.3 18 18 0.25 ]
[ " Chile " 19116201 0.87 164163 26 743532 111708 1.7 35 85 0.25 ]
[ " Kazakhstan " 18776707 1.21 225280 7 2699700 -18000 2.8 31 58 0.24 ]
[ " Zambia " 18383955 2.93 522925 25 743390 -8000 4.7 18 45 0.24 ]
[ " Guatemala " 17915568 1.9 334096 167 107160 -9215 2.9 23 52 0.23 ]
[ " Ecuador " 17643054 1.55 269392 71 248360 36400 2.4 28 63 0.23 ]
[ " Syria " 17500658 2.52 430523 95 183630 -427391 2.8 26 60 0.22 ]
[ " Netherlands " 17134872 0.22 37742 508 33720 16000 1.7 43 92 0.22 ]
[ " Senegal " 16743927 2.75 447563 87 192530 -20000 4.7 19 49 0.21 ]
[ " Cambodia " 16718965 1.41 232423 95 176520 -30000 2.5 26 24 0.21 ]
[ " Chad " 16425864 3.0 478988 13 1259200 2000 5.8 17 23 0.21 ]
[ " Somalia " 15893222 2.92 450317 25 627340 -40000 6.1 17 47 0.2 ]
[ " Zimbabwe " 14862924 1.48 217456 38 386850 -116858 3.6 19 38 0.19 ]
[ " Guinea " 13132795 2.83 361549 53 245720 -4000 4.7 18 39 0.17 ]
;; ["Rwanda" 12952218 2.58 325268 525 24670 -9000 4.1 20 18 0.17]
[ " Benin " 12123200 2.73 322049 108 112760 -2000 4.9 19 48 0.16 ]
[ " Burundi " 11890784 3.12 360204 463 25680 2001 5.5 17 14 0.15 ]
[ " Tunisia " 11818619 1.06 123900 76 155360 -4000 2.2 33 70 0.15 ]
[ " Bolivia " 11673021 1.39 159921 11 1083300 -9504 2.8 26 69 0.15 ]
;; ["Belgium" 11589623 0.44 50295 383 30280 48000 1.7 42 98 0.15]
[ " Haiti " 11402528 1.24 139451 414 27560 -35000 3.0 24 57 0.15 ]
[ " Cuba " 11326616 -0.06 -6867 106 106440 -14400 1.6 42 78 0.15 ]
[ " South Sudan " 11193725 1.19 131612 18 610952 -174200 4.7 19 25 0.14 ]
[ " Dominican Republic " 10847910 1.01 108952 225 48320 -30000 2.4 28 85 0.14 ]
[ " Czech Republic " 10708981 0.18 19772 139 77240 22011 1.6 43 74 0.14 ]
[ " Greece " 10423054 -0.48 -50401 81 128900 -16000 1.3 46 85 0.13 ]
[ " Jordan " 10203134 1.0 101440 115 88780 10220 2.8 24 91 0.13 ]
[ " Portugal " 10196709 -0.29 -29478 111 91590 -6000 1.3 46 66 0.13 ]
[ " Azerbaijan " 10139177 0.91 91459 123 82658 1200 2.1 32 56 0.13 ]
[ " Sweden " 10099265 0.63 62886 25 410340 40000 1.9 41 88 0.13 ]
[ " Honduras " 9904607 1.63 158490 89 111890 -6800 2.5 24 57 0.13 ]
[ " United Arab Emirates " 9890402 1.23 119873 118 83600 40000 1.4 33 86 0.13 ]
[ " Hungary " 9660351 -0.25 -24328 107 90530 6000 1.5 43 72 0.12 ]
[ " Tajikistan " 9537645 2.32 216627 68 139960 -20000 3.6 22 27 0.12 ]
[ " Belarus " 9449323 -0.03 -3088 47 202910 8730 1.7 40 79 0.12 ]
[ " Austria " 9006398 0.57 51296 109 82409 65000 1.5 43 57 0.12 ]
[ " Papua New Guinea " 8947024 1.95 170915 20 452860 -800 3.6 22 13 0.11 ]
[ " Serbia " 8737371 -0.4 -34864 100 87460 4000 1.5 42 56 0.11 ]
[ " Israel " 8655535 1.6 136158 400 21640 10000 3.0 30 93 0.11 ]
[ " Switzerland " 8654622 0.74 63257 219 39516 52000 1.5 43 74 0.11 ]
[ " Togo " 8278724 2.43 196358 152 54390 -2000 4.4 19 43 0.11 ]
[ " Sierra Leone " 7976983 2.1 163768 111 72180 -4200 4.3 19 43 0.1 ]
[ " Hong Kong " 7496981 0.82 60827 7140 1050 29308 1.3 45 nil 0.1 ]
[ " Laos " 7275560 1.48 106105 32 230800 -14704 2.7 24 36 0.09 ]
[ " Paraguay " 7132538 1.25 87902 18 397300 -16556 2.4 26 62 0.09 ]
[ " Bulgaria " 6948445 -0.74 -51674 64 108560 -4800 1.6 45 76 0.09 ]
[ " Libya " 6871292 1.38 93840 4 1759540 -1999 2.3 29 78 0.09 ]
[ " Lebanon " 6825445 -0.44 -30268 667 10230 -30012 2.1 30 78 0.09 ]
[ " Nicaragua " 6624554 1.21 79052 55 120340 -21272 2.4 26 57 0.08 ]
[ " Kyrgyzstan " 6524195 1.69 108345 34 191800 -4000 3.0 26 36 0.08 ]
[ " El Salvador " 6486205 0.51 32652 313 20720 -40539 2.1 28 73 0.08 ]
[ " Turkmenistan " 6031200 1.5 89111 13 469930 -5000 2.8 27 53 0.08 ]
[ " Singapore " 5850342 0.79 46005 8358 700 27028 1.2 42 nil 0.08 ]
[ " Denmark " 5792202 0.35 20326 137 42430 15200 1.8 42 88 0.07 ]
[ " Finland " 5540720 0.15 8564 18 303890 14000 1.5 43 86 0.07 ]
[ " Congo " 5518087 2.56 137579 16 341500 -4000 4.5 19 70 0.07 ] ; ; Congo - Kinshasa
[ " Slovakia " 5459642 0.05 2629 114 48088 1485 1.5 41 54 0.07 ]
[ " Norway " 5421241 0.79 42384 15 365268 28000 1.7 40 83 0.07 ]
[ " Oman " 5106626 2.65 131640 16 309500 87400 2.9 31 87 0.07 ]
[ " State of Palestine " 5101414 2.41 119994 847 6020 -10563 3.7 21 80 0.07 ]
[ " Costa Rica " 5094118 0.92 46557 100 51060 4200 1.8 33 80 0.07 ]
[ " Liberia " 5057681 2.44 120307 53 96320 -5000 4.4 19 53 0.06 ]
;; ["Ireland" 4937786 1.13 55291 72 68890 23604 1.8 38 63 0.06]
[ " Central African Republic " 4829767 1.78 84582 8 622980 -40000 4.8 18 43 0.06 ]
[ " New Zealand " 4822233 0.82 39170 18 263310 14881 1.9 38 87 0.06 ]
[ " Mauritania " 4649658 2.74 123962 5 1030700 5000 4.6 20 57 0.06 ]
[ " Panama " 4314767 1.61 68328 58 74340 11200 2.5 30 68 0.06 ]
[ " Kuwait " 4270571 1.51 63488 240 17820 39520 2.1 37 nil 0.05 ]
[ " Croatia " 4105267 -0.61 -25037 73 55960 -8001 1.4 44 58 0.05 ]
[ " Moldova " 4033963 -0.23 -9300 123 32850 -1387 1.3 38 43 0.05 ]
[ " Georgia " 3989167 -0.19 -7598 57 69490 -10000 2.1 38 58 0.05 ]
[ " Eritrea " 3546421 1.41 49304 35 101000 -39858 4.1 19 63 0.05 ]
[ " Uruguay " 3473730 0.35 11996 20 175020 -3000 2.0 36 96 0.04 ]
[ " Bosnia and Herzegovina " 3280819 -0.61 -20181 64 51000 -21585 1.3 43 52 0.04 ]
[ " Mongolia " 3278290 1.65 53123 2 1553560 -852 2.9 28 67 0.04 ]
[ " Armenia " 2963243 0.19 5512 104 28470 -4998 1.8 35 63 0.04 ]
[ " Jamaica " 2961167 0.44 12888 273 10830 -11332 2.0 31 55 0.04 ]
[ " Qatar " 2881053 1.73 48986 248 11610 40000 1.9 32 96 0.04 ]
[ " Albania " 2877797 -0.11 -3120 105 27400 -14000 1.6 36 63 0.04 ]
[ " Puerto Rico " 2860853 -2.47 -72555 323 8870 -97986 1.2 44 nil 0.04 ]
[ " Lithuania " 2722289 -1.35 -37338 43 62674 -32780 1.7 45 71 0.03 ]
[ " Namibia " 2540905 1.86 46375 3 823290 -4806 3.4 22 55 0.03 ]
[ " Gambia " 2416668 2.94 68962 239 10120 -3087 5.3 18 59 0.03 ]
[ " Botswana " 2351627 2.08 47930 4 566730 3000 2.9 24 73 0.03 ]
[ " Gabon " 2225734 2.45 53155 9 257670 3260 4.0 23 87 0.03 ]
[ " Lesotho " 2142249 0.8 16981 71 30360 -10047 3.2 24 31 0.03 ]
[ " North Macedonia " 2083374 0.0 -85 83 25220 -1000 1.5 39 59 0.03 ]
[ " Slovenia " 2078938 0.01 284 103 20140 2000 1.6 45 55 0.03 ]
[ " Guinea - Bissau " 1968001 2.45 47079 70 28120 -1399 4.5 19 45 0.03 ]
[ " Latvia " 1886198 -1.08 -20545 30 62200 -14837 1.7 44 69 0.02 ]
[ " Bahrain " 1701575 3.68 60403 2239 760 47800 2.0 32 89 0.02 ]
[ " Equatorial Guinea " 1402985 3.47 46999 50 28050 16000 4.6 22 73 0.02 ]
[ " Trinidad and Tobago " 1399488 0.32 4515 273 5130 -800 1.7 36 52 0.02 ]
[ " Estonia " 1326535 0.07 887 31 42390 3911 1.6 42 68 0.02 ]
[ " Timor - Leste " 1318445 1.96 25326 89 14870 -5385 4.1 21 33 0.02 ]
[ " Mauritius " 1271768 0.17 2100 626 2030 0 1.4 37 41 0.02 ]
[ " Cyprus " 1207359 0.73 8784 131 9240 5000 1.3 37 67 0.02 ]
[ " Eswatini " 1160164 1.05 12034 67 17200 -8353 3.0 21 30 0.01 ]
[ " Djibouti " 988000 1.48 14440 43 23180 900 2.8 27 79 0.01 ]
[ " Fiji " 896445 0.73 6492 49 18270 -6202 2.8 28 59 0.01 ]
;; ["Réunion" 895312 0.72 6385 358 2500 -1256 2.3 36 100 0.01]
[ " Comoros " 869601 2.2 18715 467 1861 -2000 4.2 20 29 0.01 ]
[ " Guyana " 786552 0.48 3786 4 196850 -6000 2.5 27 27 0.01 ]
;; ["Bhutan" 771608 1.12 8516 20 38117 320 2.0 28 46 0.01]
[ " Solomon Islands " 686884 2.55 17061 25 27990 -1600 4.4 20 23 0.01 ]
[ " Macao " 649335 1.39 8890 21645 30 5000 1.2 39 nil 0.01 ]
[ " Montenegro " 628066 0.01 79 47 13450 -480 1.8 39 68 0.01 ]
[ " Luxembourg " 625978 1.66 10249 242 2590 9741 1.5 40 88 0.01 ]
[ " Western Sahara " 597339 2.55 14876 2 266000 5582 2.4 28 87 0.01 ]
[ " Suriname " 586632 0.9 5260 4 156000 -1000 2.4 29 65 0.01 ]
[ " Cabo Verde " 555987 1.1 6052 138 4030 -1342 2.3 28 68 0.01 ]
[ " Maldives " 540544 1.81 9591 1802 300 11370 1.9 30 35 0.01 ]
[ " Malta " 441543 0.27 1171 1380 320 900 1.5 43 93 0.01 ]
[ " Brunei " 437479 0.97 4194 83 5270 0 1.8 32 80 0.01 ]
[ " Guadeloupe " 400124 0.02 68 237 1690 -1440 2.2 44 nil 0.01 ]
[ " Belize " 397628 1.86 7275 17 22810 1200 2.3 25 46 0.01 ]
[ " Bahamas " 393244 0.97 3762 39 10010 1000 1.8 32 86 0.01 ]
[ " Martinique " 375265 -0.08 -289 354 1060 -960 1.9 47 92 0.0 ]
[ " Iceland " 341243 0.65 2212 3 100250 380 1.8 37 94 0.0 ]
[ " Vanuatu " 307145 2.42 7263 25 12190 120 3.8 21 24 0.0 ]
[ " French Guiana " 298682 2.7 7850 4 82200 1200 3.4 25 87 0.0 ]
[ " Barbados " 287375 0.12 350 668 430 -79 1.6 40 31 0.0 ]
[ " New Caledonia " 285498 0.97 2748 16 18280 502 2.0 34 72 0.0 ]
[ " French Polynesia " 280908 0.58 1621 77 3660 -1000 2.0 34 64 0.0 ]
[ " Mayotte " 272815 2.5 6665 728 375 0 3.7 20 46 0.0 ]
[ " Sao Tome & Principe " 219159 1.91 4103 228 960 -1680 4.4 19 74 0.0 ]
[ " Samoa " 198414 0.67 1317 70 2830 -2803 3.9 22 18 0.0 ]
[ " Saint Lucia " 183627 0.46 837 301 610 0 1.4 34 19 0.0 ]
[ " Channel Islands " 173863 0.93 1604 915 190 1351 1.5 43 30 0.0 ]
[ " Guam " 168775 0.89 1481 313 540 -506 2.3 31 95 0.0 ]
[ " Curaçao " 164093 0.41 669 370 444 515 1.8 42 89 0.0 ]
[ " Kiribati " 119449 1.57 1843 147 810 -800 3.6 23 57 0.0 ]
[ " Micronesia " 115023 1.06 1208 164 700 -600 3.1 24 21 0.0 ]
[ " Grenada " 112523 0.46 520 331 340 -200 2.1 32 35 0.0 ]
[ " St. Vincent & Grenadines " 110940 0.32 351 284 390 -200 1.9 33 53 0.0 ]
[ " Aruba " 106766 0.43 452 593 180 201 1.9 41 44 0.0 ]
[ " Tonga " 105695 1.15 1201 147 720 -800 3.6 22 24 0.0 ]
[ " U.S. Virgin Islands " 104425 -0.15 -153 298 350 -451 2.0 43 96 0.0 ]
[ " Seychelles " 98347 0.62 608 214 460 -200 2.5 34 56 0.0 ]
[ " Antigua and " 97929 0.84 811 223 440 0 2.0 34 26 0.0 ]
[ " Isle of Man " 85033 0.53 449 149 570 nil nil 53 0.0 ]
[ " Andorra " 77265 0.16 123 164 470 nil nil 88 0.0 ]
[ " Dominica " 71986 0.25 178 96 750 nil nil 74 0.0 ]
;; ["Cayman Islands" 65722 1.19 774 274 240 nil nil 97 0.0]
[ " Bermuda " 62278 -0.36 -228 1246 50 nil nil 97 0.0 ]
[ " Marshall Islands " 59190 0.68 399 329 180 nil nil 70 0.0 ]
[ " Northern Mariana Islands " 57559 0.6 343 125 460 nil nil 88 0.0 ]
[ " Greenland " 56770 0.17 98 0 410450 nil nil 87 0.0 ]
[ " American Samoa " 55191 -0.22 -121 276 200 nil nil 88 0.0 ]
[ " Saint Kitts & Nevis " 53199 0.71 376 205 260 nil nil 33 0.0 ]
[ " Faeroe Islands " 48863 0.38 185 35 1396 nil nil 43 0.0 ]
[ " Sint Maarten " 42876 1.15 488 1261 34 nil nil 96 0.0 ]
[ " Monaco " 39242 0.71 278 26337 1 nil nil nil 0.0 ]
[ " Turks and Caicos " 38717 1.38 526 41 950 nil nil 89 0.0 ]
[ " Saint Martin " 38666 1.75 664 730 53 nil nil 0 0.0 ]
[ " Liechtenstein " 38128 0.29 109 238 160 nil nil 15 0.0 ]
[ " San Marino " 33931 0.21 71 566 60 nil nil 97 0.0 ]
;; ["Gibraltar" 33691 -0.03 -10 3369 10 nil nil nil 0.0]
[ " British Virgin Islands " 30231 0.67 201 202 150 nil nil 52 0.0 ]
[ " Caribbean Netherlands " 26223 0.94 244 80 328 nil nil 75 0.0 ]
[ " Palau " 18094 0.48 86 39 460 nil nil nil 0.0 ]
[ " Cook Islands " 17564 0.09 16 73 240 nil nil 75 0.0 ]
[ " Anguilla " 15003 0.9 134 167 90 nil nil nil 0.0 ]
[ " Tuvalu " 11792 1.25 146 393 30 nil nil 62 0.0 ]
[ " Wallis & Futuna " 11239 -1.69 -193 80 140 nil nil 0 0.0 ]
[ " Nauru " 10824 0.63 68 541 20 nil nil nil 0.0 ]
[ " Saint Barthelemy " 9877 0.3 30 470 21 nil nil 0 0.0 ]
[ " Saint Helena " 6077 0.3 18 16 390 nil nil 27 0.0 ]
[ " Saint Pierre & Miquelon " 5794 -0.48 -28 25 230 nil nil 100 0.0 ]
[ " Montserrat " 4992 0.06 3 50 100 nil nil 10 0.0 ]
[ " Falkland Islands " 3480 3.05 103 0 12170 nil nil 66 0.0 ]
[ " Niue " 1626 0.68 11 6 260 nil nil 46 0.0 ]
[ " Tokelau " 1357 1.27 17 136 10 nil nil 0 0.0 ]
[ " Holy See " 801 0.25 2 2003 0 nil nil nil 0.0 ]
;; ])
;; (def population
;; (transduce
;; (map (fn [[Country Population YearlyChangeRate NetChange Density LandArea
;; Migrants FertilityRate MedianAge UrbanPopulationRate
;; WorldShareRate]]
[ ( ccr / get - country - code Country ) Population ] ) )
;; conj (sorted-map)
;; population-table))
( defn growth - per - report
" Population : number of people at the beginning of the year
YearlyChangeRate : in percents E.g. :
= > ( growth - per - report 83517045 0.3 ) ; ; 686 "
;; [Population YearlyChangeRate]
;; (int
;; (/ (* Population (/ YearlyChangeRate 100))
;; 365)))
;; (printf "Current-ns [%s] loading %s ... done\n" *ns* 'corona.tables)
| null | https://raw.githubusercontent.com/Bost/corona_cases/415d71a29d3e6864ec9095f6236bb86bdcb04647/src/corona/tables.clj | clojure | (printf "Current-ns [%s] loading %s ...\n" *ns* 'corona.tables)
(ns corona.tables
(:require
[corona.countries :as ccr]
))
;; (set! *warn-on-reflection* true)
(def ^:const regions
"
Contains \"Americas\" which is not a standard name. Bloody hell!
(United_Nations)#List"
[
; Congo - Brazzaville
]
)
(def ^:const population-table
"
-population/population-by-country/
Country
Population
YearlyChangeRate
Density
LandArea
Migrants
WorldShareRate
"
[
["Nepal" 29136808 1.85 528098 203 143350 41710 1.9 25 21 0.37]
["Sri Lanka" 21413249 0.42 89516 341 62710 -97986 2.2 34 18 0.27]
["Rwanda" 12952218 2.58 325268 525 24670 -9000 4.1 20 18 0.17]
["Belgium" 11589623 0.44 50295 383 30280 48000 1.7 42 98 0.15]
; Congo - Kinshasa
["Ireland" 4937786 1.13 55291 72 68890 23604 1.8 38 63 0.06]
["Réunion" 895312 0.72 6385 358 2500 -1256 2.3 36 100 0.01]
["Bhutan" 771608 1.12 8516 20 38117 320 2.0 28 46 0.01]
["Cayman Islands" 65722 1.19 774 274 240 nil nil 97 0.0]
["Gibraltar" 33691 -0.03 -10 3369 10 nil nil nil 0.0]
])
(def population
(transduce
(map (fn [[Country Population YearlyChangeRate NetChange Density LandArea
Migrants FertilityRate MedianAge UrbanPopulationRate
WorldShareRate]]
conj (sorted-map)
population-table))
; 686 "
[Population YearlyChangeRate]
(int
(/ (* Population (/ YearlyChangeRate 100))
365)))
(printf "Current-ns [%s] loading %s ... done\n" *ns* 'corona.tables) |
[ " China " " Asia " " Eastern Asia " " 1427647786 " " 1433783686 " " +0.43 % " ]
[ " India " " Asia " " Southern Asia " " 1352642280 " " 1366417754 " " +1.02 % " ]
[ " United States " " Americas " " Northern America " " 327096265 " " 329064917 " " +0.60 % " ]
[ " Indonesia " " Asia " " South - eastern Asia " " 267670543 " " 270625568 " " +1.10 % " ]
[ " Pakistan " " Asia " " Southern Asia " " 212228286 " " 216565318 " " +2.04 % " ]
[ " Brazil " " Americas " " South America " " 209469323 " " 211049527 " " +0.75 % " ]
[ " Nigeria " " Africa " " Western Africa " " 195874683 " " 200963599 " " +2.60 % " ]
[ " Bangladesh " " Asia " " Southern Asia " " 161376708 " " 163046161 " " +1.03 % " ]
[ " Russia " " Europe " " Eastern Europe " " 145734038 " " 145872256 " " +0.09 % " ]
[ " Mexico " " Americas " " Central America " " 126190788 " " 127575529 " " +1.10 % " ]
[ " Japan " " Asia " " Eastern Asia " " 127202192 " " 126860301 " " −0.27 % " ]
[ " Ethiopia " " Africa " " Eastern Africa " " 109224414 " " 112078730 " " +2.61 % " ]
[ " Philippines " " Asia " " South - eastern Asia " " 106651394 " " 108116615 " " +1.37 % " ]
[ " Egypt " " Africa " " Northern Africa " " 98423598 " " 100388073 " " +2.00 % " ]
[ " Vietnam " " Asia " " South - eastern Asia " " 95545962 " " 96462106 " " +0.96 % " ]
[ " Germany " " Europe " " Western Europe " " 83124418 " " 83517045 " " +0.47 % " ]
[ " Turkey " " Asia " " Western Asia " " 82340088 " " 83429615 " " +1.32 % " ]
[ " Iran " " Asia " " Southern Asia " " 81800188 " " 82913906 " " +1.36 % " ]
[ " Thailand " " Asia " " South - eastern Asia " " 68863514 " " 69037513 " " +0.25 % " ]
[ " United Kingdom " " Europe " " Northern Europe " " 67141684 " " 67530172 " " +0.58 % " ]
[ " France " " Europe " " Western Europe " " 64990511 " " 65129728 " " +0.21 % " ]
[ " Italy " " Europe " " Southern Europe " " 60627291 " " 60550075 " " −0.13 % " ]
[ " South Africa " " Africa " " Southern Africa " " 57792518 " " 58558270 " " +1.33 % " ]
[ " Tanzania " " Africa " " Eastern Africa " " 56313438 " " 58005463 " " +3.00 % " ]
[ " Myanmar " " Asia " " South - eastern Asia " " 53708320 " " 54045420 " " +0.63 % " ]
[ " Kenya " " Africa " " Eastern Africa " " 51392565 " " 52573973 " " +2.30 % " ]
[ " South Korea " " Asia " " Eastern Asia " " 51171706 " " 51225308 " " +0.10 % " ]
[ " Colombia " " Americas " " South America " " 49661048 " " 50339443 " " +1.37 % " ]
[ " Spain " " Europe " " Southern Europe " " 46692858 " " 46736776 " " +0.09 % " ]
[ " Argentina " " Americas " " South America " " 44361150 " " 44780677 " " +0.95 % " ]
[ " Uganda " " Africa " " Eastern Africa " " 42729036 " " 44269594 " " +3.61 % " ]
[ " Ukraine " " Europe " " Eastern Europe " " 44246156 " " 43993638 " " −0.57 % " ]
[ " Algeria " " Africa " " Northern Africa " " 42228408 " " 43053054 " " +1.95 % " ]
[ " Sudan " " Africa " " Northern Africa " " 41801533 " " 42813238 " " +2.42 % " ]
[ " Iraq " " Asia " " Western Asia " " 38433600 " " 39309783 " " " ]
[ " Afghanistan " " Asia " " Southern Asia " " 37171921 " " 38041754 " " +2.34 % " ]
[ " Poland " " Europe " " Eastern Europe " " 37921592 " " 37887768 " " −0.09 % " ]
[ " Canada " " Americas " " Northern America " " 37074562 " " 37411047 " " +0.91 % " ]
[ " Morocco " " Africa " " Northern Africa " " 36029093 " " 36471769 " " +1.23 % " ]
[ " Saudi Arabia " " Asia " " Western Asia " " 33702756 " " 34268528 " " +1.68 % " ]
[ " Uzbekistan " " Asia " " Central Asia " " 32476244 " " 32981716 " " +1.56 % " ]
[ " Peru " " Americas " " South America " " 31989260 " " 32510453 " " +1.63 % " ]
[ " Malaysia " " Asia " " South - eastern Asia " " 31528033 " " 31949777 " " +1.34 % " ]
[ " Angola " " Africa " " Middle Africa " " 30809787 " " 31825295 " " +3.30 % " ]
[ " Mozambique " " Africa " " Eastern Africa " " 29496004 " " 30366036 " " % " ]
[ " Yemen " " Asia " " Western Asia " " 28498683 " " 29161922 " " +2.33 % " ]
[ " Ghana " " Africa " " Western Africa " " 28206728 " " 28833629 " " +2.22 % " ]
[ " Nepal " " Asia " " Southern Asia " " 28095714 " " 28608710 " " +1.83 % " ]
[ " Venezuela " " Americas " " South America " " 28887118 " " 28515829 " " −1.29 % " ]
[ " Madagascar " " Africa " " Eastern Africa " " 26262313 " " 26969307 " " +2.69 % " ]
[ " North Korea " " Asia " " Eastern Asia " " 25549604 " " 25666161 " " +0.46 % " ]
[ " Ivory Coast " " Africa " " Western Africa " " 25069230 " " 25716544 " " +2.58 % " ]
[ " Cameroon " " Africa " " Middle Africa " " 25216267 " " 25876380 " " +2.62 % " ]
[ " Australia " " Oceania " " Australia and New Zealand " " 24898152 " " 25203198 " " +1.23 % " ]
[ " Taiwan " " Asia " " Eastern Asia " " 23726460 " " 23773876 " " +0.20 % " ]
[ " Niger " " Africa " " Western Africa " " 22442822 " " 23310715 " " +3.87 % " ]
[ " Sri Lanka " " Asia " " Southern Asia " " 21228763 " " 21323733 " " +0.45 % " ]
[ " Burkina Faso " " Africa " " Western Africa " " 19751466 " " 20321378 " " +2.89 % " ]
[ " Mali " " Africa " " Western Africa " " 19077749 " " 19658031 " " +3.04 % " ]
[ " Romania " " Europe " " Eastern Europe " " 19506114 " " 19364557 " " −0.73 % " ]
[ " Malawi " " Africa " " Eastern Africa " " 18143217 " " 18628747 " " +2.68 % " ]
[ " Chile " " Americas " " South America " " 18729160 " " 18952038 " " +1.19 % " ]
[ " Kazakhstan " " Asia " " Central Asia " " 18319618 " " 18551427 " " +1.27 % " ]
[ " Zambia " " Africa " " Eastern Africa " " 17351708 " " 17861030 " " +2.94 % " ]
[ " Guatemala " " Americas " " Central America " " 17247849 " " 17581472 " " +1.93 % " ]
[ " Ecuador " " Americas " " South America " " 17084358 " " 17373662 " " +1.69 % " ]
[ " Netherlands " " Europe " " Western Europe " " 17059560 " " 17097130 " " +0.22 % " ]
[ " Syria " " Asia " " Western Asia " " 16945057 " " 17070135 " " +0.74 % " ]
[ " Cambodia " " Asia " " South - eastern Asia " " 16249792 " " 16486542 " " +1.46 % " ]
[ " Senegal " " Africa " " Western Africa " " 15854323 " " 16296364 " " +2.79 % " ]
[ " Chad " " Africa " " Middle Africa " " 15477729 " " 15946876 " " +3.03 % " ]
[ " Somalia " " Africa " " Eastern Africa " " 15008226 " " 15442905 " " +2.90 % " ]
[ " Zimbabwe " " Africa " " Eastern Africa " " 14438802 " " 14645468 " " +1.43 % " ]
[ " Guinea " " Africa " " Western Africa " " 12414293 " " 12771246 " " +2.88 % " ]
[ " Rwanda " " Africa " " Eastern Africa " " 12301970 " " 12626950 " " +2.64 % " ]
[ " Benin " " Africa " " Western Africa " " 11485044 " " 11801151 " " +2.75 % " ]
[ " Tunisia " " Africa " " Northern Africa " " 11565201 " " 11694719 " " +1.12 % " ]
[ " Belgium " " Europe " " Western Europe " " 11482178 " " 11539328 " " +0.50 % " ]
[ " Bolivia " " Americas " " South America " " 11353142 " " 11513100 " " +1.41 % " ]
[ " Cuba " " Americas " " Caribbean " " 11338134 " " 11333483 " " −0.04 % " ]
[ " Haiti " " Americas " " Caribbean " " 11123178 " " 11263770 " " +1.26 % " ]
[ " South Sudan " " Africa " " Eastern Africa " " 10975927 " " 11062113 " " +0.79 % " ]
[ " Burundi " " Africa " " Eastern Africa " " 10524117 " " 10864245 " " +3.23 % " ]
[ " Dominican Republic " " Americas " " Caribbean " " 10627141 " " 10738958 " " +1.05 % " ]
[ " Czech Republic " " Europe " " Eastern Europe " " 10665677 " " 10689209 " " +0.22 % " ]
[ " Greece " " Europe " " Southern Europe " " 10522246 " " 10473455 " " −0.46 % " ]
[ " Portugal " " Europe " " Southern Europe " " 10256193 " " 10226187 " " −0.29 % " ]
[ " " " Asia " " Western Asia " " 9965318 " " 10101694 " " +1.37 % " ]
[ " Azerbaijan " " Asia " " Western Asia " " 9949537 " " 10047718 " " +0.99 % " ]
[ " Sweden " " Europe " " Northern Europe " " 9971638 " " 10036379 " " +0.65 % " ]
[ " United Arab Emirates " " Asia " " Western Asia " " 9630959 " " 9770529 " " +1.45 % " ]
[ " Honduras " " Americas " " Central America " " 9587522 " " 9746117 " " +1.65 % " ]
[ " Hungary " " Europe " " Eastern Europe " " 9707499 " " 9684679 " " −0.24 % " ]
[ " Belarus " " Europe " " Eastern Europe " " 9452617 " " 9452411 " " 0.00 % " ]
[ " Tajikistan " " Asia " " Central Asia " " 9100835 " " 9321018 " " +2.42 % " ]
[ " Austria " " Europe " " Western Europe " " 8891388 " " 8955102 " " +0.72 % " ]
[ " Papua New Guinea " " Oceania " " Melanesia " " 8606323 " " 8776109 " " +1.97 % " ]
[ " Serbia " " Europe " " Southern Europe " " 8802754 " " 8772235 " " −0.35 % " ]
[ " Switzerland " " Europe " " Western Europe " " 8525611 " " 8591365 " " +0.77 % " ]
[ " Israel " " Asia " " Western Asia " " 8381516 " " 8519377 " " +1.64 % " ]
[ " Togo " " Africa " " Western Africa " " 7889093 " " 8082366 " " +2.45 % " ]
[ " Sierra Leone " " Africa " " Western Africa " " 7650150 " " 7813215 " " +2.13 % " ]
[ " Hong Kong " " Asia " " Eastern Asia " " 7371730 " " 7436154 " " +0.87 % " ]
[ " Laos " " Asia " " South - eastern Asia " " 7061507 " " 7169455 " " +1.53 % " ]
[ " Paraguay " " Americas " " South America " " 6956066 " " 7044636 " " +1.27 % " ]
[ " Bulgaria " " Europe " " Eastern Europe " " 7051608 " " 7000119 " " −0.73 % " ]
[ " Lebanon " " Asia " " Western Asia " " 6859408 " " 6855713 " " −0.05 % " ]
[ " Libya " " Africa " " Northern Africa " " 6678559 " " 6777452 " " +1.48 % " ]
[ " Nicaragua " " Americas " " Central America " " 6465501 " " 6545502 " " +1.24 % " ]
[ " El Salvador " " Americas " " Central America " " 6420746 " " 6453553 " " +0.51 % " ]
[ " Kyrgyzstan " " Asia " " Central Asia " " 6304030 " " 6415850 " " +1.77 % " ]
[ " Turkmenistan " " Asia " " Central Asia " " 5850901 " " 5942089 " " +1.56 % " ]
[ " Singapore " " Asia " " South - eastern Asia " " 5757499 " " 5804337 " " +0.81 % " ]
[ " Denmark " " Europe " " Northern Europe " " 5752126 " " 5771876 " " +0.34 % " ]
[ " Finland " " Europe " " Northern Europe " " 5522576 " " 5532156 " " +0.17 % " ]
[ " Slovakia " " Europe " " Eastern Europe " " 5453014 " " 5457013 " " +0.07 % " ]
[ " Congo " " Africa " " Middle Africa " " 5244359 " " 5380508 " " +2.60 % " ]
[ " Norway " " Europe " " Northern Europe " " 5337962 " " 5378857 " " +0.77 % " ]
[ " Costa Rica " " Americas " " Central America " " 4999441 " " 5047561 " " +0.96 % " ]
[ " Palestine " " Asia " " Western Asia " " 4862979 " " 4981420 " " +2.44 % " ]
[ " Oman " " Asia " " Western Asia " " 4829473 " " 4974986 " " +3.01 % " ]
[ " Liberia " " Africa " " Western Africa " " 4818973 " " 4937374 " " +2.46 % " ]
[ " Ireland " " Europe " " Northern Europe " " 4818690 " " 4882495 " " +1.32 % " ]
[ " New Zealand " " Oceania " " Australia and New Zealand " " 4743131 " " " " +0.84 % " ]
[ " Central African Republic " " Africa " " Middle Africa " " 4666368 " " 4745185 " " +1.69 % " ]
[ " Mauritania " " Africa " " Western Africa " " 4403313 " " 4525696 " " +2.78 % " ]
[ " Panama " " Americas " " Central America " " 4176869 " " 4246439 " " +1.67 % " ]
[ " Kuwait " " Asia " " Western Asia " " 4137312 " " 4207083 " " +1.69 % " ]
[ " Croatia " " Europe " " Southern Europe " " 4156405 " " 4130304 " " −0.63 % " ]
[ " Moldova " " Europe " " Eastern Europe " " 4051944 " " 4043263 " " −0.21 % " ]
[ " Georgia " " Asia " " Western Asia " " 4002942 " " 3996765 " " −0.15 % " ]
[ " Eritrea " " Africa " " Eastern Africa " " 3452786 " " 3497117 " " +1.28 % " ]
[ " Uruguay " " Americas " " South America " " 3449285 " " 3461734 " " +0.36 % " ]
[ " Bosnia and Herzegovina " " Europe " " Southern Europe " " 3323925 " " 3301000 " " −0.69 % " ]
[ " Mongolia " " Asia " " Eastern Asia " " 3170216 " " 3225167 " " +1.73 % " ]
[ " Armenia " " Asia " " Western Asia " " 2951745 " " 2957731 " " +0.20 % " ]
[ " Jamaica " " Americas " " Caribbean " " 2934847 " " 2948279 " " +0.46 % " ]
[ " Puerto Rico " " Americas " " Caribbean " " 3039596 " " 2933408 " " −3.49 % " ]
[ " Albania " " Europe " " Southern Europe " " 2882740 " " 2880917 " " −0.06 % " ]
[ " Qatar " " Asia " " Western Asia " " 2781682 " " 2832067 " " +1.81 % " ]
[ " Lithuania " " Europe " " Northern Europe " " 2801264 " " 2759627 " " −1.49 % " ]
[ " Namibia " " Africa " " Southern Africa " " 2448301 " " 2494530 " " +1.89 % " ]
[ " Gambia " " Africa " " Western Africa " " 2280094 " " 2347706 " " +2.97 % " ]
[ " Botswana " " Africa " " Southern Africa " " 2254068 " " 2303697 " " +2.20 % " ]
[ " Gabon " " Africa " " Middle Africa " " 2119275 " " 2172579 " " +2.52 % " ]
[ " Lesotho " " Africa " " Southern Africa " " 2108328 " " 2125268 " " +0.80 % " ]
[ " North Macedonia " " Europe " " Southern Europe " " 2082957 " " 2083459 " " +0.02 % " ]
[ " Slovenia " " Europe " " Southern Europe " " 2077837 " " 2078654 " " +0.04 % " ]
[ " Guinea - Bissau " " Africa " " Western Africa " " 1874303 " " 1920922 " " +2.49 % " ]
[ " Latvia " " Europe " " Northern Europe " " 1928459 " " 1906743 " " −1.13 % " ]
[ " Bahrain " " Asia " " Western Asia " " 1569446 " " 1641172 " " +4.57 % " ]
[ " Trinidad and Tobago " " Americas " " Caribbean " " 1389843 " " 1394973 " " +0.37 % " ]
[ " Equatorial Guinea " " Africa " " Middle Africa " " 1308975 " " 1355986 " " +3.59 % " ]
[ " Estonia " " Europe " " Northern Europe " " 1322920 " " 1325648 " " +0.21 % " ]
[ " East Timor " " Asia " " South - eastern Asia " " 1267974 " " 1293119 " " +1.98 % " ]
[ " Mauritius " " Africa " " Eastern Africa " " 1189265 " " 1198575 " " +0.78 % " ]
[ " Cyprus " " Asia " " Western Asia " " 1170125 " " 1179551 " " +0.81 % " ]
[ " Eswatini " " Africa " " Southern Africa " " 1136281 " " 1148130 " " +1.04 % " ]
[ " Djibouti " " Africa " " Eastern Africa " " 958923 " " 973560 " " +1.53 % " ]
[ " Fiji " " Oceania " " Melanesia " " 883483 " " 889953 " " +0.73 % " ]
[ " Réunion " " Africa " " Eastern Africa " " 882526 " " 888927 " " +0.73 % " ]
[ " Comoros " " Africa " " Eastern Africa " " " " 850886 " " +2.23 % " ]
[ " Guyana " " Americas " " South America " " 779006 " " 782766 " " +0.48 % " ]
[ " Bhutan " " Asia " " Southern Asia " " 754388 " " 763092 " " +1.15 % " ]
[ " Solomon Islands " " Oceania " " Melanesia " " 652857 " " 669823 " " +2.60 % " ]
[ " Macau " " Asia " " Eastern Asia " " 631636 " " 640445 " " +1.39 % " ]
[ " Montenegro " " Europe " " Southern Europe " " 627809 " " 627987 " " +0.03 % " ]
[ " Luxembourg " " Europe " " Western Europe " " 604245 " " 615729 " " +1.90 % " ]
[ " Western Sahara " " Africa " " Northern Africa " " 567402 " " 582463 " " +2.65 % " ]
[ " Suriname " " Americas " " South America " " 575990 " " 581372 " " +0.93 % " ]
[ " Cape Verde " " Africa " " Western Africa " " 543767 " " 549935 " " +1.13 % " ]
[ " Maldives " " Asia " " Southern Asia " " 515696 " " 530953 " " +2.96 % " ]
[ " Guadeloupe " " Americas " " Caribbean " " 446928 " " 447905 " " ]
[ " Malta " " Europe " " Southern Europe " " 439248 " " 440372 " " +0.26 % " ]
[ " Brunei " " Asia " " South - eastern Asia " " 428963 " " 433285 " " +1.01 % " ]
[ " Belize " " Americas " " Central America " " 383071 " " " " +1.90 % " ]
[ " Bahamas " " Americas " " Caribbean " " 385637 " " 389482 " " +1.00 % " ]
[ " Martinique " " Americas " " Caribbean " " 375673 " " 375554 " " −0.03 % " ]
[ " Iceland " " Europe " " Northern Europe " " 336713 " " 339031 " " +0.69 % " ]
[ " Vanuatu " " Oceania " " Melanesia " " 292680 " " 299882 " " +2.46 % " ]
[ " Barbados " " Americas " " Caribbean " " 286641 " " 287025 " " +0.13 % " ]
[ " New Caledonia " " Oceania " " Melanesia " " 279993 " " 282750 " " +0.98 % " ]
[ " French Guiana " " Americas " " South America " " 275713 " " 282731 " " +2.55 % " ]
[ " French Polynesia " " Oceania " " Polynesia " " 277679 " " 279287 " " ]
[ " Mayotte " " Africa " " Eastern Africa " " 259531 " " 266150 " " +2.55 % " ]
[ " São Tomé and " " Africa " " Middle Africa " " 211028 " " 215056 " " +1.91 % " ]
[ " Samoa " " Oceania " " Polynesia " " 196129 " " 197097 " " ]
[ " Saint Lucia " " Americas " " Caribbean " " 181889 " " 182790 " " +0.50 % " ]
[ " Guernsey and Jersey " " Europe " " Northern Europe " " 170499 " " 172259 " " +1.03 % " ]
[ " Guam " " Oceania " " Micronesia " " 165768 " " 167294 " " +0.92 % " ]
[ " Curaçao " " Americas " " Caribbean " " 162752 " " 163424 " " +0.41 % " ]
[ " Kiribati " " Oceania " " Micronesia " " 115847 " " 117606 " " +1.52 % " ]
[ " F.S. Micronesia " " Oceania " " Micronesia " " 112640 " " 113815 " " +1.04 % " ]
[ " Grenada " " Americas " " Caribbean " " 111454 " " 112003 " " ]
[ " Tonga " " Oceania " " Polynesia " " 110589 " " 110940 " " +0.32 % " ]
[ " Saint Vincent and the Grenadines " " Americas " " Caribbean " " 110211 " " 110589 " " +0.34 % " ]
[ " Aruba " " Americas " " Caribbean " " 105845 " " 106314 " " +0.44 % " ]
[ " U.S. Virgin Islands " " Americas " " Caribbean " " 104680 " " 104578 " " −0.10 % " ]
[ " Seychelles " " Africa " " Eastern Africa " " 97096 " " 97739 " " +0.66 % " ]
[ " Antigua and " " Americas " " Caribbean " " 96286 " " " " +0.86 % " ]
[ " Isle of Man " " Europe " " Northern Europe " " 84077 " " 84584 " " +0.60 % " ]
[ " Andorra " " Europe " " Southern Europe " " 77006 " " 77142 " " +0.18 % " ]
[ " Dominica " " Americas " " Caribbean " " 71625 " " 71808 " " +0.26 % " ]
[ " Cayman Islands " " Americas " " Caribbean " " 64174 " " 64948 " " +1.21 % " ]
[ " Bermuda " " Americas " " Northern America " " 62756 " " 62506 " " −0.40 % " ]
[ " Marshall Islands " " Oceania " " Micronesia " " 58413 " " 58791 " " +0.65 % " ]
[ " Greenland " " Americas " " Northern America " " 56564 " " 56672 " " +0.19 % " ]
[ " Northern Mariana Islands " " Oceania " " Micronesia " " 56882 " " 56188 " " −1.22 % " ]
[ " American Samoa " " Oceania " " Polynesia " " 55465 " " 55312 " " −0.28 % " ]
[ " Saint Kitts and Nevis " " Americas " " Caribbean " " 52441 " " 52823 " " +0.73 % " ]
[ " Faroe Islands " " Europe " " Northern Europe " " 48497 " " 48678 " " +0.37 % " ]
[ " Sint Maarten " " Americas " " Caribbean " " 41940 " " 42388 " " +1.07 % " ]
[ " Monaco " " Europe " " Western Europe " " 38682 " " 38964 " " +0.73 % " ]
[ " Turks and Caicos Islands " " Americas " " Caribbean " " 37665 " " 38191 " " +1.40 % " ]
[ " Liechtenstein " " Europe " " Western Europe " " 37910 " " 38019 " " +0.29 % " ]
[ " San Marino " " Europe " " Southern Europe " " 33785 " " 33860 " " +0.22 % " ]
[ " Gibraltar " " Europe " " Southern Europe " " 33718 " " 33701 " " −0.05 % " ]
[ " British Virgin Islands " " Americas " " Caribbean " " 29802 " " 30030 " " +0.77 % " ]
[ " Caribbean Netherlands " " Americas " " Caribbean " " 25711 " " 25979 " " +1.04 % " ]
[ " Palau " " Oceania " " Micronesia " " 17907 " " 18008 " " +0.56 % " ]
[ " Cook Islands " " Oceania " " Polynesia " " 17518 " " 17548 " " +0.17 % " ]
[ " Anguilla " " Americas " " Caribbean " " 14731 " " 14869 " " +0.94 % " ]
[ " Tuvalu " " Oceania " " Polynesia " " 11508 " " 11646 " " +1.20 % " ]
[ " Wallis and Futuna " " Oceania " " Polynesia " " 11661 " " 11432 " " −1.96 % " ]
[ " Nauru " " Oceania " " Micronesia " " 10670 " " 10756 " " +0.81 % " ]
[ " Saint Helena , Ascension and " " Africa " " Western Africa " " 6035 " " 6059 " " +0.40 % " ]
[ " Saint Pierre and Miquelon " " Americas " " Northern America " " 5849 " " 5822 " " −0.46 % " ]
[ " Montserrat " " Americas " " Caribbean " " 4993 " " 4989 " " −0.08 % " ]
[ " Falkland Islands " " Americas " " South America " " 3234 " " 3377 " " +4.42 % " ]
[ " Niue " " Oceania " " Polynesia " " 1620 " " 1615 " " −0.31 % " ]
[ " Tokelau " " Oceania " " Polynesia " " 1319 " " 1340 " " +1.59 % " ]
[ " Vatican City " " Europe " " Southern Europe " " 801 " " 799 " " −0.25 % " ]
TODO create spec or use typed - clojure , e.g. Rate is percentage so it must be between 0 and 100
NetChange
FertilityRate
MedianAge
UrbanPopulationRate
[ " China " 1439323776 0.39 5540090 153 9388211 -348399 1.7 38 61 18.47 ]
[ " India " 1380004385 0.99 13586631 464 2973190 -532687 2.2 28 35 17.7 ]
[ " United States " 331002651 0.59 1937734 36 9147420 954806 1.8 38 83 4.25 ]
[ " Indonesia " 273523615 1.07 2898047 151 1811570 -98955 2.3 30 56 3.51 ]
[ " Pakistan " 220892340 2.0 4327022 287 770880 -233379 3.6 23 35 2.83 ]
[ " Brazil " 212559417 0.72 1509890 25 8358140 21200 1.7 33 88 2.73 ]
[ " Nigeria " 206139589 2.58 5175990 226 910770 -60000 5.4 18 52 2.64 ]
[ " Bangladesh " 164689383 1.01 1643222 1265 130170 -369501 2.1 28 39 2.11 ]
[ " Russia " 145934462 0.04 62206 9 16376870 182456 1.8 40 74 1.87 ]
[ " Mexico " 128932753 1.06 1357224 66 1943950 -60000 2.1 29 84 1.65 ]
[ " Japan " 126476461 -0.3 -383840 347 364555 71560 1.4 48 92 1.62 ]
[ " Ethiopia " 114963588 2.57 2884858 115 1000000 30000 4.3 19 21 1.47 ]
[ " Philippines " 109581078 1.35 1464463 368 298170 -67152 2.6 26 47 1.41 ]
[ " Egypt " 102334404 1.94 1946331 103 995450 -38033 3.3 25 43 1.31 ]
[ " Vietnam " 97338579 0.91 876473 314 310070 -80000 2.1 32 38 1.25 ]
[ " DR Congo " 89561403 3.19 2770836 40 2267050 23861 6.0 17 46 1.15 ]
[ " Turkey " 84339067 1.09 909452 110 769630 283922 2.1 32 76 1.08 ]
[ " Iran " 83992949 1.3 1079043 52 1628550 -55000 2.2 32 76 1.08 ]
[ " Germany " 83783942 0.32 266897 240 348560 543822 1.6 46 76 1.07 ]
[ " Thailand " 69799978 0.25 174396 137 510890 19444 1.5 40 51 0.9 ]
[ " United Kingdom " 67886011 0.53 355839 281 241930 260650 1.8 40 83 0.87 ]
[ " France " 65273511 0.22 143783 119 547557 36527 1.9 42 82 0.84 ]
[ " Italy " 60461826 -0.15 -88249 206 294140 148943 1.3 47 69 0.78 ]
[ " Tanzania " 59734218 2.98 1728755 67 885800 -40076 4.9 18 37 0.77 ]
[ " South Africa " 59308690 1.28 750420 49 1213090 145405 2.4 28 67 0.76 ]
[ " Myanmar " 54409800 0.67 364380 83 653290 -163313 2.2 29 31 0.7 ]
[ " Kenya " 53771296 2.28 1197323 94 569140 -10000 3.5 20 28 0.69 ]
[ " South Korea " 51269185 0.09 43877 527 97230 11731 1.1 44 82 0.66 ]
[ " Colombia " 50882891 1.08 543448 46 1109500 204796 1.8 31 80 0.65 ]
[ " Spain " 46754778 0.04 18002 94 498800 40000 1.3 45 80 0.6 ]
[ " Uganda " 45741007 3.32 1471413 229 199810 168694 5.0 17 26 0.59 ]
[ " Argentina " 45195774 0.93 415097 17 2736690 4800 2.3 32 93 0.58 ]
[ " Algeria " 43851044 1.85 797990 18 2381740 -10000 3.1 29 73 0.56 ]
[ " Sudan " 43849260 2.42 1036022 25 1765048 -50000 4.4 20 35 0.56 ]
[ " Ukraine " 43733762 -0.59 -259876 75 579320 10000 1.4 41 69 0.56 ]
[ " Iraq " 40222493 2.32 912710 93 434320 7834 3.7 21 73 0.52 ]
[ " Afghanistan " 38928346 2.33 886592 60 652860 -62920 4.6 18 25 0.5 ]
[ " Poland " 37846611 -0.11 -41157 124 306230 -29395 1.4 42 60 0.49 ]
[ " Canada " 37742154 0.89 331107 4 9093510 242032 1.5 41 81 0.48 ]
[ " Morocco " 36910560 1.2 438791 83 446300 -51419 2.4 30 64 0.47 ]
[ " Saudi Arabia " 34813871 1.59 545343 16 2149690 134979 2.3 32 84 0.45 ]
[ " Uzbekistan " 33469203 1.48 487487 79 425400 -8863 2.4 28 50 0.43 ]
[ " Peru " 32971854 1.42 461401 26 1280000 99069 2.3 31 79 0.42 ]
[ " Angola " 32866272 3.27 1040977 26 1246700 6413 5.6 17 67 0.42 ]
[ " Malaysia " 32365999 1.3 416222 99 328550 50000 2.0 30 78 0.42 ]
[ " Mozambique " 31255435 2.93 889399 40 786380 -5000 4.9 18 38 0.4 ]
[ " Ghana " 31072940 2.15 655084 137 227540 -10000 3.9 22 57 0.4 ]
[ " Yemen " 29825964 2.28 664042 56 527970 -30000 3.8 20 38 0.38 ]
[ " Venezuela " 28435940 -0.28 -79889 32 882050 -653249 2.3 30 nil 0.36 ]
[ " Madagascar " 27691018 2.68 721711 48 581795 -1500 4.1 20 39 0.36 ]
[ " Cameroon " 26545863 2.59 669483 56 472710 -4800 4.6 19 56 0.34 ]
[ " Côte d'Ivoire " 26378274 2.57 661730 83 318000 -8000 4.7 19 51 0.34 ]
[ " North Korea " 25778816 0.44 112655 214 120410 -5403 1.9 35 63 0.33 ]
[ " Australia " 25499884 1.18 296686 3 7682300 158246 1.8 38 86 0.33 ]
[ " Niger " 24206644 3.84 895929 19 1266700 4000 7.0 15 17 0.31 ]
[ " Taiwan " 23816775 0.18 42899 673 35410 30001 1.2 42 79 0.31 ]
[ " Burkina Faso " 20903273 2.86 581895 76 273600 -25000 5.2 18 31 0.27 ]
[ " Mali " 20250833 3.02 592802 17 1220190 -40000 5.9 16 44 0.26 ]
[ " Romania " 19237691 -0.66 -126866 84 230170 -73999 1.6 43 55 0.25 ]
[ " Malawi " 19129952 2.69 501205 203 94280 -16053 4.3 18 18 0.25 ]
[ " Chile " 19116201 0.87 164163 26 743532 111708 1.7 35 85 0.25 ]
[ " Kazakhstan " 18776707 1.21 225280 7 2699700 -18000 2.8 31 58 0.24 ]
[ " Zambia " 18383955 2.93 522925 25 743390 -8000 4.7 18 45 0.24 ]
[ " Guatemala " 17915568 1.9 334096 167 107160 -9215 2.9 23 52 0.23 ]
[ " Ecuador " 17643054 1.55 269392 71 248360 36400 2.4 28 63 0.23 ]
[ " Syria " 17500658 2.52 430523 95 183630 -427391 2.8 26 60 0.22 ]
[ " Netherlands " 17134872 0.22 37742 508 33720 16000 1.7 43 92 0.22 ]
[ " Senegal " 16743927 2.75 447563 87 192530 -20000 4.7 19 49 0.21 ]
[ " Cambodia " 16718965 1.41 232423 95 176520 -30000 2.5 26 24 0.21 ]
[ " Chad " 16425864 3.0 478988 13 1259200 2000 5.8 17 23 0.21 ]
[ " Somalia " 15893222 2.92 450317 25 627340 -40000 6.1 17 47 0.2 ]
[ " Zimbabwe " 14862924 1.48 217456 38 386850 -116858 3.6 19 38 0.19 ]
[ " Guinea " 13132795 2.83 361549 53 245720 -4000 4.7 18 39 0.17 ]
[ " Benin " 12123200 2.73 322049 108 112760 -2000 4.9 19 48 0.16 ]
[ " Burundi " 11890784 3.12 360204 463 25680 2001 5.5 17 14 0.15 ]
[ " Tunisia " 11818619 1.06 123900 76 155360 -4000 2.2 33 70 0.15 ]
[ " Bolivia " 11673021 1.39 159921 11 1083300 -9504 2.8 26 69 0.15 ]
[ " Haiti " 11402528 1.24 139451 414 27560 -35000 3.0 24 57 0.15 ]
[ " Cuba " 11326616 -0.06 -6867 106 106440 -14400 1.6 42 78 0.15 ]
[ " South Sudan " 11193725 1.19 131612 18 610952 -174200 4.7 19 25 0.14 ]
[ " Dominican Republic " 10847910 1.01 108952 225 48320 -30000 2.4 28 85 0.14 ]
[ " Czech Republic " 10708981 0.18 19772 139 77240 22011 1.6 43 74 0.14 ]
[ " Greece " 10423054 -0.48 -50401 81 128900 -16000 1.3 46 85 0.13 ]
[ " Jordan " 10203134 1.0 101440 115 88780 10220 2.8 24 91 0.13 ]
[ " Portugal " 10196709 -0.29 -29478 111 91590 -6000 1.3 46 66 0.13 ]
[ " Azerbaijan " 10139177 0.91 91459 123 82658 1200 2.1 32 56 0.13 ]
[ " Sweden " 10099265 0.63 62886 25 410340 40000 1.9 41 88 0.13 ]
[ " Honduras " 9904607 1.63 158490 89 111890 -6800 2.5 24 57 0.13 ]
[ " United Arab Emirates " 9890402 1.23 119873 118 83600 40000 1.4 33 86 0.13 ]
[ " Hungary " 9660351 -0.25 -24328 107 90530 6000 1.5 43 72 0.12 ]
[ " Tajikistan " 9537645 2.32 216627 68 139960 -20000 3.6 22 27 0.12 ]
[ " Belarus " 9449323 -0.03 -3088 47 202910 8730 1.7 40 79 0.12 ]
[ " Austria " 9006398 0.57 51296 109 82409 65000 1.5 43 57 0.12 ]
[ " Papua New Guinea " 8947024 1.95 170915 20 452860 -800 3.6 22 13 0.11 ]
[ " Serbia " 8737371 -0.4 -34864 100 87460 4000 1.5 42 56 0.11 ]
[ " Israel " 8655535 1.6 136158 400 21640 10000 3.0 30 93 0.11 ]
[ " Switzerland " 8654622 0.74 63257 219 39516 52000 1.5 43 74 0.11 ]
[ " Togo " 8278724 2.43 196358 152 54390 -2000 4.4 19 43 0.11 ]
[ " Sierra Leone " 7976983 2.1 163768 111 72180 -4200 4.3 19 43 0.1 ]
[ " Hong Kong " 7496981 0.82 60827 7140 1050 29308 1.3 45 nil 0.1 ]
[ " Laos " 7275560 1.48 106105 32 230800 -14704 2.7 24 36 0.09 ]
[ " Paraguay " 7132538 1.25 87902 18 397300 -16556 2.4 26 62 0.09 ]
[ " Bulgaria " 6948445 -0.74 -51674 64 108560 -4800 1.6 45 76 0.09 ]
[ " Libya " 6871292 1.38 93840 4 1759540 -1999 2.3 29 78 0.09 ]
[ " Lebanon " 6825445 -0.44 -30268 667 10230 -30012 2.1 30 78 0.09 ]
[ " Nicaragua " 6624554 1.21 79052 55 120340 -21272 2.4 26 57 0.08 ]
[ " Kyrgyzstan " 6524195 1.69 108345 34 191800 -4000 3.0 26 36 0.08 ]
[ " El Salvador " 6486205 0.51 32652 313 20720 -40539 2.1 28 73 0.08 ]
[ " Turkmenistan " 6031200 1.5 89111 13 469930 -5000 2.8 27 53 0.08 ]
[ " Singapore " 5850342 0.79 46005 8358 700 27028 1.2 42 nil 0.08 ]
[ " Denmark " 5792202 0.35 20326 137 42430 15200 1.8 42 88 0.07 ]
[ " Finland " 5540720 0.15 8564 18 303890 14000 1.5 43 86 0.07 ]
[ " Slovakia " 5459642 0.05 2629 114 48088 1485 1.5 41 54 0.07 ]
[ " Norway " 5421241 0.79 42384 15 365268 28000 1.7 40 83 0.07 ]
[ " Oman " 5106626 2.65 131640 16 309500 87400 2.9 31 87 0.07 ]
[ " State of Palestine " 5101414 2.41 119994 847 6020 -10563 3.7 21 80 0.07 ]
[ " Costa Rica " 5094118 0.92 46557 100 51060 4200 1.8 33 80 0.07 ]
[ " Liberia " 5057681 2.44 120307 53 96320 -5000 4.4 19 53 0.06 ]
[ " Central African Republic " 4829767 1.78 84582 8 622980 -40000 4.8 18 43 0.06 ]
[ " New Zealand " 4822233 0.82 39170 18 263310 14881 1.9 38 87 0.06 ]
[ " Mauritania " 4649658 2.74 123962 5 1030700 5000 4.6 20 57 0.06 ]
[ " Panama " 4314767 1.61 68328 58 74340 11200 2.5 30 68 0.06 ]
[ " Kuwait " 4270571 1.51 63488 240 17820 39520 2.1 37 nil 0.05 ]
[ " Croatia " 4105267 -0.61 -25037 73 55960 -8001 1.4 44 58 0.05 ]
[ " Moldova " 4033963 -0.23 -9300 123 32850 -1387 1.3 38 43 0.05 ]
[ " Georgia " 3989167 -0.19 -7598 57 69490 -10000 2.1 38 58 0.05 ]
[ " Eritrea " 3546421 1.41 49304 35 101000 -39858 4.1 19 63 0.05 ]
[ " Uruguay " 3473730 0.35 11996 20 175020 -3000 2.0 36 96 0.04 ]
[ " Bosnia and Herzegovina " 3280819 -0.61 -20181 64 51000 -21585 1.3 43 52 0.04 ]
[ " Mongolia " 3278290 1.65 53123 2 1553560 -852 2.9 28 67 0.04 ]
[ " Armenia " 2963243 0.19 5512 104 28470 -4998 1.8 35 63 0.04 ]
[ " Jamaica " 2961167 0.44 12888 273 10830 -11332 2.0 31 55 0.04 ]
[ " Qatar " 2881053 1.73 48986 248 11610 40000 1.9 32 96 0.04 ]
[ " Albania " 2877797 -0.11 -3120 105 27400 -14000 1.6 36 63 0.04 ]
[ " Puerto Rico " 2860853 -2.47 -72555 323 8870 -97986 1.2 44 nil 0.04 ]
[ " Lithuania " 2722289 -1.35 -37338 43 62674 -32780 1.7 45 71 0.03 ]
[ " Namibia " 2540905 1.86 46375 3 823290 -4806 3.4 22 55 0.03 ]
[ " Gambia " 2416668 2.94 68962 239 10120 -3087 5.3 18 59 0.03 ]
[ " Botswana " 2351627 2.08 47930 4 566730 3000 2.9 24 73 0.03 ]
[ " Gabon " 2225734 2.45 53155 9 257670 3260 4.0 23 87 0.03 ]
[ " Lesotho " 2142249 0.8 16981 71 30360 -10047 3.2 24 31 0.03 ]
[ " North Macedonia " 2083374 0.0 -85 83 25220 -1000 1.5 39 59 0.03 ]
[ " Slovenia " 2078938 0.01 284 103 20140 2000 1.6 45 55 0.03 ]
[ " Guinea - Bissau " 1968001 2.45 47079 70 28120 -1399 4.5 19 45 0.03 ]
[ " Latvia " 1886198 -1.08 -20545 30 62200 -14837 1.7 44 69 0.02 ]
[ " Bahrain " 1701575 3.68 60403 2239 760 47800 2.0 32 89 0.02 ]
[ " Equatorial Guinea " 1402985 3.47 46999 50 28050 16000 4.6 22 73 0.02 ]
[ " Trinidad and Tobago " 1399488 0.32 4515 273 5130 -800 1.7 36 52 0.02 ]
[ " Estonia " 1326535 0.07 887 31 42390 3911 1.6 42 68 0.02 ]
[ " Timor - Leste " 1318445 1.96 25326 89 14870 -5385 4.1 21 33 0.02 ]
[ " Mauritius " 1271768 0.17 2100 626 2030 0 1.4 37 41 0.02 ]
[ " Cyprus " 1207359 0.73 8784 131 9240 5000 1.3 37 67 0.02 ]
[ " Eswatini " 1160164 1.05 12034 67 17200 -8353 3.0 21 30 0.01 ]
[ " Djibouti " 988000 1.48 14440 43 23180 900 2.8 27 79 0.01 ]
[ " Fiji " 896445 0.73 6492 49 18270 -6202 2.8 28 59 0.01 ]
[ " Comoros " 869601 2.2 18715 467 1861 -2000 4.2 20 29 0.01 ]
[ " Guyana " 786552 0.48 3786 4 196850 -6000 2.5 27 27 0.01 ]
[ " Solomon Islands " 686884 2.55 17061 25 27990 -1600 4.4 20 23 0.01 ]
[ " Macao " 649335 1.39 8890 21645 30 5000 1.2 39 nil 0.01 ]
[ " Montenegro " 628066 0.01 79 47 13450 -480 1.8 39 68 0.01 ]
[ " Luxembourg " 625978 1.66 10249 242 2590 9741 1.5 40 88 0.01 ]
[ " Western Sahara " 597339 2.55 14876 2 266000 5582 2.4 28 87 0.01 ]
[ " Suriname " 586632 0.9 5260 4 156000 -1000 2.4 29 65 0.01 ]
[ " Cabo Verde " 555987 1.1 6052 138 4030 -1342 2.3 28 68 0.01 ]
[ " Maldives " 540544 1.81 9591 1802 300 11370 1.9 30 35 0.01 ]
[ " Malta " 441543 0.27 1171 1380 320 900 1.5 43 93 0.01 ]
[ " Brunei " 437479 0.97 4194 83 5270 0 1.8 32 80 0.01 ]
[ " Guadeloupe " 400124 0.02 68 237 1690 -1440 2.2 44 nil 0.01 ]
[ " Belize " 397628 1.86 7275 17 22810 1200 2.3 25 46 0.01 ]
[ " Bahamas " 393244 0.97 3762 39 10010 1000 1.8 32 86 0.01 ]
[ " Martinique " 375265 -0.08 -289 354 1060 -960 1.9 47 92 0.0 ]
[ " Iceland " 341243 0.65 2212 3 100250 380 1.8 37 94 0.0 ]
[ " Vanuatu " 307145 2.42 7263 25 12190 120 3.8 21 24 0.0 ]
[ " French Guiana " 298682 2.7 7850 4 82200 1200 3.4 25 87 0.0 ]
[ " Barbados " 287375 0.12 350 668 430 -79 1.6 40 31 0.0 ]
[ " New Caledonia " 285498 0.97 2748 16 18280 502 2.0 34 72 0.0 ]
[ " French Polynesia " 280908 0.58 1621 77 3660 -1000 2.0 34 64 0.0 ]
[ " Mayotte " 272815 2.5 6665 728 375 0 3.7 20 46 0.0 ]
[ " Sao Tome & Principe " 219159 1.91 4103 228 960 -1680 4.4 19 74 0.0 ]
[ " Samoa " 198414 0.67 1317 70 2830 -2803 3.9 22 18 0.0 ]
[ " Saint Lucia " 183627 0.46 837 301 610 0 1.4 34 19 0.0 ]
[ " Channel Islands " 173863 0.93 1604 915 190 1351 1.5 43 30 0.0 ]
[ " Guam " 168775 0.89 1481 313 540 -506 2.3 31 95 0.0 ]
[ " Curaçao " 164093 0.41 669 370 444 515 1.8 42 89 0.0 ]
[ " Kiribati " 119449 1.57 1843 147 810 -800 3.6 23 57 0.0 ]
[ " Micronesia " 115023 1.06 1208 164 700 -600 3.1 24 21 0.0 ]
[ " Grenada " 112523 0.46 520 331 340 -200 2.1 32 35 0.0 ]
[ " St. Vincent & Grenadines " 110940 0.32 351 284 390 -200 1.9 33 53 0.0 ]
[ " Aruba " 106766 0.43 452 593 180 201 1.9 41 44 0.0 ]
[ " Tonga " 105695 1.15 1201 147 720 -800 3.6 22 24 0.0 ]
[ " U.S. Virgin Islands " 104425 -0.15 -153 298 350 -451 2.0 43 96 0.0 ]
[ " Seychelles " 98347 0.62 608 214 460 -200 2.5 34 56 0.0 ]
[ " Antigua and " 97929 0.84 811 223 440 0 2.0 34 26 0.0 ]
[ " Isle of Man " 85033 0.53 449 149 570 nil nil 53 0.0 ]
[ " Andorra " 77265 0.16 123 164 470 nil nil 88 0.0 ]
[ " Dominica " 71986 0.25 178 96 750 nil nil 74 0.0 ]
[ " Bermuda " 62278 -0.36 -228 1246 50 nil nil 97 0.0 ]
[ " Marshall Islands " 59190 0.68 399 329 180 nil nil 70 0.0 ]
[ " Northern Mariana Islands " 57559 0.6 343 125 460 nil nil 88 0.0 ]
[ " Greenland " 56770 0.17 98 0 410450 nil nil 87 0.0 ]
[ " American Samoa " 55191 -0.22 -121 276 200 nil nil 88 0.0 ]
[ " Saint Kitts & Nevis " 53199 0.71 376 205 260 nil nil 33 0.0 ]
[ " Faeroe Islands " 48863 0.38 185 35 1396 nil nil 43 0.0 ]
[ " Sint Maarten " 42876 1.15 488 1261 34 nil nil 96 0.0 ]
[ " Monaco " 39242 0.71 278 26337 1 nil nil nil 0.0 ]
[ " Turks and Caicos " 38717 1.38 526 41 950 nil nil 89 0.0 ]
[ " Saint Martin " 38666 1.75 664 730 53 nil nil 0 0.0 ]
[ " Liechtenstein " 38128 0.29 109 238 160 nil nil 15 0.0 ]
[ " San Marino " 33931 0.21 71 566 60 nil nil 97 0.0 ]
[ " British Virgin Islands " 30231 0.67 201 202 150 nil nil 52 0.0 ]
[ " Caribbean Netherlands " 26223 0.94 244 80 328 nil nil 75 0.0 ]
[ " Palau " 18094 0.48 86 39 460 nil nil nil 0.0 ]
[ " Cook Islands " 17564 0.09 16 73 240 nil nil 75 0.0 ]
[ " Anguilla " 15003 0.9 134 167 90 nil nil nil 0.0 ]
[ " Tuvalu " 11792 1.25 146 393 30 nil nil 62 0.0 ]
[ " Wallis & Futuna " 11239 -1.69 -193 80 140 nil nil 0 0.0 ]
[ " Nauru " 10824 0.63 68 541 20 nil nil nil 0.0 ]
[ " Saint Barthelemy " 9877 0.3 30 470 21 nil nil 0 0.0 ]
[ " Saint Helena " 6077 0.3 18 16 390 nil nil 27 0.0 ]
[ " Saint Pierre & Miquelon " 5794 -0.48 -28 25 230 nil nil 100 0.0 ]
[ " Montserrat " 4992 0.06 3 50 100 nil nil 10 0.0 ]
[ " Falkland Islands " 3480 3.05 103 0 12170 nil nil 66 0.0 ]
[ " Niue " 1626 0.68 11 6 260 nil nil 46 0.0 ]
[ " Tokelau " 1357 1.27 17 136 10 nil nil 0 0.0 ]
[ " Holy See " 801 0.25 2 2003 0 nil nil nil 0.0 ]
[ ( ccr / get - country - code Country ) Population ] ) )
( defn growth - per - report
" Population : number of people at the beginning of the year
YearlyChangeRate : in percents E.g. :
|
d216935f69e5f25cbaebb3d8335efe6c01dd91a1d0292b201f1c9978d4477925 | arttuka/reagent-material-ui | face_6_two_tone.cljs | (ns reagent-mui.icons.face-6-two-tone
"Imports @mui/icons-material/Face6TwoTone as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def face-6-two-tone (create-svg-icon [(e "path" #js {"d" "M6.55 7.66C7.06 6.64 8.09 6 9.24 6h5.53c1.14 0 2.17.64 2.68 1.66.94 1.87 1.66 2.08 2.26 2.24C18.78 6.51 15.68 4 12 4S5.22 6.51 4.29 9.9c.68-.18 1.33-.38 2.26-2.24z", "opacity" ".3"}) (e "path" #js {"d" "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 2c3.68 0 6.78 2.51 7.71 5.9-.6-.16-1.33-.37-2.26-2.24C16.94 6.64 15.91 6 14.76 6H9.24c-1.15 0-2.18.64-2.69 1.66-.93 1.86-1.58 2.06-2.26 2.24C5.22 6.51 8.32 4 12 4zm0 16c-4.41 0-8-3.59-8-8v-.03c2.31-.22 3.43-1.59 4.34-3.41.17-.35.51-.56.9-.56h5.53c.38 0 .72.21.89.55.9 1.8 1.99 3.19 4.34 3.41v.03c0 4.42-3.59 8.01-8 8.01z"}) (e "circle" #js {"cx" "9", "cy" "13", "r" "1.25"}) (e "circle" #js {"cx" "15", "cy" "13", "r" "1.25"})]
"Face6TwoTone"))
| null | https://raw.githubusercontent.com/arttuka/reagent-material-ui/14103a696c41c0eb67fc07fc67cd8799efd88cb9/src/icons/reagent_mui/icons/face_6_two_tone.cljs | clojure | (ns reagent-mui.icons.face-6-two-tone
"Imports @mui/icons-material/Face6TwoTone as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def face-6-two-tone (create-svg-icon [(e "path" #js {"d" "M6.55 7.66C7.06 6.64 8.09 6 9.24 6h5.53c1.14 0 2.17.64 2.68 1.66.94 1.87 1.66 2.08 2.26 2.24C18.78 6.51 15.68 4 12 4S5.22 6.51 4.29 9.9c.68-.18 1.33-.38 2.26-2.24z", "opacity" ".3"}) (e "path" #js {"d" "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 2c3.68 0 6.78 2.51 7.71 5.9-.6-.16-1.33-.37-2.26-2.24C16.94 6.64 15.91 6 14.76 6H9.24c-1.15 0-2.18.64-2.69 1.66-.93 1.86-1.58 2.06-2.26 2.24C5.22 6.51 8.32 4 12 4zm0 16c-4.41 0-8-3.59-8-8v-.03c2.31-.22 3.43-1.59 4.34-3.41.17-.35.51-.56.9-.56h5.53c.38 0 .72.21.89.55.9 1.8 1.99 3.19 4.34 3.41v.03c0 4.42-3.59 8.01-8 8.01z"}) (e "circle" #js {"cx" "9", "cy" "13", "r" "1.25"}) (e "circle" #js {"cx" "15", "cy" "13", "r" "1.25"})]
"Face6TwoTone"))
| |
372719972dd6f78a861c2c3ac551f3729b0d8aefd61c08c3770ae647d9d1c602 | gogins/csound-extended-nudruz | nudruz-csound.lisp | ; C O M M O N M U S I C C F F I I N T E R F A C E T O C S O U N D
;
Copyright ( C ) 2016
;
; This file belongs to Csound.
;
; This software is free software; you can redistribute it and/or
; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
;
; This software is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
; Lesser General Public License for more details.
;
You should have received a copy of the GNU Lesser General Public
; License along with this software; if not, write to the Free Software
Foundation , Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
;
; This file is handwritten and should be maintained by keeping it up to date
with regard to include / csound.h . This file is not intended to be complete
and essentially defines a Steel Bank Common Lisp interface to a subset of
the most useful functions in csound.h . At the present time , only pointers ,
; strings, and other primitive types are used in this interface.
(in-package :cm)
(set-dispatch-macro-character #\# #\> #'cl-heredoc:read-heredoc)
(defun event-to-istatement (event channel-offset velocity-scale arrangement)
"Translates a Common Music MIDI channel event to a Csound score event
(i-statement), which is terminated with a newline. An offset, which may
be any number, is added to the MIDI channel number. After that, if the
arrangement parameter is not nil, the existing event is remapped to a new
instrument number and the velocity is modified. MIDI events that are
not channel events are included, but as comments."
(let
((insno)
(midikey)
(velocity)
(pan 0.5))
(if (string-equal (class-name (class-of event)) "MIDI")
(progn
; (inspect event)
(setf insno (+ channel-offset (midi-channel event)))
(setf velocity (* velocity-scale (midi-amplitude event)))
(setf midikey (keynum (midi-keynum event)))
(when arrangement
(setf pan (third (gethash insno arrangement)))
(setf velocity (+ velocity (second (gethash insno arrangement))))
(setf insno (first (gethash insno arrangement))))
(format nil "i ~,6f ~,6f ~,6f ~,6f ~,6f 0 ~,6f 0 0 0 0~%" insno (object-time event)(midi-duration event)
midikey velocity pan))
(format nil "; ~a~%" event)
)))
(export 'event-to-istatement)
(defun replace-all (string part replacement &key (test #'char=))
"Replaces all occurences of the string 'part' in 'string' with 'replacement',
using 'test' for character equality."
(with-output-to-string (out)
(loop with part-length = (length part)
for old-pos = 0 then (+ pos part-length)
for pos = (search part string
:start2 old-pos
:test test)
do (write-string string out
:start old-pos
:end (or pos (length string)))
when pos do (write-string replacement out)
while pos)))
(defun seq-to-sco (seq &optional (channel-offset 1) (velocity-scale 127) &key (arrangement nil))
"Translates all MIDI channel events in a Common Music SEQ object to Csound sco text,
with an optional channel offset and velocity scaling. The arrangement
parameter, if passed, is used to reassign the instrument numbers and
add to/subtract from the MIDI velocities in the sequence. The arrangement
consists of a hashtable mapping original Csound instrument numbers
to a list '(new-inso add-velocity pan)."
(let
((score-list (list))
(score-text "")
(sco-text ""))
(progn
(format t "Building Csound sco from seq...~%")
(defun curried-event-to-istatement (event)
(event-to-istatement event channel-offset velocity-scale arrangement))
(setq score-list (mapcar 'curried-event-to-istatement (subobjects seq)))
(setq sco-text (format nil "~{~A~^~}" score-list))
)
)
)
(defun csd-to-file (name content)
"Writes the contents of a CSD to a file, replacing the file if it exists."
(with-open-file (stream name
:direction :output
:if-exists :supersede
:if-does-not-exist :create )
(write-line content stream)))
(defun seq-to-lilypond (sequence filename fomus-parts partids-for-channels voices-for-channels &key (title nil)(subtitle nil)(composer nil))
"Attempts to translate MIDI events in the sequence to a Lilypond score using
Fomus (but does not always succeed). MIDI channels must be assigned to
Lilypond part IDs and Lilypond voices in the hashtables. If the :voice
parameter is a list, it should contain as many voices for the corresponding
channel as there actually are in that channel."
(let
((fomus-events (list)))
(progn
(format t "Building Lilypond score ~A from seq...~%" filename)
(defun midi-event-to-fomus-event (event)
(new fomus:note
:partid (gethash (midi-channel event) partids-for-channels)
:off (object-time event)
:dur (midi-duration event)
:note (midi-keynum event)
:voice (gethash (midi-channel event) voices-for-channels)
)
)
(setf fomus-events (mapcar 'midi-event-to-fomus-event (subobjects sequence)))
(format t "Generated: ~d Fomus events.~%" (list-length fomus-events))
(setf fomus-events (remove-duplicates fomus-events
:test #'(lambda (x y)
(equal (format nil "~A" x) (format nil "~A" y))
)))
(format t "Removed duplicates: ~d Fomus events.~%" (list-length fomus-events))
(events (new seq :name "Lilypond" :subobjects fomus-events) filename :auto-voicing t :verbose 2 :auto-quantize t :view t :parts (list fomus-parts) :title title :subtitle subtitle :composer composer)
CMN output does not produce a usable score , see # 61 .
( events ( new seq : name " Common Music Notation " : subobjects - events ) " temp.eps " )
)
)
)
(defun seq-to-midifile (sequence filename)
"Writes a sequence containing MIDI events to a MIDI file, replacing the file if
it exists."
(events sequence filename :play nil)
)
(defun build-csd (orc &key (sco "")(options "--midi-key=4 --midi-velocity=5 -m195 -+msg_color=0 -RWdf")(output "dac"))
(let
((csd "")
(csd-template "<CsoundSynthesizer>
<CsOptions>
~A -o ~A
</CsOptions>
<CsInstruments>
~A
</CsInstruments>
<CsScore>
~A
</CsScore>
</CsoundSynthesizer>
~%")
)
(setq csd (format nil csd-template options output orc sco))
)
)
(defun render-with-orc (sequence orc &key (options "--midi-key=4 --midi-velocity=5 -m195 -+msg_color=0 -RWdf")
(output "dac")(channel-offset 1)(velocity-scale 127)(csound-instance nil)
(csd-filename "tmp-generated.csd")(arrangement nil))
(let
((csd "")
(sco-text "")
(result 0))
(progn
(setq csd (build-csd orc :options options :output output))
(setq result (render-with-csd sequence csd :channel-offset channel-offset :velocity-scale velocity-scale :csound-instance csound-instance :csd-filename csd-filename :arrangement arrangement))
)
)
)
(defun render-with-csd (seq csd &key (channel-offset 1)(velocity-scale 127)
(csound-instance nil)(csd-filename "temp-csd.csd")(arrangement nil))
"Given a Common Music 'seq', translates each of its MIDI events into a Csound
'i' statement, optionally offsetting the channel number and/or rescaling MIDI
velocity, then renders the resulting score using the Csound 'csd'. The
generated score is appended to the <CsScore> element of `csd`. It is
possible to call csoundReadScore during the performance. This function returns
the Csound object that it uses.
The csound parameter is used to call Csound if passed. This enables
render-with-csound to be run in a separate thread of execution, and for the
caller to control Csound instrument parameters during real time performance,
e.g.
(setq csound (csoundCreate 0))
(setq my-thread (bt:make-thread (lambda () (render-with-csound cs csd 1 127 csound))))
(csoundSetControlChannel csound 'mychannel' myvalue)
(bt:join-thread my-thread)
A copy of the .csd file that is rendered is saved for archival purposes."
(let
((score-list (list))
(cs 0)
(sco-text "")
(result 0)
(new-csd-text "")
(csd-pointer 0))
(progn
(setq sco-text (seq-to-sco seq channel-offset velocity-scale :arrangement arrangement))
(setq new-csd-text (replace-all csd "</CsScore>" (concatenate 'string sco-text "</CsScore>")))
; (format t "new-csd-text:~%~A~%" new-csd-text)
(csd-to-file csd-filename new-csd-text)
(setq csd-pointer (cffi:foreign-string-alloc new-csd-text))
(if csound-instance
(setq cs csound-instance)
(progn
(setq result (csound:csoundInitialize 3))
(setq cs (csound:csoundCreate (cffi:null-pointer)))
(format t "csoundCreate returned: ~S.~%" cs)
)
)
; Not sure why, but cffi:with-foreign-string doesn't seem to work.
(setq result (csound:csoundCompileCsdText cs csd-pointer))
(format t "csoundCompileCsdText returned: ~D.~%" result)
(setq result (csound:csoundStart cs))
(format t "csoundStart returned: ~D.~%" result)
(loop
(setq result (csound:csoundPerformKsmps cs))
(when (not (equal result 0)) (return))
)
(setq result (csound:csoundCleanup cs))
(format t "csoundCleanup returned: ~D.~%" result)
(sleep 5)
(if (not csound-instance)
(csound:csoundDestroy cs)
(format t "csoundDestroy was called.~%")
)
(cffi:foreign-string-free csd-pointer)
(format t "The Csound performance has ended: ~D.~%" result)
)
)
)
(defun midifile-to-seq (midi-filename)
"Import a standard midi file to a Common Music seq object such that notes
are not CM::MIDI-EVENT binary objects, but rather regular CM::MIDI objects
that can be processed in Common Music."
(let
((raw-seq)
(cooked-seq))
(setf raw-seq (import-events midi-filename :tracks true :meta-exclude true))
(setf cooked-seq (new seq :name "csound-seq"))
(events raw-seq cooked-seq)
)
)
(defun cope-events-to-seq (cope-events)
"Translates an event list produced by David Cope's 'Computer Models of Musical
Creativity' software into a CM::SEQ object."
(let
((midi-events))
(defun cope-event-to-midi-event (event)
(new midi
:time(/ (first event) 1000.)
:keynum (second event)
:amplitude (/ (fifth event) 127.)
:channel (- (fourth event) 1)
:duration (/ (third event) 1000.)
)
)
(setf midi-events (mapcar 'cope-event-to-midi-event cope-events))
;(print midi-events)
(format t "Translated: ~d MIDI events.~%" (list-length midi-events))
(new seq :name "cm-seq" :subobjects midi-events)
)
)
| null | https://raw.githubusercontent.com/gogins/csound-extended-nudruz/4551d54890f4adbadc5db8f46cc24af8e92fb9e9/sources/nudruz-csound.lisp | lisp | C O M M O N M U S I C C F F I I N T E R F A C E T O C S O U N D
This file belongs to Csound.
This software is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this software; if not, write to the Free Software
This file is handwritten and should be maintained by keeping it up to date
strings, and other primitive types are used in this interface.
(inspect event)
(format t "new-csd-text:~%~A~%" new-csd-text)
Not sure why, but cffi:with-foreign-string doesn't seem to work.
(print midi-events) | Copyright ( C ) 2016
version 2.1 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
with regard to include / csound.h . This file is not intended to be complete
and essentially defines a Steel Bank Common Lisp interface to a subset of
the most useful functions in csound.h . At the present time , only pointers ,
(in-package :cm)
(set-dispatch-macro-character #\# #\> #'cl-heredoc:read-heredoc)
(defun event-to-istatement (event channel-offset velocity-scale arrangement)
"Translates a Common Music MIDI channel event to a Csound score event
(i-statement), which is terminated with a newline. An offset, which may
be any number, is added to the MIDI channel number. After that, if the
arrangement parameter is not nil, the existing event is remapped to a new
instrument number and the velocity is modified. MIDI events that are
not channel events are included, but as comments."
(let
((insno)
(midikey)
(velocity)
(pan 0.5))
(if (string-equal (class-name (class-of event)) "MIDI")
(progn
(setf insno (+ channel-offset (midi-channel event)))
(setf velocity (* velocity-scale (midi-amplitude event)))
(setf midikey (keynum (midi-keynum event)))
(when arrangement
(setf pan (third (gethash insno arrangement)))
(setf velocity (+ velocity (second (gethash insno arrangement))))
(setf insno (first (gethash insno arrangement))))
(format nil "i ~,6f ~,6f ~,6f ~,6f ~,6f 0 ~,6f 0 0 0 0~%" insno (object-time event)(midi-duration event)
midikey velocity pan))
(format nil "; ~a~%" event)
)))
(export 'event-to-istatement)
(defun replace-all (string part replacement &key (test #'char=))
"Replaces all occurences of the string 'part' in 'string' with 'replacement',
using 'test' for character equality."
(with-output-to-string (out)
(loop with part-length = (length part)
for old-pos = 0 then (+ pos part-length)
for pos = (search part string
:start2 old-pos
:test test)
do (write-string string out
:start old-pos
:end (or pos (length string)))
when pos do (write-string replacement out)
while pos)))
(defun seq-to-sco (seq &optional (channel-offset 1) (velocity-scale 127) &key (arrangement nil))
"Translates all MIDI channel events in a Common Music SEQ object to Csound sco text,
with an optional channel offset and velocity scaling. The arrangement
parameter, if passed, is used to reassign the instrument numbers and
add to/subtract from the MIDI velocities in the sequence. The arrangement
consists of a hashtable mapping original Csound instrument numbers
to a list '(new-inso add-velocity pan)."
(let
((score-list (list))
(score-text "")
(sco-text ""))
(progn
(format t "Building Csound sco from seq...~%")
(defun curried-event-to-istatement (event)
(event-to-istatement event channel-offset velocity-scale arrangement))
(setq score-list (mapcar 'curried-event-to-istatement (subobjects seq)))
(setq sco-text (format nil "~{~A~^~}" score-list))
)
)
)
(defun csd-to-file (name content)
"Writes the contents of a CSD to a file, replacing the file if it exists."
(with-open-file (stream name
:direction :output
:if-exists :supersede
:if-does-not-exist :create )
(write-line content stream)))
(defun seq-to-lilypond (sequence filename fomus-parts partids-for-channels voices-for-channels &key (title nil)(subtitle nil)(composer nil))
"Attempts to translate MIDI events in the sequence to a Lilypond score using
Fomus (but does not always succeed). MIDI channels must be assigned to
Lilypond part IDs and Lilypond voices in the hashtables. If the :voice
parameter is a list, it should contain as many voices for the corresponding
channel as there actually are in that channel."
(let
((fomus-events (list)))
(progn
(format t "Building Lilypond score ~A from seq...~%" filename)
(defun midi-event-to-fomus-event (event)
(new fomus:note
:partid (gethash (midi-channel event) partids-for-channels)
:off (object-time event)
:dur (midi-duration event)
:note (midi-keynum event)
:voice (gethash (midi-channel event) voices-for-channels)
)
)
(setf fomus-events (mapcar 'midi-event-to-fomus-event (subobjects sequence)))
(format t "Generated: ~d Fomus events.~%" (list-length fomus-events))
(setf fomus-events (remove-duplicates fomus-events
:test #'(lambda (x y)
(equal (format nil "~A" x) (format nil "~A" y))
)))
(format t "Removed duplicates: ~d Fomus events.~%" (list-length fomus-events))
(events (new seq :name "Lilypond" :subobjects fomus-events) filename :auto-voicing t :verbose 2 :auto-quantize t :view t :parts (list fomus-parts) :title title :subtitle subtitle :composer composer)
CMN output does not produce a usable score , see # 61 .
( events ( new seq : name " Common Music Notation " : subobjects - events ) " temp.eps " )
)
)
)
(defun seq-to-midifile (sequence filename)
"Writes a sequence containing MIDI events to a MIDI file, replacing the file if
it exists."
(events sequence filename :play nil)
)
(defun build-csd (orc &key (sco "")(options "--midi-key=4 --midi-velocity=5 -m195 -+msg_color=0 -RWdf")(output "dac"))
(let
((csd "")
(csd-template "<CsoundSynthesizer>
<CsOptions>
~A -o ~A
</CsOptions>
<CsInstruments>
~A
</CsInstruments>
<CsScore>
~A
</CsScore>
</CsoundSynthesizer>
~%")
)
(setq csd (format nil csd-template options output orc sco))
)
)
(defun render-with-orc (sequence orc &key (options "--midi-key=4 --midi-velocity=5 -m195 -+msg_color=0 -RWdf")
(output "dac")(channel-offset 1)(velocity-scale 127)(csound-instance nil)
(csd-filename "tmp-generated.csd")(arrangement nil))
(let
((csd "")
(sco-text "")
(result 0))
(progn
(setq csd (build-csd orc :options options :output output))
(setq result (render-with-csd sequence csd :channel-offset channel-offset :velocity-scale velocity-scale :csound-instance csound-instance :csd-filename csd-filename :arrangement arrangement))
)
)
)
(defun render-with-csd (seq csd &key (channel-offset 1)(velocity-scale 127)
(csound-instance nil)(csd-filename "temp-csd.csd")(arrangement nil))
"Given a Common Music 'seq', translates each of its MIDI events into a Csound
'i' statement, optionally offsetting the channel number and/or rescaling MIDI
velocity, then renders the resulting score using the Csound 'csd'. The
generated score is appended to the <CsScore> element of `csd`. It is
possible to call csoundReadScore during the performance. This function returns
the Csound object that it uses.
The csound parameter is used to call Csound if passed. This enables
render-with-csound to be run in a separate thread of execution, and for the
caller to control Csound instrument parameters during real time performance,
e.g.
(setq csound (csoundCreate 0))
(setq my-thread (bt:make-thread (lambda () (render-with-csound cs csd 1 127 csound))))
(csoundSetControlChannel csound 'mychannel' myvalue)
(bt:join-thread my-thread)
A copy of the .csd file that is rendered is saved for archival purposes."
(let
((score-list (list))
(cs 0)
(sco-text "")
(result 0)
(new-csd-text "")
(csd-pointer 0))
(progn
(setq sco-text (seq-to-sco seq channel-offset velocity-scale :arrangement arrangement))
(setq new-csd-text (replace-all csd "</CsScore>" (concatenate 'string sco-text "</CsScore>")))
(csd-to-file csd-filename new-csd-text)
(setq csd-pointer (cffi:foreign-string-alloc new-csd-text))
(if csound-instance
(setq cs csound-instance)
(progn
(setq result (csound:csoundInitialize 3))
(setq cs (csound:csoundCreate (cffi:null-pointer)))
(format t "csoundCreate returned: ~S.~%" cs)
)
)
(setq result (csound:csoundCompileCsdText cs csd-pointer))
(format t "csoundCompileCsdText returned: ~D.~%" result)
(setq result (csound:csoundStart cs))
(format t "csoundStart returned: ~D.~%" result)
(loop
(setq result (csound:csoundPerformKsmps cs))
(when (not (equal result 0)) (return))
)
(setq result (csound:csoundCleanup cs))
(format t "csoundCleanup returned: ~D.~%" result)
(sleep 5)
(if (not csound-instance)
(csound:csoundDestroy cs)
(format t "csoundDestroy was called.~%")
)
(cffi:foreign-string-free csd-pointer)
(format t "The Csound performance has ended: ~D.~%" result)
)
)
)
(defun midifile-to-seq (midi-filename)
"Import a standard midi file to a Common Music seq object such that notes
are not CM::MIDI-EVENT binary objects, but rather regular CM::MIDI objects
that can be processed in Common Music."
(let
((raw-seq)
(cooked-seq))
(setf raw-seq (import-events midi-filename :tracks true :meta-exclude true))
(setf cooked-seq (new seq :name "csound-seq"))
(events raw-seq cooked-seq)
)
)
(defun cope-events-to-seq (cope-events)
"Translates an event list produced by David Cope's 'Computer Models of Musical
Creativity' software into a CM::SEQ object."
(let
((midi-events))
(defun cope-event-to-midi-event (event)
(new midi
:time(/ (first event) 1000.)
:keynum (second event)
:amplitude (/ (fifth event) 127.)
:channel (- (fourth event) 1)
:duration (/ (third event) 1000.)
)
)
(setf midi-events (mapcar 'cope-event-to-midi-event cope-events))
(format t "Translated: ~d MIDI events.~%" (list-length midi-events))
(new seq :name "cm-seq" :subobjects midi-events)
)
)
|
accc49fcc5424ca9f930d7b5638c848f9b89fc019b1a1701b34be1c9374b561d | hawkir/calispel | null-queue.lisp | (in-package #:calispel)
(defclass null-queue (jpl-queues:queue)
()
(:documentation "The null queue. Used for unbuffered CHANNELs.
Think of it as the NULL class, but for queues."))
(defmethod jpl-queues:empty? ((queue null-queue))
t)
(defmethod jpl-queues:full? ((queue null-queue))
t)
(defmethod jpl-queues:size ((queue null-queue))
0)
(defmethod jpl-queues:capacity ((queue null-queue))
0)
(defmethod jpl-queues:enqueue (object (queue null-queue))
(error "It is an error to ENQUEUE to a NULL-QUEUE."))
(defmethod jpl-queues:dequeue ((queue null-queue))
(error "It is an error to DEQUEUE from a NULL-QUEUE."))
(defmethod jpl-queues:dequeue-object-if (predicate (queue null-queue) &key &allow-other-keys)
;; We can guarantee that no matching OBJECT is in this QUEUE.
(values))
;;; Since NULL-QUEUE has no state, we can keep a single instance.
Do n't think of it as the queue analog to NIL ( of lists ) , because
;;; (EQ +NULL-QUEUE+ (MAKE-INSTANCE 'NULL-QUEUE)) is false.
(defparameter +null-queue+ (make-instance 'null-queue))
| null | https://raw.githubusercontent.com/hawkir/calispel/e9f2f9c1af97f4d7bb4c8ac25fb2a8f3e8fada7a/null-queue.lisp | lisp | We can guarantee that no matching OBJECT is in this QUEUE.
Since NULL-QUEUE has no state, we can keep a single instance.
(EQ +NULL-QUEUE+ (MAKE-INSTANCE 'NULL-QUEUE)) is false. | (in-package #:calispel)
(defclass null-queue (jpl-queues:queue)
()
(:documentation "The null queue. Used for unbuffered CHANNELs.
Think of it as the NULL class, but for queues."))
(defmethod jpl-queues:empty? ((queue null-queue))
t)
(defmethod jpl-queues:full? ((queue null-queue))
t)
(defmethod jpl-queues:size ((queue null-queue))
0)
(defmethod jpl-queues:capacity ((queue null-queue))
0)
(defmethod jpl-queues:enqueue (object (queue null-queue))
(error "It is an error to ENQUEUE to a NULL-QUEUE."))
(defmethod jpl-queues:dequeue ((queue null-queue))
(error "It is an error to DEQUEUE from a NULL-QUEUE."))
(defmethod jpl-queues:dequeue-object-if (predicate (queue null-queue) &key &allow-other-keys)
(values))
Do n't think of it as the queue analog to NIL ( of lists ) , because
(defparameter +null-queue+ (make-instance 'null-queue))
|
72db66393d7f7849650567936aaf9460c051b6647681d086307e7c8a84447149 | ninjudd/jiraph | typed.clj | (ns flatland.jiraph.typed
(:use [flatland.jiraph.core :only [layer]]
[flatland.jiraph.layer :only [Basic Optimized Schema
get-node schema update-in-node query-fn]]
[flatland.jiraph.wrapped-layer :only [defwrapped update-wrap-read forward-reads]]
[clojure.core.match :only [match]]
[flatland.useful.map :only [map-vals-with-keys update update-in*]]
[flatland.useful.fn :only [fix fixing]]
[flatland.useful.utils :only [adjoin]]
[flatland.useful.seq :only [assert-length]]
[flatland.useful.experimental :only [prefix-lookup]]
[flatland.useful.datatypes :only [assoc-record]])
(:require [flatland.jiraph.layer :as layer]))
(defn edge-validator [layer id]
(or ((:type-lookup layer) id)
(throw (IllegalArgumentException. (format "%s is not a valid node on layer %s"
id (pr-str layer))))))
(defn validate-edges [layer from-id to-ids]
(let [valid? (edge-validator layer from-id)]
(when-let [broken-edges (seq (remove valid? to-ids))]
(throw (IllegalArgumentException.
(format "%s can't have edges to %s on layer %s"
from-id (pr-str broken-edges) (pr-str layer)))))))
(defn writable-area? [layer keyseq]
(or (empty? keyseq)
(let [lookup (:type-lookup layer)
edge-checker (lookup (first keyseq))]
(and edge-checker
(let [keys (next keyseq)]
is [ i d ]
(not= :edges (first keys)) ;; something not under edges
(let [edge-path (next keys)]
(or (not edge-path) ;; just [id :edges]
(edge-checker (first edge-path))))))))))
the is for bookkeeping / reference only ; the type - lookup function is derived from it at
;; construction time, and is always used instead because it is much faster. type-lookup is a
;; function taking a node-id and returning (if the node's type is valid as a from-edge on this
layer ) another function . That function takes in a node - id and returns iff it is a valid
destination node for an edge from the first node - id .
(defwrapped TypedLayer [layer type-multimap type-lookup]
Basic
(get-node [this id not-found]
(if (writable-area? this [id])
(get-node layer id not-found)
not-found))
(update-in-node [this keyseq f args]
(do (if (empty? keyseq)
(condp = f
dissoc nil
assoc (let [[id attrs] (assert-length 2 args)]
(validate-edges this id (keys (:edges attrs))))
(throw (IllegalArgumentException. (format "Can't apply function %s at top level"
f))))
(if-not (#{assoc adjoin} f)
(throw (IllegalArgumentException.
(format "Can't guarantee typing of %s on typed layer" f)))
(let [from-id (first keyseq)
[attrs] (assert-length 1 args)]
(validate-edges this from-id
(match (rest keyseq)
([] :seq) (keys (:edges attrs))
([:edges] :seq) (keys attrs)
([:edges to-id & _] :seq) [to-id])))))
(-> (update-in-node layer keyseq f args)
(update-wrap-read forward-reads this layer))))
Optimized
(query-fn [this keyseq not-found f]
(if (writable-area? this keyseq)
(query-fn layer keyseq not-found f)
(fn [& args]
(apply f not-found args))))
Schema
(schema [this node-id]
(when (type-lookup node-id)
(schema layer node-id))))
(defn typed-layer [base-layer types]
(TypedLayer. base-layer types
(prefix-lookup (for [[from-type to-types] types]
[from-type (prefix-lookup (for [to-type to-types]
[to-type true]))]))))
(defn without-typing [^TypedLayer typed-layer]
(.layer typed-layer))
| null | https://raw.githubusercontent.com/ninjudd/jiraph/e2897cf4770ead40e574261cd294d2c6701703e8/src/flatland/jiraph/typed.clj | clojure | something not under edges
just [id :edges]
the type - lookup function is derived from it at
construction time, and is always used instead because it is much faster. type-lookup is a
function taking a node-id and returning (if the node's type is valid as a from-edge on this | (ns flatland.jiraph.typed
(:use [flatland.jiraph.core :only [layer]]
[flatland.jiraph.layer :only [Basic Optimized Schema
get-node schema update-in-node query-fn]]
[flatland.jiraph.wrapped-layer :only [defwrapped update-wrap-read forward-reads]]
[clojure.core.match :only [match]]
[flatland.useful.map :only [map-vals-with-keys update update-in*]]
[flatland.useful.fn :only [fix fixing]]
[flatland.useful.utils :only [adjoin]]
[flatland.useful.seq :only [assert-length]]
[flatland.useful.experimental :only [prefix-lookup]]
[flatland.useful.datatypes :only [assoc-record]])
(:require [flatland.jiraph.layer :as layer]))
(defn edge-validator [layer id]
(or ((:type-lookup layer) id)
(throw (IllegalArgumentException. (format "%s is not a valid node on layer %s"
id (pr-str layer))))))
(defn validate-edges [layer from-id to-ids]
(let [valid? (edge-validator layer from-id)]
(when-let [broken-edges (seq (remove valid? to-ids))]
(throw (IllegalArgumentException.
(format "%s can't have edges to %s on layer %s"
from-id (pr-str broken-edges) (pr-str layer)))))))
(defn writable-area? [layer keyseq]
(or (empty? keyseq)
(let [lookup (:type-lookup layer)
edge-checker (lookup (first keyseq))]
(and edge-checker
(let [keys (next keyseq)]
is [ i d ]
(let [edge-path (next keys)]
(edge-checker (first edge-path))))))))))
layer ) another function . That function takes in a node - id and returns iff it is a valid
destination node for an edge from the first node - id .
(defwrapped TypedLayer [layer type-multimap type-lookup]
Basic
(get-node [this id not-found]
(if (writable-area? this [id])
(get-node layer id not-found)
not-found))
(update-in-node [this keyseq f args]
(do (if (empty? keyseq)
(condp = f
dissoc nil
assoc (let [[id attrs] (assert-length 2 args)]
(validate-edges this id (keys (:edges attrs))))
(throw (IllegalArgumentException. (format "Can't apply function %s at top level"
f))))
(if-not (#{assoc adjoin} f)
(throw (IllegalArgumentException.
(format "Can't guarantee typing of %s on typed layer" f)))
(let [from-id (first keyseq)
[attrs] (assert-length 1 args)]
(validate-edges this from-id
(match (rest keyseq)
([] :seq) (keys (:edges attrs))
([:edges] :seq) (keys attrs)
([:edges to-id & _] :seq) [to-id])))))
(-> (update-in-node layer keyseq f args)
(update-wrap-read forward-reads this layer))))
Optimized
(query-fn [this keyseq not-found f]
(if (writable-area? this keyseq)
(query-fn layer keyseq not-found f)
(fn [& args]
(apply f not-found args))))
Schema
(schema [this node-id]
(when (type-lookup node-id)
(schema layer node-id))))
(defn typed-layer [base-layer types]
(TypedLayer. base-layer types
(prefix-lookup (for [[from-type to-types] types]
[from-type (prefix-lookup (for [to-type to-types]
[to-type true]))]))))
(defn without-typing [^TypedLayer typed-layer]
(.layer typed-layer))
|
42845542ff03ae9ae2874867be3142be92740a409542086169ebe455069e4736 | janestreet/bonsai | bonsai_web_ui_reorderable_list.mli | open! Core
open! Bonsai_web
(** A vertical list component which moves items into their proper place during
drag and drop. Items use absolute positioning for explicit layout; that is,
the nth item is [n * item_height] pixels from the top of the container.
Items outside the list may be dragged into the list to extend it. *)
val list
: ('source, 'cmp) Bonsai.comparator
-> dnd:('source, int) Bonsai_web_ui_drag_and_drop.t Bonsai.Value.t
(** The drag-and-drop universe the list should operate in; other items in the
universe may be dragged into the list *)
-> ?enable_debug_overlay:bool
(** Display a transparent overlay on targets to make it clear where an item
may be dropped. *)
-> ?extra_item_attrs:Vdom.Attr.t Bonsai.Value.t
(** Extra attributes to put on the wrapper div for each item in the list. For
example, you might want to make each item animate into and out of
position. *)
-> ?left:Css_gen.Length.t
(** The space between the left edge of an item and the list container *)
-> ?right:Css_gen.Length.t
(** The space between the right edge of an item and the list container *)
-> ?empty_list_placeholder:(item_is_hovered:bool Value.t -> Vdom.Node.t Computation.t)
(** What to display when there are no items in the list. [item_is_hovered] is
provided in case you wish to change the placeholder based on whether an
item is being hovered above the empty list. *)
-> ?default_item_height:int
(** The items and drop targets are spaced evenly every item_height. In order
to look natural, each item should have height [item_height]. *)
-> ?override_last_target_rank:int option Value.t
(** The target that should be given to the place where you can drag an element so
that it's at the end of the list. If optional parameter is not given or the
value contains [None], then the [(maximum id of given items) + 1] is used. *)
-> ('source, Vdom.Node.t * int, 'cmp) Map.t Value.t
(** The items that should be displayed in the list. Each item should have its
view and its current rank. Updating the rank of an item must be done via
the [on_drop] callback of the drag-and-drop universe. *)
-> Vdom.Node.t Bonsai.Computation.t
(** Similar to [list], but creates the drag-and-drop universe and handles the
[on_drop] event, making it fully self-contained. *)
val simple
: ('key, 'cmp) Bonsai.comparator
-> ?sentinel_name:string
-> ?enable_debug_overlay:bool
-> ?extra_item_attrs:Vdom.Attr.t Value.t
-> ?left:Css_gen.Length.t
-> ?right:Css_gen.Length.t
-> ?empty_list_placeholder:(item_is_hovered:bool Value.t -> Vdom.Node.t Computation.t)
-> ?default_item_height:int
-> render:
(index:int Value.t
-> source:Vdom.Attr.t Value.t
-> 'key Value.t
-> ('data * Vdom.Node.t) Computation.t)
-> ('key, 'cmp) Set.t Value.t
-> (('key * 'data) list * Vdom.Node.t) Computation.t
module Action : sig
type 'a item =
| Move of 'a * int
| Set of 'a
| Remove of 'a
| Overwrite of 'a list
[@@deriving sexp]
type 'a t = 'a item list [@@deriving sexp]
end
(** Similar to [simple], but exposes the components injection function. This is
used by the [Bonsai_web_ui_form] wrapper of this library. *)
val with_inject
: ('key, 'cmp) Bonsai.comparator
-> ?sentinel_name:string
-> ?enable_debug_overlay:bool
-> ?extra_item_attrs:Vdom.Attr.t Value.t
-> ?left:Css_gen.Length.t
-> ?right:Css_gen.Length.t
-> ?empty_list_placeholder:(item_is_hovered:bool Value.t -> Vdom.Node.t Computation.t)
-> ?default_item_height:int
-> (index:int Value.t
-> source:Vdom.Attr.t Value.t
-> 'key Value.t
-> ('data * Vdom.Node.t) Computation.t)
-> (('key * 'data) list * Vdom.Node.t * ('key Action.t -> unit Effect.t)) Computation.t
| null | https://raw.githubusercontent.com/janestreet/bonsai/33e9a58fc55ec12095959dc5ef4fd681021c1083/web_ui/reorderable_list/src/bonsai_web_ui_reorderable_list.mli | ocaml | * A vertical list component which moves items into their proper place during
drag and drop. Items use absolute positioning for explicit layout; that is,
the nth item is [n * item_height] pixels from the top of the container.
Items outside the list may be dragged into the list to extend it.
* The drag-and-drop universe the list should operate in; other items in the
universe may be dragged into the list
* Display a transparent overlay on targets to make it clear where an item
may be dropped.
* Extra attributes to put on the wrapper div for each item in the list. For
example, you might want to make each item animate into and out of
position.
* The space between the left edge of an item and the list container
* The space between the right edge of an item and the list container
* What to display when there are no items in the list. [item_is_hovered] is
provided in case you wish to change the placeholder based on whether an
item is being hovered above the empty list.
* The items and drop targets are spaced evenly every item_height. In order
to look natural, each item should have height [item_height].
* The target that should be given to the place where you can drag an element so
that it's at the end of the list. If optional parameter is not given or the
value contains [None], then the [(maximum id of given items) + 1] is used.
* The items that should be displayed in the list. Each item should have its
view and its current rank. Updating the rank of an item must be done via
the [on_drop] callback of the drag-and-drop universe.
* Similar to [list], but creates the drag-and-drop universe and handles the
[on_drop] event, making it fully self-contained.
* Similar to [simple], but exposes the components injection function. This is
used by the [Bonsai_web_ui_form] wrapper of this library. | open! Core
open! Bonsai_web
val list
: ('source, 'cmp) Bonsai.comparator
-> dnd:('source, int) Bonsai_web_ui_drag_and_drop.t Bonsai.Value.t
-> ?enable_debug_overlay:bool
-> ?extra_item_attrs:Vdom.Attr.t Bonsai.Value.t
-> ?left:Css_gen.Length.t
-> ?right:Css_gen.Length.t
-> ?empty_list_placeholder:(item_is_hovered:bool Value.t -> Vdom.Node.t Computation.t)
-> ?default_item_height:int
-> ?override_last_target_rank:int option Value.t
-> ('source, Vdom.Node.t * int, 'cmp) Map.t Value.t
-> Vdom.Node.t Bonsai.Computation.t
val simple
: ('key, 'cmp) Bonsai.comparator
-> ?sentinel_name:string
-> ?enable_debug_overlay:bool
-> ?extra_item_attrs:Vdom.Attr.t Value.t
-> ?left:Css_gen.Length.t
-> ?right:Css_gen.Length.t
-> ?empty_list_placeholder:(item_is_hovered:bool Value.t -> Vdom.Node.t Computation.t)
-> ?default_item_height:int
-> render:
(index:int Value.t
-> source:Vdom.Attr.t Value.t
-> 'key Value.t
-> ('data * Vdom.Node.t) Computation.t)
-> ('key, 'cmp) Set.t Value.t
-> (('key * 'data) list * Vdom.Node.t) Computation.t
module Action : sig
type 'a item =
| Move of 'a * int
| Set of 'a
| Remove of 'a
| Overwrite of 'a list
[@@deriving sexp]
type 'a t = 'a item list [@@deriving sexp]
end
val with_inject
: ('key, 'cmp) Bonsai.comparator
-> ?sentinel_name:string
-> ?enable_debug_overlay:bool
-> ?extra_item_attrs:Vdom.Attr.t Value.t
-> ?left:Css_gen.Length.t
-> ?right:Css_gen.Length.t
-> ?empty_list_placeholder:(item_is_hovered:bool Value.t -> Vdom.Node.t Computation.t)
-> ?default_item_height:int
-> (index:int Value.t
-> source:Vdom.Attr.t Value.t
-> 'key Value.t
-> ('data * Vdom.Node.t) Computation.t)
-> (('key * 'data) list * Vdom.Node.t * ('key Action.t -> unit Effect.t)) Computation.t
|
b5ad93f76970a714c222371cb629ec28a1e0ac427f9e15019a35f1e85a8081e6 | tolysz/ghcjs-stack | Tree.hs | # LANGUAGE DeriveFunctor , , DeriveTraversable #
module Distribution.Client.Dependency.Modular.Tree
( FailReason(..)
, POption(..)
, Tree(..)
, TreeF(..)
, ana
, cata
, choices
, dchoices
, inn
, innM
, para
, trav
, zeroOrOneChoices
) where
import Control.Monad hiding (mapM, sequence)
import Data.Foldable
import Data.Traversable
import Prelude hiding (foldr, mapM, sequence)
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.PSQ (PSQ)
import qualified Distribution.Client.Dependency.Modular.PSQ as P
import Distribution.Client.Dependency.Modular.Version
import Distribution.Client.Dependency.Types ( ConstraintSource(..) )
-- | Type of the search tree. Inlining the choice nodes for now.
data Tree a =
PChoice QPN a (PSQ POption (Tree a))
Bool indicates whether it 's weak / trivial , second whether it 's manual
| SChoice QSN a Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's trivial
PSQ should never be empty
| Done RevDepMap
| Fail (ConflictSet QPN) FailReason
deriving (Eq, Show, Functor)
-- Above, a choice is called trivial if it clearly does not matter. The
-- special case of triviality we actually consider is if there are no new
-- dependencies introduced by this node.
--
-- A (flag) choice is called weak if we do want to defer it. This is the
-- case for flags that should be implied by what's currently installed on
-- the system, as opposed to flags that are used to explicitly enable or
-- disable some functionality.
-- | A package option is a package instance with an optional linking annotation
--
-- The modular solver has a number of package goals to solve for, and can only
-- pick a single package version for a single goal. In order to allow to
-- install multiple versions of the same package as part of a single solution
-- the solver uses qualified goals. For example, @0.P@ and @1.P@ might both
be qualified goals for , allowing to pick a difference version of package
-- @P@ for @0.P@ and @1.P@.
--
-- Linking is an essential part of this story. In addition to picking a specific
-- version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or
vice versa ) . Teans that @1.P@ and @0.P@ really must be the very same package
-- (and hence must have the same build time configuration, and their
-- dependencies must also be the exact same).
--
-- See <-typed.com/blog/2015/03/qualified-goals/> for details.
data POption = POption I (Maybe PP)
deriving (Eq, Show)
data FailReason = InconsistentInitialConstraints
| Conflicting [Dep QPN]
| CannotInstall
| CannotReinstall
| Shadowed
| Broken
| GlobalConstraintVersion VR ConstraintSource
| GlobalConstraintInstalled ConstraintSource
| GlobalConstraintSource ConstraintSource
| GlobalConstraintFlag ConstraintSource
| ManualFlag
| MalformedFlagChoice QFN
| MalformedStanzaChoice QSN
| EmptyGoalChoice
| Backjump
| MultipleInstances
| DependenciesNotLinked String
| CyclicDependencies
deriving (Eq, Show)
-- | Functor for the tree type.
data TreeF a b =
PChoiceF QPN a (PSQ POption b)
| FChoiceF QFN a Bool Bool (PSQ Bool b)
| SChoiceF QSN a Bool (PSQ Bool b)
| GoalChoiceF (PSQ (OpenGoal ()) b)
| DoneF RevDepMap
| FailF (ConflictSet QPN) FailReason
deriving (Functor, Foldable, Traversable)
out :: Tree a -> TreeF a (Tree a)
out (PChoice p i ts) = PChoiceF p i ts
out (FChoice p i b m ts) = FChoiceF p i b m ts
out (SChoice p i b ts) = SChoiceF p i b ts
out (GoalChoice ts) = GoalChoiceF ts
out (Done x ) = DoneF x
out (Fail c x ) = FailF c x
inn :: TreeF a (Tree a) -> Tree a
inn (PChoiceF p i ts) = PChoice p i ts
inn (FChoiceF p i b m ts) = FChoice p i b m ts
inn (SChoiceF p i b ts) = SChoice p i b ts
inn (GoalChoiceF ts) = GoalChoice ts
inn (DoneF x ) = Done x
inn (FailF c x ) = Fail c x
innM :: Monad m => TreeF a (m (Tree a)) -> m (Tree a)
innM (PChoiceF p i ts) = liftM (PChoice p i ) (sequence ts)
innM (FChoiceF p i b m ts) = liftM (FChoice p i b m) (sequence ts)
innM (SChoiceF p i b ts) = liftM (SChoice p i b ) (sequence ts)
innM (GoalChoiceF ts) = liftM (GoalChoice ) (sequence ts)
innM (DoneF x ) = return $ Done x
innM (FailF c x ) = return $ Fail c x
-- | Determines whether a tree is active, i.e., isn't a failure node.
active :: Tree a -> Bool
active (Fail _ _) = False
active _ = True
-- | Determines how many active choices are available in a node. Note that we
count goal choices as having one choice , always .
choices :: Tree a -> Int
choices (PChoice _ _ ts) = P.length (P.filter active ts)
choices (FChoice _ _ _ _ ts) = P.length (P.filter active ts)
choices (SChoice _ _ _ ts) = P.length (P.filter active ts)
choices (GoalChoice _ ) = 1
choices (Done _ ) = 1
choices (Fail _ _ ) = 0
-- | Variant of 'choices' that only approximates the number of choices.
dchoices :: Tree a -> P.Degree
dchoices (PChoice _ _ ts) = P.degree (P.filter active ts)
dchoices (FChoice _ _ _ _ ts) = P.degree (P.filter active ts)
dchoices (SChoice _ _ _ ts) = P.degree (P.filter active ts)
dchoices (GoalChoice _ ) = P.ZeroOrOne
dchoices (Done _ ) = P.ZeroOrOne
dchoices (Fail _ _ ) = P.ZeroOrOne
-- | Variant of 'choices' that only approximates the number of choices.
zeroOrOneChoices :: Tree a -> Bool
zeroOrOneChoices (PChoice _ _ ts) = P.isZeroOrOne (P.filter active ts)
zeroOrOneChoices (FChoice _ _ _ _ ts) = P.isZeroOrOne (P.filter active ts)
zeroOrOneChoices (SChoice _ _ _ ts) = P.isZeroOrOne (P.filter active ts)
zeroOrOneChoices (GoalChoice _ ) = True
zeroOrOneChoices (Done _ ) = True
zeroOrOneChoices (Fail _ _ ) = True
-- | Catamorphism on trees.
cata :: (TreeF a b -> b) -> Tree a -> b
cata phi x = (phi . fmap (cata phi) . out) x
trav :: (TreeF a (Tree b) -> TreeF b (Tree b)) -> Tree a -> Tree b
trav psi x = cata (inn . psi) x
-- | Paramorphism on trees.
para :: (TreeF a (b, Tree a) -> b) -> Tree a -> b
para phi = phi . fmap (\ x -> (para phi x, x)) . out
-- | Anamorphism on trees.
ana :: (b -> TreeF a b) -> b -> Tree a
ana psi = inn . fmap (ana psi) . psi
| null | https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/cabal-next/cabal-install/Distribution/Client/Dependency/Modular/Tree.hs | haskell | | Type of the search tree. Inlining the choice nodes for now.
Bool indicates whether it's trivial
Above, a choice is called trivial if it clearly does not matter. The
special case of triviality we actually consider is if there are no new
dependencies introduced by this node.
A (flag) choice is called weak if we do want to defer it. This is the
case for flags that should be implied by what's currently installed on
the system, as opposed to flags that are used to explicitly enable or
disable some functionality.
| A package option is a package instance with an optional linking annotation
The modular solver has a number of package goals to solve for, and can only
pick a single package version for a single goal. In order to allow to
install multiple versions of the same package as part of a single solution
the solver uses qualified goals. For example, @0.P@ and @1.P@ might both
@P@ for @0.P@ and @1.P@.
Linking is an essential part of this story. In addition to picking a specific
version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or
(and hence must have the same build time configuration, and their
dependencies must also be the exact same).
See <-typed.com/blog/2015/03/qualified-goals/> for details.
| Functor for the tree type.
| Determines whether a tree is active, i.e., isn't a failure node.
| Determines how many active choices are available in a node. Note that we
| Variant of 'choices' that only approximates the number of choices.
| Variant of 'choices' that only approximates the number of choices.
| Catamorphism on trees.
| Paramorphism on trees.
| Anamorphism on trees. | # LANGUAGE DeriveFunctor , , DeriveTraversable #
module Distribution.Client.Dependency.Modular.Tree
( FailReason(..)
, POption(..)
, Tree(..)
, TreeF(..)
, ana
, cata
, choices
, dchoices
, inn
, innM
, para
, trav
, zeroOrOneChoices
) where
import Control.Monad hiding (mapM, sequence)
import Data.Foldable
import Data.Traversable
import Prelude hiding (foldr, mapM, sequence)
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.PSQ (PSQ)
import qualified Distribution.Client.Dependency.Modular.PSQ as P
import Distribution.Client.Dependency.Modular.Version
import Distribution.Client.Dependency.Types ( ConstraintSource(..) )
data Tree a =
PChoice QPN a (PSQ POption (Tree a))
Bool indicates whether it 's weak / trivial , second whether it 's manual
PSQ should never be empty
| Done RevDepMap
| Fail (ConflictSet QPN) FailReason
deriving (Eq, Show, Functor)
be qualified goals for , allowing to pick a difference version of package
vice versa ) . Teans that @1.P@ and @0.P@ really must be the very same package
data POption = POption I (Maybe PP)
deriving (Eq, Show)
data FailReason = InconsistentInitialConstraints
| Conflicting [Dep QPN]
| CannotInstall
| CannotReinstall
| Shadowed
| Broken
| GlobalConstraintVersion VR ConstraintSource
| GlobalConstraintInstalled ConstraintSource
| GlobalConstraintSource ConstraintSource
| GlobalConstraintFlag ConstraintSource
| ManualFlag
| MalformedFlagChoice QFN
| MalformedStanzaChoice QSN
| EmptyGoalChoice
| Backjump
| MultipleInstances
| DependenciesNotLinked String
| CyclicDependencies
deriving (Eq, Show)
data TreeF a b =
PChoiceF QPN a (PSQ POption b)
| FChoiceF QFN a Bool Bool (PSQ Bool b)
| SChoiceF QSN a Bool (PSQ Bool b)
| GoalChoiceF (PSQ (OpenGoal ()) b)
| DoneF RevDepMap
| FailF (ConflictSet QPN) FailReason
deriving (Functor, Foldable, Traversable)
out :: Tree a -> TreeF a (Tree a)
out (PChoice p i ts) = PChoiceF p i ts
out (FChoice p i b m ts) = FChoiceF p i b m ts
out (SChoice p i b ts) = SChoiceF p i b ts
out (GoalChoice ts) = GoalChoiceF ts
out (Done x ) = DoneF x
out (Fail c x ) = FailF c x
inn :: TreeF a (Tree a) -> Tree a
inn (PChoiceF p i ts) = PChoice p i ts
inn (FChoiceF p i b m ts) = FChoice p i b m ts
inn (SChoiceF p i b ts) = SChoice p i b ts
inn (GoalChoiceF ts) = GoalChoice ts
inn (DoneF x ) = Done x
inn (FailF c x ) = Fail c x
innM :: Monad m => TreeF a (m (Tree a)) -> m (Tree a)
innM (PChoiceF p i ts) = liftM (PChoice p i ) (sequence ts)
innM (FChoiceF p i b m ts) = liftM (FChoice p i b m) (sequence ts)
innM (SChoiceF p i b ts) = liftM (SChoice p i b ) (sequence ts)
innM (GoalChoiceF ts) = liftM (GoalChoice ) (sequence ts)
innM (DoneF x ) = return $ Done x
innM (FailF c x ) = return $ Fail c x
active :: Tree a -> Bool
active (Fail _ _) = False
active _ = True
count goal choices as having one choice , always .
choices :: Tree a -> Int
choices (PChoice _ _ ts) = P.length (P.filter active ts)
choices (FChoice _ _ _ _ ts) = P.length (P.filter active ts)
choices (SChoice _ _ _ ts) = P.length (P.filter active ts)
choices (GoalChoice _ ) = 1
choices (Done _ ) = 1
choices (Fail _ _ ) = 0
dchoices :: Tree a -> P.Degree
dchoices (PChoice _ _ ts) = P.degree (P.filter active ts)
dchoices (FChoice _ _ _ _ ts) = P.degree (P.filter active ts)
dchoices (SChoice _ _ _ ts) = P.degree (P.filter active ts)
dchoices (GoalChoice _ ) = P.ZeroOrOne
dchoices (Done _ ) = P.ZeroOrOne
dchoices (Fail _ _ ) = P.ZeroOrOne
zeroOrOneChoices :: Tree a -> Bool
zeroOrOneChoices (PChoice _ _ ts) = P.isZeroOrOne (P.filter active ts)
zeroOrOneChoices (FChoice _ _ _ _ ts) = P.isZeroOrOne (P.filter active ts)
zeroOrOneChoices (SChoice _ _ _ ts) = P.isZeroOrOne (P.filter active ts)
zeroOrOneChoices (GoalChoice _ ) = True
zeroOrOneChoices (Done _ ) = True
zeroOrOneChoices (Fail _ _ ) = True
cata :: (TreeF a b -> b) -> Tree a -> b
cata phi x = (phi . fmap (cata phi) . out) x
trav :: (TreeF a (Tree b) -> TreeF b (Tree b)) -> Tree a -> Tree b
trav psi x = cata (inn . psi) x
para :: (TreeF a (b, Tree a) -> b) -> Tree a -> b
para phi = phi . fmap (\ x -> (para phi x, x)) . out
ana :: (b -> TreeF a b) -> b -> Tree a
ana psi = inn . fmap (ana psi) . psi
|
e3d67a40f60f4cc6e67b444f43b88db7e02fbd1b4c7564b083a0f6578a306a67 | dmitryvk/sbcl-win32-threads | physenvanal.lisp | ;;;; This file implements the environment analysis phase for the
;;;; compiler. This phase annotates IR1 with a hierarchy environment
;;;; structures, determining the physical environment that each LAMBDA
;;;; allocates its variables and finding what values are closed over
;;;; by each physical environment.
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!C")
;;; Do environment analysis on the code in COMPONENT. This involves
;;; various things:
1 . Make a PHYSENV structure for each non - LET LAMBDA , assigning
the LAMBDA - PHYSENV for all LAMBDAs .
2 . Find all values that need to be closed over by each
;;; physical environment.
3 . Scan the blocks in the component closing over non - local - exit
;;; continuations.
4 . Delete all non - top - level functions with no references . This
;;; should only get functions with non-NULL kinds, since normal
functions are deleted when their references go to zero .
(defun physenv-analyze (component)
(declare (type component component))
(aver (every (lambda (x)
(eq (functional-kind x) :deleted))
(component-new-functionals component)))
(setf (component-new-functionals component) ())
(dolist (clambda (component-lambdas component))
(reinit-lambda-physenv clambda))
(mapc #'add-lambda-vars-and-let-vars-to-closures
(component-lambdas component))
(find-non-local-exits component)
(recheck-dynamic-extent-lvars component)
(find-cleanup-points component)
(tail-annotate component)
(analyze-indirect-lambda-vars component)
(dolist (fun (component-lambdas component))
(when (null (leaf-refs fun))
(let ((kind (functional-kind fun)))
(unless (or (eq kind :toplevel)
(functional-has-external-references-p fun))
(aver (member kind '(:optional :cleanup :escape)))
(setf (functional-kind fun) nil)
(delete-functional fun)))))
(setf (component-nlx-info-generated-p component) t)
(values))
;;; This is to be called on a COMPONENT with top level LAMBDAs before
;;; the compilation of the associated non-top-level code to detect
;;; closed over top level variables. We just do COMPUTE-CLOSURE on all
;;; the lambdas. This will pre-allocate environments for all the
;;; functions with closed-over top level variables. The post-pass will
;;; use the existing structure, rather than allocating a new one. We
;;; return true if we discover any possible closure vars.
(defun pre-physenv-analyze-toplevel (component)
(declare (type component component))
(let ((found-it nil))
(dolist (lambda (component-lambdas component))
(when (add-lambda-vars-and-let-vars-to-closures lambda)
(setq found-it t)))
found-it))
If CLAMBDA has a PHYSENV , return it , otherwise assign an empty one
;;; and return that.
(defun get-lambda-physenv (clambda)
(declare (type clambda clambda))
(let ((homefun (lambda-home clambda)))
(or (lambda-physenv homefun)
(let ((res (make-physenv :lambda homefun)))
(setf (lambda-physenv homefun) res)
All the LETLAMBDAs belong to HOMEFUN , and share the same
PHYSENV . Thus , ( 1 ) since HOMEFUN 's PHYSENV was NIL ,
theirs should be NIL too , and ( 2 ) since we 're modifying
HOMEFUN 's PHYSENV , we should modify theirs , too .
(dolist (letlambda (lambda-lets homefun))
(aver (eql (lambda-home letlambda) homefun))
(aver (null (lambda-physenv letlambda)))
(setf (lambda-physenv letlambda) res))
res))))
If FUN has no physical environment , assign one , otherwise clean up
;;; the old physical environment and the INDIRECT flag on LAMBDA-VARs.
;;; This is necessary because pre-analysis is done before
;;; optimization.
(defun reinit-lambda-physenv (fun)
(let ((old (lambda-physenv (lambda-home fun))))
(cond (old
(setf (physenv-closure old) nil)
(flet ((clear (fun)
(dolist (var (lambda-vars fun))
(setf (lambda-var-indirect var) nil))))
(clear fun)
(map nil #'clear (lambda-lets fun))))
(t
(get-lambda-physenv fun))))
(values))
Get NODE 's environment , assigning one if necessary .
(defun get-node-physenv (node)
(declare (type node node))
(get-lambda-physenv (node-home-lambda node)))
;;; private guts of ADD-LAMBDA-VARS-AND-LET-VARS-TO-CLOSURES
;;;
This is the old CMU CL COMPUTE - CLOSURE , which only works on
;;; LAMBDA-VARS directly, not on the LAMBDA-VARS of LAMBDA-LETS. It
seems never to be valid to use this operation alone , so in SBCL ,
;;; it's private, and the public interface,
;;; ADD-LAMBDA-VARS-AND-LET-VARS-TO-CLOSURES, always runs over all the
variables , not only the LAMBDA - VARS of CLAMBDA itself but also
the LAMBDA - VARS of CLAMBDA 's LAMBDA - LETS .
(defun %add-lambda-vars-to-closures (clambda)
(let ((physenv (get-lambda-physenv clambda))
(did-something nil))
(note-unreferenced-vars clambda)
(dolist (var (lambda-vars clambda))
(dolist (ref (leaf-refs var))
(let ((ref-physenv (get-node-physenv ref)))
(unless (eq ref-physenv physenv)
(when (lambda-var-sets var)
(setf (lambda-var-indirect var) t))
(setq did-something t)
(close-over var ref-physenv physenv))))
(dolist (set (basic-var-sets var))
;; Variables which are set but never referenced can be
;; optimized away, and closing over them here would just
interfere with that . ( In bug 147 , it * did * interfere with
;; that, causing confusion later. This UNLESS solves that
problem , but I ( WHN ) am not 100 % sure it 's best to solve
;; the problem this way instead of somehow solving it
somewhere upstream and just doing ( AVER ( LEAF - REFS VAR ) )
;; here.)
(unless (null (leaf-refs var))
(let ((set-physenv (get-node-physenv set)))
(unless (eq set-physenv physenv)
(setf did-something t
(lambda-var-indirect var) t)
(close-over var set-physenv physenv))))))
did-something))
Find any variables in CLAMBDA -- either directly in LAMBDA - VARS or
;;; in the LAMBDA-VARS of elements of LAMBDA-LETS -- with references
;;; outside of the home environment and close over them. If a
;;; closed-over variable is set, then we set the INDIRECT flag so that
;;; we will know the closed over value is really a pointer to the
;;; value cell. We also warn about unreferenced variables here, just
;;; because it's a convenient place to do it. We return true if we
;;; close over anything.
(defun add-lambda-vars-and-let-vars-to-closures (clambda)
(declare (type clambda clambda))
(let ((did-something nil))
(when (%add-lambda-vars-to-closures clambda)
(setf did-something t))
(dolist (lambda-let (lambda-lets clambda))
;; There's no need to recurse through full COMPUTE-CLOSURE
here , since LETS only go one layer deep .
(aver (null (lambda-lets lambda-let)))
(when (%add-lambda-vars-to-closures lambda-let)
(setf did-something t)))
did-something))
(defun xep-allocator (xep)
(let ((entry (functional-entry-fun xep)))
(functional-allocator entry)))
Make sure that THING is closed over in REF - PHYSENV and in all
PHYSENVs for the functions that reference REF - PHYSENV 's function
;;; (not just calls). HOME-PHYSENV is THING's home environment. When we
;;; reach the home environment, we stop propagating the closure.
(defun close-over (thing ref-physenv home-physenv)
(declare (type physenv ref-physenv home-physenv))
(let ((flooded-physenvs nil))
(labels ((flood (flooded-physenv)
(unless (or (eql flooded-physenv home-physenv)
(member flooded-physenv flooded-physenvs))
(push flooded-physenv flooded-physenvs)
(unless (memq thing (physenv-closure flooded-physenv))
(push thing (physenv-closure flooded-physenv))
(let ((lambda (physenv-lambda flooded-physenv)))
(cond ((eq (functional-kind lambda) :external)
(let* ((alloc-node (xep-allocator lambda))
(alloc-lambda (node-home-lambda alloc-node))
(alloc-physenv (get-lambda-physenv alloc-lambda)))
(flood alloc-physenv)
(dolist (ref (leaf-refs lambda))
(close-over lambda
(get-node-physenv ref) alloc-physenv))))
(t (dolist (ref (leaf-refs lambda))
;; FIXME: This assertion looks
;; reasonable, but does not work for
;; :CLEANUPs.
#+nil
(let ((dest (node-dest ref)))
(aver (basic-combination-p dest))
(aver (eq (basic-combination-kind dest) :local)))
(flood (get-node-physenv ref))))))))))
(flood ref-physenv)))
(values))
;;; Find LAMBDA-VARs that are marked as needing to support indirect
;;; access (SET at some point after initial creation) that are present
;;; in CLAMBDAs not marked as being DYNAMIC-EXTENT (meaning that the
;;; value-cell involved must be able to survive past the extent of the
;;; allocating frame), and mark them (the LAMBDA-VARs) as needing
;;; explicit value-cells. Because they are already closed-over, the
;;; LAMBDA-VARs already appear in the closures of all of the CLAMBDAs
;;; that need checking.
(defun analyze-indirect-lambda-vars (component)
(dolist (fun (component-lambdas component))
(let ((entry-fun (functional-entry-fun fun)))
;; We also check the ENTRY-FUN, as XEPs for LABELS or FLET
functions are n't set to be DX even if their underlying
;; CLAMBDAs are, and if we ever get LET-bound anonymous function
DX working , it would mark the XEP as being DX but not the
" real " CLAMBDA . This works because a FUNCTIONAL - ENTRY - FUN is
;; either NULL, a self-pointer (for :TOPLEVEL functions), a
;; pointer from an XEP to its underlying function (for :EXTERNAL
;; functions), or a pointer from an underlying function to its
XEP ( for non-:TOPLEVEL functions with XEPs ) .
(unless (or (leaf-dynamic-extent fun)
(and entry-fun
(leaf-dynamic-extent entry-fun)))
(let ((closure (physenv-closure (lambda-physenv fun))))
(dolist (var closure)
(when (and (lambda-var-p var)
(lambda-var-indirect var))
(setf (lambda-var-explicit-value-cell var) t))))))))
;;;; non-local exit
#!-sb-fluid (declaim (inline should-exit-check-tag-p))
(defun exit-should-check-tag-p (exit)
(declare (type exit exit))
(not (zerop (policy exit check-tag-existence))))
;;; Insert the entry stub before the original exit target, and add a
new entry to the PHYSENV - NLX - INFO . The % NLX - ENTRY call in the
stub is passed the NLX - INFO as an argument so that the back end
;;; knows what entry is being done.
;;;
The link from the EXIT block to the entry stub is changed to be a
link from the component head . Similarly , the EXIT block is linked
;;; to the component tail. This leaves the entry stub reachable, but
;;; makes the flow graph less confusing to flow analysis.
;;;
;;; If a CATCH or an UNWIND-protect, then we set the LEXENV for the
;;; last node in the cleanup code to be the enclosing environment, to
;;; represent the fact that the binding was undone as a side effect of
;;; the exit. This will cause a lexical exit to be broken up if we are
;;; actually exiting the scope (i.e. a BLOCK), and will also do any
;;; other cleanups that may have to be done on the way.
(defun insert-nlx-entry-stub (exit env)
(declare (type physenv env) (type exit exit))
(let* ((exit-block (node-block exit))
(next-block (first (block-succ exit-block)))
(entry (exit-entry exit))
(cleanup (entry-cleanup entry))
(info (make-nlx-info cleanup exit))
(new-block (insert-cleanup-code exit-block next-block
entry
`(%nlx-entry ',info)
cleanup))
(component (block-component new-block)))
(unlink-blocks exit-block new-block)
(link-blocks exit-block (component-tail component))
(link-blocks (component-head component) new-block)
(setf (exit-nlx-info exit) info)
(setf (nlx-info-target info) new-block)
(setf (nlx-info-safe-p info) (exit-should-check-tag-p exit))
(push info (physenv-nlx-info env))
(push info (cleanup-info cleanup))
(when (member (cleanup-kind cleanup) '(:catch :unwind-protect))
(setf (node-lexenv (block-last new-block))
(node-lexenv entry))))
(values))
;;; Do stuff necessary to represent a non-local exit from the node
EXIT into ENV . This is called for each non - local exit node , of
;;; which there may be several per exit continuation. This is what we
;;; do:
;;; -- If there isn't any NLX-INFO entry in the environment, make
;;; an entry stub, otherwise just move the exit block link to
;;; the component tail.
-- Close over the NLX - INFO in the exit environment .
;;; -- If the exit is from an :ESCAPE function, then substitute a
;;; constant reference to NLX-INFO structure for the escape
;;; function reference. This will cause the escape function to
be deleted ( although not removed from the DFO . ) The escape
;;; function is no longer needed, and we don't want to emit code
;;; for it.
-- Change the % NLX - ENTRY call to use the NLX lvar so that 1 ) there
will be a use to represent the NLX use ; 2 ) make life easier for
;;; the stack analysis.
(defun note-non-local-exit (env exit)
(declare (type physenv env) (type exit exit))
(let ((lvar (node-lvar exit))
(exit-fun (node-home-lambda exit))
(info (find-nlx-info exit)))
(cond (info
(let ((block (node-block exit)))
(aver (= (length (block-succ block)) 1))
(unlink-blocks block (first (block-succ block)))
(link-blocks block (component-tail (block-component block)))
(setf (exit-nlx-info exit) info)
(unless (nlx-info-safe-p info)
(setf (nlx-info-safe-p info)
(exit-should-check-tag-p exit)))))
(t
(insert-nlx-entry-stub exit env)
(setq info (exit-nlx-info exit))
(aver info)))
(close-over info (node-physenv exit) env)
(when (eq (functional-kind exit-fun) :escape)
(mapc (lambda (x)
(setf (node-derived-type x) *wild-type*))
(leaf-refs exit-fun))
(substitute-leaf (find-constant info) exit-fun))
(when lvar
(let ((node (block-last (nlx-info-target info))))
(unless (node-lvar node)
(aver (eq lvar (node-lvar exit)))
(setf (node-derived-type node) (lvar-derived-type lvar))
(add-lvar-use node lvar)))))
(values))
Iterate over the EXITs in COMPONENT , calling NOTE - NON - LOCAL - EXIT
when we find a block that ends in a non - local EXIT node . We also
ensure that all EXIT nodes are either non - local or degenerate by
calling IR1 - OPTIMIZE - EXIT on local exits . This makes life simpler
;;; for later phases.
(defun find-non-local-exits (component)
(declare (type component component))
(dolist (lambda (component-lambdas component))
(dolist (entry (lambda-entries lambda))
(dolist (exit (entry-exits entry))
(let ((target-physenv (node-physenv entry)))
(if (eq (node-physenv exit) target-physenv)
(maybe-delete-exit exit)
(note-non-local-exit target-physenv exit))))))
(values))
;;;; final decision on stack allocation of dynamic-extent structures
(defun recheck-dynamic-extent-lvars (component)
(declare (type component component))
(dolist (lambda (component-lambdas component))
(loop for entry in (lambda-entries lambda)
for cleanup = (entry-cleanup entry)
do (when (eq (cleanup-kind cleanup) :dynamic-extent)
(collect ((real-dx-lvars))
(loop for what in (cleanup-info cleanup)
do (etypecase what
(cons
(let ((dx (car what))
(lvar (cdr what)))
(cond ((lvar-good-for-dx-p lvar dx component)
;; Since the above check does deep
;; checks. we need to deal with the deep
;; results in here as well.
(dolist (cell (handle-nested-dynamic-extent-lvars
dx lvar component))
(let ((real (principal-lvar (cdr cell))))
(setf (lvar-dynamic-extent real) cleanup)
(real-dx-lvars real))))
(t
(note-no-stack-allocation lvar)
(setf (lvar-dynamic-extent lvar) nil)))))
DX closure
(let* ((call what)
(arg (first (basic-combination-args call)))
(funs (lvar-value arg))
(dx nil))
(dolist (fun funs)
(binding* ((() (leaf-dynamic-extent fun)
:exit-if-null)
(xep (functional-entry-fun fun)
:exit-if-null)
(closure (physenv-closure
(get-lambda-physenv xep))))
(cond (closure
(setq dx t))
(t
(setf (leaf-dynamic-extent fun) nil)))))
(when dx
(setf (lvar-dynamic-extent arg) cleanup)
(real-dx-lvars arg))))))
(let ((real-dx-lvars (delete-duplicates (real-dx-lvars))))
(setf (cleanup-info cleanup) real-dx-lvars)
(setf (component-dx-lvars component)
(append real-dx-lvars (component-dx-lvars component))))))))
(values))
;;;; cleanup emission
Zoom up the cleanup nesting until we hit CLEANUP1 , accumulating
;;; cleanup code as we go. When we are done, convert the cleanup code
in an implicit MV - PROG1 . We have to force local call analysis of
;;; new references to UNWIND-PROTECT cleanup functions. If we don't
;;; actually have to do anything, then we don't insert any cleanup
code . ( FIXME : There 's some confusion here , left over from CMU CL
comments . CLEANUP1 is n't mentioned in the code of this function .
;;; It is in code elsewhere, but if the comments for this function
;;; mention it they should explain the relationship to the other code.)
;;;
;;; If we do insert cleanup code, we check that BLOCK1 doesn't end in
;;; a "tail" local call.
;;;
;;; We don't need to adjust the ending cleanup of the cleanup block,
since the cleanup blocks are inserted at the start of the DFO , and
;;; are thus never scanned.
(defun emit-cleanups (block1 block2)
(declare (type cblock block1 block2))
(collect ((code)
(reanalyze-funs))
(let ((cleanup2 (block-start-cleanup block2)))
(do ((cleanup (block-end-cleanup block1)
(node-enclosing-cleanup (cleanup-mess-up cleanup))))
((eq cleanup cleanup2))
(let* ((node (cleanup-mess-up cleanup))
(args (when (basic-combination-p node)
(basic-combination-args node))))
(ecase (cleanup-kind cleanup)
(:special-bind
(code `(%special-unbind ',(lvar-value (first args)))))
(:catch
(code `(%catch-breakup)))
(:unwind-protect
(code `(%unwind-protect-breakup))
(let ((fun (ref-leaf (lvar-uses (second args)))))
(reanalyze-funs fun)
(code `(%funcall ,fun))))
((:block :tagbody)
(dolist (nlx (cleanup-info cleanup))
(code `(%lexical-exit-breakup ',nlx))))
(:dynamic-extent
(when (not (null (cleanup-info cleanup)))
(code `(%cleanup-point)))))))
(when (code)
(aver (not (node-tail-p (block-last block1))))
(insert-cleanup-code block1 block2
(block-last block1)
`(progn ,@(code)))
(dolist (fun (reanalyze-funs))
(locall-analyze-fun-1 fun)))))
(values))
;;; Loop over the blocks in COMPONENT, calling EMIT-CLEANUPS when we
;;; see a successor in the same environment with a different cleanup.
;;; We ignore the cleanup transition if it is to a cleanup enclosed by
;;; the current cleanup, since in that case we are just messing up the
;;; environment, hence this is not the place to clean it.
(defun find-cleanup-points (component)
(declare (type component component))
(do-blocks (block1 component)
(let ((env1 (block-physenv block1))
(cleanup1 (block-end-cleanup block1)))
(dolist (block2 (block-succ block1))
(when (block-start block2)
(let ((env2 (block-physenv block2))
(cleanup2 (block-start-cleanup block2)))
(unless (or (not (eq env2 env1))
(eq cleanup1 cleanup2)
(and cleanup2
(eq (node-enclosing-cleanup
(cleanup-mess-up cleanup2))
cleanup1)))
(emit-cleanups block1 block2)))))))
(values))
optimizable tail - recursive uses of function result
;;; continuations with the corresponding TAIL-SET.
(defun tail-annotate (component)
(declare (type component component))
(dolist (fun (component-lambdas component))
(let ((ret (lambda-return fun)))
;; Nodes whose type is NIL (i.e. don't return) such as calls to
;; ERROR are never annotated as TAIL-P, in order to preserve
;; debugging information.
;;
FIXME : It might be better to add another DEFKNOWN property
;; (e.g. NO-TAIL-RECURSION) and use it for error-handling
;; functions like ERROR, instead of spreading this special case
;; net so widely. --WHN?
;;
;; Why is that bad? Because this non-elimination of
non - returning tail calls causes the XEP for FOO appear in
backtrace for ( defun foo ( x ) ( error " foo ~S " x ) ) seems
less then optimal . --NS 2005 - 02 - 28
(when ret
(let ((result (return-result ret)))
(do-uses (use result)
(when (and (policy use merge-tail-calls)
(basic-combination-p use)
(immediately-used-p result use)
(or (not (eq (node-derived-type use) *empty-type*))
(eq (basic-combination-kind use) :local)))
(setf (node-tail-p use) t)))))))
(values))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/compiler/physenvanal.lisp | lisp | This file implements the environment analysis phase for the
compiler. This phase annotates IR1 with a hierarchy environment
structures, determining the physical environment that each LAMBDA
allocates its variables and finding what values are closed over
by each physical environment.
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
Do environment analysis on the code in COMPONENT. This involves
various things:
physical environment.
continuations.
should only get functions with non-NULL kinds, since normal
This is to be called on a COMPONENT with top level LAMBDAs before
the compilation of the associated non-top-level code to detect
closed over top level variables. We just do COMPUTE-CLOSURE on all
the lambdas. This will pre-allocate environments for all the
functions with closed-over top level variables. The post-pass will
use the existing structure, rather than allocating a new one. We
return true if we discover any possible closure vars.
and return that.
the old physical environment and the INDIRECT flag on LAMBDA-VARs.
This is necessary because pre-analysis is done before
optimization.
private guts of ADD-LAMBDA-VARS-AND-LET-VARS-TO-CLOSURES
LAMBDA-VARS directly, not on the LAMBDA-VARS of LAMBDA-LETS. It
it's private, and the public interface,
ADD-LAMBDA-VARS-AND-LET-VARS-TO-CLOSURES, always runs over all the
Variables which are set but never referenced can be
optimized away, and closing over them here would just
that, causing confusion later. This UNLESS solves that
the problem this way instead of somehow solving it
here.)
in the LAMBDA-VARS of elements of LAMBDA-LETS -- with references
outside of the home environment and close over them. If a
closed-over variable is set, then we set the INDIRECT flag so that
we will know the closed over value is really a pointer to the
value cell. We also warn about unreferenced variables here, just
because it's a convenient place to do it. We return true if we
close over anything.
There's no need to recurse through full COMPUTE-CLOSURE
(not just calls). HOME-PHYSENV is THING's home environment. When we
reach the home environment, we stop propagating the closure.
FIXME: This assertion looks
reasonable, but does not work for
:CLEANUPs.
Find LAMBDA-VARs that are marked as needing to support indirect
access (SET at some point after initial creation) that are present
in CLAMBDAs not marked as being DYNAMIC-EXTENT (meaning that the
value-cell involved must be able to survive past the extent of the
allocating frame), and mark them (the LAMBDA-VARs) as needing
explicit value-cells. Because they are already closed-over, the
LAMBDA-VARs already appear in the closures of all of the CLAMBDAs
that need checking.
We also check the ENTRY-FUN, as XEPs for LABELS or FLET
CLAMBDAs are, and if we ever get LET-bound anonymous function
either NULL, a self-pointer (for :TOPLEVEL functions), a
pointer from an XEP to its underlying function (for :EXTERNAL
functions), or a pointer from an underlying function to its
non-local exit
Insert the entry stub before the original exit target, and add a
knows what entry is being done.
to the component tail. This leaves the entry stub reachable, but
makes the flow graph less confusing to flow analysis.
If a CATCH or an UNWIND-protect, then we set the LEXENV for the
last node in the cleanup code to be the enclosing environment, to
represent the fact that the binding was undone as a side effect of
the exit. This will cause a lexical exit to be broken up if we are
actually exiting the scope (i.e. a BLOCK), and will also do any
other cleanups that may have to be done on the way.
Do stuff necessary to represent a non-local exit from the node
which there may be several per exit continuation. This is what we
do:
-- If there isn't any NLX-INFO entry in the environment, make
an entry stub, otherwise just move the exit block link to
the component tail.
-- If the exit is from an :ESCAPE function, then substitute a
constant reference to NLX-INFO structure for the escape
function reference. This will cause the escape function to
function is no longer needed, and we don't want to emit code
for it.
2 ) make life easier for
the stack analysis.
for later phases.
final decision on stack allocation of dynamic-extent structures
Since the above check does deep
checks. we need to deal with the deep
results in here as well.
cleanup emission
cleanup code as we go. When we are done, convert the cleanup code
new references to UNWIND-PROTECT cleanup functions. If we don't
actually have to do anything, then we don't insert any cleanup
It is in code elsewhere, but if the comments for this function
mention it they should explain the relationship to the other code.)
If we do insert cleanup code, we check that BLOCK1 doesn't end in
a "tail" local call.
We don't need to adjust the ending cleanup of the cleanup block,
are thus never scanned.
Loop over the blocks in COMPONENT, calling EMIT-CLEANUPS when we
see a successor in the same environment with a different cleanup.
We ignore the cleanup transition if it is to a cleanup enclosed by
the current cleanup, since in that case we are just messing up the
environment, hence this is not the place to clean it.
continuations with the corresponding TAIL-SET.
Nodes whose type is NIL (i.e. don't return) such as calls to
ERROR are never annotated as TAIL-P, in order to preserve
debugging information.
(e.g. NO-TAIL-RECURSION) and use it for error-handling
functions like ERROR, instead of spreading this special case
net so widely. --WHN?
Why is that bad? Because this non-elimination of |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!C")
1 . Make a PHYSENV structure for each non - LET LAMBDA , assigning
the LAMBDA - PHYSENV for all LAMBDAs .
2 . Find all values that need to be closed over by each
3 . Scan the blocks in the component closing over non - local - exit
4 . Delete all non - top - level functions with no references . This
functions are deleted when their references go to zero .
(defun physenv-analyze (component)
(declare (type component component))
(aver (every (lambda (x)
(eq (functional-kind x) :deleted))
(component-new-functionals component)))
(setf (component-new-functionals component) ())
(dolist (clambda (component-lambdas component))
(reinit-lambda-physenv clambda))
(mapc #'add-lambda-vars-and-let-vars-to-closures
(component-lambdas component))
(find-non-local-exits component)
(recheck-dynamic-extent-lvars component)
(find-cleanup-points component)
(tail-annotate component)
(analyze-indirect-lambda-vars component)
(dolist (fun (component-lambdas component))
(when (null (leaf-refs fun))
(let ((kind (functional-kind fun)))
(unless (or (eq kind :toplevel)
(functional-has-external-references-p fun))
(aver (member kind '(:optional :cleanup :escape)))
(setf (functional-kind fun) nil)
(delete-functional fun)))))
(setf (component-nlx-info-generated-p component) t)
(values))
(defun pre-physenv-analyze-toplevel (component)
(declare (type component component))
(let ((found-it nil))
(dolist (lambda (component-lambdas component))
(when (add-lambda-vars-and-let-vars-to-closures lambda)
(setq found-it t)))
found-it))
If CLAMBDA has a PHYSENV , return it , otherwise assign an empty one
(defun get-lambda-physenv (clambda)
(declare (type clambda clambda))
(let ((homefun (lambda-home clambda)))
(or (lambda-physenv homefun)
(let ((res (make-physenv :lambda homefun)))
(setf (lambda-physenv homefun) res)
All the LETLAMBDAs belong to HOMEFUN , and share the same
PHYSENV . Thus , ( 1 ) since HOMEFUN 's PHYSENV was NIL ,
theirs should be NIL too , and ( 2 ) since we 're modifying
HOMEFUN 's PHYSENV , we should modify theirs , too .
(dolist (letlambda (lambda-lets homefun))
(aver (eql (lambda-home letlambda) homefun))
(aver (null (lambda-physenv letlambda)))
(setf (lambda-physenv letlambda) res))
res))))
If FUN has no physical environment , assign one , otherwise clean up
(defun reinit-lambda-physenv (fun)
(let ((old (lambda-physenv (lambda-home fun))))
(cond (old
(setf (physenv-closure old) nil)
(flet ((clear (fun)
(dolist (var (lambda-vars fun))
(setf (lambda-var-indirect var) nil))))
(clear fun)
(map nil #'clear (lambda-lets fun))))
(t
(get-lambda-physenv fun))))
(values))
Get NODE 's environment , assigning one if necessary .
(defun get-node-physenv (node)
(declare (type node node))
(get-lambda-physenv (node-home-lambda node)))
This is the old CMU CL COMPUTE - CLOSURE , which only works on
seems never to be valid to use this operation alone , so in SBCL ,
variables , not only the LAMBDA - VARS of CLAMBDA itself but also
the LAMBDA - VARS of CLAMBDA 's LAMBDA - LETS .
(defun %add-lambda-vars-to-closures (clambda)
(let ((physenv (get-lambda-physenv clambda))
(did-something nil))
(note-unreferenced-vars clambda)
(dolist (var (lambda-vars clambda))
(dolist (ref (leaf-refs var))
(let ((ref-physenv (get-node-physenv ref)))
(unless (eq ref-physenv physenv)
(when (lambda-var-sets var)
(setf (lambda-var-indirect var) t))
(setq did-something t)
(close-over var ref-physenv physenv))))
(dolist (set (basic-var-sets var))
interfere with that . ( In bug 147 , it * did * interfere with
problem , but I ( WHN ) am not 100 % sure it 's best to solve
somewhere upstream and just doing ( AVER ( LEAF - REFS VAR ) )
(unless (null (leaf-refs var))
(let ((set-physenv (get-node-physenv set)))
(unless (eq set-physenv physenv)
(setf did-something t
(lambda-var-indirect var) t)
(close-over var set-physenv physenv))))))
did-something))
Find any variables in CLAMBDA -- either directly in LAMBDA - VARS or
(defun add-lambda-vars-and-let-vars-to-closures (clambda)
(declare (type clambda clambda))
(let ((did-something nil))
(when (%add-lambda-vars-to-closures clambda)
(setf did-something t))
(dolist (lambda-let (lambda-lets clambda))
here , since LETS only go one layer deep .
(aver (null (lambda-lets lambda-let)))
(when (%add-lambda-vars-to-closures lambda-let)
(setf did-something t)))
did-something))
(defun xep-allocator (xep)
(let ((entry (functional-entry-fun xep)))
(functional-allocator entry)))
Make sure that THING is closed over in REF - PHYSENV and in all
PHYSENVs for the functions that reference REF - PHYSENV 's function
(defun close-over (thing ref-physenv home-physenv)
(declare (type physenv ref-physenv home-physenv))
(let ((flooded-physenvs nil))
(labels ((flood (flooded-physenv)
(unless (or (eql flooded-physenv home-physenv)
(member flooded-physenv flooded-physenvs))
(push flooded-physenv flooded-physenvs)
(unless (memq thing (physenv-closure flooded-physenv))
(push thing (physenv-closure flooded-physenv))
(let ((lambda (physenv-lambda flooded-physenv)))
(cond ((eq (functional-kind lambda) :external)
(let* ((alloc-node (xep-allocator lambda))
(alloc-lambda (node-home-lambda alloc-node))
(alloc-physenv (get-lambda-physenv alloc-lambda)))
(flood alloc-physenv)
(dolist (ref (leaf-refs lambda))
(close-over lambda
(get-node-physenv ref) alloc-physenv))))
(t (dolist (ref (leaf-refs lambda))
#+nil
(let ((dest (node-dest ref)))
(aver (basic-combination-p dest))
(aver (eq (basic-combination-kind dest) :local)))
(flood (get-node-physenv ref))))))))))
(flood ref-physenv)))
(values))
(defun analyze-indirect-lambda-vars (component)
(dolist (fun (component-lambdas component))
(let ((entry-fun (functional-entry-fun fun)))
functions are n't set to be DX even if their underlying
DX working , it would mark the XEP as being DX but not the
" real " CLAMBDA . This works because a FUNCTIONAL - ENTRY - FUN is
XEP ( for non-:TOPLEVEL functions with XEPs ) .
(unless (or (leaf-dynamic-extent fun)
(and entry-fun
(leaf-dynamic-extent entry-fun)))
(let ((closure (physenv-closure (lambda-physenv fun))))
(dolist (var closure)
(when (and (lambda-var-p var)
(lambda-var-indirect var))
(setf (lambda-var-explicit-value-cell var) t))))))))
#!-sb-fluid (declaim (inline should-exit-check-tag-p))
(defun exit-should-check-tag-p (exit)
(declare (type exit exit))
(not (zerop (policy exit check-tag-existence))))
new entry to the PHYSENV - NLX - INFO . The % NLX - ENTRY call in the
stub is passed the NLX - INFO as an argument so that the back end
The link from the EXIT block to the entry stub is changed to be a
link from the component head . Similarly , the EXIT block is linked
(defun insert-nlx-entry-stub (exit env)
(declare (type physenv env) (type exit exit))
(let* ((exit-block (node-block exit))
(next-block (first (block-succ exit-block)))
(entry (exit-entry exit))
(cleanup (entry-cleanup entry))
(info (make-nlx-info cleanup exit))
(new-block (insert-cleanup-code exit-block next-block
entry
`(%nlx-entry ',info)
cleanup))
(component (block-component new-block)))
(unlink-blocks exit-block new-block)
(link-blocks exit-block (component-tail component))
(link-blocks (component-head component) new-block)
(setf (exit-nlx-info exit) info)
(setf (nlx-info-target info) new-block)
(setf (nlx-info-safe-p info) (exit-should-check-tag-p exit))
(push info (physenv-nlx-info env))
(push info (cleanup-info cleanup))
(when (member (cleanup-kind cleanup) '(:catch :unwind-protect))
(setf (node-lexenv (block-last new-block))
(node-lexenv entry))))
(values))
EXIT into ENV . This is called for each non - local exit node , of
-- Close over the NLX - INFO in the exit environment .
be deleted ( although not removed from the DFO . ) The escape
-- Change the % NLX - ENTRY call to use the NLX lvar so that 1 ) there
(defun note-non-local-exit (env exit)
(declare (type physenv env) (type exit exit))
(let ((lvar (node-lvar exit))
(exit-fun (node-home-lambda exit))
(info (find-nlx-info exit)))
(cond (info
(let ((block (node-block exit)))
(aver (= (length (block-succ block)) 1))
(unlink-blocks block (first (block-succ block)))
(link-blocks block (component-tail (block-component block)))
(setf (exit-nlx-info exit) info)
(unless (nlx-info-safe-p info)
(setf (nlx-info-safe-p info)
(exit-should-check-tag-p exit)))))
(t
(insert-nlx-entry-stub exit env)
(setq info (exit-nlx-info exit))
(aver info)))
(close-over info (node-physenv exit) env)
(when (eq (functional-kind exit-fun) :escape)
(mapc (lambda (x)
(setf (node-derived-type x) *wild-type*))
(leaf-refs exit-fun))
(substitute-leaf (find-constant info) exit-fun))
(when lvar
(let ((node (block-last (nlx-info-target info))))
(unless (node-lvar node)
(aver (eq lvar (node-lvar exit)))
(setf (node-derived-type node) (lvar-derived-type lvar))
(add-lvar-use node lvar)))))
(values))
Iterate over the EXITs in COMPONENT , calling NOTE - NON - LOCAL - EXIT
when we find a block that ends in a non - local EXIT node . We also
ensure that all EXIT nodes are either non - local or degenerate by
calling IR1 - OPTIMIZE - EXIT on local exits . This makes life simpler
(defun find-non-local-exits (component)
(declare (type component component))
(dolist (lambda (component-lambdas component))
(dolist (entry (lambda-entries lambda))
(dolist (exit (entry-exits entry))
(let ((target-physenv (node-physenv entry)))
(if (eq (node-physenv exit) target-physenv)
(maybe-delete-exit exit)
(note-non-local-exit target-physenv exit))))))
(values))
(defun recheck-dynamic-extent-lvars (component)
(declare (type component component))
(dolist (lambda (component-lambdas component))
(loop for entry in (lambda-entries lambda)
for cleanup = (entry-cleanup entry)
do (when (eq (cleanup-kind cleanup) :dynamic-extent)
(collect ((real-dx-lvars))
(loop for what in (cleanup-info cleanup)
do (etypecase what
(cons
(let ((dx (car what))
(lvar (cdr what)))
(cond ((lvar-good-for-dx-p lvar dx component)
(dolist (cell (handle-nested-dynamic-extent-lvars
dx lvar component))
(let ((real (principal-lvar (cdr cell))))
(setf (lvar-dynamic-extent real) cleanup)
(real-dx-lvars real))))
(t
(note-no-stack-allocation lvar)
(setf (lvar-dynamic-extent lvar) nil)))))
DX closure
(let* ((call what)
(arg (first (basic-combination-args call)))
(funs (lvar-value arg))
(dx nil))
(dolist (fun funs)
(binding* ((() (leaf-dynamic-extent fun)
:exit-if-null)
(xep (functional-entry-fun fun)
:exit-if-null)
(closure (physenv-closure
(get-lambda-physenv xep))))
(cond (closure
(setq dx t))
(t
(setf (leaf-dynamic-extent fun) nil)))))
(when dx
(setf (lvar-dynamic-extent arg) cleanup)
(real-dx-lvars arg))))))
(let ((real-dx-lvars (delete-duplicates (real-dx-lvars))))
(setf (cleanup-info cleanup) real-dx-lvars)
(setf (component-dx-lvars component)
(append real-dx-lvars (component-dx-lvars component))))))))
(values))
Zoom up the cleanup nesting until we hit CLEANUP1 , accumulating
in an implicit MV - PROG1 . We have to force local call analysis of
code . ( FIXME : There 's some confusion here , left over from CMU CL
comments . CLEANUP1 is n't mentioned in the code of this function .
since the cleanup blocks are inserted at the start of the DFO , and
(defun emit-cleanups (block1 block2)
(declare (type cblock block1 block2))
(collect ((code)
(reanalyze-funs))
(let ((cleanup2 (block-start-cleanup block2)))
(do ((cleanup (block-end-cleanup block1)
(node-enclosing-cleanup (cleanup-mess-up cleanup))))
((eq cleanup cleanup2))
(let* ((node (cleanup-mess-up cleanup))
(args (when (basic-combination-p node)
(basic-combination-args node))))
(ecase (cleanup-kind cleanup)
(:special-bind
(code `(%special-unbind ',(lvar-value (first args)))))
(:catch
(code `(%catch-breakup)))
(:unwind-protect
(code `(%unwind-protect-breakup))
(let ((fun (ref-leaf (lvar-uses (second args)))))
(reanalyze-funs fun)
(code `(%funcall ,fun))))
((:block :tagbody)
(dolist (nlx (cleanup-info cleanup))
(code `(%lexical-exit-breakup ',nlx))))
(:dynamic-extent
(when (not (null (cleanup-info cleanup)))
(code `(%cleanup-point)))))))
(when (code)
(aver (not (node-tail-p (block-last block1))))
(insert-cleanup-code block1 block2
(block-last block1)
`(progn ,@(code)))
(dolist (fun (reanalyze-funs))
(locall-analyze-fun-1 fun)))))
(values))
(defun find-cleanup-points (component)
(declare (type component component))
(do-blocks (block1 component)
(let ((env1 (block-physenv block1))
(cleanup1 (block-end-cleanup block1)))
(dolist (block2 (block-succ block1))
(when (block-start block2)
(let ((env2 (block-physenv block2))
(cleanup2 (block-start-cleanup block2)))
(unless (or (not (eq env2 env1))
(eq cleanup1 cleanup2)
(and cleanup2
(eq (node-enclosing-cleanup
(cleanup-mess-up cleanup2))
cleanup1)))
(emit-cleanups block1 block2)))))))
(values))
optimizable tail - recursive uses of function result
(defun tail-annotate (component)
(declare (type component component))
(dolist (fun (component-lambdas component))
(let ((ret (lambda-return fun)))
FIXME : It might be better to add another DEFKNOWN property
non - returning tail calls causes the XEP for FOO appear in
backtrace for ( defun foo ( x ) ( error " foo ~S " x ) ) seems
less then optimal . --NS 2005 - 02 - 28
(when ret
(let ((result (return-result ret)))
(do-uses (use result)
(when (and (policy use merge-tail-calls)
(basic-combination-p use)
(immediately-used-p result use)
(or (not (eq (node-derived-type use) *empty-type*))
(eq (basic-combination-kind use) :local)))
(setf (node-tail-p use) t)))))))
(values))
|
f0178fb80fd59a9d5cf4c9c9fd19cc710b8cf91648f843af753f5e2618a300f1 | xvw/preface | monoid.mli | (** A [Monoid] is a type [t] which provides a binary associative operation
[combine] and a neutral element ([neutral]). In other words, a [Monoid] is a
{!module:Semigroup} with a neutral element. *)
(** {2 Laws}
To ensure that the derived combiners work properly, a functor should respect
these laws:
+ [combine (combine a b) c = combine a (combine b c)] (from
{!module:Semigroup})
+ [combine x neutral = combine neutral x = x] *)
* { 1 Minimal definition }
(** A type [t] with a neutral element. This signature is mainly used to enrich a
[Semigroup] with a neutral element. *)
module type WITH_NEUTRAL = sig
type t
* the type held by the [ Monoid ] .
val neutral : t
(** The neutral element of the [Monoid]. *)
end
module type WITH_NEUTRAL_AND_COMBINE = sig
type t
* the type held by the [ Monoid ] .
include Semigroup.CORE with type t := t
* @inline
include WITH_NEUTRAL with type t := t
* @inline
end
(** {1 Structure anatomy} *)
module type CORE = WITH_NEUTRAL_AND_COMBINE
(** Basis operations.*)
(** Additional operations. *)
module type OPERATION = sig
include Semigroup.OPERATION
* @inline
val times : int -> t -> t
* [ times n x ] apply [ combine ] on [ x ] [ n ] times . If [ n ] is lower than [ 1 ] the
function will returns [ neutral ] .
function will returns [neutral]. *)
val reduce : t list -> t
(** Reduce a [List.t] using [combine]. *)
end
(** Infix operators. *)
module type INFIX = sig
include Semigroup.INFIX
* @inline
end
(** {1 Complete API} *)
(** The complete interface of a [Monoid]. *)
module type API = sig
* { 1 Type }
type t
* the type held by the [ Monoid ] .
* { 1 Functions }
include CORE with type t := t
* @inline
include OPERATION with type t := t
* @inline
(** {1 Infix operators} *)
module Infix : INFIX with type t := t
include INFIX with type t := t
* @inline
end
* { 1 Additional references }
- { { : -4.14.0.0/docs/Data-Monoid.html }
's documentation of a Monoid }
- {{:-4.14.0.0/docs/Data-Monoid.html}
Haskell's documentation of a Monoid} *)
| null | https://raw.githubusercontent.com/xvw/preface/51892a7ce2ddfef69de963265da3617968cdb7ad/lib/preface_specs/monoid.mli | ocaml | * A [Monoid] is a type [t] which provides a binary associative operation
[combine] and a neutral element ([neutral]). In other words, a [Monoid] is a
{!module:Semigroup} with a neutral element.
* {2 Laws}
To ensure that the derived combiners work properly, a functor should respect
these laws:
+ [combine (combine a b) c = combine a (combine b c)] (from
{!module:Semigroup})
+ [combine x neutral = combine neutral x = x]
* A type [t] with a neutral element. This signature is mainly used to enrich a
[Semigroup] with a neutral element.
* The neutral element of the [Monoid].
* {1 Structure anatomy}
* Basis operations.
* Additional operations.
* Reduce a [List.t] using [combine].
* Infix operators.
* {1 Complete API}
* The complete interface of a [Monoid].
* {1 Infix operators} |
* { 1 Minimal definition }
module type WITH_NEUTRAL = sig
type t
* the type held by the [ Monoid ] .
val neutral : t
end
module type WITH_NEUTRAL_AND_COMBINE = sig
type t
* the type held by the [ Monoid ] .
include Semigroup.CORE with type t := t
* @inline
include WITH_NEUTRAL with type t := t
* @inline
end
module type CORE = WITH_NEUTRAL_AND_COMBINE
module type OPERATION = sig
include Semigroup.OPERATION
* @inline
val times : int -> t -> t
* [ times n x ] apply [ combine ] on [ x ] [ n ] times . If [ n ] is lower than [ 1 ] the
function will returns [ neutral ] .
function will returns [neutral]. *)
val reduce : t list -> t
end
module type INFIX = sig
include Semigroup.INFIX
* @inline
end
module type API = sig
* { 1 Type }
type t
* the type held by the [ Monoid ] .
* { 1 Functions }
include CORE with type t := t
* @inline
include OPERATION with type t := t
* @inline
module Infix : INFIX with type t := t
include INFIX with type t := t
* @inline
end
* { 1 Additional references }
- { { : -4.14.0.0/docs/Data-Monoid.html }
's documentation of a Monoid }
- {{:-4.14.0.0/docs/Data-Monoid.html}
Haskell's documentation of a Monoid} *)
|
516421a1568a5ebfd0812a287888fe7efc85f71112575f9c1f68fee2234c2d18 | cognitect-labs/test-runner | samples_test.clj | (ns cognitect.test-runner.samples-test
(:require [clojure.test :as t :refer [deftest is testing]]))
(deftest math-works
(testing "basic addition and subtraction"
(is (= 42 (+ 40 2)))
(is (= 42 (- 44 2)))))
(deftest ^:integration test-i
(is (= 1 1)))
| null | https://raw.githubusercontent.com/cognitect-labs/test-runner/b6b3193fcc42659d7e46ecd1884a228993441182/test/cognitect/test_runner/samples_test.clj | clojure | (ns cognitect.test-runner.samples-test
(:require [clojure.test :as t :refer [deftest is testing]]))
(deftest math-works
(testing "basic addition and subtraction"
(is (= 42 (+ 40 2)))
(is (= 42 (- 44 2)))))
(deftest ^:integration test-i
(is (= 1 1)))
| |
a47f9e4fad8f6dda475af725435934e04cf860b2c0841d74e2185b1f347f0cfa | incoherentsoftware/defect-process | AI.hs | module Enemy.All.Spear.AI
( thinkAI
) where
import Control.Monad.State (execState, modify, unless)
import Attack
import Configs
import Configs.All.Enemy
import Configs.All.Enemy.Spear
import Configs.All.Settings
import Configs.All.Settings.Debug
import Constants
import Enemy as E
import Enemy.All.Spear.AI.Run
import Enemy.All.Spear.AttackDescriptions
import Enemy.All.Spear.AttackType
import Enemy.All.Spear.Behavior
import Enemy.All.Spear.Data
import Msg
import Util
import Window.Graphics
projReleaseFrameTagName = FrameTagName "projRelease" :: FrameTagName
thinkAI :: ConfigsRead m => EnemyThinkAI SpearEnemyData m
thinkAI enemy =
let
enemyData = _data enemy
gravityVel = enemyGravityVel enemyData
enemyId = _msgId enemy
behavior = _behavior enemyData
inWallSplat = isWallSplatBehavior behavior || isWallHurtBehavior behavior
inHangtimeVel = enemyInHangtimeVel enemy (_config enemyData)
inHangtime = inHangtimeVel && case behavior of
LaunchedBehavior hangtimeTtl
| hangtimeTtl > 0.0 -> True
HurtBehavior _ AirHurt -> True
_ -> False
in do
aiEnabled <- not <$> readSettingsConfig _debug _disableAI
return . flip execState [] $ do
unless (behavior `elem` [SpawnBehavior, DeathBehavior] || inWallSplat || inHangtime) $
modify (mkMsgToEx (EnemyMsgUpdateVelocity $ vecAdd gravityVel) enemyId MsgEndOrder:)
let
runBehaviorInstr' = \cmd -> runBehaviorInstr aiEnabled cmd enemy
behaviorInstrs = thinkBehaviorInstrs enemy
modify (++ concatMap runBehaviorInstr' behaviorInstrs)
modify (++ mkEnemyUpdateDataMsgs enemy)
enemyGravityVel :: SpearEnemyData -> Vel2
enemyGravityVel enemyData = Vel2 0.0 (gravity * timeStep)
where gravity = _gravity $ _config enemyData
mkEnemyUpdateDataMsgs :: Enemy SpearEnemyData -> [Msg ThinkEnemyMsgsPhase]
mkEnemyUpdateDataMsgs enemy = mkEnemyUpdateMsg enemy $ \e ->
let eData = _data e
in e
{ _data = eData
{ _throwAtkCooldownTtl = max 0.0 (_throwAtkCooldownTtl eData - timeStep)
, _shoveAtkCooldownTtl = max 0.0 (_shoveAtkCooldownTtl eData - timeStep)
, _prevBehavior = prevBehavior
}
}
where prevBehavior = _behavior $ _data enemy
isAttackableBehavior :: Enemy SpearEnemyData -> Bool
isAttackableBehavior enemy = case _behavior (_data enemy) of
IdleBehavior _ -> True
WalkBehavior _ -> True
RetreatBehavior _ -> True
_ -> False
canAttackPlayerAtDistanceX :: Distance -> Enemy SpearEnemyData -> Bool
canAttackPlayerAtDistanceX atkDistX enemy = case enemyKnownPlayerPos enemy of
Just (Pos2 playerX _)
| isAttackableBehavior enemy ->
let
x = vecX $ E._pos enemy
dir = E._dir enemy
onGround = enemyTouchingGround enemy
facingPlayer
| playerX > x = dir == RightDir
| otherwise = dir == LeftDir
inAtkRange = abs (playerX - x) <= atkDistX
in onGround && facingPlayer && inAtkRange
_ -> False
canAttackShovePlayer :: Enemy SpearEnemyData -> Bool
canAttackShovePlayer enemy = offCooldown && canAttackPlayerAtDistanceX shoveAtkRange enemy
where
enemyData = _data enemy
offCooldown = _shoveAtkCooldownTtl enemyData <= 0.0
shoveAtkRange = _shoveAtkRange $ _spear (_config enemyData)
canAttackThrowPlayer :: Enemy SpearEnemyData -> Bool
canAttackThrowPlayer enemy = offCooldown && canAttackPlayerAtDistanceX throwAtkRange enemy
where
enemyData = _data enemy
offCooldown = _throwAtkCooldownTtl enemyData <= 0.0
throwAtkRange = _throwAtkRange $ _spear (_config enemyData)
thinkAttackBehaviorInstrs :: Enemy SpearEnemyData -> [SpearEnemyBehaviorInstr]
thinkAttackBehaviorInstrs enemy = case _attack enemy of
Just atk
| _done atk ->
let throwAtkDesc = _throw $ _attackDescs (E._data enemy)
in if
| _description atk == throwAtkDesc -> [StartRetreatInstr]
| otherwise -> [StartIdleInstr]
| projReleaseFrameTagName `isAttackFrameTag` atk && attackFrameChanged atk ->
[CreateAttackProjInstr, SetThrowAtkCooldownInstr]
_ -> []
thinkHurtBehaviorInstrs :: Secs -> HurtType -> Enemy SpearEnemyData -> [SpearEnemyBehaviorInstr]
thinkHurtBehaviorInstrs hurtTtl hurtType enemy = case hurtType of
FallenHurt
| sprFinished -> [StartFallenInstr hurtTtl']
KnockDownHurt
| sprFinished -> [StartFallenInstr hurtTtl']
WallHurt
| sprFinished -> [StartLaunchedInstr 0.0]
_
| inAir && touchingWall && abs velX >= minWallSplatImpactSpeed -> [StartWallSplatInstr]
| inAir && (sprFinished || (inHangtimeVel && hurtType == LaunchUpHurt)) -> [StartLaunchedInstr minHangtimeSecs]
| sprFinished && hurtTtl <= 0.0 -> [StartIdleInstr]
| otherwise -> [UpdateHurtInstr hurtTtl hurtType]
where
enemyData = _data enemy
cfg = _config enemyData
minWallSplatImpactSpeed = _minWallSplatImpactSpeed cfg
minHangtimeSecs = _minHangtimeSecs cfg
inAir = not $ enemyTouchingGround enemy
touchingWall = enemyTouchingWall enemy
velX = vecX $ _vel enemy
sprFinished = enemySpriteFinished enemy
hurtTtl' = hurtTtl - timeStep
inHangtimeVel = enemyInHangtimeVel enemy (_config enemyData)
thinkBehaviorInstrs :: Enemy SpearEnemyData -> [SpearEnemyBehaviorInstr]
thinkBehaviorInstrs enemy = case _behavior enemyData of
behavior
| isHealthZero health && behavior /= DeathBehavior -> [StartDeathInstr]
| canAttackShovePlayer enemy -> [StartAttackInstr ShoveAttackType]
| canAttackThrowPlayer enemy -> [StartAttackInstr ThrowAttackType]
IdleBehavior idleTtl
| not (isIdleBehavior prevBehavior) -> [StartIdleInstr]
| idleTtl > 0.0 -> [UpdateIdleInstr idleTtl]
| onGround -> [StartWalkInstr]
WalkBehavior walkTtl
| not (isWalkBehavior prevBehavior) -> [StartWalkInstr]
| walkTtl > 0.0 -> [UpdateWalkInstr walkTtl]
| otherwise -> [StartIdleInstr]
RetreatBehavior retreatTtl
| not (isRetreatBehavior prevBehavior) -> [StartRetreatInstr]
| retreatTtl > 0.0 -> [UpdateRetreatInstr retreatTtl]
| otherwise -> [StartIdleInstr]
AttackBehavior -> thinkAttackBehaviorInstrs enemy
HurtBehavior hurtTtl hurtType -> thinkHurtBehaviorInstrs hurtTtl hurtType enemy
LaunchedBehavior hangtimeTtl
| not (isLaunchedBehavior prevBehavior) -> [StartLaunchedInstr hangtimeTtl]
| hangtimeTtl > 0.0 && inHangtimeVel -> [LaunchedHangtimeInstr hangtimeTtl]
FallenBehavior fallenTtl
| not (isFallenBehavior prevBehavior) -> [StartFallenInstr fallenTtl]
| fallenTtl > 0.0 -> [UpdateFallenInstr fallenTtl]
| otherwise -> [StartGetUpInstr]
GetUpBehavior
| prevBehavior /= GetUpBehavior -> [StartGetUpInstr]
| sprFinished -> [StartIdleInstr]
WallSplatBehavior wallSplatTtl
| not (isWallSplatBehavior prevBehavior) -> [StartWallSplatInstr]
| wallSplatTtl > 0.0 -> [UpdateWallSplatInstr wallSplatTtl]
| otherwise -> [StartLaunchedInstr 0.0]
SpawnBehavior
| sprFinished -> [StartIdleInstr]
| otherwise -> [UpdateSpawnInstr]
DeathBehavior
| sprFinished -> [SetDeadInstr]
_ -> []
where
health = E._health enemy
enemyData = _data enemy
prevBehavior = _prevBehavior enemyData
sprFinished = enemySpriteFinished enemy
cfg = _config enemyData
inHangtimeVel = enemyInHangtimeVel enemy cfg
onGround = enemyTouchingGround enemy
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/Spear/AI.hs | haskell | module Enemy.All.Spear.AI
( thinkAI
) where
import Control.Monad.State (execState, modify, unless)
import Attack
import Configs
import Configs.All.Enemy
import Configs.All.Enemy.Spear
import Configs.All.Settings
import Configs.All.Settings.Debug
import Constants
import Enemy as E
import Enemy.All.Spear.AI.Run
import Enemy.All.Spear.AttackDescriptions
import Enemy.All.Spear.AttackType
import Enemy.All.Spear.Behavior
import Enemy.All.Spear.Data
import Msg
import Util
import Window.Graphics
projReleaseFrameTagName = FrameTagName "projRelease" :: FrameTagName
thinkAI :: ConfigsRead m => EnemyThinkAI SpearEnemyData m
thinkAI enemy =
let
enemyData = _data enemy
gravityVel = enemyGravityVel enemyData
enemyId = _msgId enemy
behavior = _behavior enemyData
inWallSplat = isWallSplatBehavior behavior || isWallHurtBehavior behavior
inHangtimeVel = enemyInHangtimeVel enemy (_config enemyData)
inHangtime = inHangtimeVel && case behavior of
LaunchedBehavior hangtimeTtl
| hangtimeTtl > 0.0 -> True
HurtBehavior _ AirHurt -> True
_ -> False
in do
aiEnabled <- not <$> readSettingsConfig _debug _disableAI
return . flip execState [] $ do
unless (behavior `elem` [SpawnBehavior, DeathBehavior] || inWallSplat || inHangtime) $
modify (mkMsgToEx (EnemyMsgUpdateVelocity $ vecAdd gravityVel) enemyId MsgEndOrder:)
let
runBehaviorInstr' = \cmd -> runBehaviorInstr aiEnabled cmd enemy
behaviorInstrs = thinkBehaviorInstrs enemy
modify (++ concatMap runBehaviorInstr' behaviorInstrs)
modify (++ mkEnemyUpdateDataMsgs enemy)
enemyGravityVel :: SpearEnemyData -> Vel2
enemyGravityVel enemyData = Vel2 0.0 (gravity * timeStep)
where gravity = _gravity $ _config enemyData
mkEnemyUpdateDataMsgs :: Enemy SpearEnemyData -> [Msg ThinkEnemyMsgsPhase]
mkEnemyUpdateDataMsgs enemy = mkEnemyUpdateMsg enemy $ \e ->
let eData = _data e
in e
{ _data = eData
{ _throwAtkCooldownTtl = max 0.0 (_throwAtkCooldownTtl eData - timeStep)
, _shoveAtkCooldownTtl = max 0.0 (_shoveAtkCooldownTtl eData - timeStep)
, _prevBehavior = prevBehavior
}
}
where prevBehavior = _behavior $ _data enemy
isAttackableBehavior :: Enemy SpearEnemyData -> Bool
isAttackableBehavior enemy = case _behavior (_data enemy) of
IdleBehavior _ -> True
WalkBehavior _ -> True
RetreatBehavior _ -> True
_ -> False
canAttackPlayerAtDistanceX :: Distance -> Enemy SpearEnemyData -> Bool
canAttackPlayerAtDistanceX atkDistX enemy = case enemyKnownPlayerPos enemy of
Just (Pos2 playerX _)
| isAttackableBehavior enemy ->
let
x = vecX $ E._pos enemy
dir = E._dir enemy
onGround = enemyTouchingGround enemy
facingPlayer
| playerX > x = dir == RightDir
| otherwise = dir == LeftDir
inAtkRange = abs (playerX - x) <= atkDistX
in onGround && facingPlayer && inAtkRange
_ -> False
canAttackShovePlayer :: Enemy SpearEnemyData -> Bool
canAttackShovePlayer enemy = offCooldown && canAttackPlayerAtDistanceX shoveAtkRange enemy
where
enemyData = _data enemy
offCooldown = _shoveAtkCooldownTtl enemyData <= 0.0
shoveAtkRange = _shoveAtkRange $ _spear (_config enemyData)
canAttackThrowPlayer :: Enemy SpearEnemyData -> Bool
canAttackThrowPlayer enemy = offCooldown && canAttackPlayerAtDistanceX throwAtkRange enemy
where
enemyData = _data enemy
offCooldown = _throwAtkCooldownTtl enemyData <= 0.0
throwAtkRange = _throwAtkRange $ _spear (_config enemyData)
thinkAttackBehaviorInstrs :: Enemy SpearEnemyData -> [SpearEnemyBehaviorInstr]
thinkAttackBehaviorInstrs enemy = case _attack enemy of
Just atk
| _done atk ->
let throwAtkDesc = _throw $ _attackDescs (E._data enemy)
in if
| _description atk == throwAtkDesc -> [StartRetreatInstr]
| otherwise -> [StartIdleInstr]
| projReleaseFrameTagName `isAttackFrameTag` atk && attackFrameChanged atk ->
[CreateAttackProjInstr, SetThrowAtkCooldownInstr]
_ -> []
thinkHurtBehaviorInstrs :: Secs -> HurtType -> Enemy SpearEnemyData -> [SpearEnemyBehaviorInstr]
thinkHurtBehaviorInstrs hurtTtl hurtType enemy = case hurtType of
FallenHurt
| sprFinished -> [StartFallenInstr hurtTtl']
KnockDownHurt
| sprFinished -> [StartFallenInstr hurtTtl']
WallHurt
| sprFinished -> [StartLaunchedInstr 0.0]
_
| inAir && touchingWall && abs velX >= minWallSplatImpactSpeed -> [StartWallSplatInstr]
| inAir && (sprFinished || (inHangtimeVel && hurtType == LaunchUpHurt)) -> [StartLaunchedInstr minHangtimeSecs]
| sprFinished && hurtTtl <= 0.0 -> [StartIdleInstr]
| otherwise -> [UpdateHurtInstr hurtTtl hurtType]
where
enemyData = _data enemy
cfg = _config enemyData
minWallSplatImpactSpeed = _minWallSplatImpactSpeed cfg
minHangtimeSecs = _minHangtimeSecs cfg
inAir = not $ enemyTouchingGround enemy
touchingWall = enemyTouchingWall enemy
velX = vecX $ _vel enemy
sprFinished = enemySpriteFinished enemy
hurtTtl' = hurtTtl - timeStep
inHangtimeVel = enemyInHangtimeVel enemy (_config enemyData)
thinkBehaviorInstrs :: Enemy SpearEnemyData -> [SpearEnemyBehaviorInstr]
thinkBehaviorInstrs enemy = case _behavior enemyData of
behavior
| isHealthZero health && behavior /= DeathBehavior -> [StartDeathInstr]
| canAttackShovePlayer enemy -> [StartAttackInstr ShoveAttackType]
| canAttackThrowPlayer enemy -> [StartAttackInstr ThrowAttackType]
IdleBehavior idleTtl
| not (isIdleBehavior prevBehavior) -> [StartIdleInstr]
| idleTtl > 0.0 -> [UpdateIdleInstr idleTtl]
| onGround -> [StartWalkInstr]
WalkBehavior walkTtl
| not (isWalkBehavior prevBehavior) -> [StartWalkInstr]
| walkTtl > 0.0 -> [UpdateWalkInstr walkTtl]
| otherwise -> [StartIdleInstr]
RetreatBehavior retreatTtl
| not (isRetreatBehavior prevBehavior) -> [StartRetreatInstr]
| retreatTtl > 0.0 -> [UpdateRetreatInstr retreatTtl]
| otherwise -> [StartIdleInstr]
AttackBehavior -> thinkAttackBehaviorInstrs enemy
HurtBehavior hurtTtl hurtType -> thinkHurtBehaviorInstrs hurtTtl hurtType enemy
LaunchedBehavior hangtimeTtl
| not (isLaunchedBehavior prevBehavior) -> [StartLaunchedInstr hangtimeTtl]
| hangtimeTtl > 0.0 && inHangtimeVel -> [LaunchedHangtimeInstr hangtimeTtl]
FallenBehavior fallenTtl
| not (isFallenBehavior prevBehavior) -> [StartFallenInstr fallenTtl]
| fallenTtl > 0.0 -> [UpdateFallenInstr fallenTtl]
| otherwise -> [StartGetUpInstr]
GetUpBehavior
| prevBehavior /= GetUpBehavior -> [StartGetUpInstr]
| sprFinished -> [StartIdleInstr]
WallSplatBehavior wallSplatTtl
| not (isWallSplatBehavior prevBehavior) -> [StartWallSplatInstr]
| wallSplatTtl > 0.0 -> [UpdateWallSplatInstr wallSplatTtl]
| otherwise -> [StartLaunchedInstr 0.0]
SpawnBehavior
| sprFinished -> [StartIdleInstr]
| otherwise -> [UpdateSpawnInstr]
DeathBehavior
| sprFinished -> [SetDeadInstr]
_ -> []
where
health = E._health enemy
enemyData = _data enemy
prevBehavior = _prevBehavior enemyData
sprFinished = enemySpriteFinished enemy
cfg = _config enemyData
inHangtimeVel = enemyInHangtimeVel enemy cfg
onGround = enemyTouchingGround enemy
| |
9d50970bb4d156f79e32c5068b54f1ed7d2c61977478bdd970212b10f356e9dc | fgatherlet/cl-webdriver | base.lisp | (in-package :webdriver)
;;; ------------------------------ condition
(define-condition protocol-error (error)
((http-status :initarg :http-status :initform nil)
(oss-status :initarg :oss-status :initform nil)
(response :initarg :response :initform nil)
(response-source :initarg :response-source :initform nil)
))
(defmethod print-object ((error protocol-error) stream)
(with-slots (http-status oss-status response response-source) error
(format stream "~a" (type-of error))
(if *wd-session-w3c-p*
(when http-status
(format stream "~%(http-status:~s)" http-status))
(when oss-status
(format stream "~%(oss-status:~s)" oss-status)))
(when response
(format stream "~%(response:~s)" response))
(when response-source
(if (<= (length response-source) 512)
(format stream "~%(response-source:~s)" response-source)
(format stream
"~%(response-source:~s...)"
(subseq response-source 0 512))))))
;;; ------------------------------ wd-obj
(defun camelize (x)
"simple camelize function"
(if (keywordp x)
(with-output-to-string (*standard-output*)
(let (prev-is-dash-p)
(iterate ((char (scan (symbol-name x))))
(cond
((char= #\- char) (setq prev-is-dash-p t))
(prev-is-dash-p
(write-char (char-upcase char))
(setq prev-is-dash-p nil))
(t
(write-char (char-downcase char)))))))
x))
(defun wd-obj-p (x)
(and (consp x)
(eql :obj (car x))))
(deftype wd-obj ()
`(satisfies wd-obj-p))
(defun wd-obj (&rest rest)
"Short hand to make jsown object"
(cons :obj
(collect (mapping (((k v) (chunk 2 2 (scan rest))))
(cons (camelize k)
(camelize v)
)))))
(defun wd-ref (obj key)
;; from jsown::val-safe
(handler-case
(values (jsown:val obj (camelize key)) t)
(error () nil nil)))
(defun (setf wd-ref) (val obj key)
(setf (jsown:val obj (camelize key))
(camelize val)))
;;; ------------------------------
(defun wd-make-cookie (name value &key path domain secure expiry)
(wd-obj
:name name
:value value
:path path
:domain domain
:secure secure
:expiry expiry))
(defun wd-make-rect (&key (x 32) (y 32) (width 1024) (height 1024))
(wd-obj :x x :y y :width width :height height))
(defun wd-element-to-id (obj)
"extract element-string from the yes-element like (:obj (<element-key> . <element-string>),,,).
wd-ref obj \"ELEMENT\" is better. but geckodriver's key would be \"element-xxx\"..."
(cdr (car (cdr obj))))
;;; ------------------------------ short hand to handle capabilities.
(defun wd-make-capabilities-for-chrome (&key
(headlessp nil)
(user-data-dir nil)
(user-agent nil)
(w3c-p nil)
)
(let* ((sub-obj-args '())
(sub-obj (wd-obj))
(obj (wd-obj
:browser-name "chrome")))
(when w3c-p
(setf (wd-ref sub-obj :w3c) t))
(when headlessp
(push "--headless" sub-obj-args))
(when user-agent
(push (format nil "--user-agent='~a'" (compile-user-agent user-agent)) sub-obj-args))
(when user-data-dir
(push (format nil "--user-data-dir=~a" user-data-dir) sub-obj-args))
(setf (wd-ref sub-obj "args") sub-obj-args
(wd-ref obj "goog:chromeOptions") sub-obj)
obj))
(defun wd-make-capabilities-for-firefox (&key
(headlessp nil)
(profile nil)
(user-agent nil)
)
"you can specify profile name with :profile argument.
you can create firefox profile with `firefox-bin -CreateProfile \"profile_name profile_dir\".
ref: -US/docs/Mozilla/Command_Line_Options
"
(let* ((sub-obj-args '())
(sub-obj-prefs (wd-obj))
(sub-obj (wd-obj))
(obj (wd-obj
"browserName" "firefox")))
(when headlessp
(push "-headless" sub-obj-args))
(when profile
( push ( format nil " -P " " profile ) sub - obj - args )
(push (format nil "--profile \"~s\"" profile) sub-obj-args))
(when user-agent
( setf ( wd - ref sub - obj - prefs " general.useragent.extra.firefox " ) ( compile - user - agent user - agent ) ) )
(setf (wd-ref sub-obj-prefs "general.useragent.override") (compile-user-agent user-agent)
(wd-ref sub-obj-prefs "general.useragent.updates.enabled") t))
(setf (wd-ref sub-obj "args") sub-obj-args
(wd-ref sub-obj "prefs") sub-obj-prefs
(wd-ref obj "moz:firefoxOptions") sub-obj)
obj))
| null | https://raw.githubusercontent.com/fgatherlet/cl-webdriver/80acda412f8da718b3379c78dc5f59199836ea7d/src/base.lisp | lisp | ------------------------------ condition
------------------------------ wd-obj
from jsown::val-safe
------------------------------
------------------------------ short hand to handle capabilities. | (in-package :webdriver)
(define-condition protocol-error (error)
((http-status :initarg :http-status :initform nil)
(oss-status :initarg :oss-status :initform nil)
(response :initarg :response :initform nil)
(response-source :initarg :response-source :initform nil)
))
(defmethod print-object ((error protocol-error) stream)
(with-slots (http-status oss-status response response-source) error
(format stream "~a" (type-of error))
(if *wd-session-w3c-p*
(when http-status
(format stream "~%(http-status:~s)" http-status))
(when oss-status
(format stream "~%(oss-status:~s)" oss-status)))
(when response
(format stream "~%(response:~s)" response))
(when response-source
(if (<= (length response-source) 512)
(format stream "~%(response-source:~s)" response-source)
(format stream
"~%(response-source:~s...)"
(subseq response-source 0 512))))))
(defun camelize (x)
"simple camelize function"
(if (keywordp x)
(with-output-to-string (*standard-output*)
(let (prev-is-dash-p)
(iterate ((char (scan (symbol-name x))))
(cond
((char= #\- char) (setq prev-is-dash-p t))
(prev-is-dash-p
(write-char (char-upcase char))
(setq prev-is-dash-p nil))
(t
(write-char (char-downcase char)))))))
x))
(defun wd-obj-p (x)
(and (consp x)
(eql :obj (car x))))
(deftype wd-obj ()
`(satisfies wd-obj-p))
(defun wd-obj (&rest rest)
"Short hand to make jsown object"
(cons :obj
(collect (mapping (((k v) (chunk 2 2 (scan rest))))
(cons (camelize k)
(camelize v)
)))))
(defun wd-ref (obj key)
(handler-case
(values (jsown:val obj (camelize key)) t)
(error () nil nil)))
(defun (setf wd-ref) (val obj key)
(setf (jsown:val obj (camelize key))
(camelize val)))
(defun wd-make-cookie (name value &key path domain secure expiry)
(wd-obj
:name name
:value value
:path path
:domain domain
:secure secure
:expiry expiry))
(defun wd-make-rect (&key (x 32) (y 32) (width 1024) (height 1024))
(wd-obj :x x :y y :width width :height height))
(defun wd-element-to-id (obj)
"extract element-string from the yes-element like (:obj (<element-key> . <element-string>),,,).
wd-ref obj \"ELEMENT\" is better. but geckodriver's key would be \"element-xxx\"..."
(cdr (car (cdr obj))))
(defun wd-make-capabilities-for-chrome (&key
(headlessp nil)
(user-data-dir nil)
(user-agent nil)
(w3c-p nil)
)
(let* ((sub-obj-args '())
(sub-obj (wd-obj))
(obj (wd-obj
:browser-name "chrome")))
(when w3c-p
(setf (wd-ref sub-obj :w3c) t))
(when headlessp
(push "--headless" sub-obj-args))
(when user-agent
(push (format nil "--user-agent='~a'" (compile-user-agent user-agent)) sub-obj-args))
(when user-data-dir
(push (format nil "--user-data-dir=~a" user-data-dir) sub-obj-args))
(setf (wd-ref sub-obj "args") sub-obj-args
(wd-ref obj "goog:chromeOptions") sub-obj)
obj))
(defun wd-make-capabilities-for-firefox (&key
(headlessp nil)
(profile nil)
(user-agent nil)
)
"you can specify profile name with :profile argument.
you can create firefox profile with `firefox-bin -CreateProfile \"profile_name profile_dir\".
ref: -US/docs/Mozilla/Command_Line_Options
"
(let* ((sub-obj-args '())
(sub-obj-prefs (wd-obj))
(sub-obj (wd-obj))
(obj (wd-obj
"browserName" "firefox")))
(when headlessp
(push "-headless" sub-obj-args))
(when profile
( push ( format nil " -P " " profile ) sub - obj - args )
(push (format nil "--profile \"~s\"" profile) sub-obj-args))
(when user-agent
( setf ( wd - ref sub - obj - prefs " general.useragent.extra.firefox " ) ( compile - user - agent user - agent ) ) )
(setf (wd-ref sub-obj-prefs "general.useragent.override") (compile-user-agent user-agent)
(wd-ref sub-obj-prefs "general.useragent.updates.enabled") t))
(setf (wd-ref sub-obj "args") sub-obj-args
(wd-ref sub-obj "prefs") sub-obj-prefs
(wd-ref obj "moz:firefoxOptions") sub-obj)
obj))
|
cd0d89336f85c85fa7fa36b639593c757c66eae312e8783d03fb9fd0d35dd29f | AlexKnauth/debug | typed-with-reader.rkt | #lang debug typed/racket
(require typed/debug/report)
(module+ test
(require typed/rackunit))
(define x 2)
(define (f) #R x #R (+ x 4))
(module+ test
(define p (open-output-string))
(parameterize ([current-error-port p])
#R (f))
(check-equal? (get-output-string p) "x = 2\n(+ x 4) = 6\n(f) = 6\n"))
| null | https://raw.githubusercontent.com/AlexKnauth/debug/4f0fb0b018221ed0bb3216d580f33af389954cde/typed/debug/test/typed-with-reader.rkt | racket | #lang debug typed/racket
(require typed/debug/report)
(module+ test
(require typed/rackunit))
(define x 2)
(define (f) #R x #R (+ x 4))
(module+ test
(define p (open-output-string))
(parameterize ([current-error-port p])
#R (f))
(check-equal? (get-output-string p) "x = 2\n(+ x 4) = 6\n(f) = 6\n"))
| |
1868ae3bf23c28957bdba9b9413401741c03fc0fc3a3e615443e564e4a66c1a2 | OCamlPro/ocp-build | hello_cppo.ml |
let () =
#ifdef TOTO
print_endline "TODO is defined"
#else
print_endline "TODO is not defined"
#endif
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/docs/examples/03_cppo/hello_cppo.ml | ocaml |
let () =
#ifdef TOTO
print_endline "TODO is defined"
#else
print_endline "TODO is not defined"
#endif
| |
14f203a8691dee4021501cc072f7c799d5361fda10de7ab33308391ce9859818 | CyberCat-Institute/open-game-engine | ChooseReservePrice.hs | # LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TupleSections #
# LANGUAGE MultiParamTypeClasses , FlexibleInstances , FlexibleContexts , TemplateHaskell #
# LANGUAGE DeriveGeneric #
# LANGUAGE QuasiQuotes #
module Examples.Auctions.ChooseReservePrice where
import OpenGames
import OpenGames.Preprocessor
import Examples.Auctions.AuctionSupportFunctions
import Examples.Auctions.SimultaneousBidAuction
----------
-- A Model
----------
-- 0. Auxiliary function
revenueAuctioneer :: Num v => [(n, v)] -> v
revenueAuctioneer ls = sum $ fmap snd ls
---------------------
1 The actual games
-- Draws a value and creates a pair of _value_ _name_
setReservePrice kPrice kSlots = [opengame|
inputs : ;
feedback : ;
:-----:
inputs : ;
feedback : ;
operation : dependentDecision "auctioneer" (const [0,20..100]) ;
outputs : reservePrice ;
returns : revenueAuctioneer payments ;
inputs : reservePrice ;
feedback : ;
operation : bidding2ReservePrice kPrice kSlots;
outputs : payments ;
returns : ;
:-----:
outputs : ;
returns : ;
|]
-- B Analysis
----------------
-- 0. Strategies
stratAuctioneer x = pureAction x
stratTuple x = stratAuctioneer x :- truthfulStrat
---------------
1 Equilibria
1.0 Eq . game with 3 players
equilibriumSetReservePrice kPrice kSlots strat = evaluate (setReservePrice kPrice kSlots) strat void
------------------------
2 Interactive session
One object being auctioned off Once we exclude slots via lottery , and just auction off one slot , truthful bidding becomes an equilibrium
testReservePrice p = generateIsEq $ equilibriumSetReservePrice 2 1 (stratTuple p)
| null | https://raw.githubusercontent.com/CyberCat-Institute/open-game-engine/86031c42bf13178554c21cd7ab9e9d18f1ca6963/src/Examples/Auctions/ChooseReservePrice.hs | haskell | # LANGUAGE OverloadedStrings #
--------
A Model
--------
0. Auxiliary function
-------------------
Draws a value and creates a pair of _value_ _name_
---:
---:
B Analysis
--------------
0. Strategies
-------------
---------------------- | # LANGUAGE DataKinds #
# LANGUAGE TupleSections #
# LANGUAGE MultiParamTypeClasses , FlexibleInstances , FlexibleContexts , TemplateHaskell #
# LANGUAGE DeriveGeneric #
# LANGUAGE QuasiQuotes #
module Examples.Auctions.ChooseReservePrice where
import OpenGames
import OpenGames.Preprocessor
import Examples.Auctions.AuctionSupportFunctions
import Examples.Auctions.SimultaneousBidAuction
revenueAuctioneer :: Num v => [(n, v)] -> v
revenueAuctioneer ls = sum $ fmap snd ls
1 The actual games
setReservePrice kPrice kSlots = [opengame|
inputs : ;
feedback : ;
inputs : ;
feedback : ;
operation : dependentDecision "auctioneer" (const [0,20..100]) ;
outputs : reservePrice ;
returns : revenueAuctioneer payments ;
inputs : reservePrice ;
feedback : ;
operation : bidding2ReservePrice kPrice kSlots;
outputs : payments ;
returns : ;
outputs : ;
returns : ;
|]
stratAuctioneer x = pureAction x
stratTuple x = stratAuctioneer x :- truthfulStrat
1 Equilibria
1.0 Eq . game with 3 players
equilibriumSetReservePrice kPrice kSlots strat = evaluate (setReservePrice kPrice kSlots) strat void
2 Interactive session
One object being auctioned off Once we exclude slots via lottery , and just auction off one slot , truthful bidding becomes an equilibrium
testReservePrice p = generateIsEq $ equilibriumSetReservePrice 2 1 (stratTuple p)
|
4077aac511dd499646a106ad3c0b35c43d85fa7cbae10643ba041829b7682dd7 | dag/all-about-monads | example8.hs | Author :
Maintainer : < >
Time - stamp : < Fri Aug 15 17:54:59 2003 >
License : GPL
Maintainer: Jeff Newbern <>
Time-stamp: <Fri Aug 15 17:54:59 2003>
License: GPL
-}
DESCRIPTION
Example 8 - Using the ap function
Usage : Compile the code and execute the resulting program .
The first argument is an integer , the second argument is
a space - delimited list of commands to apply to the number .
The program will print a result value , or Nothing if any
command is unknown .
Try : ./ex8 7 ' double '
./ex8 10 ' halve square negate '
./ex8 0 ' incr double square '
./ex8 2 ' square square square decr '
./ex8 45 ' halve cube negate '
Example 8 - Using the ap function
Usage: Compile the code and execute the resulting program.
The first argument is an integer, the second argument is
a space-delimited list of commands to apply to the number.
The program will print a result value, or Nothing if any
command is unknown.
Try: ./ex8 7 'double'
./ex8 10 'halve square negate'
./ex8 0 'incr double square'
./ex8 2 'square square square decr'
./ex8 45 'halve cube negate'
-}
import System
import Monad
-- lookup the commands and fold ap into the command list to
-- compute a result.
main :: IO ()
main = do let fns = [("double",(2*)), ("halve",(`div`2)),
("square",(\x->x*x)), ("negate", negate),
("incr",(+1)), ("decr",(+(-1)))
]
args <- getArgs
let val = read (args!!0)
cmds = map ((flip lookup) fns) (words (args!!1))
print $ foldl (flip ap) (Just val) cmds
END OF FILE | null | https://raw.githubusercontent.com/dag/all-about-monads/f5efbe7276a090cb1a3ce7ebb97ac28b1b13572e/examples/example8.hs | haskell | lookup the commands and fold ap into the command list to
compute a result. | Author :
Maintainer : < >
Time - stamp : < Fri Aug 15 17:54:59 2003 >
License : GPL
Maintainer: Jeff Newbern <>
Time-stamp: <Fri Aug 15 17:54:59 2003>
License: GPL
-}
DESCRIPTION
Example 8 - Using the ap function
Usage : Compile the code and execute the resulting program .
The first argument is an integer , the second argument is
a space - delimited list of commands to apply to the number .
The program will print a result value , or Nothing if any
command is unknown .
Try : ./ex8 7 ' double '
./ex8 10 ' halve square negate '
./ex8 0 ' incr double square '
./ex8 2 ' square square square decr '
./ex8 45 ' halve cube negate '
Example 8 - Using the ap function
Usage: Compile the code and execute the resulting program.
The first argument is an integer, the second argument is
a space-delimited list of commands to apply to the number.
The program will print a result value, or Nothing if any
command is unknown.
Try: ./ex8 7 'double'
./ex8 10 'halve square negate'
./ex8 0 'incr double square'
./ex8 2 'square square square decr'
./ex8 45 'halve cube negate'
-}
import System
import Monad
main :: IO ()
main = do let fns = [("double",(2*)), ("halve",(`div`2)),
("square",(\x->x*x)), ("negate", negate),
("incr",(+1)), ("decr",(+(-1)))
]
args <- getArgs
let val = read (args!!0)
cmds = map ((flip lookup) fns) (words (args!!1))
print $ foldl (flip ap) (Just val) cmds
END OF FILE |
36abe89fac6e96eb72f0f571b79fed24acf9629582a637959d8f1d7ac80cc625 | metasoarous/datsys | utils.cljc | (ns dat.sys.utils)
(defn deref-or-value
[val-or-atom]
(if (satisfies? #?(:cljs IDeref :clj clojure.lang.IDeref) val-or-atom) @val-or-atom val-or-atom))
(defn deep-merge
"Like merge, but merges maps recursively."
[& maps]
(if (every? #(or (map? %) (nil? %)) maps)
(apply merge-with deep-merge maps)
(last maps)))
| null | https://raw.githubusercontent.com/metasoarous/datsys/67f506525d66887737ef058c866adb0c847be925/src/cljc/dat/sys/utils.cljc | clojure | (ns dat.sys.utils)
(defn deref-or-value
[val-or-atom]
(if (satisfies? #?(:cljs IDeref :clj clojure.lang.IDeref) val-or-atom) @val-or-atom val-or-atom))
(defn deep-merge
"Like merge, but merges maps recursively."
[& maps]
(if (every? #(or (map? %) (nil? %)) maps)
(apply merge-with deep-merge maps)
(last maps)))
| |
a4011d3c9c0b7afa2466cfe1cc971938dd3c329221e1f89d28b3e47d780152a5 | ermine/sulci | plugin_1april.ml |
* ( c ) 2008 - 2010
* (c) 2008-2010 Anastasia Gornostaeva
*)
open XMPP
open JID
open Hooks
type rate = {
lastrate: float;
lasttime: float
}
let maxrate = ref 400.0
let init_rate () =
{ lastrate = 0.0; lasttime = Unix.gettimeofday ()}
from evgs
S(n)=a*C / T+(1 - a)*S(n-1 )
S(0)=0 ; 0 < a<1
S(n)=a*C/T+(1-a)*S(n-1)
S(0)=0; 0<a<1
*)
let update_rate data size =
let a = 0.5 in
let now = Unix.gettimeofday () in
let t = now -. data.lasttime in
let s = a *. float size /. t +. (1.0 -. a) *. data.lastrate in
{lasttime = now; lastrate = s}
type user = {
reso: string;
prio: int;
group: int;
}
module User =
struct
type t = (string * string * string)
let compare = compare
end
module UserGroup = Set.Make(User)
type group = {
rate: rate;
participants: UserGroup.t
}
let ht = Hashtbl.create 100
let groups = Hashtbl.create 10
let active = ref (1, 0.0)
let new_group =
let n = ref 1 in
fun () -> incr n; !n
let log = open_out_gen [Open_append; Open_creat] 0o664 "1april.log"
let ltime () =
Strftime.strftime "%d/%m/%Y %T" ~tm:(Unix.localtime (Unix.gettimeofday ()))
let get_activeless_group exgroup =
Hashtbl.fold (fun gno group (g, min) ->
if gno <> exgroup && group.rate.lastrate < min then
(gno, group.rate.lastrate)
else
(g, min)
) groups (0, 1000.0)
let set_of_list list =
List.fold_left (fun set user -> UserGroup.add user set) UserGroup.empty list
let update_group group users =
List.iter (fun (user, server, _) ->
let data = Hashtbl.find ht (user, server) in
Hashtbl.replace ht (user, server) {data with group = group}
) users
let divide_group xmpp group =
let g = Hashtbl.find groups group in
let plen = UserGroup.cardinal g.participants in
if plen > 1 then
let participants = UserGroup.elements g.participants in
let rec aux_divide acc1 acc2 = function
| 0 -> (acc1, acc2)
| i -> aux_divide (List.tl acc1) (List.hd acc1 :: acc2) (i-1)
in
let g1, g2 = aux_divide participants [] (plen / 2) in
let newgroup = new_group () in
Hashtbl.replace groups group
{rate = init_rate (); participants = set_of_list g1};
Hashtbl.add groups newgroup
{rate = init_rate (); participants = set_of_list g2};
update_group newgroup g2;
Printf.fprintf log "%s Divide group %d -> %d (%d) and %d (%d)\n"
(ltime ()) group group (List.length g1) newgroup (List.length g2);
flush log
let union_groups xmpp group1 group2 =
let g1 = Hashtbl.find groups group1 in
let g2 = Hashtbl.find groups group2 in
Hashtbl.replace groups group1
{rate = init_rate ();
participants = UserGroup.union g1.participants g2.participants};
Hashtbl.remove groups group2;
update_group group1 (UserGroup.elements g2.participants);
Printf.fprintf log "%s Union groups %d (%d) and %d (%d)\n"
(ltime ()) group1 (UserGroup.cardinal g1.participants)
group2 (UserGroup.cardinal g2.participants);
flush log
let add_group xmpp (group:int) ((u, s, r) as user) =
if Hashtbl.length groups >= 2 then (
let activeless, _ = get_activeless_group group in
let (gno, _) = !active in
union_groups xmpp gno activeless
);
try
let g = Hashtbl.find groups group in
Hashtbl.replace groups group
{g with participants = UserGroup.add user g.participants}
with Not_found ->
Hashtbl.add groups group
{rate = init_rate ();
participants = UserGroup.add user UserGroup.empty}
let remove_group xmpp (group:int) user =
let g = Hashtbl.find groups group in
let participants = UserGroup.remove user g.participants in
if UserGroup.is_empty participants then
Hashtbl.remove groups group
else
Hashtbl.replace groups group {g with participants = participants}
let add_user xmpp (jid:jid) prio =
try
let data = Hashtbl.find ht (jid.lnode, jid.ldomain) in
if data.reso = jid.lresource then (
if data.prio <> prio then (
Hashtbl.replace ht (jid.lnode, jid.ldomain)
{data with prio = prio};
)
)
else if prio > data.prio then
let old = data.reso in
Printf.fprintf log "%s Replaced user [%d] %s@%s: (%s) -> (%s)\n"
(ltime ()) data.group jid.node jid.domain old jid.resource;
flush log;
Hashtbl.replace ht (jid.lnode, jid.ldomain)
{data with reso = jid.lresource; prio = prio};
XMPP.send_presence xmpp
~jid_to:(replace_resource jid old)
~kind:Unavailable ();
remove_group xmpp data.group (jid.lnode, jid.ldomain, old);
add_group xmpp data.group (jid.lnode, jid.ldomain, jid.lresource);
with Not_found ->
let group, _ = !active in
Printf.fprintf log "%s New participant: [%d] (%s@%s/%s)\n"
(ltime ()) group jid.node jid.domain jid.resource;
flush log;
Hashtbl.add ht (jid.lnode, jid.ldomain)
{ reso = jid.lresource; prio = prio;
group = group };
add_group xmpp group (jid.lnode, jid.ldomain, jid.lresource)
let remove_user xmpp jid =
try
let data = Hashtbl.find ht (jid.lnode, jid.ldomain) in
if data.reso = jid.lresource then (
Printf.fprintf log "%s Remove participant: [%d] (%s@%s/%s)\n"
(ltime ()) data.group jid.lnode jid.ldomain jid.lresource;
flush log;
Hashtbl.remove ht (jid.lnode, jid.ldomain);
remove_group xmpp data.group (jid.lnode, jid.ldomain, jid.lresource);
XMPP.send_presence xmpp
~jid_to:jid
~kind:Unavailable ()
)
with Not_found ->
()
let dispatch xmpp from body =
try
let data = Hashtbl.find ht (from.lnode, from.ldomain) in
let group = Hashtbl.find groups data.group in
let newrate = update_rate group.rate 100 in
Printf.fprintf log "%s Message [%d] (%g) (%s@%s/%s)\n%s\n\n"
(ltime ()) data.group group.rate.lastrate
from.node from.domain from.resource body;
flush log;
Hashtbl.replace groups data.group {group with rate = newrate};
if newrate.lastrate >= !maxrate then
divide_group xmpp data.group;
let gno, grate = !active in
if newrate.lastrate > grate then
active := (data.group, newrate.lastrate);
let group = Hashtbl.find groups data.group in
UserGroup.iter
(fun (lnode, ldomain, lresource) ->
if (lnode, ldomain, lresource) =
(from.lnode, from.ldomain, from.lresource) then
()
else
XMPP.send_message xmpp
~jid_to:(make_jid lnode ldomain lresource)
~kind:Chat
~body ()
) group.participants
with Not_found ->
Printf.fprintf log "%s Message from not-logged in user (%s@%s/%s)\n"
(ltime ()) from.lnode from.ldomain from.lresource;
flush log
let process_presence_error xmpp ?id ?jid_from ?jid_to ?lang error =
match jid_from with
| None -> ()
| Some from ->
Printf.fprintf log "%s Presence error (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
remove_user xmpp from
let process_presence xmpp env stanza hooks =
match stanza.jid_from with
| None -> do_hook xmpp env stanza hooks
| Some from ->
let () =
if from.lnode = xmpp.myjid.lnode &&
from.ldomain = xmpp.myjid.ldomain then
()
else
match stanza.content.presence_type with
| Some Subscribe ->
Printf.fprintf log "%s Subscribe (%s@%s)\n"
(ltime ()) from.node from.domain;
flush log;
XMPP.send_presence xmpp
~jid_to:(replace_resource from "")
~kind:Subscribed ();
XMPP.send_presence xmpp
~jid_to:(replace_resource from "")
~kind:Subscribe ();
| Some Subscribed ->
Printf.fprintf log "%s Unsubscribe (%s@%s)\n"
(ltime ()) from.node from.domain;
flush log;
XMPP.send_presence xmpp
~jid_to:(replace_resource from "")
~kind:Unsubscribed ()
| None ->
Printf.fprintf log
"%s Presence available (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
let prio =
match stanza.content.XMPP.priority with
| None -> 0
| Some i -> i
in
add_user xmpp from prio
| Some Unavailable ->
Printf.fprintf log
"%s Presence unavailable (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
remove_user xmpp from
| _ ->
()
in
do_hook xmpp env stanza hooks
let process_message_error xmpp ?id ?jid_from ?jid_to ?lang error =
match jid_from with
| None -> ()
| Some from ->
Printf.fprintf log "%s Presence error (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
remove_user xmpp from
let process_message xmpp env stanza hooks =
match stanza.jid_from with
| None -> do_hook xmpp env stanza hooks
| Some from ->
let () =
match stanza.content.message_type, stanza.content.body with
| Some Chat, Some body ->
if body <> "" && String.length body < 1024 then
dispatch xmpp from body
| _ ->
()
in
do_hook xmpp env stanza hooks
let plugin opts =
Hooks.add_for_token
(fun _opts xmpp ->
Hooks.add_message_hook xmpp 30 "1april" process_message;
Hooks.add_presence_hook xmpp 30 "1april" process_presence
)
let () =
Plugin.add_plugin "1april" plugin
| null | https://raw.githubusercontent.com/ermine/sulci/3ee4bd609b01e2093a6d37bf74579728d0a93b70/src/plugin_1april.ml | ocaml |
* ( c ) 2008 - 2010
* (c) 2008-2010 Anastasia Gornostaeva
*)
open XMPP
open JID
open Hooks
type rate = {
lastrate: float;
lasttime: float
}
let maxrate = ref 400.0
let init_rate () =
{ lastrate = 0.0; lasttime = Unix.gettimeofday ()}
from evgs
S(n)=a*C / T+(1 - a)*S(n-1 )
S(0)=0 ; 0 < a<1
S(n)=a*C/T+(1-a)*S(n-1)
S(0)=0; 0<a<1
*)
let update_rate data size =
let a = 0.5 in
let now = Unix.gettimeofday () in
let t = now -. data.lasttime in
let s = a *. float size /. t +. (1.0 -. a) *. data.lastrate in
{lasttime = now; lastrate = s}
type user = {
reso: string;
prio: int;
group: int;
}
module User =
struct
type t = (string * string * string)
let compare = compare
end
module UserGroup = Set.Make(User)
type group = {
rate: rate;
participants: UserGroup.t
}
let ht = Hashtbl.create 100
let groups = Hashtbl.create 10
let active = ref (1, 0.0)
let new_group =
let n = ref 1 in
fun () -> incr n; !n
let log = open_out_gen [Open_append; Open_creat] 0o664 "1april.log"
let ltime () =
Strftime.strftime "%d/%m/%Y %T" ~tm:(Unix.localtime (Unix.gettimeofday ()))
let get_activeless_group exgroup =
Hashtbl.fold (fun gno group (g, min) ->
if gno <> exgroup && group.rate.lastrate < min then
(gno, group.rate.lastrate)
else
(g, min)
) groups (0, 1000.0)
let set_of_list list =
List.fold_left (fun set user -> UserGroup.add user set) UserGroup.empty list
let update_group group users =
List.iter (fun (user, server, _) ->
let data = Hashtbl.find ht (user, server) in
Hashtbl.replace ht (user, server) {data with group = group}
) users
let divide_group xmpp group =
let g = Hashtbl.find groups group in
let plen = UserGroup.cardinal g.participants in
if plen > 1 then
let participants = UserGroup.elements g.participants in
let rec aux_divide acc1 acc2 = function
| 0 -> (acc1, acc2)
| i -> aux_divide (List.tl acc1) (List.hd acc1 :: acc2) (i-1)
in
let g1, g2 = aux_divide participants [] (plen / 2) in
let newgroup = new_group () in
Hashtbl.replace groups group
{rate = init_rate (); participants = set_of_list g1};
Hashtbl.add groups newgroup
{rate = init_rate (); participants = set_of_list g2};
update_group newgroup g2;
Printf.fprintf log "%s Divide group %d -> %d (%d) and %d (%d)\n"
(ltime ()) group group (List.length g1) newgroup (List.length g2);
flush log
let union_groups xmpp group1 group2 =
let g1 = Hashtbl.find groups group1 in
let g2 = Hashtbl.find groups group2 in
Hashtbl.replace groups group1
{rate = init_rate ();
participants = UserGroup.union g1.participants g2.participants};
Hashtbl.remove groups group2;
update_group group1 (UserGroup.elements g2.participants);
Printf.fprintf log "%s Union groups %d (%d) and %d (%d)\n"
(ltime ()) group1 (UserGroup.cardinal g1.participants)
group2 (UserGroup.cardinal g2.participants);
flush log
let add_group xmpp (group:int) ((u, s, r) as user) =
if Hashtbl.length groups >= 2 then (
let activeless, _ = get_activeless_group group in
let (gno, _) = !active in
union_groups xmpp gno activeless
);
try
let g = Hashtbl.find groups group in
Hashtbl.replace groups group
{g with participants = UserGroup.add user g.participants}
with Not_found ->
Hashtbl.add groups group
{rate = init_rate ();
participants = UserGroup.add user UserGroup.empty}
let remove_group xmpp (group:int) user =
let g = Hashtbl.find groups group in
let participants = UserGroup.remove user g.participants in
if UserGroup.is_empty participants then
Hashtbl.remove groups group
else
Hashtbl.replace groups group {g with participants = participants}
let add_user xmpp (jid:jid) prio =
try
let data = Hashtbl.find ht (jid.lnode, jid.ldomain) in
if data.reso = jid.lresource then (
if data.prio <> prio then (
Hashtbl.replace ht (jid.lnode, jid.ldomain)
{data with prio = prio};
)
)
else if prio > data.prio then
let old = data.reso in
Printf.fprintf log "%s Replaced user [%d] %s@%s: (%s) -> (%s)\n"
(ltime ()) data.group jid.node jid.domain old jid.resource;
flush log;
Hashtbl.replace ht (jid.lnode, jid.ldomain)
{data with reso = jid.lresource; prio = prio};
XMPP.send_presence xmpp
~jid_to:(replace_resource jid old)
~kind:Unavailable ();
remove_group xmpp data.group (jid.lnode, jid.ldomain, old);
add_group xmpp data.group (jid.lnode, jid.ldomain, jid.lresource);
with Not_found ->
let group, _ = !active in
Printf.fprintf log "%s New participant: [%d] (%s@%s/%s)\n"
(ltime ()) group jid.node jid.domain jid.resource;
flush log;
Hashtbl.add ht (jid.lnode, jid.ldomain)
{ reso = jid.lresource; prio = prio;
group = group };
add_group xmpp group (jid.lnode, jid.ldomain, jid.lresource)
let remove_user xmpp jid =
try
let data = Hashtbl.find ht (jid.lnode, jid.ldomain) in
if data.reso = jid.lresource then (
Printf.fprintf log "%s Remove participant: [%d] (%s@%s/%s)\n"
(ltime ()) data.group jid.lnode jid.ldomain jid.lresource;
flush log;
Hashtbl.remove ht (jid.lnode, jid.ldomain);
remove_group xmpp data.group (jid.lnode, jid.ldomain, jid.lresource);
XMPP.send_presence xmpp
~jid_to:jid
~kind:Unavailable ()
)
with Not_found ->
()
let dispatch xmpp from body =
try
let data = Hashtbl.find ht (from.lnode, from.ldomain) in
let group = Hashtbl.find groups data.group in
let newrate = update_rate group.rate 100 in
Printf.fprintf log "%s Message [%d] (%g) (%s@%s/%s)\n%s\n\n"
(ltime ()) data.group group.rate.lastrate
from.node from.domain from.resource body;
flush log;
Hashtbl.replace groups data.group {group with rate = newrate};
if newrate.lastrate >= !maxrate then
divide_group xmpp data.group;
let gno, grate = !active in
if newrate.lastrate > grate then
active := (data.group, newrate.lastrate);
let group = Hashtbl.find groups data.group in
UserGroup.iter
(fun (lnode, ldomain, lresource) ->
if (lnode, ldomain, lresource) =
(from.lnode, from.ldomain, from.lresource) then
()
else
XMPP.send_message xmpp
~jid_to:(make_jid lnode ldomain lresource)
~kind:Chat
~body ()
) group.participants
with Not_found ->
Printf.fprintf log "%s Message from not-logged in user (%s@%s/%s)\n"
(ltime ()) from.lnode from.ldomain from.lresource;
flush log
let process_presence_error xmpp ?id ?jid_from ?jid_to ?lang error =
match jid_from with
| None -> ()
| Some from ->
Printf.fprintf log "%s Presence error (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
remove_user xmpp from
let process_presence xmpp env stanza hooks =
match stanza.jid_from with
| None -> do_hook xmpp env stanza hooks
| Some from ->
let () =
if from.lnode = xmpp.myjid.lnode &&
from.ldomain = xmpp.myjid.ldomain then
()
else
match stanza.content.presence_type with
| Some Subscribe ->
Printf.fprintf log "%s Subscribe (%s@%s)\n"
(ltime ()) from.node from.domain;
flush log;
XMPP.send_presence xmpp
~jid_to:(replace_resource from "")
~kind:Subscribed ();
XMPP.send_presence xmpp
~jid_to:(replace_resource from "")
~kind:Subscribe ();
| Some Subscribed ->
Printf.fprintf log "%s Unsubscribe (%s@%s)\n"
(ltime ()) from.node from.domain;
flush log;
XMPP.send_presence xmpp
~jid_to:(replace_resource from "")
~kind:Unsubscribed ()
| None ->
Printf.fprintf log
"%s Presence available (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
let prio =
match stanza.content.XMPP.priority with
| None -> 0
| Some i -> i
in
add_user xmpp from prio
| Some Unavailable ->
Printf.fprintf log
"%s Presence unavailable (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
remove_user xmpp from
| _ ->
()
in
do_hook xmpp env stanza hooks
let process_message_error xmpp ?id ?jid_from ?jid_to ?lang error =
match jid_from with
| None -> ()
| Some from ->
Printf.fprintf log "%s Presence error (%s@%s/%s)\n"
(ltime ()) from.node from.domain from.resource;
flush log;
remove_user xmpp from
let process_message xmpp env stanza hooks =
match stanza.jid_from with
| None -> do_hook xmpp env stanza hooks
| Some from ->
let () =
match stanza.content.message_type, stanza.content.body with
| Some Chat, Some body ->
if body <> "" && String.length body < 1024 then
dispatch xmpp from body
| _ ->
()
in
do_hook xmpp env stanza hooks
let plugin opts =
Hooks.add_for_token
(fun _opts xmpp ->
Hooks.add_message_hook xmpp 30 "1april" process_message;
Hooks.add_presence_hook xmpp 30 "1april" process_presence
)
let () =
Plugin.add_plugin "1april" plugin
| |
216d56890e0421ab8e70cdaf499283e7117fbceacca2b5aa764dc13bdeeb436a | racket/rhombus-prototype | repetition.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre
"operator-parse.rkt"
enforest
enforest/property
enforest/syntax-local
enforest/operator
enforest/transformer
enforest/property
enforest/name-parse
enforest/proc-name
"introducer.rkt"
(for-syntax racket/base))
"enforest.rkt"
"expression.rkt"
"binding.rkt"
"static-info.rkt"
"ref-result-key.rkt"
"parse.rkt")
(provide define-repetition-syntax)
(begin-for-syntax
(provide (property-out repetition-prefix-operator)
(property-out repetition-infix-operator)
(struct-out repetition-prefix+infix-operator)
repetition-transformer
make-expression+repetition
repetition-as-list
repetition-as-deeper-repetition
flatten-repetition
:repetition
:repetition-info
in-repetition-space
repet-quote
identifier-repetition-use
make-repetition-info
repetition-static-info-lookup))
(begin-for-syntax
(define-syntax-class :repetition-info
#:datum-literals (parens group)
(pattern (rep-expr
name
seq-expr
bind-depth:exact-nonnegative-integer
use-depth:exact-nonnegative-integer
element-static-infos
immediate?)))
(define (make-repetition-info rep-expr name seq-expr bind-depth use-depth element-static-infos immediate?)
;; `element-static-infos` can be an identifier, which means both that static
;; information can be looked up on demand
#`(#,rep-expr #,name #,seq-expr #,bind-depth #,use-depth #,element-static-infos #,immediate?))
(define (check-repetition-result form proc)
(syntax-parse (if (syntax? form) form #'#f)
[_::repetition-info form]
[_ (raise-result-error (proc-name proc) "repetition-info?" form)]))
(property repetition-prefix-operator prefix-operator)
(property repetition-infix-operator infix-operator)
(struct repetition-prefix+infix-operator (prefix infix)
#:property prop:repetition-prefix-operator (lambda (self) (repetition-prefix+infix-operator-prefix self))
#:property prop:repetition-infix-operator (lambda (self) (repetition-prefix+infix-operator-infix self)))
(define in-repetition-space (make-interned-syntax-introducer/add 'rhombus/repet))
(define-syntax (repet-quote stx)
(syntax-case stx ()
[(_ id) #`(quote-syntax #,((make-interned-syntax-introducer 'rhombus/repet) #'id))]))
(define (identifier-repetition-use id)
(make-repetition-info id
id
id
0
0
id
#t))
(define (identifier-repetition-use/maybe id)
(make-repetition-info id
id
#`(rhombus-expression (group #,id))
0
0
id
#t))
;; Form in a repetition context:
(define-rhombus-enforest
#:syntax-class :repetition
#:prefix-more-syntax-class :prefix-op+repetition-use+tail
#:infix-more-syntax-class :infix-op+repetition-use+tail
#:desc "repetition"
#:operator-desc "repetition operator"
#:in-space in-repetition-space
#:prefix-operator-ref repetition-prefix-operator-ref
#:infix-operator-ref repetition-infix-operator-ref
#:check-result check-repetition-result
#:make-identifier-form identifier-repetition-use/maybe)
(define (make-expression+repetition name seq-expr element-static-infos
#:depth [depth 1]
#:expr-handler [expr-handler (lambda (stx fail) (fail))]
#:repet-handler [repet-handler (lambda (stx next) (next))])
(values
(expression-transformer
(lambda (stx)
(expr-handler stx (lambda ()
(syntax-parse stx
[(self . _)
(raise-syntax-error #f
"cannot use repetition binding as an expression"
#'self)])))))
(repetition-transformer
(lambda (stx)
(repet-handler stx (lambda ()
(syntax-parse stx
[(id . tail)
(values (make-repetition-info stx
name
seq-expr
depth
#'0
element-static-infos
#f)
#'tail)])))))))
(define (repetition-transformer proc)
(repetition-prefix-operator (quote-syntax ignored) '((default . stronger)) 'macro proc))
(define (repetition-static-info-lookup element-static-infos key)
(if (identifier? element-static-infos)
(syntax-local-static-info element-static-infos key)
(static-info-lookup element-static-infos key))))
(define-for-syntax repetition-as-list
(case-lambda
[(ellipses stx depth)
(repetition-as-list ellipses stx depth 0)]
[(ellipses stx depth extra-ellipses)
(syntax-parse stx
[rep::repetition
(repetition-as-list (flatten-repetition #'rep.parsed extra-ellipses) depth)]
[_
(raise-syntax-error (syntax-e ellipses)
"not preceded by a repetition"
stx)])]
[(rep-parsed depth)
(syntax-parse rep-parsed
[rep-info::repetition-info
(define want-depth (syntax-e #'rep-info.bind-depth))
(define use-depth (+ depth (syntax-e #'rep-info.use-depth)))
(unless (= use-depth want-depth)
(raise-syntax-error #f
"used with wrong repetition depth"
#'rep-info.rep-expr
#f
null
(format "\n expected: ~a\n actual: ~a"
want-depth
use-depth)))
(define infos (if (identifier? #'rep-info.element-static-infos)
(datum->syntax #f (extract-static-infos #'rep-info.element-static-infos))
#'rep-info.element-static-infos))
(if (= depth 0)
(wrap-static-info* #'rep-info.seq-expr infos)
(wrap-static-info #'rep-info.seq-expr #'#%ref-result infos))])]))
(define-for-syntax (repetition-as-deeper-repetition rep-parsed static-infos)
(syntax-parse rep-parsed
[rep-info::repetition-info
(make-repetition-info #'rep-info.rep-expr
#'rep-info.name
#'rep-info.seq-expr
#'rep-info.bind-depth
(+ 1 (syntax-e #'rep-info.use-depth))
#`((#%ref-result rep-info.element-static-infos)
. #,static-infos)
#'rep-info.immediate?)]))
(define-for-syntax (flatten-repetition rep-parsed count)
(cond
[(= 0 count) rep-parsed]
[else
(syntax-parse rep-parsed
[rep-info::repetition-info
(make-repetition-info #'rep-info.rep-expr
#'rep-info.name
#`(flatten rep-info.seq-expr #,count)
#'rep-info.bind-depth
(+ count (syntax-e #'rep-info.use-depth))
#'rep-info.element-static-infos
#f)])]))
(define (flatten lists count)
(if (zero? count)
lists
(flatten (apply append lists) (sub1 count))))
(define-syntax (define-repetition-syntax stx)
(syntax-parse stx
[(_ id:identifier rhs)
#`(define-syntax #,(in-repetition-space #'id)
rhs)]))
| null | https://raw.githubusercontent.com/racket/rhombus-prototype/4e66c1361bdde51c2df9332644800baead49e86f/rhombus/private/repetition.rkt | racket | `element-static-infos` can be an identifier, which means both that static
information can be looked up on demand
Form in a repetition context: | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre
"operator-parse.rkt"
enforest
enforest/property
enforest/syntax-local
enforest/operator
enforest/transformer
enforest/property
enforest/name-parse
enforest/proc-name
"introducer.rkt"
(for-syntax racket/base))
"enforest.rkt"
"expression.rkt"
"binding.rkt"
"static-info.rkt"
"ref-result-key.rkt"
"parse.rkt")
(provide define-repetition-syntax)
(begin-for-syntax
(provide (property-out repetition-prefix-operator)
(property-out repetition-infix-operator)
(struct-out repetition-prefix+infix-operator)
repetition-transformer
make-expression+repetition
repetition-as-list
repetition-as-deeper-repetition
flatten-repetition
:repetition
:repetition-info
in-repetition-space
repet-quote
identifier-repetition-use
make-repetition-info
repetition-static-info-lookup))
(begin-for-syntax
(define-syntax-class :repetition-info
#:datum-literals (parens group)
(pattern (rep-expr
name
seq-expr
bind-depth:exact-nonnegative-integer
use-depth:exact-nonnegative-integer
element-static-infos
immediate?)))
(define (make-repetition-info rep-expr name seq-expr bind-depth use-depth element-static-infos immediate?)
#`(#,rep-expr #,name #,seq-expr #,bind-depth #,use-depth #,element-static-infos #,immediate?))
(define (check-repetition-result form proc)
(syntax-parse (if (syntax? form) form #'#f)
[_::repetition-info form]
[_ (raise-result-error (proc-name proc) "repetition-info?" form)]))
(property repetition-prefix-operator prefix-operator)
(property repetition-infix-operator infix-operator)
(struct repetition-prefix+infix-operator (prefix infix)
#:property prop:repetition-prefix-operator (lambda (self) (repetition-prefix+infix-operator-prefix self))
#:property prop:repetition-infix-operator (lambda (self) (repetition-prefix+infix-operator-infix self)))
(define in-repetition-space (make-interned-syntax-introducer/add 'rhombus/repet))
(define-syntax (repet-quote stx)
(syntax-case stx ()
[(_ id) #`(quote-syntax #,((make-interned-syntax-introducer 'rhombus/repet) #'id))]))
(define (identifier-repetition-use id)
(make-repetition-info id
id
id
0
0
id
#t))
(define (identifier-repetition-use/maybe id)
(make-repetition-info id
id
#`(rhombus-expression (group #,id))
0
0
id
#t))
(define-rhombus-enforest
#:syntax-class :repetition
#:prefix-more-syntax-class :prefix-op+repetition-use+tail
#:infix-more-syntax-class :infix-op+repetition-use+tail
#:desc "repetition"
#:operator-desc "repetition operator"
#:in-space in-repetition-space
#:prefix-operator-ref repetition-prefix-operator-ref
#:infix-operator-ref repetition-infix-operator-ref
#:check-result check-repetition-result
#:make-identifier-form identifier-repetition-use/maybe)
(define (make-expression+repetition name seq-expr element-static-infos
#:depth [depth 1]
#:expr-handler [expr-handler (lambda (stx fail) (fail))]
#:repet-handler [repet-handler (lambda (stx next) (next))])
(values
(expression-transformer
(lambda (stx)
(expr-handler stx (lambda ()
(syntax-parse stx
[(self . _)
(raise-syntax-error #f
"cannot use repetition binding as an expression"
#'self)])))))
(repetition-transformer
(lambda (stx)
(repet-handler stx (lambda ()
(syntax-parse stx
[(id . tail)
(values (make-repetition-info stx
name
seq-expr
depth
#'0
element-static-infos
#f)
#'tail)])))))))
(define (repetition-transformer proc)
(repetition-prefix-operator (quote-syntax ignored) '((default . stronger)) 'macro proc))
(define (repetition-static-info-lookup element-static-infos key)
(if (identifier? element-static-infos)
(syntax-local-static-info element-static-infos key)
(static-info-lookup element-static-infos key))))
(define-for-syntax repetition-as-list
(case-lambda
[(ellipses stx depth)
(repetition-as-list ellipses stx depth 0)]
[(ellipses stx depth extra-ellipses)
(syntax-parse stx
[rep::repetition
(repetition-as-list (flatten-repetition #'rep.parsed extra-ellipses) depth)]
[_
(raise-syntax-error (syntax-e ellipses)
"not preceded by a repetition"
stx)])]
[(rep-parsed depth)
(syntax-parse rep-parsed
[rep-info::repetition-info
(define want-depth (syntax-e #'rep-info.bind-depth))
(define use-depth (+ depth (syntax-e #'rep-info.use-depth)))
(unless (= use-depth want-depth)
(raise-syntax-error #f
"used with wrong repetition depth"
#'rep-info.rep-expr
#f
null
(format "\n expected: ~a\n actual: ~a"
want-depth
use-depth)))
(define infos (if (identifier? #'rep-info.element-static-infos)
(datum->syntax #f (extract-static-infos #'rep-info.element-static-infos))
#'rep-info.element-static-infos))
(if (= depth 0)
(wrap-static-info* #'rep-info.seq-expr infos)
(wrap-static-info #'rep-info.seq-expr #'#%ref-result infos))])]))
(define-for-syntax (repetition-as-deeper-repetition rep-parsed static-infos)
(syntax-parse rep-parsed
[rep-info::repetition-info
(make-repetition-info #'rep-info.rep-expr
#'rep-info.name
#'rep-info.seq-expr
#'rep-info.bind-depth
(+ 1 (syntax-e #'rep-info.use-depth))
#`((#%ref-result rep-info.element-static-infos)
. #,static-infos)
#'rep-info.immediate?)]))
(define-for-syntax (flatten-repetition rep-parsed count)
(cond
[(= 0 count) rep-parsed]
[else
(syntax-parse rep-parsed
[rep-info::repetition-info
(make-repetition-info #'rep-info.rep-expr
#'rep-info.name
#`(flatten rep-info.seq-expr #,count)
#'rep-info.bind-depth
(+ count (syntax-e #'rep-info.use-depth))
#'rep-info.element-static-infos
#f)])]))
(define (flatten lists count)
(if (zero? count)
lists
(flatten (apply append lists) (sub1 count))))
(define-syntax (define-repetition-syntax stx)
(syntax-parse stx
[(_ id:identifier rhs)
#`(define-syntax #,(in-repetition-space #'id)
rhs)]))
|
142074237f48b381baf24ccfc93a7a7805c15f9e96c4d26ab128d1893a8d6d53 | Psi-Prod/ppx_system | system.ml | type t = Darwin | FreeBSD | NetBSD | OpenBSD | Unix | Win32
let to_string = function
| Darwin -> "Darwin"
| FreeBSD -> "FreeBSD"
| NetBSD -> "NetBSD"
| OpenBSD -> "OpenBSD"
| Unix -> "Unix"
| Win32 -> "Win32"
let get () =
if Sys.win32 then Ok Win32
else
match Uname.sysname () with
| "Darwin" -> Ok Darwin
| "FreeBSD" -> Ok FreeBSD
| "Linux" -> Ok Unix
| "NetBSD" -> Ok NetBSD
| "OpenBSD" -> Ok OpenBSD
| sys_name -> Error sys_name
| null | https://raw.githubusercontent.com/Psi-Prod/ppx_system/ed75ae15f07ee0002145671821c9fc2288e04ba8/lib/system.ml | ocaml | type t = Darwin | FreeBSD | NetBSD | OpenBSD | Unix | Win32
let to_string = function
| Darwin -> "Darwin"
| FreeBSD -> "FreeBSD"
| NetBSD -> "NetBSD"
| OpenBSD -> "OpenBSD"
| Unix -> "Unix"
| Win32 -> "Win32"
let get () =
if Sys.win32 then Ok Win32
else
match Uname.sysname () with
| "Darwin" -> Ok Darwin
| "FreeBSD" -> Ok FreeBSD
| "Linux" -> Ok Unix
| "NetBSD" -> Ok NetBSD
| "OpenBSD" -> Ok OpenBSD
| sys_name -> Error sys_name
| |
2cbfa24a50a7a2b60febb3ee8d7ed709b587cfcc7c3410c9325c0e4fc5d42643 | snapframework/snap-server | TestSuite.hs | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
module Main where
import Control.Concurrent (killThread, takeMVar)
import qualified Control.Exception as E
import Control.Monad (liftM)
import Data.Maybe (maybeToList)
#if !MIN_VERSION_network(2,7,0)
import Network (withSocketsDo)
#endif
import System.Environment
import Test.Framework (defaultMain, testGroup)
------------------------------------------------------------------------------
import qualified Snap.Internal.Http.Server.TLS as TLS
------------------------------------------------------------------------------
import qualified Snap.Internal.Http.Server.Address.Tests as Address
import qualified Snap.Internal.Http.Server.Parser.Tests as Parser
import qualified Snap.Internal.Http.Server.Session.Tests as Session
import qualified Snap.Internal.Http.Server.Socket.Tests as Socket
import qualified Snap.Internal.Http.Server.TimeoutManager.Tests as TimeoutManager
import Snap.Test.Common (eatException)
#ifdef HAS_SENDFILE
import qualified System.SendFile.Tests as SendFile
#endif
import qualified Test.Blackbox
#if MIN_VERSION_network(2,7,0)
withSocketsDo :: IO a -> IO a
withSocketsDo = id
#endif
------------------------------------------------------------------------------
main :: IO ()
main = withSocketsDo $ TLS.withTLS $ eatException $
E.bracket (Test.Blackbox.startTestServers)
cleanup
(\tinfos -> do
let blackboxTests = bbox tinfos
defaultMain $ tests ++ blackboxTests
)
where
cleanup (x, y, m) = do
let backends = [x, y] ++ maybeToList m
mapM_ (killThread . (\(a, _, _) -> a)) backends
mapM_ (takeMVar . (\(_, _, a) -> a)) backends
bbox ((_, port, _), (_, port2, _), m) =
[ testGroup "Blackbox" $
concat [ Test.Blackbox.tests port
, Test.Blackbox.haTests port2
, Test.Blackbox.ssltests $ fmap (\(_,x,_) -> x) m
]
]
tests = [ testGroup "Address" Address.tests
, testGroup "Parser" Parser.tests
#ifdef HAS_SENDFILE
, testGroup "SendFile" SendFile.tests
#endif
, testGroup "Server" Session.tests
, testGroup "Socket" Socket.tests
, testGroup "TimeoutManager" TimeoutManager.tests
]
------------------------------------------------------------------------------
sslPort :: Int -> Maybe Int
#ifdef OPENSSL
sslPort sp = Just (sp + 100)
#else
sslPort _ = Nothing
#endif
ports :: Int -> (Int, Maybe Int)
ports sp = (sp, sslPort sp)
getStartPort :: IO Int
getStartPort = (liftM read (getEnv "STARTPORT") >>= E.evaluate)
`E.catch` \(_::E.SomeException) -> return 8111
| null | https://raw.githubusercontent.com/snapframework/snap-server/f9c6e00630a8a78705aceafa0ac046ae70e1310e/test/TestSuite.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
---------------------------------------------------------------------------- | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
module Main where
import Control.Concurrent (killThread, takeMVar)
import qualified Control.Exception as E
import Control.Monad (liftM)
import Data.Maybe (maybeToList)
#if !MIN_VERSION_network(2,7,0)
import Network (withSocketsDo)
#endif
import System.Environment
import Test.Framework (defaultMain, testGroup)
import qualified Snap.Internal.Http.Server.TLS as TLS
import qualified Snap.Internal.Http.Server.Address.Tests as Address
import qualified Snap.Internal.Http.Server.Parser.Tests as Parser
import qualified Snap.Internal.Http.Server.Session.Tests as Session
import qualified Snap.Internal.Http.Server.Socket.Tests as Socket
import qualified Snap.Internal.Http.Server.TimeoutManager.Tests as TimeoutManager
import Snap.Test.Common (eatException)
#ifdef HAS_SENDFILE
import qualified System.SendFile.Tests as SendFile
#endif
import qualified Test.Blackbox
#if MIN_VERSION_network(2,7,0)
withSocketsDo :: IO a -> IO a
withSocketsDo = id
#endif
main :: IO ()
main = withSocketsDo $ TLS.withTLS $ eatException $
E.bracket (Test.Blackbox.startTestServers)
cleanup
(\tinfos -> do
let blackboxTests = bbox tinfos
defaultMain $ tests ++ blackboxTests
)
where
cleanup (x, y, m) = do
let backends = [x, y] ++ maybeToList m
mapM_ (killThread . (\(a, _, _) -> a)) backends
mapM_ (takeMVar . (\(_, _, a) -> a)) backends
bbox ((_, port, _), (_, port2, _), m) =
[ testGroup "Blackbox" $
concat [ Test.Blackbox.tests port
, Test.Blackbox.haTests port2
, Test.Blackbox.ssltests $ fmap (\(_,x,_) -> x) m
]
]
tests = [ testGroup "Address" Address.tests
, testGroup "Parser" Parser.tests
#ifdef HAS_SENDFILE
, testGroup "SendFile" SendFile.tests
#endif
, testGroup "Server" Session.tests
, testGroup "Socket" Socket.tests
, testGroup "TimeoutManager" TimeoutManager.tests
]
sslPort :: Int -> Maybe Int
#ifdef OPENSSL
sslPort sp = Just (sp + 100)
#else
sslPort _ = Nothing
#endif
ports :: Int -> (Int, Maybe Int)
ports sp = (sp, sslPort sp)
getStartPort :: IO Int
getStartPort = (liftM read (getEnv "STARTPORT") >>= E.evaluate)
`E.catch` \(_::E.SomeException) -> return 8111
|
8d9e774824e13673017c054a3df1436951a3ba651e72ae7da08c59ee89865d1f | oblivia-simplex/roper | ropush-vars.lisp | (in-package :ropush)
;; some dynamic defs, to prevent unbound variable errors
(defvar $$push nil)
(defvar $$pop)
(defvar $$peek)
(defvar $$depth)
(defvar $$stack-of)
;(defun $pop ())
;(defun $push ())
;(defun $depth ())
;(defun $peek ())
(defvar $stacks)
(defvar $counter)
(defvar $unicorn)
(defvar $halt)
(defvar $types)
(deftype bytes () '(vector (unsigned-byte 8)))
(defun debugging (&optional on)
(if (and (not on) (member :debugging *features*))
(progn
(format t "[-] DEBUGGING OFF~%")
(setq *features* (remove :debugging *features*)))
(progn
(format t "[+] DEBUGGING ON~%")
(if (not (member :debugging *features*))
(push :debugging *features*)))))
(debugging t)
(export '*operations*)
(defvar *operations* ())
;; the list provided in ropush-params is meant to be editable.
;; this list furnishes the defaults.
(export '*stack-types*)
(defparameter *stack-types* '(:input!
:output!
:exec
:code
:womb
:gadget
:bool
:int
:ratio
:bytes))
(export '*immutable-stacks*)
(defparameter *immutable-stacks* '(:input!))
(defparameter *untyped-stacks* '(:exec
:code
:womb))
;;;;
inspired by spector 's autoconstructive systems ( pushpop , etc ) ,
;; the :womb stack can be used as both a secondary exec stack, to
;; facilitate the rearrangement of instructions, and also as a
;; reproductive organ. some potential reproductive strategies:
;; * clone original exec, pre-run, with mutations
;; * clone womb stack
;; * crossover exec x exec
;; * crossover womb x womb
;; * crossover womb x exec
;; The last seems like the most fruitful, a fortiori. Simple womb
;; cloning could risk having creature sizes dwindle, or explode,
;; depending on the early frequency of dup-like instructions.
| null | https://raw.githubusercontent.com/oblivia-simplex/roper/7714ccf677359126ca82446843030fac89c6655a/lisp/roper/ropush-vars.lisp | lisp | some dynamic defs, to prevent unbound variable errors
(defun $pop ())
(defun $push ())
(defun $depth ())
(defun $peek ())
the list provided in ropush-params is meant to be editable.
this list furnishes the defaults.
the :womb stack can be used as both a secondary exec stack, to
facilitate the rearrangement of instructions, and also as a
reproductive organ. some potential reproductive strategies:
* clone original exec, pre-run, with mutations
* clone womb stack
* crossover exec x exec
* crossover womb x womb
* crossover womb x exec
The last seems like the most fruitful, a fortiori. Simple womb
cloning could risk having creature sizes dwindle, or explode,
depending on the early frequency of dup-like instructions. | (in-package :ropush)
(defvar $$push nil)
(defvar $$pop)
(defvar $$peek)
(defvar $$depth)
(defvar $$stack-of)
(defvar $stacks)
(defvar $counter)
(defvar $unicorn)
(defvar $halt)
(defvar $types)
(deftype bytes () '(vector (unsigned-byte 8)))
(defun debugging (&optional on)
(if (and (not on) (member :debugging *features*))
(progn
(format t "[-] DEBUGGING OFF~%")
(setq *features* (remove :debugging *features*)))
(progn
(format t "[+] DEBUGGING ON~%")
(if (not (member :debugging *features*))
(push :debugging *features*)))))
(debugging t)
(export '*operations*)
(defvar *operations* ())
(export '*stack-types*)
(defparameter *stack-types* '(:input!
:output!
:exec
:code
:womb
:gadget
:bool
:int
:ratio
:bytes))
(export '*immutable-stacks*)
(defparameter *immutable-stacks* '(:input!))
(defparameter *untyped-stacks* '(:exec
:code
:womb))
inspired by spector 's autoconstructive systems ( pushpop , etc ) ,
|
888ad6049b7bf9ad3dc234333be75420c41d81f74dbda52455305199a1478aaa | startalkIM/ejabberd | mod_private_riak.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2016 ,
%%% @doc
%%%
%%% @end
Created : 13 Apr 2016 by < >
%%%-------------------------------------------------------------------
-module(mod_private_riak).
-behaviour(mod_private).
%% API
-export([init/2, set_data/3, get_data/3, get_all_data/2, remove_user/2,
import/2]).
-include("jlib.hrl").
-include("mod_private.hrl").
%%%===================================================================
%%% API
%%%===================================================================
init(_Host, _Opts) ->
ok.
set_data(LUser, LServer, Data) ->
lists:foreach(
fun({XMLNS, El}) ->
ejabberd_riak:put(#private_storage{usns = {LUser, LServer, XMLNS},
xml = El},
private_storage_schema(),
[{'2i', [{<<"us">>, {LUser, LServer}}]}])
end, Data),
{atomic, ok}.
get_data(LUser, LServer, XMLNS) ->
case ejabberd_riak:get(private_storage, private_storage_schema(),
{LUser, LServer, XMLNS}) of
{ok, #private_storage{xml = El}} ->
{ok, El};
_ ->
error
end.
get_all_data(LUser, LServer) ->
case ejabberd_riak:get_by_index(
private_storage, private_storage_schema(),
<<"us">>, {LUser, LServer}) of
{ok, Res} ->
[El || #private_storage{xml = El} <- Res];
_ ->
[]
end.
remove_user(LUser, LServer) ->
{atomic, ejabberd_riak:delete_by_index(private_storage,
<<"us">>, {LUser, LServer})}.
import(_LServer, #private_storage{usns = {LUser, LServer, _}} = PS) ->
ejabberd_riak:put(PS, private_storage_schema(),
[{'2i', [{<<"us">>, {LUser, LServer}}]}]).
%%%===================================================================
Internal functions
%%%===================================================================
private_storage_schema() ->
{record_info(fields, private_storage), #private_storage{}}.
| null | https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/src/mod_private_riak.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
===================================================================
API
===================================================================
===================================================================
=================================================================== | @author < >
( C ) 2016 ,
Created : 13 Apr 2016 by < >
-module(mod_private_riak).
-behaviour(mod_private).
-export([init/2, set_data/3, get_data/3, get_all_data/2, remove_user/2,
import/2]).
-include("jlib.hrl").
-include("mod_private.hrl").
init(_Host, _Opts) ->
ok.
set_data(LUser, LServer, Data) ->
lists:foreach(
fun({XMLNS, El}) ->
ejabberd_riak:put(#private_storage{usns = {LUser, LServer, XMLNS},
xml = El},
private_storage_schema(),
[{'2i', [{<<"us">>, {LUser, LServer}}]}])
end, Data),
{atomic, ok}.
get_data(LUser, LServer, XMLNS) ->
case ejabberd_riak:get(private_storage, private_storage_schema(),
{LUser, LServer, XMLNS}) of
{ok, #private_storage{xml = El}} ->
{ok, El};
_ ->
error
end.
get_all_data(LUser, LServer) ->
case ejabberd_riak:get_by_index(
private_storage, private_storage_schema(),
<<"us">>, {LUser, LServer}) of
{ok, Res} ->
[El || #private_storage{xml = El} <- Res];
_ ->
[]
end.
remove_user(LUser, LServer) ->
{atomic, ejabberd_riak:delete_by_index(private_storage,
<<"us">>, {LUser, LServer})}.
import(_LServer, #private_storage{usns = {LUser, LServer, _}} = PS) ->
ejabberd_riak:put(PS, private_storage_schema(),
[{'2i', [{<<"us">>, {LUser, LServer}}]}]).
Internal functions
private_storage_schema() ->
{record_info(fields, private_storage), #private_storage{}}.
|
be1549e482edf4de9beb5902446ca71651923f01f851d47816dbc7b851d3c527 | hyperfiddle/electric | monad_state_joinr.clj | (ns dustin.y2020.monad-state-joinr)
(defmacro run-state [init state-symb bindings & body]
(let [steps
(->> (for [[l expr] (partition 2 bindings)]
[l `(let [state# (deref ~state-symb)
[new-state# res#] (~(first expr) state# ~@(rest expr))
~'_ (reset! ~state-symb new-state#)]
res#)])
(reduce (fn [acc [l r]]
(conj acc l r)) []))]
`(let [~state-symb (atom ~init)
~@steps]
[(deref ~state-symb) ~@body])))
(defn random [state n]
[state (rand-int n)])
(defn increment [state id n]
(let [res (+ (get state id) n)]
[(assoc state id res) res]))
(defn decrement [state id n]
(let [res (- (get state id) n)]
[(assoc state id res) res]))
(defn transfer [init-state from to amount]
(run-state init-state state
[l (decrement from amount)
r (increment to amount)]
amount))
(defn demo [init-state from to]
(run-state init-state state
[amount (random 10)
res (transfer from to amount)]
{:transferred res :from from :to to}))
(comment
(demo {:a 10 :b 20} :a :b)
:= [{:a 7, :b 23} {:transferred 3, :from :a, :to :b}]
(demo {:a 10 :b 20} :a :b)
:= [{:a 8, :b 22} {:transferred 2, :from :a, :to :b}]
(demo {:a 10 :b 20} :a :b)
:= [{:a 10, :b 20} {:transferred 0, :from :a, :to :b}]
(demo {:a 10 :b 20} :a :b)
:= [{:a 10, :b 20} {:transferred 0, :from :a, :to :b}]
)
| null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2020/monad_state_joinr.clj | clojure | (ns dustin.y2020.monad-state-joinr)
(defmacro run-state [init state-symb bindings & body]
(let [steps
(->> (for [[l expr] (partition 2 bindings)]
[l `(let [state# (deref ~state-symb)
[new-state# res#] (~(first expr) state# ~@(rest expr))
~'_ (reset! ~state-symb new-state#)]
res#)])
(reduce (fn [acc [l r]]
(conj acc l r)) []))]
`(let [~state-symb (atom ~init)
~@steps]
[(deref ~state-symb) ~@body])))
(defn random [state n]
[state (rand-int n)])
(defn increment [state id n]
(let [res (+ (get state id) n)]
[(assoc state id res) res]))
(defn decrement [state id n]
(let [res (- (get state id) n)]
[(assoc state id res) res]))
(defn transfer [init-state from to amount]
(run-state init-state state
[l (decrement from amount)
r (increment to amount)]
amount))
(defn demo [init-state from to]
(run-state init-state state
[amount (random 10)
res (transfer from to amount)]
{:transferred res :from from :to to}))
(comment
(demo {:a 10 :b 20} :a :b)
:= [{:a 7, :b 23} {:transferred 3, :from :a, :to :b}]
(demo {:a 10 :b 20} :a :b)
:= [{:a 8, :b 22} {:transferred 2, :from :a, :to :b}]
(demo {:a 10 :b 20} :a :b)
:= [{:a 10, :b 20} {:transferred 0, :from :a, :to :b}]
(demo {:a 10 :b 20} :a :b)
:= [{:a 10, :b 20} {:transferred 0, :from :a, :to :b}]
)
| |
40d463a8affd9cca4341b2163b30d2940e0c658e008378ffdcac29fbc1b55824 | ekmett/coda | Trie.hs | {-# language CPP #-}
# language GADTs #
# language RankNTypes #
# language LambdaCase #
-- |
--
-- Well-Quasi-Orders using tries
module Termination.Trie
( Trie(..)
, runTrie
, finite
, finiteOrd
, finiteHash
, history
) where
import Data.Functor
import Data.Functor.Identity
import Data.Functor.Compose
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Divisible
import Data.Functor.Product
import Data.Coerce
import Data.Map as Map
import Data.Hashable
import Data.HashMap.Strict as HashMap
import Data.Maybe
import Data.Proxy
#if __GLASGOW_HASKELL__ < 804
import Data.Semigroup
#endif
import Data.Void
import Termination.History
-- | A well quasi-order: A reflexive, transitive relation such that
and every infinite set xs has i < j such that ( xs!i ) < = ( )
-- encoded as a procedure for maintaining 'xs' in an easily testable form
-- This is an experiment to see if we can use a trie-based encoding.
-- How to handle orders?
data Trie a where Trie :: (forall x. x -> f x) -> (forall x. a -> x -> (x -> Maybe x) -> f x -> Maybe (f x)) -> Trie a
instance Contravariant Trie where
contramap f (Trie h g) = Trie h (g . f)
instance Divisible Trie where
conquer = Trie Identity (\_ _ -> coerce :: (x -> Maybe x) -> Identity x -> Maybe (Identity x))
divide f (Trie p g) (Trie q h) = Trie (Compose . p . q) $ \a d k -> case f a of
(b, c) -> fmap Compose . g b (q d) (h c d k) . getCompose
instance Decidable Trie where
lose f = Trie (const Proxy) (absurd . f)
choose f (Trie p g) (Trie q h) = Trie (\a -> Pair (p a) (q a)) $ \a d k (Pair x y) -> case f a of
Left b -> (`Pair` y) <$> g b d k x
Right c -> Pair x <$> h c d k y
instance Semigroup (Trie a) where
Trie p g <> Trie q h = Trie (Compose . p . q) $ \a d k -> fmap Compose . g a (q d) (h a d k) . getCompose
instance Monoid (Trie a) where
mempty = conquer
mappend = (<>)
seen :: Bool -> Maybe Bool
seen True = Nothing
seen False = Just True
runTrie :: Trie a -> a -> a -> Bool
runTrie (Trie p f) a b = isJust $ f a False seen (p False) >>= f b False seen
newtype V a b = V [(a,b)]
-- side-condition: needs 'a' to be finitely enumerable -- linear time
finite :: Eq a => Trie a
finite = Trie (const $ V []) $ \a d k (V xs) -> fini a xs $ step a d k xs where
fini :: a -> [(a,x)] -> Either (Maybe x) [(a,x)] -> Maybe (V a x)
fini _ _ (Left Nothing) = Nothing
fini a xs (Left (Just d')) = Just $ V $ (a,d'):xs
fini _ _ (Right ys) = Just $ V ys
step :: Eq a => a -> x -> (x -> Maybe x) -> [(a,x)] -> Either (Maybe x) [(a,x)]
step _ d k [] = Left (k d)
step a d k ((b,x):xs)
| a /= b = ((b,x):) <$> step a d k xs
| otherwise = case k x of
Nothing -> Left Nothing
Just x' -> Right ((b,x'):xs)
-- side-condition: needs 'a' to be finitely enumerable and have an 'Ord' instance -- log time
finiteOrd :: Ord a => Trie a
finiteOrd = Trie (const mempty) $ \ a d k -> Map.alterF (fmap Just . k . fromMaybe d) a
atH :: (Functor f, Hashable k, Eq k) => k -> (Maybe a -> f (Maybe a)) -> HashMap k a -> f (HashMap k a)
atH k f m = f mv <&> \case
Nothing -> maybe m (const (HashMap.delete k m)) mv
Just v' -> HashMap.insert k v' m
where mv = HashMap.lookup k m
finiteHash :: (Hashable a, Eq a) => Trie a
finiteHash = Trie (const mempty) $ \a d k -> atH a (fmap Just . k . fromMaybe d)
-- can I handle orders? I can't compile down to a test, can I incorporate tests as another constructor?
-- or handle a mix of test and non-test parts?
history :: Trie a -> History a
history (Trie p f) = History step (p False) where
step xs a = f a False seen xs
| null | https://raw.githubusercontent.com/ekmett/coda/bca7e36ab00036f92d94eb86298712ab1dbf9b8d/src/termination/Termination/Trie.hs | haskell | # language CPP #
|
Well-Quasi-Orders using tries
| A well quasi-order: A reflexive, transitive relation such that
encoded as a procedure for maintaining 'xs' in an easily testable form
This is an experiment to see if we can use a trie-based encoding.
How to handle orders?
side-condition: needs 'a' to be finitely enumerable -- linear time
side-condition: needs 'a' to be finitely enumerable and have an 'Ord' instance -- log time
can I handle orders? I can't compile down to a test, can I incorporate tests as another constructor?
or handle a mix of test and non-test parts? | # language GADTs #
# language RankNTypes #
# language LambdaCase #
module Termination.Trie
( Trie(..)
, runTrie
, finite
, finiteOrd
, finiteHash
, history
) where
import Data.Functor
import Data.Functor.Identity
import Data.Functor.Compose
import Data.Functor.Contravariant
import Data.Functor.Contravariant.Divisible
import Data.Functor.Product
import Data.Coerce
import Data.Map as Map
import Data.Hashable
import Data.HashMap.Strict as HashMap
import Data.Maybe
import Data.Proxy
#if __GLASGOW_HASKELL__ < 804
import Data.Semigroup
#endif
import Data.Void
import Termination.History
and every infinite set xs has i < j such that ( xs!i ) < = ( )
data Trie a where Trie :: (forall x. x -> f x) -> (forall x. a -> x -> (x -> Maybe x) -> f x -> Maybe (f x)) -> Trie a
instance Contravariant Trie where
contramap f (Trie h g) = Trie h (g . f)
instance Divisible Trie where
conquer = Trie Identity (\_ _ -> coerce :: (x -> Maybe x) -> Identity x -> Maybe (Identity x))
divide f (Trie p g) (Trie q h) = Trie (Compose . p . q) $ \a d k -> case f a of
(b, c) -> fmap Compose . g b (q d) (h c d k) . getCompose
instance Decidable Trie where
lose f = Trie (const Proxy) (absurd . f)
choose f (Trie p g) (Trie q h) = Trie (\a -> Pair (p a) (q a)) $ \a d k (Pair x y) -> case f a of
Left b -> (`Pair` y) <$> g b d k x
Right c -> Pair x <$> h c d k y
instance Semigroup (Trie a) where
Trie p g <> Trie q h = Trie (Compose . p . q) $ \a d k -> fmap Compose . g a (q d) (h a d k) . getCompose
instance Monoid (Trie a) where
mempty = conquer
mappend = (<>)
seen :: Bool -> Maybe Bool
seen True = Nothing
seen False = Just True
runTrie :: Trie a -> a -> a -> Bool
runTrie (Trie p f) a b = isJust $ f a False seen (p False) >>= f b False seen
newtype V a b = V [(a,b)]
finite :: Eq a => Trie a
finite = Trie (const $ V []) $ \a d k (V xs) -> fini a xs $ step a d k xs where
fini :: a -> [(a,x)] -> Either (Maybe x) [(a,x)] -> Maybe (V a x)
fini _ _ (Left Nothing) = Nothing
fini a xs (Left (Just d')) = Just $ V $ (a,d'):xs
fini _ _ (Right ys) = Just $ V ys
step :: Eq a => a -> x -> (x -> Maybe x) -> [(a,x)] -> Either (Maybe x) [(a,x)]
step _ d k [] = Left (k d)
step a d k ((b,x):xs)
| a /= b = ((b,x):) <$> step a d k xs
| otherwise = case k x of
Nothing -> Left Nothing
Just x' -> Right ((b,x'):xs)
finiteOrd :: Ord a => Trie a
finiteOrd = Trie (const mempty) $ \ a d k -> Map.alterF (fmap Just . k . fromMaybe d) a
atH :: (Functor f, Hashable k, Eq k) => k -> (Maybe a -> f (Maybe a)) -> HashMap k a -> f (HashMap k a)
atH k f m = f mv <&> \case
Nothing -> maybe m (const (HashMap.delete k m)) mv
Just v' -> HashMap.insert k v' m
where mv = HashMap.lookup k m
finiteHash :: (Hashable a, Eq a) => Trie a
finiteHash = Trie (const mempty) $ \a d k -> atH a (fmap Just . k . fromMaybe d)
history :: Trie a -> History a
history (Trie p f) = History step (p False) where
step xs a = f a False seen xs
|
69eb915bb40e11726c5a85648ea607a5c85f25f41f859caf1996bac8499c6b2d | namin/staged-miniKanren | tests-dl-strict.scm | (load "staged-load.scm")
Adapted from the nnf code in ' The Semantic Web Explained ' by
Szeredi , , and . Cambridge University
Press , 2014 .
(define nnf
(lambda (concept)
`(letrec ((number?
(lambda (n)
(match n
[`z #t]
[`(s . ,n-1)
(number? n-1)]
[else #f]))))
(letrec ((positive?
(lambda (n)
(match n
[`z #f]
[`(s . ,n-1) #t]))))
(letrec ((add1
(lambda (n)
(cons 's n))))
(letrec ((sub1
(lambda (n)
(match n
[`(s . ,n-1) n-1]))))
(letrec ((concept?
(lambda (c)
(match c
[`Top #t]
[`,(? symbol? concept-name)
#t]
[`(Not ,c) (concept? c)]
[`(And ,c1 ,c2)
(and (concept? c1) (concept? c1))]
[`(Or ,c1 ,c2)
(and (concept? c1) (concept? c1))]
[`(Exists ,(? symbol? r) ,c)
(concept? c)]
[`(All ,(? symbol? r) ,c)
(concept? c)]
[`(AtLeast ,k ,(? symbol? r))
(number? k)]
[`(AtMost ,k ,(? symbol? r))
(number? k)]
[else #f]))))
(letrec ((nnf
(lambda (concept)
(match concept
[`(Not (Not ,c)) (nnf c)]
[`(Not (And ,c1 ,c2))
(list 'Or (nnf (list 'Not c1)) (nnf (list 'Not c2)))]
[`(Not (Or ,c1 ,c2))
(list 'And (nnf (list 'Not c1)) (nnf (list 'Not c2)))]
[`(Not (Exists ,(? symbol? r) ,c))
(list 'All r (nnf (list 'Not c)))]
[`(Not (All ,(? symbol? r) ,c))
(list 'Exists r (nnf (list 'Not c)))]
[`(Not (AtMost ,k ,(? symbol? r)))
(list 'AtLeast (add1 k) r)]
[`(Not (AtLeast ,k ,(? symbol? r)))
(if (positive? k)
(list 'AtMost (sub1 k) r)
(list 'Not 'Top))]
[`(And ,c1 ,c2)
(list 'And (nnf c1) (nnf c2))]
[`(Or ,c1 ,c2)
(list 'Or (nnf c1) (nnf c2))]
[`(Exists ,(? symbol? r) ,c)
(list 'Exists r (nnf c))]
[`(All ,(? symbol? r) ,c)
(list 'All r (nnf c))]
[concept
(if (concept? concept)
concept
;; simulate an error
(car '())
;; #f ;; really an error
)]))))
(nnf ',concept)))))))))
(record-bench 'staging 'nnf)
(define-staged-relation (nnfo concept nnf-concept)
(evalo-staged
(nnf concept)
nnf-concept))
(record-bench 'staged 'nnf 0)
(time-test
(run 10 (concept)
(nnfo concept '(Not Top)))
'((Not Top) (Not (Not (Not Top)))
((Not (AtLeast z _.0))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not Top)))))
((Not (Not (Not (AtLeast z _.0))))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not Top)))))))
((Not (Not (Not (Not (Not (AtLeast z _.0))))))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))
((Not (Not (Not (Not (Not (Not (Not (AtLeast z _.0))))))))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))
(record-bench 'unstaged 'nnf 0)
(time-test
(run 10 (concept)
(evalo-unstaged
(nnf concept)
'(Not Top)))
'((Not Top)
(Not (Not (Not Top)))
(Not (Not (Not (Not (Not Top)))))
(Not (Not (Not (Not (Not (Not (Not Top)))))))
(Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))
((Not (AtLeast z _.0))
$$
(=/= ((_.0 call)) ((_.0 call-code)) ((_.0 closure)) ((_.0 dynamic)) ((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))
((Not (Not (Not (AtLeast z _.0))))
$$
(=/= ((_.0 call)) ((_.0 call-code)) ((_.0 closure)) ((_.0 dynamic)) ((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))))))
;; Note the difference in order w.r.t. the unstaged version.
(record-bench 'run-staged 'nnf 0)
(time-test
(run-staged 10 (concept)
(evalo-staged
(nnf concept)
'(Not Top)))
'((Not Top) (Not (Not (Not Top)))
((Not (AtLeast z _.0))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not Top)))))
((Not (Not (Not (AtLeast z _.0))))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not Top)))))))
((Not (Not (Not (Not (Not (AtLeast z _.0))))))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))
((Not (Not (Not (Not (Not (Not (Not (AtLeast z _.0))))))))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))
(record-bench 'unstaged 'nnf 1)
(time-test
(run* (nnf-concept)
(evalo-unstaged
(nnf '(Not (AtLeast z hasChild)))
nnf-concept))
'((Not Top)))
(record-bench 'run-staged 'nnf 1)
(time-test
(run-staged #f (nnf-concept)
(evalo-staged
(nnf '(Not (AtLeast z hasChild)))
nnf-concept))
'((Not Top)))
(record-bench 'staged 'nnf 1)
(time-test
(run* (nnf-concept)
(nnfo '(Not (AtLeast z hasChild)) nnf-concept))
'((Not Top)))
(record-bench 'unstaged 'nnf 2)
(time-test
(run* (nnf-concept)
(evalo-unstaged
(nnf '(Not (AtMost (s . z) hasChild)))
nnf-concept))
'((AtLeast (s s . z) hasChild)))
(record-bench 'run-staged 'nnf 2)
(time-test
(run-staged #f (nnf-concept)
(evalo-staged
(nnf '(Not (AtMost (s . z) hasChild)))
nnf-concept))
'((AtLeast (s s . z) hasChild)))
(record-bench 'staged 'nnf 2)
(time-test
(run* (nnf-concept)
(nnfo '(Not (AtMost (s . z) hasChild)) nnf-concept))
'((AtLeast (s s . z) hasChild)))
(record-bench 'unstaged 'nnf 3)
(time-test
(run* (nnf-concept)
(evalo-unstaged
(nnf '(Not (AtLeast (s s s . z) hasChild)))
nnf-concept))
'((AtMost (s s . z) hasChild)))
(record-bench 'run-staged 'nnf 3)
(time-test
(run-staged #f (nnf-concept)
(evalo-staged
(nnf '(Not (AtLeast (s s s . z) hasChild))) nnf-concept))
'((AtMost (s s . z) hasChild)))
(record-bench 'staged 'nnf 3)
(time-test
(run* (nnf-concept)
(nnfo '(Not (AtLeast (s s s . z) hasChild)) nnf-concept))
'((AtMost (s s . z) hasChild)))
| null | https://raw.githubusercontent.com/namin/staged-miniKanren/019d0e68fd1fe49589a845a695a4d6fd39c9e4a2/tests-dl-strict.scm | scheme | simulate an error
#f ;; really an error
Note the difference in order w.r.t. the unstaged version. | (load "staged-load.scm")
Adapted from the nnf code in ' The Semantic Web Explained ' by
Szeredi , , and . Cambridge University
Press , 2014 .
(define nnf
(lambda (concept)
`(letrec ((number?
(lambda (n)
(match n
[`z #t]
[`(s . ,n-1)
(number? n-1)]
[else #f]))))
(letrec ((positive?
(lambda (n)
(match n
[`z #f]
[`(s . ,n-1) #t]))))
(letrec ((add1
(lambda (n)
(cons 's n))))
(letrec ((sub1
(lambda (n)
(match n
[`(s . ,n-1) n-1]))))
(letrec ((concept?
(lambda (c)
(match c
[`Top #t]
[`,(? symbol? concept-name)
#t]
[`(Not ,c) (concept? c)]
[`(And ,c1 ,c2)
(and (concept? c1) (concept? c1))]
[`(Or ,c1 ,c2)
(and (concept? c1) (concept? c1))]
[`(Exists ,(? symbol? r) ,c)
(concept? c)]
[`(All ,(? symbol? r) ,c)
(concept? c)]
[`(AtLeast ,k ,(? symbol? r))
(number? k)]
[`(AtMost ,k ,(? symbol? r))
(number? k)]
[else #f]))))
(letrec ((nnf
(lambda (concept)
(match concept
[`(Not (Not ,c)) (nnf c)]
[`(Not (And ,c1 ,c2))
(list 'Or (nnf (list 'Not c1)) (nnf (list 'Not c2)))]
[`(Not (Or ,c1 ,c2))
(list 'And (nnf (list 'Not c1)) (nnf (list 'Not c2)))]
[`(Not (Exists ,(? symbol? r) ,c))
(list 'All r (nnf (list 'Not c)))]
[`(Not (All ,(? symbol? r) ,c))
(list 'Exists r (nnf (list 'Not c)))]
[`(Not (AtMost ,k ,(? symbol? r)))
(list 'AtLeast (add1 k) r)]
[`(Not (AtLeast ,k ,(? symbol? r)))
(if (positive? k)
(list 'AtMost (sub1 k) r)
(list 'Not 'Top))]
[`(And ,c1 ,c2)
(list 'And (nnf c1) (nnf c2))]
[`(Or ,c1 ,c2)
(list 'Or (nnf c1) (nnf c2))]
[`(Exists ,(? symbol? r) ,c)
(list 'Exists r (nnf c))]
[`(All ,(? symbol? r) ,c)
(list 'All r (nnf c))]
[concept
(if (concept? concept)
concept
(car '())
)]))))
(nnf ',concept)))))))))
(record-bench 'staging 'nnf)
(define-staged-relation (nnfo concept nnf-concept)
(evalo-staged
(nnf concept)
nnf-concept))
(record-bench 'staged 'nnf 0)
(time-test
(run 10 (concept)
(nnfo concept '(Not Top)))
'((Not Top) (Not (Not (Not Top)))
((Not (AtLeast z _.0))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not Top)))))
((Not (Not (Not (AtLeast z _.0))))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not Top)))))))
((Not (Not (Not (Not (Not (AtLeast z _.0))))))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))
((Not (Not (Not (Not (Not (Not (Not (AtLeast z _.0))))))))
$$
(=/= ((_.0 call))
((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))
(record-bench 'unstaged 'nnf 0)
(time-test
(run 10 (concept)
(evalo-unstaged
(nnf concept)
'(Not Top)))
'((Not Top)
(Not (Not (Not Top)))
(Not (Not (Not (Not (Not Top)))))
(Not (Not (Not (Not (Not (Not (Not Top)))))))
(Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))
((Not (AtLeast z _.0))
$$
(=/= ((_.0 call)) ((_.0 call-code)) ((_.0 closure)) ((_.0 dynamic)) ((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))
((Not (Not (Not (AtLeast z _.0))))
$$
(=/= ((_.0 call)) ((_.0 call-code)) ((_.0 closure)) ((_.0 dynamic)) ((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))))))
(record-bench 'run-staged 'nnf 0)
(time-test
(run-staged 10 (concept)
(evalo-staged
(nnf concept)
'(Not Top)))
'((Not Top) (Not (Not (Not Top)))
((Not (AtLeast z _.0))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not Top)))))
((Not (Not (Not (AtLeast z _.0))))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not Top)))))))
((Not (Not (Not (Not (Not (AtLeast z _.0))))))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))
((Not (Not (Not (Not (Not (Not (Not (AtLeast z _.0))))))))
$$
(=/= ((_.0 call)) ((_.0 call-code))
((_.0 closure))
((_.0 dynamic))
((_.0 prim)))
(sym _.0))
(Not (Not (Not (Not (Not (Not (Not (Not (Not (Not (Not Top)))))))))))))
(record-bench 'unstaged 'nnf 1)
(time-test
(run* (nnf-concept)
(evalo-unstaged
(nnf '(Not (AtLeast z hasChild)))
nnf-concept))
'((Not Top)))
(record-bench 'run-staged 'nnf 1)
(time-test
(run-staged #f (nnf-concept)
(evalo-staged
(nnf '(Not (AtLeast z hasChild)))
nnf-concept))
'((Not Top)))
(record-bench 'staged 'nnf 1)
(time-test
(run* (nnf-concept)
(nnfo '(Not (AtLeast z hasChild)) nnf-concept))
'((Not Top)))
(record-bench 'unstaged 'nnf 2)
(time-test
(run* (nnf-concept)
(evalo-unstaged
(nnf '(Not (AtMost (s . z) hasChild)))
nnf-concept))
'((AtLeast (s s . z) hasChild)))
(record-bench 'run-staged 'nnf 2)
(time-test
(run-staged #f (nnf-concept)
(evalo-staged
(nnf '(Not (AtMost (s . z) hasChild)))
nnf-concept))
'((AtLeast (s s . z) hasChild)))
(record-bench 'staged 'nnf 2)
(time-test
(run* (nnf-concept)
(nnfo '(Not (AtMost (s . z) hasChild)) nnf-concept))
'((AtLeast (s s . z) hasChild)))
(record-bench 'unstaged 'nnf 3)
(time-test
(run* (nnf-concept)
(evalo-unstaged
(nnf '(Not (AtLeast (s s s . z) hasChild)))
nnf-concept))
'((AtMost (s s . z) hasChild)))
(record-bench 'run-staged 'nnf 3)
(time-test
(run-staged #f (nnf-concept)
(evalo-staged
(nnf '(Not (AtLeast (s s s . z) hasChild))) nnf-concept))
'((AtMost (s s . z) hasChild)))
(record-bench 'staged 'nnf 3)
(time-test
(run* (nnf-concept)
(nnfo '(Not (AtLeast (s s s . z) hasChild)) nnf-concept))
'((AtMost (s s . z) hasChild)))
|
25be60a17cb2757f2a12b9bf64f03c3b7ee3a7dc70ab0398a1d7648c43df60a5 | kellino/microML | ParseTree.hs | module Repl.ParseTree (showTree) where
import Data.Tree
import Data.Tree.Pretty
import MicroML.Syntax
-- | pretty print the parse tree of an expression in the repl
exprToTree :: (String, Expr) -> Tree String
exprToTree (nm, ex) =
Node nm $ etoT ex
etoT :: Expr -> [Tree Name]
etoT (FixPoint e1) = [Node "rec" (etoT e1)]
etoT Nil = [Node "[]" []]
etoT (App e1 e2) = etoT e1 ++ etoT e2
etoT (UnaryOp op e1) = [Node (ppUnop op) (etoT e1)]
etoT (If cond tr fls) = [Node "if" (etoT cond ++ etoT tr ++ etoT fls)]
etoT (Var x) = [Node x []]
etoT (Let nm e1 e2) = [Node nm (etoT e1 ++ etoT e2)]
etoT (Lit (LInt x)) = [Node (show x) []]
etoT (Lit (LDouble x)) = [Node (show x) []]
etoT (Lit (LChar x)) = [Node (show x) []]
etoT (Lit (LString x)) = [Node x []]
etoT (Lit (LBoolean x)) = [Node (show x) []]
etoT (Lit (LTup x)) = [Node (show x) []]
etoT (PrimitiveErr _) = [Node "error" []]
etoT (Lam nm ex) = [Node nm $ etoT ex]
etoT (BinOp op e1 e2) = [Node (pp op) (etoT e1 ++ etoT e2)]
etoT x = [Node (show x) []]
ppUnop :: UnaryOp -> String
ppUnop OpLog = "log"
ppUnop Car = "head"
ppUnop Cdr = "tail"
ppUnop Read = "read"
ppUnop Show = "show"
ppUnop Not = "not"
ppUnop Minus = "-"
ppUnop Chr = "char"
ppUnop Ord = "ord"
pp :: Binop -> String
pp OpAdd = "+"
pp OpMul = "*"
pp OpEq = "=="
pp OpAppend = "++"
pp OpNotEq = "≠"
pp OpOr = "or"
pp OpExp = "^"
pp OpDiv = "÷"
pp OpIntDiv = "÷"
pp OpPipe = ">>"
pp OpMod = "%"
pp OpAnd = "and"
pp OpXor = "xor"
pp OpSub = "-"
pp OpLe = "<="
pp OpLt = "<"
pp OpGe = ">="
pp OpGt = ">"
pp OpCons = ":"
showTree :: (String, Expr) -> IO ()
showTree tr = putStrLn $ drawVerticalTreeWith 5 (exprToTree tr)
| null | https://raw.githubusercontent.com/kellino/microML/26a4e0ad7542e26f51945eb92db19f63f69b6962/src/Repl/ParseTree.hs | haskell | | pretty print the parse tree of an expression in the repl | module Repl.ParseTree (showTree) where
import Data.Tree
import Data.Tree.Pretty
import MicroML.Syntax
exprToTree :: (String, Expr) -> Tree String
exprToTree (nm, ex) =
Node nm $ etoT ex
etoT :: Expr -> [Tree Name]
etoT (FixPoint e1) = [Node "rec" (etoT e1)]
etoT Nil = [Node "[]" []]
etoT (App e1 e2) = etoT e1 ++ etoT e2
etoT (UnaryOp op e1) = [Node (ppUnop op) (etoT e1)]
etoT (If cond tr fls) = [Node "if" (etoT cond ++ etoT tr ++ etoT fls)]
etoT (Var x) = [Node x []]
etoT (Let nm e1 e2) = [Node nm (etoT e1 ++ etoT e2)]
etoT (Lit (LInt x)) = [Node (show x) []]
etoT (Lit (LDouble x)) = [Node (show x) []]
etoT (Lit (LChar x)) = [Node (show x) []]
etoT (Lit (LString x)) = [Node x []]
etoT (Lit (LBoolean x)) = [Node (show x) []]
etoT (Lit (LTup x)) = [Node (show x) []]
etoT (PrimitiveErr _) = [Node "error" []]
etoT (Lam nm ex) = [Node nm $ etoT ex]
etoT (BinOp op e1 e2) = [Node (pp op) (etoT e1 ++ etoT e2)]
etoT x = [Node (show x) []]
ppUnop :: UnaryOp -> String
ppUnop OpLog = "log"
ppUnop Car = "head"
ppUnop Cdr = "tail"
ppUnop Read = "read"
ppUnop Show = "show"
ppUnop Not = "not"
ppUnop Minus = "-"
ppUnop Chr = "char"
ppUnop Ord = "ord"
pp :: Binop -> String
pp OpAdd = "+"
pp OpMul = "*"
pp OpEq = "=="
pp OpAppend = "++"
pp OpNotEq = "≠"
pp OpOr = "or"
pp OpExp = "^"
pp OpDiv = "÷"
pp OpIntDiv = "÷"
pp OpPipe = ">>"
pp OpMod = "%"
pp OpAnd = "and"
pp OpXor = "xor"
pp OpSub = "-"
pp OpLe = "<="
pp OpLt = "<"
pp OpGe = ">="
pp OpGt = ">"
pp OpCons = ":"
showTree :: (String, Expr) -> IO ()
showTree tr = putStrLn $ drawVerticalTreeWith 5 (exprToTree tr)
|
c47e8f56d90a117bc06a9faa708236b07aa3c0a5404242f9c53305c79e85ce2b | janestreet/bonsai | bonsai_web_ui_file_from_web_file.ml | open Core
open Async_kernel
open Js_of_ocaml
let create file =
let read =
Ui_effect.of_sync_fun (fun on_progress ->
let file_reader = new%js File.fileReader in
let result = Ivar.create () in
let result =
Bonsai_web.Effect.of_deferred_fun
(fun () ->
let call_on_progress ev =
if Js.to_bool ev##.lengthComputable
then
on_progress
{ Bonsai_web_ui_file.Progress.loaded = ev##.loaded
; total = ev##.total
}
|> Ui_effect.Expert.handle
in
file_reader##.onprogress
:= Dom.handler (fun ev ->
call_on_progress ev;
Js._true);
file_reader##.onerror
:= Dom.handler (fun _ev ->
let error =
Error.create_s
[%message
"Error reading file"
~code:(file_reader##.error##.code : int)
~message:
(Js.to_string
(Js.Unsafe.get
file_reader##.error
(Js.string "message")))]
in
Ivar.fill_if_empty
result
(Error (Bonsai_web_ui_file.Read_error.Error error));
Js._true);
file_reader##.onload
:= Dom.handler (fun ev ->
call_on_progress ev;
(match
file_reader##.result
|> File.CoerceTo.arrayBuffer
|> Js.Opt.to_option
with
| None ->
raise_s
[%message
"BUG: could not coerce fileReader result to arrayBuffer"]
| Some array_buffer ->
let contents = Typed_array.String.of_arrayBuffer array_buffer in
Ivar.fill_if_empty result (Ok contents));
Js._true);
file_reader##readAsArrayBuffer file;
Ivar.read result)
()
in
let abort = Ui_effect.of_sync_fun (fun () -> file_reader##abort) () in
{ Bonsai_web_ui_file.Expert.result; abort })
in
Bonsai_web_ui_file.Expert.create ~read ~filename:(File.filename file |> Js.to_string)
;;
| null | https://raw.githubusercontent.com/janestreet/bonsai/782fecd000a1f97b143a3f24b76efec96e36a398/web_ui/file/from_web_file/bonsai_web_ui_file_from_web_file.ml | ocaml | open Core
open Async_kernel
open Js_of_ocaml
let create file =
let read =
Ui_effect.of_sync_fun (fun on_progress ->
let file_reader = new%js File.fileReader in
let result = Ivar.create () in
let result =
Bonsai_web.Effect.of_deferred_fun
(fun () ->
let call_on_progress ev =
if Js.to_bool ev##.lengthComputable
then
on_progress
{ Bonsai_web_ui_file.Progress.loaded = ev##.loaded
; total = ev##.total
}
|> Ui_effect.Expert.handle
in
file_reader##.onprogress
:= Dom.handler (fun ev ->
call_on_progress ev;
Js._true);
file_reader##.onerror
:= Dom.handler (fun _ev ->
let error =
Error.create_s
[%message
"Error reading file"
~code:(file_reader##.error##.code : int)
~message:
(Js.to_string
(Js.Unsafe.get
file_reader##.error
(Js.string "message")))]
in
Ivar.fill_if_empty
result
(Error (Bonsai_web_ui_file.Read_error.Error error));
Js._true);
file_reader##.onload
:= Dom.handler (fun ev ->
call_on_progress ev;
(match
file_reader##.result
|> File.CoerceTo.arrayBuffer
|> Js.Opt.to_option
with
| None ->
raise_s
[%message
"BUG: could not coerce fileReader result to arrayBuffer"]
| Some array_buffer ->
let contents = Typed_array.String.of_arrayBuffer array_buffer in
Ivar.fill_if_empty result (Ok contents));
Js._true);
file_reader##readAsArrayBuffer file;
Ivar.read result)
()
in
let abort = Ui_effect.of_sync_fun (fun () -> file_reader##abort) () in
{ Bonsai_web_ui_file.Expert.result; abort })
in
Bonsai_web_ui_file.Expert.create ~read ~filename:(File.filename file |> Js.to_string)
;;
| |
266c561f98c7f728aa939fa3340862009d4095c1647464488df6e86f918e943a | srid/ema | Ex00_Hello.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE UndecidableInstances #
| Most trivial Ema program
module Ema.Example.Ex00_Hello where
import Ema
Let 's newtype the unit route , because we have only one page to generate .
newtype Route = Route ()
deriving newtype
(Show, Eq, Ord, Generic, IsRoute)
instance EmaSite Route where
siteInput _ _ =
pure $ pure ()
siteOutput _ _ _ =
pure $ Ema.AssetGenerated Ema.Html "<b>Hello</b>, Ema"
main :: IO ()
main = void $ Ema.runSite @Route ()
| null | https://raw.githubusercontent.com/srid/ema/61faae56aa0f3c6ca815f344684cc566f6341662/ema-examples/src/Ema/Example/Ex00_Hello.hs | haskell | # LANGUAGE DeriveAnyClass # | # LANGUAGE UndecidableInstances #
| Most trivial Ema program
module Ema.Example.Ex00_Hello where
import Ema
Let 's newtype the unit route , because we have only one page to generate .
newtype Route = Route ()
deriving newtype
(Show, Eq, Ord, Generic, IsRoute)
instance EmaSite Route where
siteInput _ _ =
pure $ pure ()
siteOutput _ _ _ =
pure $ Ema.AssetGenerated Ema.Html "<b>Hello</b>, Ema"
main :: IO ()
main = void $ Ema.runSite @Route ()
|
66ff55bd5b37989eb9edbf920272ab8b3c93beb8dcb8a42da1f1399c438de382 | aowens-21/racket-formatting | file-level.rkt | #lang racket/base
(require rackunit
racket/runtime-path
custom-syntax-format
racket/port
racket/file)
(define-runtime-path file-level-conds.rkt "file-level/01-conds.rkt")
(define-runtime-path file-level-expected.rkt "file-level/01-expected.rkt")
(define-runtime-path file-level-02-comments.rkt "file-level/02-comments.rkt")
(define-runtime-path file-level-02-expected.rkt "file-level/02-expected.rkt")
(define-runtime-path file-level-03-nested-contracts.rkt "file-level/03-nested-contracts.rkt")
(define-runtime-path file-level-03-expected.rkt "file-level/03-expected.rkt")
(define-runtime-path file-level-04-single-conds.rkt "file-level/99-conds.rkt")
(define-runtime-path file-level-04-expected.rkt "file-level/99-expected.rkt")
(module+ main
(require racket/pretty)
(pretty-write
(get-format-instructions file-level-04-single-conds.rkt))
)
(module+ test
(require rackunit)
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-conds.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-expected.rkt #f))))
(check-equal? (format-file file-level-conds.rkt)
(file->string file-level-expected.rkt))
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-02-comments.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-02-expected.rkt #f))))
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-03-nested-contracts.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-03-expected.rkt #f))))
(check-equal? (format-file file-level-03-nested-contracts.rkt)
(file->string file-level-03-expected.rkt))
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-04-single-conds.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-04-expected.rkt #f))))
(check-equal? (format-file file-level-04-single-conds.rkt)
(file->string file-level-04-expected.rkt))
;; fails
#;(check-equal? (format-file file-level-02-comments.rkt)
(file->string file-level-02-expected.rkt)))
| null | https://raw.githubusercontent.com/aowens-21/racket-formatting/88c60c53edbfe2d88c26c8e45e11387e98bd6213/custom-syntax-format/tests/custom-syntax-format/file-level.rkt | racket | fails
(check-equal? (format-file file-level-02-comments.rkt) | #lang racket/base
(require rackunit
racket/runtime-path
custom-syntax-format
racket/port
racket/file)
(define-runtime-path file-level-conds.rkt "file-level/01-conds.rkt")
(define-runtime-path file-level-expected.rkt "file-level/01-expected.rkt")
(define-runtime-path file-level-02-comments.rkt "file-level/02-comments.rkt")
(define-runtime-path file-level-02-expected.rkt "file-level/02-expected.rkt")
(define-runtime-path file-level-03-nested-contracts.rkt "file-level/03-nested-contracts.rkt")
(define-runtime-path file-level-03-expected.rkt "file-level/03-expected.rkt")
(define-runtime-path file-level-04-single-conds.rkt "file-level/99-conds.rkt")
(define-runtime-path file-level-04-expected.rkt "file-level/99-expected.rkt")
(module+ main
(require racket/pretty)
(pretty-write
(get-format-instructions file-level-04-single-conds.rkt))
)
(module+ test
(require rackunit)
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-conds.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-expected.rkt #f))))
(check-equal? (format-file file-level-conds.rkt)
(file->string file-level-expected.rkt))
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-02-comments.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-02-expected.rkt #f))))
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-03-nested-contracts.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-03-expected.rkt #f))))
(check-equal? (format-file file-level-03-nested-contracts.rkt)
(file->string file-level-03-expected.rkt))
(check-equal? (with-output-to-string
(λ () (dynamic-require file-level-04-single-conds.rkt #f)))
(with-output-to-string
(λ () (dynamic-require file-level-04-expected.rkt #f))))
(check-equal? (format-file file-level-04-single-conds.rkt)
(file->string file-level-04-expected.rkt))
(file->string file-level-02-expected.rkt)))
|
dc1ac5c0be1d4e814f66ff4da9481f31d3381e9840c978e17bea5453cbaf8a35 | weavery/clarity.ml | lexer.mli | (* This is free and unencumbered software released into the public domain. *)
val read_token : Lexing.lexbuf -> token
| null | https://raw.githubusercontent.com/weavery/clarity.ml/20e48b275eaacd7fa71a7b9b7796977f0aba95cb/src/lexer.mli | ocaml | This is free and unencumbered software released into the public domain. |
val read_token : Lexing.lexbuf -> token
|
1b3d5e4526b738083972fcf0d2d8ac21ec892537d00bc67cd94def6a83746c86 | freckle/stackctl | VerboseOption.hs | module Stackctl.VerboseOption
( Verbosity
, verbositySetLogLevels
, HasVerboseOption(..)
, verboseOption
) where
import Stackctl.Prelude
import Blammo.Logging.LogSettings.LogLevels
import Options.Applicative
newtype Verbosity = Verbosity [()]
deriving newtype (Semigroup, Monoid)
verbositySetLogLevels :: Verbosity -> (LogSettings -> LogSettings)
verbositySetLogLevels (Verbosity bs) = case bs of
[] -> id
[_] -> setLogSettingsLevels v
[_, _] -> setLogSettingsLevels vv
_ -> setLogSettingsLevels vvv
where
v = newLogLevels LevelDebug [("Amazonka", LevelInfo)]
vv = newLogLevels LevelDebug []
vvv = newLogLevels (LevelOther "trace") []
class HasVerboseOption env where
verboseOptionL :: Lens' env Verbosity
instance HasVerboseOption Verbosity where
verboseOptionL = id
verboseOption :: Parser Verbosity
verboseOption = fmap Verbosity $ many $ flag' () $ mconcat
[ short 'v'
, long "verbose"
, help "Increase verbosity (can be passed multiple times)"
]
| null | https://raw.githubusercontent.com/freckle/stackctl/480255ddafd865ba8cfd06ed8afe67c22ff78020/src/Stackctl/VerboseOption.hs | haskell | module Stackctl.VerboseOption
( Verbosity
, verbositySetLogLevels
, HasVerboseOption(..)
, verboseOption
) where
import Stackctl.Prelude
import Blammo.Logging.LogSettings.LogLevels
import Options.Applicative
newtype Verbosity = Verbosity [()]
deriving newtype (Semigroup, Monoid)
verbositySetLogLevels :: Verbosity -> (LogSettings -> LogSettings)
verbositySetLogLevels (Verbosity bs) = case bs of
[] -> id
[_] -> setLogSettingsLevels v
[_, _] -> setLogSettingsLevels vv
_ -> setLogSettingsLevels vvv
where
v = newLogLevels LevelDebug [("Amazonka", LevelInfo)]
vv = newLogLevels LevelDebug []
vvv = newLogLevels (LevelOther "trace") []
class HasVerboseOption env where
verboseOptionL :: Lens' env Verbosity
instance HasVerboseOption Verbosity where
verboseOptionL = id
verboseOption :: Parser Verbosity
verboseOption = fmap Verbosity $ many $ flag' () $ mconcat
[ short 'v'
, long "verbose"
, help "Increase verbosity (can be passed multiple times)"
]
| |
c11a8ad8c9a5d60c4c8f58894787de74d37c1d5c83e6f303a6138e3bbf77acb3 | Clojure2D/clojure2d-examples | interval.clj | (ns rt4.in-one-weekend.ch12a.interval
(:refer-clojure :exclude [empty])
(:require [fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol IntervalProto
(contains [interval x]) ;; a <= x <= b
;; introduced due to the bug in the book (in the time of writing this code), a < x <= b
(contains- [interval x])
(clamp [interval x]))
(defrecord Interval [^double mn ^double mx]
IntervalProto
(contains [_ x] (m/between? mn mx ^double x))
(contains- [_ x] (m/between-? mn mx ^double x))
(clamp [_ x] (m/constrain ^double x mn mx)))
(defn interval
([] (->Interval ##Inf ##-Inf))
([m] (map->Interval m))
([^double mn ^double mx] (->Interval mn mx)))
(def empty (interval))
(def universe (interval ##-Inf ##Inf))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/ead92d6f17744b91070e6308157364ad4eab8a1b/src/rt4/in_one_weekend/ch12a/interval.clj | clojure | a <= x <= b
introduced due to the bug in the book (in the time of writing this code), a < x <= b | (ns rt4.in-one-weekend.ch12a.interval
(:refer-clojure :exclude [empty])
(:require [fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol IntervalProto
(contains- [interval x])
(clamp [interval x]))
(defrecord Interval [^double mn ^double mx]
IntervalProto
(contains [_ x] (m/between? mn mx ^double x))
(contains- [_ x] (m/between-? mn mx ^double x))
(clamp [_ x] (m/constrain ^double x mn mx)))
(defn interval
([] (->Interval ##Inf ##-Inf))
([m] (map->Interval m))
([^double mn ^double mx] (->Interval mn mx)))
(def empty (interval))
(def universe (interval ##-Inf ##Inf))
|
8bdfed25160fa84bf87e4ac97e5313da67c9809a653d673591d87f4abb121af0 | mathiasbourgoin/SPOC | belote.ml | open Spoc
ktype color = Spades | Hearts | Diamonds | Clubs ;;
ktype colval = {c:color; v : int32} ;;
ktype card =
Ace of color
| King of color
| Queen of color
| Jack of color
| Other of colval;;
let compute = kern cards trump values n ->
let value = fun a trump->
match a with
| Ace c -> 11
| King c -> 4
| Queen c -> 3
| Jack c -> if c = trump then 20 else 2
| Other cv ->
if cv.v = 10 then 10 else if (cv.c = trump) && (cv.v = 9) then 14 else 0
in
let open Std in
let i = thread_idx_x + block_dim_x * block_idx_x in
if i < n then
values.[<i>] <- value cards.[<i>] trump.[<0>]
let n = 5_000_0000
let cpt = ref 0
let tot_time = ref 0.
let measure_time f s =
let t0 = Unix.gettimeofday () in
let a = f () in
let t1 = Unix.gettimeofday () in
Printf.printf "%s : time %d : %Fs\n%!" s !cpt (t1 -. t0);
tot_time := !tot_time +. (t1 -. t0);
incr cpt;
a;;
let _ =
Random.self_init ();
let devs = Spoc.Devices.init () in
let dev = devs.(2) in
let cards = Vector.create (Vector.Custom customCard) n in
let values = Vector.create Vector.int32 n in
let trump = Vector.create (Vector.Custom customColor) 1 in
(* let cards_c = Array.create n (King Spades)
and values_c = Array.create n 0 in*)
for i = 0 to n - 1 do
let c = (let j = Random.int 12 + 1 in
let c =
match Random.int 3 with
| 0 -> Spades
| 1 -> Hearts
| 2 -> Diamonds
| 3 -> Clubs
| _ -> assert false in
match j with
| 11 -> Jack c
| 12 -> Queen c
| 13 -> King c
| 1 -> Ace c
| a -> Other {c = c; v = Int32.of_int a}) in
Mem.set cards i c;
cards_c.(i ) < - c ;
done;
Mem.set trump 0 Spades;
ignore(Kirc.gen ~only:Devices.OpenCL compute);
let threadsPerBlock = match dev.Devices.specific_info with
| Devices.OpenCLInfo clI ->
(match clI.Devices.device_type with
| Devices.CL_DEVICE_TYPE_CPU -> 1
| _ -> 256)
| _ -> 256
in
let blocksPerGrid = (n + threadsPerBlock -1) / threadsPerBlock in
let block = {Spoc.Kernel.blockX = threadsPerBlock;
Spoc.Kernel.blockY = 1;
Spoc.Kernel.blockZ = 1;} in
let grid = {Spoc.Kernel.gridX = blocksPerGrid;
Spoc.Kernel.gridY = 1;
Spoc.Kernel.gridZ = 1;} in
let name = dev.Spoc.Devices.general_info.Spoc.Devices.name in
measure_time (fun () ->
for i = 0 to 9 do
Kirc.run compute (cards, trump, values, n) (block,grid) 0 dev;
Mem.to_cpu values ();
Devices.flush dev ();
done) ("GPU "^name);
let string_of_card c =
let string_of_color c =
match c with
| Spades - > " Spades "
| Hearts - > " Hearts "
| Diamonds - > " Diamonds "
| Clubs - > " Clubs "
in
match c with
| Ace c - > " Ace of " ^string_of_color c
| King c - > " King of " ^string_of_color c
| Queen c - > " Queen of " ^string_of_color c
| > " of " ^string_of_color c
| Other { c;v } - > ( " Other of " ^string_of_color c^ " of " ^Int32.to_string v )
in
for i = 0 to 100 do
Printf.printf " % s : % ld\n " ( string_of_card ( Mem.get cards i ) ) ( Mem.get values i )
done
let string_of_color c =
match c with
| Spades -> "Spades"
| Hearts -> "Hearts"
| Diamonds -> "Diamonds"
| Clubs -> "Clubs"
in
match c with
| Ace c -> "Ace of "^string_of_color c
| King c -> "King of " ^string_of_color c
| Queen c -> "Queen of "^string_of_color c
| Jack c -> "Jack of "^string_of_color c
| Other {c;v} -> ("Other of "^string_of_color c^" of val "^Int32.to_string v)
in
for i = 0 to 100 do
Printf.printf "%s val : %ld\n" (string_of_card (Mem.get cards i)) (Mem.get values i)
done
*)
let compute cards trump =
let value = fun a - >
match a with
| Ace c - > 11
| King c - > 4
| Queen c - > 3
| if c = trump then 20 else 2
| Other cv - >
if = 10l then 10 else if ( = trump ) & & ( cv.v = 9l ) then 14 else 0
in
for i = 0 to n -1 do
values_c.(i ) < - value cards.(i )
done ;
in
measure_time ( fun ( ) - >
for i = 0 to 9 do
ignore(compute ) ;
done ) " CPU " ;
let compute cards trump =
let value = fun a ->
match a with
| Ace c -> 11
| King c -> 4
| Queen c -> 3
| Jack c -> if c = trump then 20 else 2
| Other cv ->
if cv.v = 10l then 10 else if (cv.c = trump) && (cv.v = 9l) then 14 else 0
in
for i = 0 to n -1 do
values_c.(i) <- value cards.(i)
done;
in
measure_time (fun () ->
for i = 0 to 9 do
ignore(compute cards_c Spades);
done) "CPU";
*)
| null | https://raw.githubusercontent.com/mathiasbourgoin/SPOC/db8ac84fce7077caba1b2c33e9b2a01c1989620b/SpocLibs/Sarek/extension/tests/belote.ml | ocaml | let cards_c = Array.create n (King Spades)
and values_c = Array.create n 0 in | open Spoc
ktype color = Spades | Hearts | Diamonds | Clubs ;;
ktype colval = {c:color; v : int32} ;;
ktype card =
Ace of color
| King of color
| Queen of color
| Jack of color
| Other of colval;;
let compute = kern cards trump values n ->
let value = fun a trump->
match a with
| Ace c -> 11
| King c -> 4
| Queen c -> 3
| Jack c -> if c = trump then 20 else 2
| Other cv ->
if cv.v = 10 then 10 else if (cv.c = trump) && (cv.v = 9) then 14 else 0
in
let open Std in
let i = thread_idx_x + block_dim_x * block_idx_x in
if i < n then
values.[<i>] <- value cards.[<i>] trump.[<0>]
let n = 5_000_0000
let cpt = ref 0
let tot_time = ref 0.
let measure_time f s =
let t0 = Unix.gettimeofday () in
let a = f () in
let t1 = Unix.gettimeofday () in
Printf.printf "%s : time %d : %Fs\n%!" s !cpt (t1 -. t0);
tot_time := !tot_time +. (t1 -. t0);
incr cpt;
a;;
let _ =
Random.self_init ();
let devs = Spoc.Devices.init () in
let dev = devs.(2) in
let cards = Vector.create (Vector.Custom customCard) n in
let values = Vector.create Vector.int32 n in
let trump = Vector.create (Vector.Custom customColor) 1 in
for i = 0 to n - 1 do
let c = (let j = Random.int 12 + 1 in
let c =
match Random.int 3 with
| 0 -> Spades
| 1 -> Hearts
| 2 -> Diamonds
| 3 -> Clubs
| _ -> assert false in
match j with
| 11 -> Jack c
| 12 -> Queen c
| 13 -> King c
| 1 -> Ace c
| a -> Other {c = c; v = Int32.of_int a}) in
Mem.set cards i c;
cards_c.(i ) < - c ;
done;
Mem.set trump 0 Spades;
ignore(Kirc.gen ~only:Devices.OpenCL compute);
let threadsPerBlock = match dev.Devices.specific_info with
| Devices.OpenCLInfo clI ->
(match clI.Devices.device_type with
| Devices.CL_DEVICE_TYPE_CPU -> 1
| _ -> 256)
| _ -> 256
in
let blocksPerGrid = (n + threadsPerBlock -1) / threadsPerBlock in
let block = {Spoc.Kernel.blockX = threadsPerBlock;
Spoc.Kernel.blockY = 1;
Spoc.Kernel.blockZ = 1;} in
let grid = {Spoc.Kernel.gridX = blocksPerGrid;
Spoc.Kernel.gridY = 1;
Spoc.Kernel.gridZ = 1;} in
let name = dev.Spoc.Devices.general_info.Spoc.Devices.name in
measure_time (fun () ->
for i = 0 to 9 do
Kirc.run compute (cards, trump, values, n) (block,grid) 0 dev;
Mem.to_cpu values ();
Devices.flush dev ();
done) ("GPU "^name);
let string_of_card c =
let string_of_color c =
match c with
| Spades - > " Spades "
| Hearts - > " Hearts "
| Diamonds - > " Diamonds "
| Clubs - > " Clubs "
in
match c with
| Ace c - > " Ace of " ^string_of_color c
| King c - > " King of " ^string_of_color c
| Queen c - > " Queen of " ^string_of_color c
| > " of " ^string_of_color c
| Other { c;v } - > ( " Other of " ^string_of_color c^ " of " ^Int32.to_string v )
in
for i = 0 to 100 do
Printf.printf " % s : % ld\n " ( string_of_card ( Mem.get cards i ) ) ( Mem.get values i )
done
let string_of_color c =
match c with
| Spades -> "Spades"
| Hearts -> "Hearts"
| Diamonds -> "Diamonds"
| Clubs -> "Clubs"
in
match c with
| Ace c -> "Ace of "^string_of_color c
| King c -> "King of " ^string_of_color c
| Queen c -> "Queen of "^string_of_color c
| Jack c -> "Jack of "^string_of_color c
| Other {c;v} -> ("Other of "^string_of_color c^" of val "^Int32.to_string v)
in
for i = 0 to 100 do
Printf.printf "%s val : %ld\n" (string_of_card (Mem.get cards i)) (Mem.get values i)
done
*)
let compute cards trump =
let value = fun a - >
match a with
| Ace c - > 11
| King c - > 4
| Queen c - > 3
| if c = trump then 20 else 2
| Other cv - >
if = 10l then 10 else if ( = trump ) & & ( cv.v = 9l ) then 14 else 0
in
for i = 0 to n -1 do
values_c.(i ) < - value cards.(i )
done ;
in
measure_time ( fun ( ) - >
for i = 0 to 9 do
ignore(compute ) ;
done ) " CPU " ;
let compute cards trump =
let value = fun a ->
match a with
| Ace c -> 11
| King c -> 4
| Queen c -> 3
| Jack c -> if c = trump then 20 else 2
| Other cv ->
if cv.v = 10l then 10 else if (cv.c = trump) && (cv.v = 9l) then 14 else 0
in
for i = 0 to n -1 do
values_c.(i) <- value cards.(i)
done;
in
measure_time (fun () ->
for i = 0 to 9 do
ignore(compute cards_c Spades);
done) "CPU";
*)
|
adcc756c5b08eb9d8de325dbdc9b2070dedb72936cd12a746b1eae75476d06b1 | wdebeaum/step | youth.lisp | ;;;;
w::youth
;;;;
(define-words :pos W::n
:words (
(w::youth
(senses
((LF-PARENT ONT::lifecycle-stage)
(TEMPL bare-pred-TEMPL)
)
)
)
))
(define-words :pos W::n
:words (
(w::youth
(senses
((LF-PARENT ONT::person)
(TEMPL count-pred-TEMPL)
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/youth.lisp | lisp | w::youth
(define-words :pos W::n
:words (
(w::youth
(senses
((LF-PARENT ONT::lifecycle-stage)
(TEMPL bare-pred-TEMPL)
)
)
)
))
(define-words :pos W::n
:words (
(w::youth
(senses
((LF-PARENT ONT::person)
(TEMPL count-pred-TEMPL)
)
)
)
))
| |
f2e4da29d9fc568ec32c021c9506011b47591174d51adb72e5738ed8b73ecba8 | exercism/haskell | Hexadecimal.hs | module Hexadecimal (hexToInt) where
{-# ANN digitToInt "HLint: ignore Use isDigit" #-}
digitToInt :: Char -> Maybe Int
digitToInt c
| c >= '0' && c <= '9' = Just $ n - fromEnum '0'
| c >= 'a' && c <= 'f' = Just $ n - fromEnum 'a' + 10
| c >= 'A' && c <= 'F' = Just $ n - fromEnum 'A' + 10
| otherwise = Nothing
where n = fromEnum c
hexToInt :: String -> Int
hexToInt = go 0
where
go acc (c:cs) = case digitToInt c of
Just n -> (go $! acc * 16 + n) cs
_ -> 0
go acc [] = acc
| null | https://raw.githubusercontent.com/exercism/haskell/ae17e9fc5ca736a228db6dda5e3f3b057fa6f3d0/exercises/practice/hexadecimal/.meta/examples/success-standard/src/Hexadecimal.hs | haskell | # ANN digitToInt "HLint: ignore Use isDigit" # | module Hexadecimal (hexToInt) where
digitToInt :: Char -> Maybe Int
digitToInt c
| c >= '0' && c <= '9' = Just $ n - fromEnum '0'
| c >= 'a' && c <= 'f' = Just $ n - fromEnum 'a' + 10
| c >= 'A' && c <= 'F' = Just $ n - fromEnum 'A' + 10
| otherwise = Nothing
where n = fromEnum c
hexToInt :: String -> Int
hexToInt = go 0
where
go acc (c:cs) = case digitToInt c of
Just n -> (go $! acc * 16 + n) cs
_ -> 0
go acc [] = acc
|
e1d0ac78cc376d18698b901b0eb2e415e62d5b997933ca5a5e3e751d2b3a6078 | stepcut/plugins | Main.hs | import System.Plugins
import API
main = do
let plist = ["../Plugin1.o", "../Plugin2.o", "../Plugin3.o"]
plugins <- mapM (\p -> load p ["../api"] [] "resource") plist
let functions = map (valueOf . fromLoadSuc) plugins
-- apply the function from each plugin in turn
mapM_ (\f -> putStrLn $ f "haskell is for hackers") functions
fromLoadSuc (LoadFailure _) = error "load failed"
fromLoadSuc (LoadSuccess _ v) = v
| null | https://raw.githubusercontent.com/stepcut/plugins/52c660b5bc71182627d14c1d333d0234050cac01/testsuite/multi/3plugins/prog/Main.hs | haskell | apply the function from each plugin in turn | import System.Plugins
import API
main = do
let plist = ["../Plugin1.o", "../Plugin2.o", "../Plugin3.o"]
plugins <- mapM (\p -> load p ["../api"] [] "resource") plist
let functions = map (valueOf . fromLoadSuc) plugins
mapM_ (\f -> putStrLn $ f "haskell is for hackers") functions
fromLoadSuc (LoadFailure _) = error "load failed"
fromLoadSuc (LoadSuccess _ v) = v
|
c8cafa9d6f603c19233445912ae9ab6628104688ba985cf49d3503ae1a36710c | lispgames/glkit | macros.lisp | (in-package :kit.gl.shader)
;; SHADER-DICTIONARIES
(defvar *shader-dictionaries* (make-hash-table))
(defun find-dictionary (name)
(or (gethash name *shader-dictionaries*)
(error "Shader dictionary not found: ~S" name)))
(defun define-dictionary (name programs &key (path *default-pathname-defaults*)
shaders)
(setf (gethash name *shader-dictionaries*)
(make-instance 'shader-dictionary-definition
:name name
:path path
:shaders shaders
:programs programs)))
(defmacro dict (name)
`(find-dictionary ',name))
PARSE - SHADER - SOURCE
(defgeneric parse-shader-source (source shader-type shader-list)
(:documentation "Specialize on `SOURCE` and return a string.
`SHADER-TYPE` is the type (e.g., `:fragment-shader`). Specializations
are predefined for *string*, *list*, and *symbol*; do not redefine
these.
`SHADER-LIST` is an optional ALIST of existing \"named\" shader
definitions in the form `(NAME . (TYPE VALUE))`. Note that `VALUE`
may not be a string, and `PARSE-SHADER-SOURCE` must be called
recursively to resolve it."))
(defgeneric parse-shader-source-complex (key params shader-type shader-list)
(:documentation "Much like `PARSE-SHADER-SOURCE`, except called when
the source is a list. In this case, `KEY` is the car of that list,
`PARAMS` is the cdr, and `SHADER-TYPE` and `SHADER-LIST` are as per
`PARSE-SHADER-SOURCE`."))
(defmethod parse-shader-source ((source string) shader-type shader-list)
(declare (ignore shader-type shader-list))
source)
(defmethod parse-shader-source ((source list) shader-type shader-list)
(parse-shader-source-complex (car source) (cdr source) shader-type shader-list))
(defmethod parse-shader-source ((source symbol) shader-type shader-list)
(declare (ignore shader-type))
(let ((shader (assoc source shader-list)))
(if shader
(parse-shader-source (caddr shader) (cadr shader) shader-list)
(error "Shader not found: ~S" source))))
(defmethod parse-shader-source-complex ((key (eql :file)) params shader-type shader-list)
(declare (ignore shader-type shader-list))
(read-file-into-string (car params)))
DEFDICT
(defmacro defdict (name (&key shader-path (uniform-style :underscore)) &body options)
(let ((shaders) (programs))
(loop for option in options
do (alexandria:switch ((car option) :test 'equalp
:key 'symbol-name)
("shader"
(destructuring-bind (name type value) (cdr option)
(push (list name type value) shaders)))
("program"
(destructuring-bind (&rest options) (cdr option)
(if (listp (car options))
(destructuring-bind ((name &key attrs uniforms)
&rest shaders)
options
(push `(make-instance 'program-source
:name ',name
:uniform-style ',uniform-style
:uniforms ',uniforms
:attrs ',attrs
:shaders ',shaders)
programs))
(destructuring-bind (name uniform-list &rest shaders)
options
(push `(make-instance 'program-source
:name ',name
:uniform-style ',uniform-style
:uniforms ',uniform-list
:shaders ',shaders)
programs)))))))
`(define-dictionary ',name (list ,@programs)
:path (or ,shader-path
*default-pathname-defaults*)
:shaders ',shaders)))
| null | https://raw.githubusercontent.com/lispgames/glkit/0d8e7c5fed4231f2177afcf0f3ff66f196ed6a46/src/shader-dict/macros.lisp | lisp | SHADER-DICTIONARIES
do not redefine | (in-package :kit.gl.shader)
(defvar *shader-dictionaries* (make-hash-table))
(defun find-dictionary (name)
(or (gethash name *shader-dictionaries*)
(error "Shader dictionary not found: ~S" name)))
(defun define-dictionary (name programs &key (path *default-pathname-defaults*)
shaders)
(setf (gethash name *shader-dictionaries*)
(make-instance 'shader-dictionary-definition
:name name
:path path
:shaders shaders
:programs programs)))
(defmacro dict (name)
`(find-dictionary ',name))
PARSE - SHADER - SOURCE
(defgeneric parse-shader-source (source shader-type shader-list)
(:documentation "Specialize on `SOURCE` and return a string.
`SHADER-TYPE` is the type (e.g., `:fragment-shader`). Specializations
these.
`SHADER-LIST` is an optional ALIST of existing \"named\" shader
definitions in the form `(NAME . (TYPE VALUE))`. Note that `VALUE`
may not be a string, and `PARSE-SHADER-SOURCE` must be called
recursively to resolve it."))
(defgeneric parse-shader-source-complex (key params shader-type shader-list)
(:documentation "Much like `PARSE-SHADER-SOURCE`, except called when
the source is a list. In this case, `KEY` is the car of that list,
`PARAMS` is the cdr, and `SHADER-TYPE` and `SHADER-LIST` are as per
`PARSE-SHADER-SOURCE`."))
(defmethod parse-shader-source ((source string) shader-type shader-list)
(declare (ignore shader-type shader-list))
source)
(defmethod parse-shader-source ((source list) shader-type shader-list)
(parse-shader-source-complex (car source) (cdr source) shader-type shader-list))
(defmethod parse-shader-source ((source symbol) shader-type shader-list)
(declare (ignore shader-type))
(let ((shader (assoc source shader-list)))
(if shader
(parse-shader-source (caddr shader) (cadr shader) shader-list)
(error "Shader not found: ~S" source))))
(defmethod parse-shader-source-complex ((key (eql :file)) params shader-type shader-list)
(declare (ignore shader-type shader-list))
(read-file-into-string (car params)))
DEFDICT
(defmacro defdict (name (&key shader-path (uniform-style :underscore)) &body options)
(let ((shaders) (programs))
(loop for option in options
do (alexandria:switch ((car option) :test 'equalp
:key 'symbol-name)
("shader"
(destructuring-bind (name type value) (cdr option)
(push (list name type value) shaders)))
("program"
(destructuring-bind (&rest options) (cdr option)
(if (listp (car options))
(destructuring-bind ((name &key attrs uniforms)
&rest shaders)
options
(push `(make-instance 'program-source
:name ',name
:uniform-style ',uniform-style
:uniforms ',uniforms
:attrs ',attrs
:shaders ',shaders)
programs))
(destructuring-bind (name uniform-list &rest shaders)
options
(push `(make-instance 'program-source
:name ',name
:uniform-style ',uniform-style
:uniforms ',uniform-list
:shaders ',shaders)
programs)))))))
`(define-dictionary ',name (list ,@programs)
:path (or ,shader-path
*default-pathname-defaults*)
:shaders ',shaders)))
|
c1fa1d508a35ffdbda1bbf81ff8f78843dfd07a9f60a406f6a89ff9cad025b5c | deadpendency/deadpendency | DetermineDependenciesGitHubC.hs | # LANGUAGE DataKinds #
module DD.Effect.DetermineDependencies.Carrier.DetermineDependenciesGitHubC
( DetermineDependenciesGitHubIOC (..),
)
where
import Common.Effect.AppEventEmit.AppEventEmit
import Common.Effect.AppEventEmit.Model.AppEventAdditional
import Common.Effect.AppEventEmit.Model.AppEventMessage
import Common.Effect.GitHub.FetchRepoFiles.FetchRepoFiles
import Common.Effect.GitHub.FetchRepoFiles.Model.RepoFilesRequest
import Common.Effect.GitHub.FetchRepoFiles.Model.RepoFilesResult
import Common.Effect.GitHub.SearchRepoDirectoryFiles.Model.SearchRepoDirectoryFilesRequest
import Common.Effect.GitHub.SearchRepoDirectoryFiles.SearchRepoDirectoryFiles
import Common.Effect.GitHub.SearchRepoFiles.Model.SearchRepoFilesRequest
import Common.Effect.GitHub.SearchRepoFiles.SearchRepoFiles
import Common.Effect.Util
import Common.Model.Dependency.Basic.BasicDependency
import Common.Model.Dependency.Basic.BasicRepoDependencies
import Common.Model.Dependency.File.DependenciesFileLoad
import Common.Model.Dependency.File.DependenciesFileLoadDetails
import Common.Model.Dependency.File.DependenciesFileType
import Common.Model.Dependency.Ignored.IgnoredRepoDependencies
import Common.Model.Ecosystem.ProgrammingLanguage
import Common.Model.Error.CommonError
import Common.Model.Git.GitPath
import Common.Model.Git.GitSha
import Common.Model.Git.QualifiedRepo
import Common.Model.GitHub.GHRepoFile
import Common.Model.RepoConfig.FileLoadPlan
import Common.Model.RepoConfig.IgnoreDependenciesConfig (IgnoreDependenciesConfig)
import Common.Model.RepoConfig.RepoConfig
import Control.Algebra (Algebra (..), Has, (:+:) (..))
import Control.Effect.State (State)
import Control.Effect.Throw (Throw, liftEither, throwError)
import DD.Effect.DetermineDependencies.Backend.DependencyLanguageFilesBackend
import DD.Effect.DetermineDependencies.Backend.DetermineDependencyBackend
import DD.Effect.DetermineDependencies.Backend.Model.RawDepFileContent
import DD.Effect.DetermineDependencies.DetermineDependencies (DetermineDependencies (..))
import DD.Effect.DetermineDependencies.Model.DetermineDependenciesError
import DD.Effect.DetermineDependencies.Model.DetermineDependenciesResult
import Data.Map.Strict qualified as M
import Data.Text qualified as Text
import Data.Vector qualified as V
import Data.Vector.NonEmpty qualified as NV
newtype DetermineDependenciesGitHubIOC m a = DetermineDependenciesGitHubIOC {runDetermineDependenciesGitHubIOC :: m a}
deriving newtype (Functor, Applicative, Monad)
instance
( Algebra sig m,
Has AppEventEmit sig m,
Has (Throw DetermineDependenciesError) sig m,
Has (Throw CommonError) sig m,
Has FetchRepoFiles sig m,
Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m,
Has (State (Maybe RepoConfig)) sig m
) =>
Algebra (DetermineDependencies :+: sig) (DetermineDependenciesGitHubIOC m)
where
alg hdl sig ctx = case sig of
(L (DetermineDependencies request)) -> do
emitAppEventInfoA (AppEventMessage "Started: Determine dependencies") (AppEventAdditional request)
repoConfig <- getRepoConfig
let fileLoadPlan = repoConfig ^. #_fileLoadPlan
programmingLanguages = request ^. #_programmingLanguages
toLoadLanguages = factorLoadPlan fileLoadPlan programmingLanguages
similar languages can produce the same dependency file load ie . java + kotlin
dependenciesFileLoads = ordNubV $ V.concatMap determineDependencyLanguageFiles toLoadLanguages
userFileLoads = request ^. #_additionalDependencyFiles
qualifiedRepo = request ^. #_qualifiedRepo
repoCommitSha = request ^. #_commitSha
-- determine exact dep files to load
depFileTypeLoads <- determineDepFiles qualifiedRepo repoCommitSha userFileLoads dependenciesFileLoads
let filesToLoadCount = countDepFileLoads depFileTypeLoads
when
(filesToLoadCount > 100)
(throwError $ TooManyDependencyFiles filesToLoadCount)
-- fetch files
rawDepFiles <- join <$> for depFileTypeLoads (loadDepTypesContent qualifiedRepo repoCommitSha)
-- convert files into the actual dependencies
initialDeps <- liftEither $ join <$> traverse loadFileDeps rawDepFiles
let additionalDeps = request ^. #_additionalDependencies
toIgnoreLanguageDeps = request ^. #_ignoreDependenciesConfig
-- apply final de-dup, ignore and validity checks
(ignoredRepoDependencies, nvFinalDeps) <- mungeFinalDeps additionalDeps toIgnoreLanguageDeps initialDeps
let finalDepCount = NV.length nvFinalDeps
when
(finalDepCount > 500)
(throwError $ TooManyDependencies finalDepCount)
let result =
DetermineDependenciesResult
{ _basicRepoDependencies = BasicRepoDependencies nvFinalDeps,
_ignoredRepoDependencies = ignoredRepoDependencies
}
emitAppEventInfoA (AppEventMessage "Finished: Determine dependencies") (AppEventAdditional result)
DetermineDependenciesGitHubIOC $ pure (ctx $> result)
(R other) -> DetermineDependenciesGitHubIOC $ alg (runDetermineDependenciesGitHubIOC . hdl) other ctx
TYPES
This is highly complex and untested .. It is probably the ' worst ' code in the codebase .
It is something that has grown over time and towards the end of the project , where
was probably not going to survive . So it is a middle ground of effort vs maintainability .
Why is it so complex ?
Firstly , the code in essence is quite simple :
1 . Search for dependency files based on languages in use in the repository .
2 . Fetch those files and parse them for dependencies .
There are 2 key requirements which escalate things :
1 . The files we search and find ( ' system ' files ) , we expect to never fail to load with a 404 . If they do
this can be considered a failure with the app logic and the code should go down the
' oops we had an error ' path .
However , the files loaded can also include dependency files included by the user in their
config ( ' user ' files ) . If these files are missing , we consider that a user error and need to
go down the ' hey you tried to load this dep file , but it does n't exist path ' .
2 . We want to fetch all these files in parallel .
The tension is we fetch files with a simple FetchRepoFilesEffect which takes a [ File ] , so if we
fetch these naively in parallel , we lose the information about which file is a system or a user file .
Thus we do not know how to fail in this case .
The ideal solution is a way to keep some tag for each file so we still know and can fetch them fully
in parallel . This is probably the most effort to implement as FetchRepoFilesEffect has no knowledge of
the wider application . Instead this code splits the files into each category and loads them each with
an independent call to FetchRepoFilesEffect , then collates the results .
This is highly complex and untested.. It is probably the 'worst' code in the codebase.
It is something that has grown over time and towards the end of the project, where Deadpendency
was probably not going to survive. So it is a middle ground of effort vs maintainability.
Why is it so complex?
Firstly, the code in essence is quite simple:
1. Search for dependency files based on languages in use in the repository.
2. Fetch those files and parse them for dependencies.
There are 2 key requirements which escalate things:
1. The files we search and find ('system' files), we expect to never fail to load with a 404. If they do
this can be considered a failure with the app logic and the code should go down the
'oops we had an error' path.
However, the files loaded can also include dependency files included by the user in their
Deadpendency config ('user' files). If these files are missing, we consider that a user error and need to
go down the 'hey you tried to load this dep file, but it doesn't exist path'.
2. We want to fetch all these files in parallel.
The tension is we fetch files with a simple FetchRepoFilesEffect which takes a [File], so if we
fetch these naively in parallel, we lose the information about which file is a system or a user file.
Thus we do not know how to fail in this case.
The ideal solution is a way to keep some tag for each file so we still know and can fetch them fully
in parallel. This is probably the most effort to implement as FetchRepoFilesEffect has no knowledge of
the wider application. Instead this code splits the files into each category and loads them each with
an independent call to FetchRepoFilesEffect, then collates the results.
-}
data DepFileTypeToLoad = DepFileTypeToLoad
{ _depFileType :: DependenciesFileType,
1 . stuff we searched for and found to exist , just need to be fetched
2 . stuff the user said to load specifically , is user error if not exist
_paths :: These (NV.NonEmptyVector GitPath) (NV.NonEmptyVector GitPath)
}
deriving stock (Eq, Show, Generic)
-- DETERMINE
determineDepFiles ::
( Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m
) =>
QualifiedRepo ->
GitSha ->
V.Vector DependenciesFileLoad ->
V.Vector DependenciesFileLoad ->
m (V.Vector DepFileTypeToLoad)
determineDepFiles qualifiedRepo gitSha userFileLoads languageFileLoads = do
let userGroupedDepFileLoads = groupDepFileLoads userFileLoads
languageGroupedDepFileLoads = groupDepFileLoads languageFileLoads
userDepFileTypeLoads <-
concatMaybeV
<$> for userGroupedDepFileLoads (uncurry (loadDepLoads qualifiedRepo gitSha True))
languageDepFileTypeLoads <-
concatMaybeV
<$> for languageGroupedDepFileLoads (uncurry (loadDepLoads qualifiedRepo gitSha False))
pure $ userDepFileTypeLoads V.++ languageDepFileTypeLoads
countDepFileLoads :: V.Vector DepFileTypeToLoad -> Int
countDepFileLoads loads =
let (justSystem, justUser, bothSystemUserList) = partitionThese $ V.toList (fmap _paths loads)
bothLength = sum $ fmap (\(system, user) -> NV.length system + NV.length user) bothSystemUserList
in length justSystem + length justUser + bothLength
groupDepFileLoads :: V.Vector DependenciesFileLoad -> V.Vector (DependenciesFileType, NV.NonEmptyVector DependenciesFileLoadDetails)
groupDepFileLoads depFileLoads =
let accLoadsToMap acc (DependenciesFileLoad loadType details) = M.insertWith (NV.++) loadType (NV.singleton details) acc
resultAsMap = V.foldl' accLoadsToMap M.empty depFileLoads
in V.fromList $ M.toList resultAsMap
loadDepLoads ::
( Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m
) =>
QualifiedRepo ->
GitSha ->
Bool ->
DependenciesFileType ->
NV.NonEmptyVector DependenciesFileLoadDetails ->
m (Maybe DepFileTypeToLoad)
loadDepLoads qualifiedRepo gitSha isUserLoad depFileType fileLoadDetails = do
nvLoads <- for fileLoadDetails (getFilesToLoad qualifiedRepo gitSha isUserLoad)
let (allSystem, allUser) = NV.foldl' (\(system, user) (system', user') -> (system V.++ system', user V.++ user')) (V.empty, V.empty) nvLoads
maybeNvSystem = NV.fromVector allSystem
maybeNvUser = NV.fromVector allUser
pure $
DepFileTypeToLoad depFileType <$> maybesToThese maybeNvSystem maybeNvUser
-- FILE FETCH
getFilesToLoad ::
( Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m
) =>
QualifiedRepo ->
GitSha ->
Bool ->
DependenciesFileLoadDetails ->
m (V.Vector GitPath, V.Vector GitPath)
getFilesToLoad qualifiedRepo repoCommitSha isUserLoad loadDetails = do
case loadDetails of
DFLDSpecific filePath -> do
let specificLoads = V.singleton (GitPath filePath)
pure $
if isUserLoad
then (V.empty, specificLoads)
else (specificLoads, V.empty)
DFLDSearch fileMatch -> do
let searchRequest =
SearchRepoFilesRequest
{ _filesMatch = fileMatch,
_qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha
}
searchResult <- repoFilesSearch searchRequest
let resultPaths = searchResult ^. #_repoFilePaths
pure
(resultPaths, V.empty)
DFLDDirectorySearch directory fileMatch -> do
let searchRequest =
SearchRepoDirectoryFilesRequest
{ _filesMatch = fileMatch,
_directoryPath = directory,
_qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha
}
searchResult <- repoDirectoryFilesSearch searchRequest
let resultPaths = searchResult ^. #_repoFilePaths
pure
(resultPaths, V.empty)
loadDepTypesContent ::
( Has FetchRepoFiles sig m,
Has (Throw DetermineDependenciesError) sig m
) =>
QualifiedRepo ->
GitSha ->
DepFileTypeToLoad ->
m (V.Vector RawDepFileContent)
loadDepTypesContent qualifiedRepo repoCommitSha depFileToLoad = do
let depFileType = depFileToLoad ^. #_depFileType
theseLoads = depFileToLoad ^. #_paths
nvGHRepoFiles <-
case theseLoads of
(This systemLoads) -> loadSystemFilesContent qualifiedRepo repoCommitSha systemLoads
(That userLoads) -> loadUserFilesContent qualifiedRepo repoCommitSha userLoads
(These systemLoads userLoads) -> do
systemFiles <- loadSystemFilesContent qualifiedRepo repoCommitSha systemLoads
userFiles <- loadUserFilesContent qualifiedRepo repoCommitSha userLoads
pure $
systemFiles NV.++ userFiles
let filteredSymLinks = NV.filter notSymLink nvGHRepoFiles
pure $
filteredSymLinks <&> RawDepFileContent depFileType
loadSystemFilesContent ::
( Has FetchRepoFiles sig m,
Has (Throw DetermineDependenciesError) sig m
) =>
QualifiedRepo ->
GitSha ->
NV.NonEmptyVector GitPath ->
m (NV.NonEmptyVector GHRepoFile)
loadSystemFilesContent qualifiedRepo repoCommitSha filePaths = do
let filesRequest =
RepoFilesRequest
{ _qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha,
_filePaths = filePaths
}
repoFilesResult <- repoFilesFetch filesRequest
let fileResults = repoFilesResult ^. #_repoFiles
results <- liftEither $ first (DependencyFilesMismatch . snd) $ traverse getResult fileResults
pure $
results <&> snd
loadUserFilesContent ::
( Has FetchRepoFiles sig m,
Has (Throw DetermineDependenciesError) sig m
) =>
QualifiedRepo ->
GitSha ->
NV.NonEmptyVector GitPath ->
m (NV.NonEmptyVector GHRepoFile)
loadUserFilesContent qualifiedRepo repoCommitSha filePaths = do
TODO : use these so we know we have gitpaths to avoid a possible error
let filesRequest =
RepoFilesRequest
{ _qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha,
_filePaths = filePaths
}
repoFilesResult <- repoFilesFetch filesRequest
let fileResults = repoFilesResult ^. #_repoFiles
results <- liftEither $ first (UserSpecificedMissingFile . fst) $ traverse getResult fileResults
pure $
results <&> snd
-- FINAL MASSAGE
-- | Produce a final list of dependencies to be loaded
mungeFinalDeps ::
(Has (Throw DetermineDependenciesError) sig m) =>
V.Vector BasicDependency ->
IgnoreDependenciesConfig ->
V.Vector BasicDependency ->
m (IgnoredRepoDependencies, NV.NonEmptyVector BasicDependency)
mungeFinalDeps additionalDeps ignoreDepConfig foundDeps = do
let withAdditional = addAdditionalDeps additionalDeps foundDeps
duplicatesIgnored = removeDuplicates withAdditional
nvAllDeps <- maybeToErrorM NoDependenciesFound (NV.fromVector duplicatesIgnored)
let (resultIgnoredDeps, maybeResultBasicRepoDeps) = ignoreDeps ignoreDepConfig nvAllDeps
resultBasicRepoDeps <- maybeToErrorM AllDependenciesIgnored maybeResultBasicRepoDeps
pure
(resultIgnoredDeps, resultBasicRepoDeps)
-- UTILITY
this is an ugly way to detect a symlink eg .
-- will not match symlinks from the root to subdirs, but there is apparently no way to actually
-- detect a symlink using the graphql api
notSymLink :: GHRepoFile -> Bool
notSymLink (GHRepoFile _ fileContents) = not $ Text.isPrefixOf "../" fileContents
getResult :: RepoFileResult -> Either (GitPath, Text) (GitPath, GHRepoFile)
getResult (RepoFileResult filePath Nothing) = Left (filePath, "Unexpected missing file from search: " <> show filePath)
getResult (RepoFileResult filePath (Just repoFile)) = Right (filePath, repoFile)
factorLoadPlan :: FileLoadPlan -> V.Vector ProgrammingLanguage -> V.Vector ProgrammingLanguage
factorLoadPlan FileLoadEnabled input = input
factorLoadPlan FileLoadDisabled _ = V.empty
factorLoadPlan (FileLoadDisabledForLangs nvDisabledLangs) input = V.filter (`NV.notElem` nvDisabledLangs) input
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/dependency-determiner/src/DD/Effect/DetermineDependencies/Carrier/DetermineDependenciesGitHubC.hs | haskell | determine exact dep files to load
fetch files
convert files into the actual dependencies
apply final de-dup, ignore and validity checks
DETERMINE
FILE FETCH
FINAL MASSAGE
| Produce a final list of dependencies to be loaded
UTILITY
will not match symlinks from the root to subdirs, but there is apparently no way to actually
detect a symlink using the graphql api | # LANGUAGE DataKinds #
module DD.Effect.DetermineDependencies.Carrier.DetermineDependenciesGitHubC
( DetermineDependenciesGitHubIOC (..),
)
where
import Common.Effect.AppEventEmit.AppEventEmit
import Common.Effect.AppEventEmit.Model.AppEventAdditional
import Common.Effect.AppEventEmit.Model.AppEventMessage
import Common.Effect.GitHub.FetchRepoFiles.FetchRepoFiles
import Common.Effect.GitHub.FetchRepoFiles.Model.RepoFilesRequest
import Common.Effect.GitHub.FetchRepoFiles.Model.RepoFilesResult
import Common.Effect.GitHub.SearchRepoDirectoryFiles.Model.SearchRepoDirectoryFilesRequest
import Common.Effect.GitHub.SearchRepoDirectoryFiles.SearchRepoDirectoryFiles
import Common.Effect.GitHub.SearchRepoFiles.Model.SearchRepoFilesRequest
import Common.Effect.GitHub.SearchRepoFiles.SearchRepoFiles
import Common.Effect.Util
import Common.Model.Dependency.Basic.BasicDependency
import Common.Model.Dependency.Basic.BasicRepoDependencies
import Common.Model.Dependency.File.DependenciesFileLoad
import Common.Model.Dependency.File.DependenciesFileLoadDetails
import Common.Model.Dependency.File.DependenciesFileType
import Common.Model.Dependency.Ignored.IgnoredRepoDependencies
import Common.Model.Ecosystem.ProgrammingLanguage
import Common.Model.Error.CommonError
import Common.Model.Git.GitPath
import Common.Model.Git.GitSha
import Common.Model.Git.QualifiedRepo
import Common.Model.GitHub.GHRepoFile
import Common.Model.RepoConfig.FileLoadPlan
import Common.Model.RepoConfig.IgnoreDependenciesConfig (IgnoreDependenciesConfig)
import Common.Model.RepoConfig.RepoConfig
import Control.Algebra (Algebra (..), Has, (:+:) (..))
import Control.Effect.State (State)
import Control.Effect.Throw (Throw, liftEither, throwError)
import DD.Effect.DetermineDependencies.Backend.DependencyLanguageFilesBackend
import DD.Effect.DetermineDependencies.Backend.DetermineDependencyBackend
import DD.Effect.DetermineDependencies.Backend.Model.RawDepFileContent
import DD.Effect.DetermineDependencies.DetermineDependencies (DetermineDependencies (..))
import DD.Effect.DetermineDependencies.Model.DetermineDependenciesError
import DD.Effect.DetermineDependencies.Model.DetermineDependenciesResult
import Data.Map.Strict qualified as M
import Data.Text qualified as Text
import Data.Vector qualified as V
import Data.Vector.NonEmpty qualified as NV
newtype DetermineDependenciesGitHubIOC m a = DetermineDependenciesGitHubIOC {runDetermineDependenciesGitHubIOC :: m a}
deriving newtype (Functor, Applicative, Monad)
instance
( Algebra sig m,
Has AppEventEmit sig m,
Has (Throw DetermineDependenciesError) sig m,
Has (Throw CommonError) sig m,
Has FetchRepoFiles sig m,
Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m,
Has (State (Maybe RepoConfig)) sig m
) =>
Algebra (DetermineDependencies :+: sig) (DetermineDependenciesGitHubIOC m)
where
alg hdl sig ctx = case sig of
(L (DetermineDependencies request)) -> do
emitAppEventInfoA (AppEventMessage "Started: Determine dependencies") (AppEventAdditional request)
repoConfig <- getRepoConfig
let fileLoadPlan = repoConfig ^. #_fileLoadPlan
programmingLanguages = request ^. #_programmingLanguages
toLoadLanguages = factorLoadPlan fileLoadPlan programmingLanguages
similar languages can produce the same dependency file load ie . java + kotlin
dependenciesFileLoads = ordNubV $ V.concatMap determineDependencyLanguageFiles toLoadLanguages
userFileLoads = request ^. #_additionalDependencyFiles
qualifiedRepo = request ^. #_qualifiedRepo
repoCommitSha = request ^. #_commitSha
depFileTypeLoads <- determineDepFiles qualifiedRepo repoCommitSha userFileLoads dependenciesFileLoads
let filesToLoadCount = countDepFileLoads depFileTypeLoads
when
(filesToLoadCount > 100)
(throwError $ TooManyDependencyFiles filesToLoadCount)
rawDepFiles <- join <$> for depFileTypeLoads (loadDepTypesContent qualifiedRepo repoCommitSha)
initialDeps <- liftEither $ join <$> traverse loadFileDeps rawDepFiles
let additionalDeps = request ^. #_additionalDependencies
toIgnoreLanguageDeps = request ^. #_ignoreDependenciesConfig
(ignoredRepoDependencies, nvFinalDeps) <- mungeFinalDeps additionalDeps toIgnoreLanguageDeps initialDeps
let finalDepCount = NV.length nvFinalDeps
when
(finalDepCount > 500)
(throwError $ TooManyDependencies finalDepCount)
let result =
DetermineDependenciesResult
{ _basicRepoDependencies = BasicRepoDependencies nvFinalDeps,
_ignoredRepoDependencies = ignoredRepoDependencies
}
emitAppEventInfoA (AppEventMessage "Finished: Determine dependencies") (AppEventAdditional result)
DetermineDependenciesGitHubIOC $ pure (ctx $> result)
(R other) -> DetermineDependenciesGitHubIOC $ alg (runDetermineDependenciesGitHubIOC . hdl) other ctx
TYPES
This is highly complex and untested .. It is probably the ' worst ' code in the codebase .
It is something that has grown over time and towards the end of the project , where
was probably not going to survive . So it is a middle ground of effort vs maintainability .
Why is it so complex ?
Firstly , the code in essence is quite simple :
1 . Search for dependency files based on languages in use in the repository .
2 . Fetch those files and parse them for dependencies .
There are 2 key requirements which escalate things :
1 . The files we search and find ( ' system ' files ) , we expect to never fail to load with a 404 . If they do
this can be considered a failure with the app logic and the code should go down the
' oops we had an error ' path .
However , the files loaded can also include dependency files included by the user in their
config ( ' user ' files ) . If these files are missing , we consider that a user error and need to
go down the ' hey you tried to load this dep file , but it does n't exist path ' .
2 . We want to fetch all these files in parallel .
The tension is we fetch files with a simple FetchRepoFilesEffect which takes a [ File ] , so if we
fetch these naively in parallel , we lose the information about which file is a system or a user file .
Thus we do not know how to fail in this case .
The ideal solution is a way to keep some tag for each file so we still know and can fetch them fully
in parallel . This is probably the most effort to implement as FetchRepoFilesEffect has no knowledge of
the wider application . Instead this code splits the files into each category and loads them each with
an independent call to FetchRepoFilesEffect , then collates the results .
This is highly complex and untested.. It is probably the 'worst' code in the codebase.
It is something that has grown over time and towards the end of the project, where Deadpendency
was probably not going to survive. So it is a middle ground of effort vs maintainability.
Why is it so complex?
Firstly, the code in essence is quite simple:
1. Search for dependency files based on languages in use in the repository.
2. Fetch those files and parse them for dependencies.
There are 2 key requirements which escalate things:
1. The files we search and find ('system' files), we expect to never fail to load with a 404. If they do
this can be considered a failure with the app logic and the code should go down the
'oops we had an error' path.
However, the files loaded can also include dependency files included by the user in their
Deadpendency config ('user' files). If these files are missing, we consider that a user error and need to
go down the 'hey you tried to load this dep file, but it doesn't exist path'.
2. We want to fetch all these files in parallel.
The tension is we fetch files with a simple FetchRepoFilesEffect which takes a [File], so if we
fetch these naively in parallel, we lose the information about which file is a system or a user file.
Thus we do not know how to fail in this case.
The ideal solution is a way to keep some tag for each file so we still know and can fetch them fully
in parallel. This is probably the most effort to implement as FetchRepoFilesEffect has no knowledge of
the wider application. Instead this code splits the files into each category and loads them each with
an independent call to FetchRepoFilesEffect, then collates the results.
-}
data DepFileTypeToLoad = DepFileTypeToLoad
{ _depFileType :: DependenciesFileType,
1 . stuff we searched for and found to exist , just need to be fetched
2 . stuff the user said to load specifically , is user error if not exist
_paths :: These (NV.NonEmptyVector GitPath) (NV.NonEmptyVector GitPath)
}
deriving stock (Eq, Show, Generic)
determineDepFiles ::
( Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m
) =>
QualifiedRepo ->
GitSha ->
V.Vector DependenciesFileLoad ->
V.Vector DependenciesFileLoad ->
m (V.Vector DepFileTypeToLoad)
determineDepFiles qualifiedRepo gitSha userFileLoads languageFileLoads = do
let userGroupedDepFileLoads = groupDepFileLoads userFileLoads
languageGroupedDepFileLoads = groupDepFileLoads languageFileLoads
userDepFileTypeLoads <-
concatMaybeV
<$> for userGroupedDepFileLoads (uncurry (loadDepLoads qualifiedRepo gitSha True))
languageDepFileTypeLoads <-
concatMaybeV
<$> for languageGroupedDepFileLoads (uncurry (loadDepLoads qualifiedRepo gitSha False))
pure $ userDepFileTypeLoads V.++ languageDepFileTypeLoads
countDepFileLoads :: V.Vector DepFileTypeToLoad -> Int
countDepFileLoads loads =
let (justSystem, justUser, bothSystemUserList) = partitionThese $ V.toList (fmap _paths loads)
bothLength = sum $ fmap (\(system, user) -> NV.length system + NV.length user) bothSystemUserList
in length justSystem + length justUser + bothLength
groupDepFileLoads :: V.Vector DependenciesFileLoad -> V.Vector (DependenciesFileType, NV.NonEmptyVector DependenciesFileLoadDetails)
groupDepFileLoads depFileLoads =
let accLoadsToMap acc (DependenciesFileLoad loadType details) = M.insertWith (NV.++) loadType (NV.singleton details) acc
resultAsMap = V.foldl' accLoadsToMap M.empty depFileLoads
in V.fromList $ M.toList resultAsMap
loadDepLoads ::
( Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m
) =>
QualifiedRepo ->
GitSha ->
Bool ->
DependenciesFileType ->
NV.NonEmptyVector DependenciesFileLoadDetails ->
m (Maybe DepFileTypeToLoad)
loadDepLoads qualifiedRepo gitSha isUserLoad depFileType fileLoadDetails = do
nvLoads <- for fileLoadDetails (getFilesToLoad qualifiedRepo gitSha isUserLoad)
let (allSystem, allUser) = NV.foldl' (\(system, user) (system', user') -> (system V.++ system', user V.++ user')) (V.empty, V.empty) nvLoads
maybeNvSystem = NV.fromVector allSystem
maybeNvUser = NV.fromVector allUser
pure $
DepFileTypeToLoad depFileType <$> maybesToThese maybeNvSystem maybeNvUser
getFilesToLoad ::
( Has SearchRepoFiles sig m,
Has SearchRepoDirectoryFiles sig m
) =>
QualifiedRepo ->
GitSha ->
Bool ->
DependenciesFileLoadDetails ->
m (V.Vector GitPath, V.Vector GitPath)
getFilesToLoad qualifiedRepo repoCommitSha isUserLoad loadDetails = do
case loadDetails of
DFLDSpecific filePath -> do
let specificLoads = V.singleton (GitPath filePath)
pure $
if isUserLoad
then (V.empty, specificLoads)
else (specificLoads, V.empty)
DFLDSearch fileMatch -> do
let searchRequest =
SearchRepoFilesRequest
{ _filesMatch = fileMatch,
_qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha
}
searchResult <- repoFilesSearch searchRequest
let resultPaths = searchResult ^. #_repoFilePaths
pure
(resultPaths, V.empty)
DFLDDirectorySearch directory fileMatch -> do
let searchRequest =
SearchRepoDirectoryFilesRequest
{ _filesMatch = fileMatch,
_directoryPath = directory,
_qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha
}
searchResult <- repoDirectoryFilesSearch searchRequest
let resultPaths = searchResult ^. #_repoFilePaths
pure
(resultPaths, V.empty)
loadDepTypesContent ::
( Has FetchRepoFiles sig m,
Has (Throw DetermineDependenciesError) sig m
) =>
QualifiedRepo ->
GitSha ->
DepFileTypeToLoad ->
m (V.Vector RawDepFileContent)
loadDepTypesContent qualifiedRepo repoCommitSha depFileToLoad = do
let depFileType = depFileToLoad ^. #_depFileType
theseLoads = depFileToLoad ^. #_paths
nvGHRepoFiles <-
case theseLoads of
(This systemLoads) -> loadSystemFilesContent qualifiedRepo repoCommitSha systemLoads
(That userLoads) -> loadUserFilesContent qualifiedRepo repoCommitSha userLoads
(These systemLoads userLoads) -> do
systemFiles <- loadSystemFilesContent qualifiedRepo repoCommitSha systemLoads
userFiles <- loadUserFilesContent qualifiedRepo repoCommitSha userLoads
pure $
systemFiles NV.++ userFiles
let filteredSymLinks = NV.filter notSymLink nvGHRepoFiles
pure $
filteredSymLinks <&> RawDepFileContent depFileType
loadSystemFilesContent ::
( Has FetchRepoFiles sig m,
Has (Throw DetermineDependenciesError) sig m
) =>
QualifiedRepo ->
GitSha ->
NV.NonEmptyVector GitPath ->
m (NV.NonEmptyVector GHRepoFile)
loadSystemFilesContent qualifiedRepo repoCommitSha filePaths = do
let filesRequest =
RepoFilesRequest
{ _qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha,
_filePaths = filePaths
}
repoFilesResult <- repoFilesFetch filesRequest
let fileResults = repoFilesResult ^. #_repoFiles
results <- liftEither $ first (DependencyFilesMismatch . snd) $ traverse getResult fileResults
pure $
results <&> snd
loadUserFilesContent ::
( Has FetchRepoFiles sig m,
Has (Throw DetermineDependenciesError) sig m
) =>
QualifiedRepo ->
GitSha ->
NV.NonEmptyVector GitPath ->
m (NV.NonEmptyVector GHRepoFile)
loadUserFilesContent qualifiedRepo repoCommitSha filePaths = do
TODO : use these so we know we have gitpaths to avoid a possible error
let filesRequest =
RepoFilesRequest
{ _qualifiedRepo = qualifiedRepo,
_commitSha = repoCommitSha,
_filePaths = filePaths
}
repoFilesResult <- repoFilesFetch filesRequest
let fileResults = repoFilesResult ^. #_repoFiles
results <- liftEither $ first (UserSpecificedMissingFile . fst) $ traverse getResult fileResults
pure $
results <&> snd
mungeFinalDeps ::
(Has (Throw DetermineDependenciesError) sig m) =>
V.Vector BasicDependency ->
IgnoreDependenciesConfig ->
V.Vector BasicDependency ->
m (IgnoredRepoDependencies, NV.NonEmptyVector BasicDependency)
mungeFinalDeps additionalDeps ignoreDepConfig foundDeps = do
let withAdditional = addAdditionalDeps additionalDeps foundDeps
duplicatesIgnored = removeDuplicates withAdditional
nvAllDeps <- maybeToErrorM NoDependenciesFound (NV.fromVector duplicatesIgnored)
let (resultIgnoredDeps, maybeResultBasicRepoDeps) = ignoreDeps ignoreDepConfig nvAllDeps
resultBasicRepoDeps <- maybeToErrorM AllDependenciesIgnored maybeResultBasicRepoDeps
pure
(resultIgnoredDeps, resultBasicRepoDeps)
this is an ugly way to detect a symlink eg .
notSymLink :: GHRepoFile -> Bool
notSymLink (GHRepoFile _ fileContents) = not $ Text.isPrefixOf "../" fileContents
getResult :: RepoFileResult -> Either (GitPath, Text) (GitPath, GHRepoFile)
getResult (RepoFileResult filePath Nothing) = Left (filePath, "Unexpected missing file from search: " <> show filePath)
getResult (RepoFileResult filePath (Just repoFile)) = Right (filePath, repoFile)
factorLoadPlan :: FileLoadPlan -> V.Vector ProgrammingLanguage -> V.Vector ProgrammingLanguage
factorLoadPlan FileLoadEnabled input = input
factorLoadPlan FileLoadDisabled _ = V.empty
factorLoadPlan (FileLoadDisabledForLangs nvDisabledLangs) input = V.filter (`NV.notElem` nvDisabledLangs) input
|
348933db9ae4232d9a8206d349d187e0cbd532a3f7de4bca8acb6f905003e012 | IvanIvanov/fp2013 | solution.scm | (define (prepend l value)
(cond ((null? value) l)
((null? l) value)
((list? value) (cons (car value) (prepend l (cdr value))))
(else (cons value l))))
(define (number->list n)
(let loop ((number n)
(digits '()))
(if (< number 10)
(cons number digits)
(loop (quotient number 10)
(cons (remainder number 10) digits)))))
(define (member? x l)
(cond ((null? l) #f)
((equal? x (car l)) #t)
(else (member? x (cdr l)))))
(define (distinct l)
(let loop ((items l)
(unique-items '()))
(cond ((null? items) unique-items)
((not (member? (car items) unique-items)) (loop (cdr items) (append unique-items (list (car items)))))
(else (loop (cdr items) unique-items)))))
(define (sequence-order? order l)
(cond ((or (null? l) (null? (cdr l))) #t)
((order (car l) (cadr l)) (sequence-order? order (cdr l)))
(else #f)))
| null | https://raw.githubusercontent.com/IvanIvanov/fp2013/2ac1bb1102cb65e0ecbfa8d2fb3ca69953ae4ecf/lab4/homeworks/04/solution.scm | scheme | (define (prepend l value)
(cond ((null? value) l)
((null? l) value)
((list? value) (cons (car value) (prepend l (cdr value))))
(else (cons value l))))
(define (number->list n)
(let loop ((number n)
(digits '()))
(if (< number 10)
(cons number digits)
(loop (quotient number 10)
(cons (remainder number 10) digits)))))
(define (member? x l)
(cond ((null? l) #f)
((equal? x (car l)) #t)
(else (member? x (cdr l)))))
(define (distinct l)
(let loop ((items l)
(unique-items '()))
(cond ((null? items) unique-items)
((not (member? (car items) unique-items)) (loop (cdr items) (append unique-items (list (car items)))))
(else (loop (cdr items) unique-items)))))
(define (sequence-order? order l)
(cond ((or (null? l) (null? (cdr l))) #t)
((order (car l) (cadr l)) (sequence-order? order (cdr l)))
(else #f)))
| |
14dd1bedbd3c1c4fe3c8cfa056ce7f3c96ade0c2a0f2909ce78a705be1bfbede | esl/MongooseIM | mam_lookup_sql.erl | %% Makes a SELECT SQL query
-module(mam_lookup_sql).
-export([lookup_query/5]).
-include("mongoose_logger.hrl").
-include("mongoose_mam.hrl").
-type offset_limit() :: all | {Offset :: non_neg_integer(), Limit :: non_neg_integer()}.
-type sql_part() :: iolist() | binary().
-type env_vars() :: mod_mam_rdbms_arch:env_vars().
-type query_type() :: atom().
-type column() :: atom().
-type lookup_query_fn() :: fun((QueryType :: atom(), Env :: map(), Filters :: list(),
Order :: atom(), OffsetLimit :: offset_limit()) -> term()).
-export_type([sql_part/0]).
-export_type([query_type/0]).
-export_type([column/0]).
-export_type([lookup_query_fn/0]).
%% The ONLY usage of Env is in these functions:
%% The rest of code should treat Env as opaque (i.e. the code just passes Env around).
-spec host_type(env_vars()) -> mongooseim:host_type().
host_type(#{host_type := HostType}) -> HostType.
-spec table(env_vars()) -> atom().
table(#{table := Table}) -> Table.
-spec index_hint_sql(env_vars()) -> sql_part().
index_hint_sql(Env = #{index_hint_fn := F}) -> F(Env).
-spec columns_sql(env_vars(), query_type()) -> sql_part().
columns_sql(#{columns_sql_fn := F}, QueryType) -> F(QueryType).
-spec column_to_id(env_vars(), column()) -> string().
column_to_id(#{column_to_id_fn := F}, Col) -> F(Col).
%% This function uses some fields from Env:
%% - host_type
%% - table
%% - index_hint_fn
%% - columns_sql_fn
%% - column_to_id_fn
%%
%% Filters are in format {Op, Column, Value}
QueryType should be an atom , that we pass into the function .
-spec lookup_query(QueryType :: atom(), Env :: map(), Filters :: list(),
Order :: atom(), OffsetLimit :: offset_limit()) -> term().
lookup_query(QueryType, Env, Filters, Order, OffsetLimit) ->
Table = table(Env),
HostType = host_type(Env),
StmtName = filters_to_statement_name(Env, QueryType, Table, Filters, Order, OffsetLimit),
case mongoose_rdbms:prepared(StmtName) of
false ->
%% Create a new type of a query
SQL = lookup_sql_binary(QueryType, Table, Env, Filters, Order, OffsetLimit),
Columns = filters_to_columns(Filters, OffsetLimit),
mongoose_rdbms:prepare(StmtName, Table, Columns, SQL);
true ->
ok
end,
Args = filters_to_args(Filters, OffsetLimit),
mongoose_rdbms:execute_successfully(HostType, StmtName, Args).
lookup_sql_binary(QueryType, Table, Env, Filters, Order, OffsetLimit) ->
iolist_to_binary(lookup_sql(QueryType, Table, Env, Filters, Order, OffsetLimit)).
lookup_sql(QueryType, Table, Env, Filters, Order, OffsetLimit) ->
IndexHintSQL = index_hint_sql(Env),
FilterSQL = filters_to_sql(Filters),
OrderSQL = order_to_sql(Order),
{LimitSQL, TopSQL} = limit_sql(OffsetLimit),
["SELECT ", TopSQL, " ", columns_sql(Env, QueryType),
" FROM ", atom_to_list(Table), " ",
IndexHintSQL, FilterSQL, OrderSQL, LimitSQL].
limit_sql(all) -> {"", ""};
limit_sql({0, _Limit}) -> rdbms_queries:get_db_specific_limits();
limit_sql({_Offset, _Limit}) -> {rdbms_queries:limit_offset_sql(), ""}.
filters_to_columns(Filters, OffsetLimit) ->
offset_limit_to_columns(OffsetLimit, [Column || {_Op, Column, _Value} <- Filters]).
filters_to_args(Filters, OffsetLimit) ->
offset_limit_to_args(OffsetLimit, [Value || {_Op, _Column, Value} <- Filters]).
offset_limit_to_args(all, Args) ->
Args;
offset_limit_to_args({0, Limit}, Args) ->
rdbms_queries:add_limit_arg(Limit, Args);
offset_limit_to_args({Offset, Limit}, Args) ->
Args ++ rdbms_queries:limit_offset_args(Limit, Offset).
offset_limit_to_columns(all, Columns) ->
Columns;
offset_limit_to_columns({0, _Limit}, Columns) ->
rdbms_queries:add_limit_arg(limit, Columns);
offset_limit_to_columns({_Offset, _Limit}, Columns) ->
Columns ++ rdbms_queries:limit_offset_args(limit, offset).
filters_to_statement_name(Env, QueryType, Table, Filters, Order, OffsetLimit) ->
QueryId = query_type_to_id(QueryType),
Ids = [op_to_id(Op) ++ column_to_id(Env, Col) || {Op, Col, _Val} <- Filters],
OrderId = order_type_to_id(Order),
LimitId = offset_limit_to_id(OffsetLimit),
list_to_atom(atom_to_list(Table) ++ "_" ++ QueryId ++ "_" ++ OrderId ++ "_" ++ lists:append(Ids) ++ "_" ++ LimitId).
query_type_to_id(QueryType) -> atom_to_list(QueryType).
order_type_to_id(desc) -> "d";
order_type_to_id(asc) -> "a";
order_type_to_id(unordered) -> "u".
order_to_sql(asc) -> " ORDER BY id ";
order_to_sql(desc) -> " ORDER BY id DESC ";
order_to_sql(unordered) -> " ".
offset_limit_to_id({0, _Limit}) -> "limit";
offset_limit_to_id({_Offset, _Limit}) -> "offlim";
offset_limit_to_id(all) -> "all".
filters_to_sql(Filters) ->
SQLs = [filter_to_sql(Filter) || Filter <- Filters],
case SQLs of
[] -> "";
Defined -> [" WHERE ", rdbms_queries:join(Defined, " AND ")]
end.
-spec filter_to_sql(mam_filter:filter_field()) -> sql_part().
filter_to_sql({Op, Column, _Value}) -> filter_to_sql(atom_to_list(Column), Op).
op_to_id(equal) -> "eq";
op_to_id(less) -> "lt"; %% less than
op_to_id(greater) -> "gt"; %% greater than
op_to_id(le) -> "le"; %% less or equal
op_to_id(ge) -> "ge"; %% greater or equal
op_to_id(like) -> "lk".
filter_to_sql(Column, equal) -> Column ++ " = ?";
filter_to_sql(Column, less) -> Column ++ " < ?";
filter_to_sql(Column, greater) -> Column ++ " > ?";
filter_to_sql(Column, le) -> Column ++ " <= ?";
filter_to_sql(Column, ge) -> Column ++ " >= ?";
filter_to_sql(Column, like) -> Column ++ " LIKE ?".
| null | https://raw.githubusercontent.com/esl/MongooseIM/da0d32d2d3d68ac387fd66c7e22c3740743a2beb/src/mam/mam_lookup_sql.erl | erlang | Makes a SELECT SQL query
The ONLY usage of Env is in these functions:
The rest of code should treat Env as opaque (i.e. the code just passes Env around).
This function uses some fields from Env:
- host_type
- table
- index_hint_fn
- columns_sql_fn
- column_to_id_fn
Filters are in format {Op, Column, Value}
Create a new type of a query
less than
greater than
less or equal
greater or equal | -module(mam_lookup_sql).
-export([lookup_query/5]).
-include("mongoose_logger.hrl").
-include("mongoose_mam.hrl").
-type offset_limit() :: all | {Offset :: non_neg_integer(), Limit :: non_neg_integer()}.
-type sql_part() :: iolist() | binary().
-type env_vars() :: mod_mam_rdbms_arch:env_vars().
-type query_type() :: atom().
-type column() :: atom().
-type lookup_query_fn() :: fun((QueryType :: atom(), Env :: map(), Filters :: list(),
Order :: atom(), OffsetLimit :: offset_limit()) -> term()).
-export_type([sql_part/0]).
-export_type([query_type/0]).
-export_type([column/0]).
-export_type([lookup_query_fn/0]).
-spec host_type(env_vars()) -> mongooseim:host_type().
host_type(#{host_type := HostType}) -> HostType.
-spec table(env_vars()) -> atom().
table(#{table := Table}) -> Table.
-spec index_hint_sql(env_vars()) -> sql_part().
index_hint_sql(Env = #{index_hint_fn := F}) -> F(Env).
-spec columns_sql(env_vars(), query_type()) -> sql_part().
columns_sql(#{columns_sql_fn := F}, QueryType) -> F(QueryType).
-spec column_to_id(env_vars(), column()) -> string().
column_to_id(#{column_to_id_fn := F}, Col) -> F(Col).
QueryType should be an atom , that we pass into the function .
-spec lookup_query(QueryType :: atom(), Env :: map(), Filters :: list(),
Order :: atom(), OffsetLimit :: offset_limit()) -> term().
lookup_query(QueryType, Env, Filters, Order, OffsetLimit) ->
Table = table(Env),
HostType = host_type(Env),
StmtName = filters_to_statement_name(Env, QueryType, Table, Filters, Order, OffsetLimit),
case mongoose_rdbms:prepared(StmtName) of
false ->
SQL = lookup_sql_binary(QueryType, Table, Env, Filters, Order, OffsetLimit),
Columns = filters_to_columns(Filters, OffsetLimit),
mongoose_rdbms:prepare(StmtName, Table, Columns, SQL);
true ->
ok
end,
Args = filters_to_args(Filters, OffsetLimit),
mongoose_rdbms:execute_successfully(HostType, StmtName, Args).
lookup_sql_binary(QueryType, Table, Env, Filters, Order, OffsetLimit) ->
iolist_to_binary(lookup_sql(QueryType, Table, Env, Filters, Order, OffsetLimit)).
lookup_sql(QueryType, Table, Env, Filters, Order, OffsetLimit) ->
IndexHintSQL = index_hint_sql(Env),
FilterSQL = filters_to_sql(Filters),
OrderSQL = order_to_sql(Order),
{LimitSQL, TopSQL} = limit_sql(OffsetLimit),
["SELECT ", TopSQL, " ", columns_sql(Env, QueryType),
" FROM ", atom_to_list(Table), " ",
IndexHintSQL, FilterSQL, OrderSQL, LimitSQL].
limit_sql(all) -> {"", ""};
limit_sql({0, _Limit}) -> rdbms_queries:get_db_specific_limits();
limit_sql({_Offset, _Limit}) -> {rdbms_queries:limit_offset_sql(), ""}.
filters_to_columns(Filters, OffsetLimit) ->
offset_limit_to_columns(OffsetLimit, [Column || {_Op, Column, _Value} <- Filters]).
filters_to_args(Filters, OffsetLimit) ->
offset_limit_to_args(OffsetLimit, [Value || {_Op, _Column, Value} <- Filters]).
offset_limit_to_args(all, Args) ->
Args;
offset_limit_to_args({0, Limit}, Args) ->
rdbms_queries:add_limit_arg(Limit, Args);
offset_limit_to_args({Offset, Limit}, Args) ->
Args ++ rdbms_queries:limit_offset_args(Limit, Offset).
offset_limit_to_columns(all, Columns) ->
Columns;
offset_limit_to_columns({0, _Limit}, Columns) ->
rdbms_queries:add_limit_arg(limit, Columns);
offset_limit_to_columns({_Offset, _Limit}, Columns) ->
Columns ++ rdbms_queries:limit_offset_args(limit, offset).
filters_to_statement_name(Env, QueryType, Table, Filters, Order, OffsetLimit) ->
QueryId = query_type_to_id(QueryType),
Ids = [op_to_id(Op) ++ column_to_id(Env, Col) || {Op, Col, _Val} <- Filters],
OrderId = order_type_to_id(Order),
LimitId = offset_limit_to_id(OffsetLimit),
list_to_atom(atom_to_list(Table) ++ "_" ++ QueryId ++ "_" ++ OrderId ++ "_" ++ lists:append(Ids) ++ "_" ++ LimitId).
query_type_to_id(QueryType) -> atom_to_list(QueryType).
order_type_to_id(desc) -> "d";
order_type_to_id(asc) -> "a";
order_type_to_id(unordered) -> "u".
order_to_sql(asc) -> " ORDER BY id ";
order_to_sql(desc) -> " ORDER BY id DESC ";
order_to_sql(unordered) -> " ".
offset_limit_to_id({0, _Limit}) -> "limit";
offset_limit_to_id({_Offset, _Limit}) -> "offlim";
offset_limit_to_id(all) -> "all".
filters_to_sql(Filters) ->
SQLs = [filter_to_sql(Filter) || Filter <- Filters],
case SQLs of
[] -> "";
Defined -> [" WHERE ", rdbms_queries:join(Defined, " AND ")]
end.
-spec filter_to_sql(mam_filter:filter_field()) -> sql_part().
filter_to_sql({Op, Column, _Value}) -> filter_to_sql(atom_to_list(Column), Op).
op_to_id(equal) -> "eq";
op_to_id(like) -> "lk".
filter_to_sql(Column, equal) -> Column ++ " = ?";
filter_to_sql(Column, less) -> Column ++ " < ?";
filter_to_sql(Column, greater) -> Column ++ " > ?";
filter_to_sql(Column, le) -> Column ++ " <= ?";
filter_to_sql(Column, ge) -> Column ++ " >= ?";
filter_to_sql(Column, like) -> Column ++ " LIKE ?".
|
26d00893786339a5f25c9359605d1b6148698dea0bc100f1b191c0fa6189baca | INRIA/zelus | node_base.ml | (**************************************************************************)
(* *)
(* Zelus *)
(* A synchronous language for hybrid systems *)
(* *)
(* *)
and
(* *)
Copyright 2012 - 2019 . All rights reserved .
(* *)
(* This file is distributed under the terms of the CeCILL-C licence *)
(* *)
(* Zelus is developed in the INRIA PARKAS team. *)
(* *)
(**************************************************************************)
(* This module provides functions for lifting a hybrid function into *)
(* a discrete one. This is the so-called "co-simulation" of a *)
(* continuous or hybrid model in which the numerical solver *)
and zero - crossing detection mechanism are embedded into the step function
(* Lift a hybrid node into a discrete node *)
[ solve f ( input , t ) = next_t , result ]
- f : ' a -C- > ' b is the hybrid node ;
- stop_time : float is the stop time ( end ) of the simulation ;
- input : ' a is a stream ;
- t : float is a stream of horizons that must be increasing
( forall n in . t(n ) < = ) )
- result : ' b return is a stream of results ;
- next_t : float is a stream of achieved horizons
- f : 'a -C-> 'b is the hybrid node;
- stop_time : float is the stop time (end) of the simulation;
- input : 'a is a stream;
- t : float is a stream of horizons that must be increasing
(forall n in Nat. t(n) <= t(n+1))
- result : 'b return is a stream of results;
- next_t : float is a stream of achieved horizons *)
compile with :
* - ocamlfind ocamlc bigarray.cma sundials.cma ztypes.ml node.ml
*- ocamlfind ocamlc bigarray.cma sundials.cma ztypes.ml node.ml *)
(*- ocamlfind ocamlc bigarray.cma -package sundialsml sundials.cma
zls.cmo -I solvers solvers/illinois.cmo solvers/sundials_cvode.cmo
ztypes.ml node.ml *)
open Ztypes
let debug = ref false
let log_info s i =
if !debug then
begin print_string s; print_float i; print_newline (); flush stdout end
type status =
| Interpolate (* no integration was necessary *)
| Success of float (* the integration succeed; limit time for correctness *)
| RootsFound (* a root has been found *)
| Horizon of float (* returns the next horizon (time event) *)
| Cascade (* a cascade *)
| StopTimeReached (* the end of simulation time is reached *)
| TimeHasPassed (* an output at time [h] is expected but *)
(* [h < start] where [start] *)
(* is the last restart time of the solver *)
| Error (* something went wrong during integration *)
(* output *)
type 'b return = { time: float; status: status; result: 'b }
module Make (SSolver: Zls.STATE_SOLVER) (ZSolver: Zls.ZEROC_SOLVER) =
struct
(* the state of the solver. Either never called (allocated) or running *)
type ('a, 'b) solver =
| Init (* initial state of the simulation *)
| Running of ('a, 'b) solver_state
and ('a, 'b) solver_state =
{ zstate: ZSolver.t;
(* the solver state *)
sstate: SSolver.t;
the zero - crossing solver state
roots: Ztypes.zinvec;
the vector of zero - crossing
nvec: SSolver.nvec;
(* the vector of positions *)
cvec: Ztypes.cvec;
in two forms
mutable t_start: float;
(* time of the previous reset or mesh point *)
mutable t_limit: float;
(* time for the limit of the next solver step, i.e., next time event *)
mutable t_mesh: float;
(* horizon reached by the solver. *)
(* No zero-crossing in [t_start, t_mesh] *)
mutable t_time: float;
(* current time *)
minput: 'a ref; (* the input is read at discrete-time instants *)
mutable output: 'b; (* the current output *)
mutable next: simulation_state; (* state of the simulation *)
}
and simulation_state =
| Integrate (* integrate the signal *)
true means zero - crossing ; false a cascade
| End (* end of the simulation; stop_time has been reached *)
type ('a, 'b) state = { state: 'a; mutable solver: 'b }
(* increment a given horizon by a small margin *)
let add_margin h = h +. (2.0 *. epsilon_float *. h)
(* the main lifting function *)
let solve f (stop_time: float) =
(* convert the internal representation of a hybrid node *)
(* into one that can be used by an ODE/Zero-crossing solver *)
let Hnode
{ state; zsize; csize; derivative; crossing;
output; setroots; majorstep; reset; horizon } = Lift.lift f in
(* the allocation function *)
let alloc () =
(* At this point, we do not allocate the solver states yet. *)
(* this is due to the way we compile which expect the derivative *)
and zero - crossing function to expect an input
We will change this once those two functions are obtained
(* through slicing and will not need an input *)
{ state = state; solver = Init } in
let reset { state; solver } =
reset state;
match solver with
| Init -> ()
| Running ({ nvec; cvec; zstate; sstate; t_time } as s) ->
reset the ODE solver and Zero - crossing solver
let _ = SSolver.get_dky sstate nvec 0.0 0 in
SSolver.reinitialize sstate 0.0 nvec;
ZSolver.reinitialize zstate 0.0 cvec;
s.t_start <- 0.0;
s.t_time <- 0.0;
s.t_mesh <- 0.0 in
(* the step function *)
let step ({ state; solver } as s) (expected_time, input) =
try
(* make a step *)
match solver with
| Init ->
(* allocate the vectors for continuous state variables *)
and that for the zero - crossing detection
let nvec = SSolver.cmake csize in
let cvec = SSolver.unvec nvec in
let roots = Zls.zmake zsize in
log_info "Init: start = " 0.0;
(* initial major step *)
let result = majorstep state 0.0 cvec input in
let minput = ref input in
let derivative time cvec dvec =
derivative state !minput time cvec dvec in
let crossing time cvec dvec =
crossing state !minput time cvec dvec in
(* Allocate the solver *)
let sstate =
SSolver.initialize derivative nvec in
(* Allocate the zsolver *)
let zstate =
ZSolver.initialize zsize crossing cvec in
SSolver.set_stop_time sstate stop_time;
let horizon = horizon state in
let t_limit = min stop_time horizon in
let next, status =
if horizon = 0.0 then Discrete(false), Cascade
else
if stop_time <= 0.0 then End, StopTimeReached
else Integrate, Horizon(t_limit) in
s.solver <- Running { sstate = sstate; zstate = zstate;
t_start = 0.0; t_limit = t_limit;
t_mesh = 0.0; t_time = 0.0;
minput = minput; output = result;
roots = roots; cvec = cvec; nvec = nvec;
next = next };
log_info "horizon = " t_limit;
{ time = 0.0; status = status; result = result }
| Running({ next; sstate; zstate; t_start; t_mesh; t_limit;
t_time; minput; nvec; cvec; roots } as s) ->
log_info "Expected time = " expected_time;
if expected_time < t_start then
{ time = t_start; status = TimeHasPassed; result = s.output }
else
(* if the input did not change since the last reset *)
(* of the solvers and expected_time is less than t_mesh *)
(* interpolate the state at the expected_time *)
let input_change =
(expected_time > t_time) && not (!minput = input) in
s.t_time <- expected_time;
if expected_time <= t_mesh then
if not input_change then
(* interpolation *)
let _ = SSolver.get_dky sstate nvec expected_time 0 in
let result = output state input cvec in
s.output <- result;
log_info "Interpolate: time = " expected_time;
{ time = expected_time; status = Interpolate;
result = result }
else (* if the input has changed since the last step *)
(* the solution estimated by the solver is wrong and *)
(* must be cancelled *)
let _ = SSolver.get_dky sstate nvec t_time 0 in
log_info "Change of the input at t_time = " t_time;
let result = majorstep state t_time cvec input in
s.t_start <- t_time;
s.t_mesh <- t_time;
s.minput := input;
s.output <- result;
let status =
let horizon = horizon state in
if horizon = 0.0
then begin s.next <- Discrete(false); Cascade end
else
let _ = SSolver.reinitialize sstate t_mesh nvec in
let _ = ZSolver.reinitialize zstate t_mesh cvec in
let t_limit = min stop_time horizon in
s.t_start <- t_mesh;
s.t_limit <- t_limit;
s.next <- Integrate;
Horizon(t_limit) in
{ time = t_mesh; status = status; result = s.output }
else
match next with
| Integrate ->
log_info "Integrate: t_mesh = " t_mesh;
(* the new start point [t_start] is now [t_mesh] *)
s.t_start <- t_mesh;
if t_mesh >= stop_time then
begin
s.next <- End;
s.t_time <- expected_time;
{ time = stop_time; status = StopTimeReached;
result = s.output }
end
else
let t_limit_with_margin = add_margin t_limit in
log_info "t_limit_with_margin = " t_limit_with_margin;
let t_nextmesh =
(* integrate *)
SSolver.step sstate t_limit_with_margin nvec in
(* interpolate if the mesh point has passed the *)
(* time limit *)
log_info "t_nextmesh = " t_nextmesh;
let t =
if t_limit < t_nextmesh
then
(SSolver.get_dky sstate nvec t_limit 0; t_limit)
else t_nextmesh in
log_info "t_nextmesh = " t;
is there a zero - crossing ?
ZSolver.step zstate t cvec;
let has_roots = ZSolver.has_roots zstate in
let status =
if has_roots then
let t =
ZSolver.find
zstate (SSolver.get_dky sstate nvec, cvec)
roots in
log_info "root found at time = " t;
s.t_mesh <- t;
s.next <- Discrete(true);
Success(t)
else
let next =
if t = t_limit then Discrete(false)
else Integrate in
s.t_mesh <- t;
s.next <- next;
Success(t) in
s.t_time <- expected_time;
{ time = s.t_start; status = status; result = s.output }
| Discrete(is_zero_crossing) ->
log_info "StepRootsFound or StepCascade: time = " t_mesh;
if is_zero_crossing
then setroots state input cvec roots;
let result = majorstep state t_mesh cvec input in
s.output <- result;
let status =
let horizon = horizon state in
if horizon = 0.0
then begin s.next <- Discrete(false); Cascade end
else
let _ = SSolver.reinitialize sstate t_mesh nvec in
let _ = ZSolver.reinitialize zstate t_mesh cvec in
let t_limit = min stop_time horizon in
s.t_start <- t_mesh;
s.t_limit <- t_limit;
s.next <- Integrate;
Horizon(t_limit) in
{ time = t_mesh; status = status; result = s.output }
| End ->
log_info "End: stop_time = " stop_time;
{ time = s.t_start; status = StopTimeReached;
result = s.output }
with
| x -> raise x in
Node { alloc = alloc; step = step; reset = reset }
end
module Ode23Solver = Make (Solvers.Ode23) (Illinois)
module Ode45Solver = Make (Solvers.Ode45) (Illinois)
module SundialsSolver = Make ( Solvers . Sundials_cvode ) ( Illinois )
let solve_ode23 = Ode23Solver.solve
let solve_ode45 = Ode45Solver.solve
(* let solve = SundialsSolver.solve *)
| null | https://raw.githubusercontent.com/INRIA/zelus/685428574b0f9100ad5a41bbaa416cd7a2506d5e/lib/std/node_base.ml | ocaml | ************************************************************************
Zelus
A synchronous language for hybrid systems
This file is distributed under the terms of the CeCILL-C licence
Zelus is developed in the INRIA PARKAS team.
************************************************************************
This module provides functions for lifting a hybrid function into
a discrete one. This is the so-called "co-simulation" of a
continuous or hybrid model in which the numerical solver
Lift a hybrid node into a discrete node
- ocamlfind ocamlc bigarray.cma -package sundialsml sundials.cma
zls.cmo -I solvers solvers/illinois.cmo solvers/sundials_cvode.cmo
ztypes.ml node.ml
no integration was necessary
the integration succeed; limit time for correctness
a root has been found
returns the next horizon (time event)
a cascade
the end of simulation time is reached
an output at time [h] is expected but
[h < start] where [start]
is the last restart time of the solver
something went wrong during integration
output
the state of the solver. Either never called (allocated) or running
initial state of the simulation
the solver state
the vector of positions
time of the previous reset or mesh point
time for the limit of the next solver step, i.e., next time event
horizon reached by the solver.
No zero-crossing in [t_start, t_mesh]
current time
the input is read at discrete-time instants
the current output
state of the simulation
integrate the signal
end of the simulation; stop_time has been reached
increment a given horizon by a small margin
the main lifting function
convert the internal representation of a hybrid node
into one that can be used by an ODE/Zero-crossing solver
the allocation function
At this point, we do not allocate the solver states yet.
this is due to the way we compile which expect the derivative
through slicing and will not need an input
the step function
make a step
allocate the vectors for continuous state variables
initial major step
Allocate the solver
Allocate the zsolver
if the input did not change since the last reset
of the solvers and expected_time is less than t_mesh
interpolate the state at the expected_time
interpolation
if the input has changed since the last step
the solution estimated by the solver is wrong and
must be cancelled
the new start point [t_start] is now [t_mesh]
integrate
interpolate if the mesh point has passed the
time limit
let solve = SundialsSolver.solve | and
Copyright 2012 - 2019 . All rights reserved .
and zero - crossing detection mechanism are embedded into the step function
[ solve f ( input , t ) = next_t , result ]
- f : ' a -C- > ' b is the hybrid node ;
- stop_time : float is the stop time ( end ) of the simulation ;
- input : ' a is a stream ;
- t : float is a stream of horizons that must be increasing
( forall n in . t(n ) < = ) )
- result : ' b return is a stream of results ;
- next_t : float is a stream of achieved horizons
- f : 'a -C-> 'b is the hybrid node;
- stop_time : float is the stop time (end) of the simulation;
- input : 'a is a stream;
- t : float is a stream of horizons that must be increasing
(forall n in Nat. t(n) <= t(n+1))
- result : 'b return is a stream of results;
- next_t : float is a stream of achieved horizons *)
compile with :
* - ocamlfind ocamlc bigarray.cma sundials.cma ztypes.ml node.ml
*- ocamlfind ocamlc bigarray.cma sundials.cma ztypes.ml node.ml *)
open Ztypes
let debug = ref false
let log_info s i =
if !debug then
begin print_string s; print_float i; print_newline (); flush stdout end
type status =
type 'b return = { time: float; status: status; result: 'b }
module Make (SSolver: Zls.STATE_SOLVER) (ZSolver: Zls.ZEROC_SOLVER) =
struct
type ('a, 'b) solver =
| Running of ('a, 'b) solver_state
and ('a, 'b) solver_state =
{ zstate: ZSolver.t;
sstate: SSolver.t;
the zero - crossing solver state
roots: Ztypes.zinvec;
the vector of zero - crossing
nvec: SSolver.nvec;
cvec: Ztypes.cvec;
in two forms
mutable t_start: float;
mutable t_limit: float;
mutable t_mesh: float;
mutable t_time: float;
}
and simulation_state =
true means zero - crossing ; false a cascade
type ('a, 'b) state = { state: 'a; mutable solver: 'b }
let add_margin h = h +. (2.0 *. epsilon_float *. h)
let solve f (stop_time: float) =
let Hnode
{ state; zsize; csize; derivative; crossing;
output; setroots; majorstep; reset; horizon } = Lift.lift f in
let alloc () =
and zero - crossing function to expect an input
We will change this once those two functions are obtained
{ state = state; solver = Init } in
let reset { state; solver } =
reset state;
match solver with
| Init -> ()
| Running ({ nvec; cvec; zstate; sstate; t_time } as s) ->
reset the ODE solver and Zero - crossing solver
let _ = SSolver.get_dky sstate nvec 0.0 0 in
SSolver.reinitialize sstate 0.0 nvec;
ZSolver.reinitialize zstate 0.0 cvec;
s.t_start <- 0.0;
s.t_time <- 0.0;
s.t_mesh <- 0.0 in
let step ({ state; solver } as s) (expected_time, input) =
try
match solver with
| Init ->
and that for the zero - crossing detection
let nvec = SSolver.cmake csize in
let cvec = SSolver.unvec nvec in
let roots = Zls.zmake zsize in
log_info "Init: start = " 0.0;
let result = majorstep state 0.0 cvec input in
let minput = ref input in
let derivative time cvec dvec =
derivative state !minput time cvec dvec in
let crossing time cvec dvec =
crossing state !minput time cvec dvec in
let sstate =
SSolver.initialize derivative nvec in
let zstate =
ZSolver.initialize zsize crossing cvec in
SSolver.set_stop_time sstate stop_time;
let horizon = horizon state in
let t_limit = min stop_time horizon in
let next, status =
if horizon = 0.0 then Discrete(false), Cascade
else
if stop_time <= 0.0 then End, StopTimeReached
else Integrate, Horizon(t_limit) in
s.solver <- Running { sstate = sstate; zstate = zstate;
t_start = 0.0; t_limit = t_limit;
t_mesh = 0.0; t_time = 0.0;
minput = minput; output = result;
roots = roots; cvec = cvec; nvec = nvec;
next = next };
log_info "horizon = " t_limit;
{ time = 0.0; status = status; result = result }
| Running({ next; sstate; zstate; t_start; t_mesh; t_limit;
t_time; minput; nvec; cvec; roots } as s) ->
log_info "Expected time = " expected_time;
if expected_time < t_start then
{ time = t_start; status = TimeHasPassed; result = s.output }
else
let input_change =
(expected_time > t_time) && not (!minput = input) in
s.t_time <- expected_time;
if expected_time <= t_mesh then
if not input_change then
let _ = SSolver.get_dky sstate nvec expected_time 0 in
let result = output state input cvec in
s.output <- result;
log_info "Interpolate: time = " expected_time;
{ time = expected_time; status = Interpolate;
result = result }
let _ = SSolver.get_dky sstate nvec t_time 0 in
log_info "Change of the input at t_time = " t_time;
let result = majorstep state t_time cvec input in
s.t_start <- t_time;
s.t_mesh <- t_time;
s.minput := input;
s.output <- result;
let status =
let horizon = horizon state in
if horizon = 0.0
then begin s.next <- Discrete(false); Cascade end
else
let _ = SSolver.reinitialize sstate t_mesh nvec in
let _ = ZSolver.reinitialize zstate t_mesh cvec in
let t_limit = min stop_time horizon in
s.t_start <- t_mesh;
s.t_limit <- t_limit;
s.next <- Integrate;
Horizon(t_limit) in
{ time = t_mesh; status = status; result = s.output }
else
match next with
| Integrate ->
log_info "Integrate: t_mesh = " t_mesh;
s.t_start <- t_mesh;
if t_mesh >= stop_time then
begin
s.next <- End;
s.t_time <- expected_time;
{ time = stop_time; status = StopTimeReached;
result = s.output }
end
else
let t_limit_with_margin = add_margin t_limit in
log_info "t_limit_with_margin = " t_limit_with_margin;
let t_nextmesh =
SSolver.step sstate t_limit_with_margin nvec in
log_info "t_nextmesh = " t_nextmesh;
let t =
if t_limit < t_nextmesh
then
(SSolver.get_dky sstate nvec t_limit 0; t_limit)
else t_nextmesh in
log_info "t_nextmesh = " t;
is there a zero - crossing ?
ZSolver.step zstate t cvec;
let has_roots = ZSolver.has_roots zstate in
let status =
if has_roots then
let t =
ZSolver.find
zstate (SSolver.get_dky sstate nvec, cvec)
roots in
log_info "root found at time = " t;
s.t_mesh <- t;
s.next <- Discrete(true);
Success(t)
else
let next =
if t = t_limit then Discrete(false)
else Integrate in
s.t_mesh <- t;
s.next <- next;
Success(t) in
s.t_time <- expected_time;
{ time = s.t_start; status = status; result = s.output }
| Discrete(is_zero_crossing) ->
log_info "StepRootsFound or StepCascade: time = " t_mesh;
if is_zero_crossing
then setroots state input cvec roots;
let result = majorstep state t_mesh cvec input in
s.output <- result;
let status =
let horizon = horizon state in
if horizon = 0.0
then begin s.next <- Discrete(false); Cascade end
else
let _ = SSolver.reinitialize sstate t_mesh nvec in
let _ = ZSolver.reinitialize zstate t_mesh cvec in
let t_limit = min stop_time horizon in
s.t_start <- t_mesh;
s.t_limit <- t_limit;
s.next <- Integrate;
Horizon(t_limit) in
{ time = t_mesh; status = status; result = s.output }
| End ->
log_info "End: stop_time = " stop_time;
{ time = s.t_start; status = StopTimeReached;
result = s.output }
with
| x -> raise x in
Node { alloc = alloc; step = step; reset = reset }
end
module Ode23Solver = Make (Solvers.Ode23) (Illinois)
module Ode45Solver = Make (Solvers.Ode45) (Illinois)
module SundialsSolver = Make ( Solvers . Sundials_cvode ) ( Illinois )
let solve_ode23 = Ode23Solver.solve
let solve_ode45 = Ode45Solver.solve
|
f86dfb5d24d70373eee9150ae1790e808bef055e2b628d4c35a8bf8cd520d5ad | ucsd-progsys/liquid-fixpoint | Simplify.hs | --------------------------------------------------------------------------------
-- | This module contains common functions used in the implementations of
in both and PLE.hs .
--------------------------------------------------------------------------------
{-# LANGUAGE PartialTypeSignatures #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE ExistentialQuantification #
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-name-shadowing #-}
module Language.Fixpoint.Solver.Simplify (applyBooleanFolding, applyConstantFolding, applySetFolding, isSetPred) where
import Language.Fixpoint.Types hiding (simplify)
import Language.Fixpoint.Smt.Theories
import Data.Hashable
import qualified Data.HashSet as S
import qualified Data.Maybe as Mb
applyBooleanFolding :: Brel -> Expr -> Expr -> Expr
applyBooleanFolding brel e1 e2 =
case (e1, e2) of
(ECon (R left), ECon (R right)) ->
Mb.fromMaybe e (bfR brel left right)
(ECon (R left), ECon (I right)) ->
Mb.fromMaybe e (bfR brel left (fromIntegral right))
(ECon (I left), ECon (R right)) ->
Mb.fromMaybe e (bfR brel (fromIntegral left) right)
(ECon (I left), ECon (I right)) ->
Mb.fromMaybe e (bfI brel left right)
_ -> if isTautoPred e then PTrue else
if isContraPred e then PFalse else e
where
e = PAtom brel e1 e2
getOp :: Ord a => Brel -> (a -> a -> Bool)
getOp Gt = (>)
getOp Ge = (>=)
getOp Lt = (<)
getOp Le = (<=)
getOp Eq = (==)
getOp Ne = (/=)
getOp Ueq = (==)
getOp Une = (/=)
bfR :: Brel -> Double -> Double -> Maybe Expr
bfR brel left right = if getOp brel left right then Just PTrue else Just PFalse
bfI :: Brel -> Integer -> Integer -> Maybe Expr
bfI brel left right = if getOp brel left right then Just PTrue else Just PFalse
-- | Replace constant integer and floating point expressions by constant values
-- where possible.
applyConstantFolding :: Bop -> Expr -> Expr -> Expr
applyConstantFolding bop e1 e2 =
case (dropECst e1, dropECst e2) of
(ECon (R left), ECon (R right)) ->
Mb.fromMaybe e (cfR bop left right)
(ECon (R left), ECon (I right)) ->
Mb.fromMaybe e (cfR bop left (fromIntegral right))
(ECon (I left), ECon (R right)) ->
Mb.fromMaybe e (cfR bop (fromIntegral left) right)
(ECon (I left), ECon (I right)) ->
Mb.fromMaybe e (cfI bop left right)
(EBin Mod _ _ , _) -> e
(EBin bop1 e11 (dropECst -> ECon (R left)), ECon (R right))
| bop == bop1 -> maybe e (EBin bop e11) (cfR (rop bop) left right)
| otherwise -> e
(EBin bop1 e11 (dropECst -> ECon (R left)), ECon (I right))
| bop == bop1 -> maybe e (EBin bop e11) (cfR (rop bop) left (fromIntegral right))
| otherwise -> e
(EBin bop1 e11 (dropECst -> ECon (I left)), ECon (R right))
| bop == bop1 -> maybe e (EBin bop e11) (cfR (rop bop) (fromIntegral left) right)
| otherwise -> e
(EBin bop1 e11 (dropECst -> ECon (I left)), ECon (I right))
| bop == bop1 -> maybe e (EBin bop e11) (cfI (rop bop) left right)
| otherwise -> e
_ -> e
where
rop :: Bop -> Bop
rop Plus = Plus
rop Minus = Plus
rop Times = Times
rop Div = Times
rop RTimes = RTimes
rop RDiv = RTimes
rop Mod = Mod
e = EBin bop e1 e2
getOp :: Num a => Bop -> Maybe (a -> a -> a)
getOp Minus = Just (-)
getOp Plus = Just (+)
getOp Times = Just (*)
getOp RTimes = Just (*)
getOp _ = Nothing
cfR :: Bop -> Double -> Double -> Maybe Expr
cfR bop left right = fmap go (getOp' bop)
where
go f = ECon $ R $ f left right
getOp' Div = Just (/)
getOp' RDiv = Just (/)
getOp' op = getOp op
cfI :: Bop -> Integer -> Integer -> Maybe Expr
cfI bop left right = fmap go (getOp' bop)
where
go f = ECon $ I $ f left right
getOp' Mod = Just mod
getOp' op = getOp op
isSetPred :: Expr -> Bool
isSetPred (EVar s) | s == setEmp = True
isSetPred (EApp e1 _) = case e1 of
(EVar s) | s == setMem || s == setSub -> True
_ -> False
isSetPred _ = False
-- Note: this is currently limited to sets of integer constants
applySetFolding :: Expr -> Expr -> Expr
applySetFolding e1 e2 = case e1 of
(EVar s) | s == setEmp
-> maybe e (fromBool . S.null) (evalSetI e2)
(EApp (EVar s) e1') | s == setMem
-> maybe e fromBool (S.member <$> getInt e1' <*> evalSetI e2)
| s == setEmp
-> maybe e (fromBool . S.null) (S.difference <$> evalSetI e1' <*> evalSetI e2)
| otherwise
-> e
_ -> e
where
e = EApp e1 e2
fromBool True = PTrue
fromBool False = PFalse
getInt :: Expr -> Maybe Integer
getInt (ECon (I n)) = Just n
getInt _ = Nothing
getOp :: (Eq a, Hashable a) => Symbol -> Maybe (S.HashSet a -> S.HashSet a -> S.HashSet a)
getOp s | s == setCup = Just S.union
| s == setCap = Just S.intersection
| s == setDif = Just S.difference
| otherwise = Nothing
evalSetI :: Expr -> Maybe (S.HashSet Integer)
evalSetI (EApp e1 e2) = case e1 of
(EVar s) | s == setEmpty -> Just S.empty
| s == setSng -> case e2 of
(ECon (I n)) -> Just $ S.singleton n
_ -> Nothing
(EApp (EVar f) e1') -> getOp f <*> evalSetI e1' <*> evalSetI e2
_ -> Nothing
evalSetI _ = Nothing
| null | https://raw.githubusercontent.com/ucsd-progsys/liquid-fixpoint/a8f3f05cd9a99a56afa64a6699d7ba12417ef5b1/src/Language/Fixpoint/Solver/Simplify.hs | haskell | ------------------------------------------------------------------------------
| This module contains common functions used in the implementations of
------------------------------------------------------------------------------
# LANGUAGE PartialTypeSignatures #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -Wno-name-shadowing #
| Replace constant integer and floating point expressions by constant values
where possible.
Note: this is currently limited to sets of integer constants | in both and PLE.hs .
# LANGUAGE FlexibleInstances #
# LANGUAGE ExistentialQuantification #
module Language.Fixpoint.Solver.Simplify (applyBooleanFolding, applyConstantFolding, applySetFolding, isSetPred) where
import Language.Fixpoint.Types hiding (simplify)
import Language.Fixpoint.Smt.Theories
import Data.Hashable
import qualified Data.HashSet as S
import qualified Data.Maybe as Mb
applyBooleanFolding :: Brel -> Expr -> Expr -> Expr
applyBooleanFolding brel e1 e2 =
case (e1, e2) of
(ECon (R left), ECon (R right)) ->
Mb.fromMaybe e (bfR brel left right)
(ECon (R left), ECon (I right)) ->
Mb.fromMaybe e (bfR brel left (fromIntegral right))
(ECon (I left), ECon (R right)) ->
Mb.fromMaybe e (bfR brel (fromIntegral left) right)
(ECon (I left), ECon (I right)) ->
Mb.fromMaybe e (bfI brel left right)
_ -> if isTautoPred e then PTrue else
if isContraPred e then PFalse else e
where
e = PAtom brel e1 e2
getOp :: Ord a => Brel -> (a -> a -> Bool)
getOp Gt = (>)
getOp Ge = (>=)
getOp Lt = (<)
getOp Le = (<=)
getOp Eq = (==)
getOp Ne = (/=)
getOp Ueq = (==)
getOp Une = (/=)
bfR :: Brel -> Double -> Double -> Maybe Expr
bfR brel left right = if getOp brel left right then Just PTrue else Just PFalse
bfI :: Brel -> Integer -> Integer -> Maybe Expr
bfI brel left right = if getOp brel left right then Just PTrue else Just PFalse
applyConstantFolding :: Bop -> Expr -> Expr -> Expr
applyConstantFolding bop e1 e2 =
case (dropECst e1, dropECst e2) of
(ECon (R left), ECon (R right)) ->
Mb.fromMaybe e (cfR bop left right)
(ECon (R left), ECon (I right)) ->
Mb.fromMaybe e (cfR bop left (fromIntegral right))
(ECon (I left), ECon (R right)) ->
Mb.fromMaybe e (cfR bop (fromIntegral left) right)
(ECon (I left), ECon (I right)) ->
Mb.fromMaybe e (cfI bop left right)
(EBin Mod _ _ , _) -> e
(EBin bop1 e11 (dropECst -> ECon (R left)), ECon (R right))
| bop == bop1 -> maybe e (EBin bop e11) (cfR (rop bop) left right)
| otherwise -> e
(EBin bop1 e11 (dropECst -> ECon (R left)), ECon (I right))
| bop == bop1 -> maybe e (EBin bop e11) (cfR (rop bop) left (fromIntegral right))
| otherwise -> e
(EBin bop1 e11 (dropECst -> ECon (I left)), ECon (R right))
| bop == bop1 -> maybe e (EBin bop e11) (cfR (rop bop) (fromIntegral left) right)
| otherwise -> e
(EBin bop1 e11 (dropECst -> ECon (I left)), ECon (I right))
| bop == bop1 -> maybe e (EBin bop e11) (cfI (rop bop) left right)
| otherwise -> e
_ -> e
where
rop :: Bop -> Bop
rop Plus = Plus
rop Minus = Plus
rop Times = Times
rop Div = Times
rop RTimes = RTimes
rop RDiv = RTimes
rop Mod = Mod
e = EBin bop e1 e2
getOp :: Num a => Bop -> Maybe (a -> a -> a)
getOp Minus = Just (-)
getOp Plus = Just (+)
getOp Times = Just (*)
getOp RTimes = Just (*)
getOp _ = Nothing
cfR :: Bop -> Double -> Double -> Maybe Expr
cfR bop left right = fmap go (getOp' bop)
where
go f = ECon $ R $ f left right
getOp' Div = Just (/)
getOp' RDiv = Just (/)
getOp' op = getOp op
cfI :: Bop -> Integer -> Integer -> Maybe Expr
cfI bop left right = fmap go (getOp' bop)
where
go f = ECon $ I $ f left right
getOp' Mod = Just mod
getOp' op = getOp op
isSetPred :: Expr -> Bool
isSetPred (EVar s) | s == setEmp = True
isSetPred (EApp e1 _) = case e1 of
(EVar s) | s == setMem || s == setSub -> True
_ -> False
isSetPred _ = False
applySetFolding :: Expr -> Expr -> Expr
applySetFolding e1 e2 = case e1 of
(EVar s) | s == setEmp
-> maybe e (fromBool . S.null) (evalSetI e2)
(EApp (EVar s) e1') | s == setMem
-> maybe e fromBool (S.member <$> getInt e1' <*> evalSetI e2)
| s == setEmp
-> maybe e (fromBool . S.null) (S.difference <$> evalSetI e1' <*> evalSetI e2)
| otherwise
-> e
_ -> e
where
e = EApp e1 e2
fromBool True = PTrue
fromBool False = PFalse
getInt :: Expr -> Maybe Integer
getInt (ECon (I n)) = Just n
getInt _ = Nothing
getOp :: (Eq a, Hashable a) => Symbol -> Maybe (S.HashSet a -> S.HashSet a -> S.HashSet a)
getOp s | s == setCup = Just S.union
| s == setCap = Just S.intersection
| s == setDif = Just S.difference
| otherwise = Nothing
evalSetI :: Expr -> Maybe (S.HashSet Integer)
evalSetI (EApp e1 e2) = case e1 of
(EVar s) | s == setEmpty -> Just S.empty
| s == setSng -> case e2 of
(ECon (I n)) -> Just $ S.singleton n
_ -> Nothing
(EApp (EVar f) e1') -> getOp f <*> evalSetI e1' <*> evalSetI e2
_ -> Nothing
evalSetI _ = Nothing
|
74258a8e7c7b9208fea788e14f07c1514bcff621a54713daad3c43d694514165 | clojure-interop/aws-api | AWSStepFunctions.clj | (ns com.amazonaws.services.stepfunctions.AWSStepFunctions
"Interface for accessing AWS SFN.
Note: Do not directly implement this interface, new methods are added to it regularly. Extend from
AbstractAWSStepFunctions instead.
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of distributed applications and microservices
using visual workflows.
You can use Step Functions to build applications from individual components, each of which performs a discrete
function, or task, allowing you to scale and change applications quickly. Step Functions provides a console
that helps visualize the components of your application as a series of steps. Step Functions automatically triggers
and tracks each step, and retries steps when there are errors, so your application executes predictably and in the
right order every time. Step Functions logs the state of each step, so you can quickly diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure your application is available at any scale.
You can run tasks on AWS, your own servers, or any system that has access to AWS. You can access and use Step
Functions using the console, the AWS SDKs, or an HTTP API. For more information about Step Functions, see the AWS Step Functions Developer Guide ."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.stepfunctions AWSStepFunctions]))
(defn send-task-failure
"Used by workers to report that the task identified by the taskToken failed.
send-task-failure-request - `com.amazonaws.services.stepfunctions.model.SendTaskFailureRequest`
returns: Result of the SendTaskFailure operation returned by the service. - `com.amazonaws.services.stepfunctions.model.SendTaskFailureResult`
throws: com.amazonaws.services.stepfunctions.model.TaskDoesNotExistException"
(^com.amazonaws.services.stepfunctions.model.SendTaskFailureResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.SendTaskFailureRequest send-task-failure-request]
(-> this (.sendTaskFailure send-task-failure-request))))
(defn create-state-machine
"Creates a state machine. A state machine consists of a collection of states that can do work (Task
states), determine to which states to transition next (Choice states), stop an execution with an
error (Fail states), and so on. State machines are specified using a JSON-based, structured
language.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
create-state-machine-request - `com.amazonaws.services.stepfunctions.model.CreateStateMachineRequest`
returns: Result of the CreateStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.CreateStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.CreateStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.CreateStateMachineRequest create-state-machine-request]
(-> this (.createStateMachine create-state-machine-request))))
(defn list-tags-for-resource
"List tags for a given resource.
list-tags-for-resource-request - `com.amazonaws.services.stepfunctions.model.ListTagsForResourceRequest`
returns: Result of the ListTagsForResource operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListTagsForResourceResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.ListTagsForResourceResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListTagsForResourceRequest list-tags-for-resource-request]
(-> this (.listTagsForResource list-tags-for-resource-request))))
(defn set-region
"Deprecated. use AwsClientBuilder#setRegion(String)
region - The region this client will communicate with. See Region.getRegion(com.amazonaws.regions.Regions) for accessing a given region. Must not be null and must be a region where the service is available. - `com.amazonaws.regions.Region`"
([^AWSStepFunctions this ^com.amazonaws.regions.Region region]
(-> this (.setRegion region))))
(defn list-activities
"Lists the existing activities.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
list-activities-request - `com.amazonaws.services.stepfunctions.model.ListActivitiesRequest`
returns: Result of the ListActivities operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListActivitiesResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidTokenException - The provided token is invalid."
(^com.amazonaws.services.stepfunctions.model.ListActivitiesResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListActivitiesRequest list-activities-request]
(-> this (.listActivities list-activities-request))))
(defn untag-resource
"Remove a tag from a Step Functions resource
untag-resource-request - `com.amazonaws.services.stepfunctions.model.UntagResourceRequest`
returns: Result of the UntagResource operation returned by the service. - `com.amazonaws.services.stepfunctions.model.UntagResourceResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.UntagResourceResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.UntagResourceRequest untag-resource-request]
(-> this (.untagResource untag-resource-request))))
(defn send-task-success
"Used by workers to report that the task identified by the taskToken completed successfully.
send-task-success-request - `com.amazonaws.services.stepfunctions.model.SendTaskSuccessRequest`
returns: Result of the SendTaskSuccess operation returned by the service. - `com.amazonaws.services.stepfunctions.model.SendTaskSuccessResult`
throws: com.amazonaws.services.stepfunctions.model.TaskDoesNotExistException"
(^com.amazonaws.services.stepfunctions.model.SendTaskSuccessResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.SendTaskSuccessRequest send-task-success-request]
(-> this (.sendTaskSuccess send-task-success-request))))
(defn list-state-machines
"Lists the existing state machines.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
list-state-machines-request - `com.amazonaws.services.stepfunctions.model.ListStateMachinesRequest`
returns: Result of the ListStateMachines operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListStateMachinesResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidTokenException - The provided token is invalid."
(^com.amazonaws.services.stepfunctions.model.ListStateMachinesResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListStateMachinesRequest list-state-machines-request]
(-> this (.listStateMachines list-state-machines-request))))
(defn update-state-machine
"Updates an existing state machine by modifying its definition and/or roleArn. Running
executions will continue to use the previous definition and roleArn. You must include
at least one of definition or roleArn or you will receive a
MissingRequiredParameter error.
All StartExecution calls within a few seconds will use the updated definition and
roleArn. Executions started immediately after calling UpdateStateMachine may use the
previous state machine definition and roleArn.
update-state-machine-request - `com.amazonaws.services.stepfunctions.model.UpdateStateMachineRequest`
returns: Result of the UpdateStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.UpdateStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.UpdateStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.UpdateStateMachineRequest update-state-machine-request]
(-> this (.updateStateMachine update-state-machine-request))))
(defn delete-activity
"Deletes an activity.
delete-activity-request - `com.amazonaws.services.stepfunctions.model.DeleteActivityRequest`
returns: Result of the DeleteActivity operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DeleteActivityResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.DeleteActivityResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DeleteActivityRequest delete-activity-request]
(-> this (.deleteActivity delete-activity-request))))
(defn send-task-heartbeat
"Used by workers to report to the service that the task represented by the specified taskToken is
still making progress. This action resets the Heartbeat clock. The Heartbeat threshold
is specified in the state machine's Amazon States Language definition. This action does not in itself create an
event in the execution history. However, if the task times out, the execution history contains an
ActivityTimedOut event.
The Timeout of a task, defined in the state machine's Amazon States Language definition, is its
maximum allowed duration, regardless of the number of SendTaskHeartbeat requests received.
This operation is only useful for long-lived tasks to report the liveliness of the task.
send-task-heartbeat-request - `com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatRequest`
returns: Result of the SendTaskHeartbeat operation returned by the service. - `com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatResult`
throws: com.amazonaws.services.stepfunctions.model.TaskDoesNotExistException"
(^com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatRequest send-task-heartbeat-request]
(-> this (.sendTaskHeartbeat send-task-heartbeat-request))))
(defn list-executions
"Lists the executions of a state machine that meet the filtering criteria. Results are sorted by time, with the
most recent execution first.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
list-executions-request - `com.amazonaws.services.stepfunctions.model.ListExecutionsRequest`
returns: Result of the ListExecutions operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListExecutionsResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.ListExecutionsResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListExecutionsRequest list-executions-request]
(-> this (.listExecutions list-executions-request))))
(defn delete-state-machine
"Deletes a state machine. This is an asynchronous operation: It sets the state machine's status to
DELETING and begins the deletion process. Each state machine execution is deleted the next time it
makes a state transition.
The state machine itself is deleted after all executions are completed or deleted.
delete-state-machine-request - `com.amazonaws.services.stepfunctions.model.DeleteStateMachineRequest`
returns: Result of the DeleteStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DeleteStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.DeleteStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DeleteStateMachineRequest delete-state-machine-request]
(-> this (.deleteStateMachine delete-state-machine-request))))
(defn shutdown
"Shuts down this client object, releasing any resources that might be held open. This is an optional method, and
callers are not expected to call it, but can if they want to explicitly release any open resources. Once a client
has been shutdown, it should not be used to make any more requests."
([^AWSStepFunctions this]
(-> this (.shutdown))))
(defn describe-activity
"Describes an activity.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-activity-request - `com.amazonaws.services.stepfunctions.model.DescribeActivityRequest`
returns: Result of the DescribeActivity operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeActivityResult`
throws: com.amazonaws.services.stepfunctions.model.ActivityDoesNotExistException - The specified activity does not exist."
(^com.amazonaws.services.stepfunctions.model.DescribeActivityResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeActivityRequest describe-activity-request]
(-> this (.describeActivity describe-activity-request))))
(defn describe-state-machine
"Describes a state machine.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-state-machine-request - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineRequest`
returns: Result of the DescribeStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.DescribeStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeStateMachineRequest describe-state-machine-request]
(-> this (.describeStateMachine describe-state-machine-request))))
(defn get-execution-history
"Returns the history of the specified execution as a list of events. By default, the results are returned in
ascending order of the timeStamp of the events. Use the reverseOrder parameter to get
the latest events first.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
get-execution-history-request - `com.amazonaws.services.stepfunctions.model.GetExecutionHistoryRequest`
returns: Result of the GetExecutionHistory operation returned by the service. - `com.amazonaws.services.stepfunctions.model.GetExecutionHistoryResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.GetExecutionHistoryResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.GetExecutionHistoryRequest get-execution-history-request]
(-> this (.getExecutionHistory get-execution-history-request))))
(defn stop-execution
"Stops an execution.
stop-execution-request - `com.amazonaws.services.stepfunctions.model.StopExecutionRequest`
returns: Result of the StopExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.StopExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.StopExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.StopExecutionRequest stop-execution-request]
(-> this (.stopExecution stop-execution-request))))
(defn set-endpoint
"Deprecated. use AwsClientBuilder#setEndpointConfiguration(AwsClientBuilder.EndpointConfiguration) for
example:
builder.setEndpointConfiguration(new EndpointConfiguration(endpoint, signingRegion));
endpoint - The endpoint (ex: \"states.us-east-1.amazonaws.com\") or a full URL, including the protocol (ex: \"states.us-east-1.amazonaws.com\") of the region specific AWS endpoint this client will communicate with. - `java.lang.String`"
([^AWSStepFunctions this ^java.lang.String endpoint]
(-> this (.setEndpoint endpoint))))
(defn create-activity
"Creates an activity. An activity is a task that you write in any programming language and host on any machine
that has access to AWS Step Functions. Activities must poll Step Functions using the GetActivityTask
API action and respond using SendTask* API actions. This function lets Step Functions know the
existence of your activity and returns an identifier for use in a state machine and when polling from the
activity.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
create-activity-request - `com.amazonaws.services.stepfunctions.model.CreateActivityRequest`
returns: Result of the CreateActivity operation returned by the service. - `com.amazonaws.services.stepfunctions.model.CreateActivityResult`
throws: com.amazonaws.services.stepfunctions.model.ActivityLimitExceededException - The maximum number of activities has been reached. Existing activities must be deleted before a new activity can be created."
(^com.amazonaws.services.stepfunctions.model.CreateActivityResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.CreateActivityRequest create-activity-request]
(-> this (.createActivity create-activity-request))))
(defn get-activity-task
"Used by workers to retrieve a task (with the specified activity ARN) which has been scheduled for execution by a
running state machine. This initiates a long poll, where the service holds the HTTP connection open and responds
as soon as a task becomes available (i.e. an execution of a task of this type is needed.) The maximum time the
service holds on to the request before responding is 60 seconds. If no task is available within 60 seconds, the
poll returns a taskToken with a null string.
Workers should set their client side socket timeout to at least 65 seconds (5 seconds higher than the maximum
time the service may hold the poll request).
Polling with GetActivityTask can cause latency in some implementations. See Avoid Latency When Polling
for Activity Tasks in the Step Functions Developer Guide.
get-activity-task-request - `com.amazonaws.services.stepfunctions.model.GetActivityTaskRequest`
returns: Result of the GetActivityTask operation returned by the service. - `com.amazonaws.services.stepfunctions.model.GetActivityTaskResult`
throws: com.amazonaws.services.stepfunctions.model.ActivityDoesNotExistException - The specified activity does not exist."
(^com.amazonaws.services.stepfunctions.model.GetActivityTaskResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.GetActivityTaskRequest get-activity-task-request]
(-> this (.getActivityTask get-activity-task-request))))
(defn describe-state-machine-for-execution
"Describes the state machine associated with a specific execution.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-state-machine-for-execution-request - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionRequest`
returns: Result of the DescribeStateMachineForExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionRequest describe-state-machine-for-execution-request]
(-> this (.describeStateMachineForExecution describe-state-machine-for-execution-request))))
(defn get-cached-response-metadata
"Returns additional metadata for a previously executed successful request, typically used for debugging issues
where a service isn't acting as expected. This data isn't considered part of the result data returned by an
operation, so it's available through this separate, diagnostic interface.
Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
information for an executed request, you should use this method to retrieve it as soon as possible after
executing a request.
request - The originally executed request. - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AWSStepFunctions this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn describe-execution
"Describes an execution.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-execution-request - `com.amazonaws.services.stepfunctions.model.DescribeExecutionRequest`
returns: Result of the DescribeExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.DescribeExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeExecutionRequest describe-execution-request]
(-> this (.describeExecution describe-execution-request))))
(defn tag-resource
"Add a tag to a Step Functions resource.
tag-resource-request - `com.amazonaws.services.stepfunctions.model.TagResourceRequest`
returns: Result of the TagResource operation returned by the service. - `com.amazonaws.services.stepfunctions.model.TagResourceResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.TagResourceResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.TagResourceRequest tag-resource-request]
(-> this (.tagResource tag-resource-request))))
(defn start-execution
"Starts a state machine execution.
StartExecution is idempotent. If StartExecution is called with the same name and input
as a running execution, the call will succeed and return the same response as the original request. If the
execution is closed or if the input is different, it will return a 400 ExecutionAlreadyExists error.
Names can be reused after 90 days.
start-execution-request - `com.amazonaws.services.stepfunctions.model.StartExecutionRequest`
returns: Result of the StartExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.StartExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionLimitExceededException - The maximum number of running executions has been reached. Running executions must end or be stopped before a new execution can be started."
(^com.amazonaws.services.stepfunctions.model.StartExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.StartExecutionRequest start-execution-request]
(-> this (.startExecution start-execution-request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.stepfunctions/src/com/amazonaws/services/stepfunctions/AWSStepFunctions.clj | clojure | (ns com.amazonaws.services.stepfunctions.AWSStepFunctions
"Interface for accessing AWS SFN.
Note: Do not directly implement this interface, new methods are added to it regularly. Extend from
AbstractAWSStepFunctions instead.
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of distributed applications and microservices
using visual workflows.
You can use Step Functions to build applications from individual components, each of which performs a discrete
function, or task, allowing you to scale and change applications quickly. Step Functions provides a console
that helps visualize the components of your application as a series of steps. Step Functions automatically triggers
and tracks each step, and retries steps when there are errors, so your application executes predictably and in the
right order every time. Step Functions logs the state of each step, so you can quickly diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure your application is available at any scale.
You can run tasks on AWS, your own servers, or any system that has access to AWS. You can access and use Step
Functions using the console, the AWS SDKs, or an HTTP API. For more information about Step Functions, see the AWS Step Functions Developer Guide ."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.stepfunctions AWSStepFunctions]))
(defn send-task-failure
"Used by workers to report that the task identified by the taskToken failed.
send-task-failure-request - `com.amazonaws.services.stepfunctions.model.SendTaskFailureRequest`
returns: Result of the SendTaskFailure operation returned by the service. - `com.amazonaws.services.stepfunctions.model.SendTaskFailureResult`
throws: com.amazonaws.services.stepfunctions.model.TaskDoesNotExistException"
(^com.amazonaws.services.stepfunctions.model.SendTaskFailureResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.SendTaskFailureRequest send-task-failure-request]
(-> this (.sendTaskFailure send-task-failure-request))))
(defn create-state-machine
"Creates a state machine. A state machine consists of a collection of states that can do work (Task
states), determine to which states to transition next (Choice states), stop an execution with an
error (Fail states), and so on. State machines are specified using a JSON-based, structured
language.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
create-state-machine-request - `com.amazonaws.services.stepfunctions.model.CreateStateMachineRequest`
returns: Result of the CreateStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.CreateStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.CreateStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.CreateStateMachineRequest create-state-machine-request]
(-> this (.createStateMachine create-state-machine-request))))
(defn list-tags-for-resource
"List tags for a given resource.
list-tags-for-resource-request - `com.amazonaws.services.stepfunctions.model.ListTagsForResourceRequest`
returns: Result of the ListTagsForResource operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListTagsForResourceResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.ListTagsForResourceResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListTagsForResourceRequest list-tags-for-resource-request]
(-> this (.listTagsForResource list-tags-for-resource-request))))
(defn set-region
"Deprecated. use AwsClientBuilder#setRegion(String)
region - The region this client will communicate with. See Region.getRegion(com.amazonaws.regions.Regions) for accessing a given region. Must not be null and must be a region where the service is available. - `com.amazonaws.regions.Region`"
([^AWSStepFunctions this ^com.amazonaws.regions.Region region]
(-> this (.setRegion region))))
(defn list-activities
"Lists the existing activities.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
list-activities-request - `com.amazonaws.services.stepfunctions.model.ListActivitiesRequest`
returns: Result of the ListActivities operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListActivitiesResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidTokenException - The provided token is invalid."
(^com.amazonaws.services.stepfunctions.model.ListActivitiesResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListActivitiesRequest list-activities-request]
(-> this (.listActivities list-activities-request))))
(defn untag-resource
"Remove a tag from a Step Functions resource
untag-resource-request - `com.amazonaws.services.stepfunctions.model.UntagResourceRequest`
returns: Result of the UntagResource operation returned by the service. - `com.amazonaws.services.stepfunctions.model.UntagResourceResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.UntagResourceResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.UntagResourceRequest untag-resource-request]
(-> this (.untagResource untag-resource-request))))
(defn send-task-success
"Used by workers to report that the task identified by the taskToken completed successfully.
send-task-success-request - `com.amazonaws.services.stepfunctions.model.SendTaskSuccessRequest`
returns: Result of the SendTaskSuccess operation returned by the service. - `com.amazonaws.services.stepfunctions.model.SendTaskSuccessResult`
throws: com.amazonaws.services.stepfunctions.model.TaskDoesNotExistException"
(^com.amazonaws.services.stepfunctions.model.SendTaskSuccessResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.SendTaskSuccessRequest send-task-success-request]
(-> this (.sendTaskSuccess send-task-success-request))))
(defn list-state-machines
"Lists the existing state machines.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
list-state-machines-request - `com.amazonaws.services.stepfunctions.model.ListStateMachinesRequest`
returns: Result of the ListStateMachines operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListStateMachinesResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidTokenException - The provided token is invalid."
(^com.amazonaws.services.stepfunctions.model.ListStateMachinesResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListStateMachinesRequest list-state-machines-request]
(-> this (.listStateMachines list-state-machines-request))))
(defn update-state-machine
"Updates an existing state machine by modifying its definition and/or roleArn. Running
executions will continue to use the previous definition and roleArn. You must include
at least one of definition or roleArn or you will receive a
MissingRequiredParameter error.
All StartExecution calls within a few seconds will use the updated definition and
roleArn. Executions started immediately after calling UpdateStateMachine may use the
previous state machine definition and roleArn.
update-state-machine-request - `com.amazonaws.services.stepfunctions.model.UpdateStateMachineRequest`
returns: Result of the UpdateStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.UpdateStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.UpdateStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.UpdateStateMachineRequest update-state-machine-request]
(-> this (.updateStateMachine update-state-machine-request))))
(defn delete-activity
"Deletes an activity.
delete-activity-request - `com.amazonaws.services.stepfunctions.model.DeleteActivityRequest`
returns: Result of the DeleteActivity operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DeleteActivityResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.DeleteActivityResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DeleteActivityRequest delete-activity-request]
(-> this (.deleteActivity delete-activity-request))))
(defn send-task-heartbeat
"Used by workers to report to the service that the task represented by the specified taskToken is
still making progress. This action resets the Heartbeat clock. The Heartbeat threshold
is specified in the state machine's Amazon States Language definition. This action does not in itself create an
event in the execution history. However, if the task times out, the execution history contains an
ActivityTimedOut event.
The Timeout of a task, defined in the state machine's Amazon States Language definition, is its
maximum allowed duration, regardless of the number of SendTaskHeartbeat requests received.
This operation is only useful for long-lived tasks to report the liveliness of the task.
send-task-heartbeat-request - `com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatRequest`
returns: Result of the SendTaskHeartbeat operation returned by the service. - `com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatResult`
throws: com.amazonaws.services.stepfunctions.model.TaskDoesNotExistException"
(^com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.SendTaskHeartbeatRequest send-task-heartbeat-request]
(-> this (.sendTaskHeartbeat send-task-heartbeat-request))))
(defn list-executions
"Lists the executions of a state machine that meet the filtering criteria. Results are sorted by time, with the
most recent execution first.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
list-executions-request - `com.amazonaws.services.stepfunctions.model.ListExecutionsRequest`
returns: Result of the ListExecutions operation returned by the service. - `com.amazonaws.services.stepfunctions.model.ListExecutionsResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.ListExecutionsResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.ListExecutionsRequest list-executions-request]
(-> this (.listExecutions list-executions-request))))
(defn delete-state-machine
"Deletes a state machine. This is an asynchronous operation: It sets the state machine's status to
DELETING and begins the deletion process. Each state machine execution is deleted the next time it
makes a state transition.
The state machine itself is deleted after all executions are completed or deleted.
delete-state-machine-request - `com.amazonaws.services.stepfunctions.model.DeleteStateMachineRequest`
returns: Result of the DeleteStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DeleteStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.DeleteStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DeleteStateMachineRequest delete-state-machine-request]
(-> this (.deleteStateMachine delete-state-machine-request))))
(defn shutdown
"Shuts down this client object, releasing any resources that might be held open. This is an optional method, and
callers are not expected to call it, but can if they want to explicitly release any open resources. Once a client
has been shutdown, it should not be used to make any more requests."
([^AWSStepFunctions this]
(-> this (.shutdown))))
(defn describe-activity
"Describes an activity.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-activity-request - `com.amazonaws.services.stepfunctions.model.DescribeActivityRequest`
returns: Result of the DescribeActivity operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeActivityResult`
throws: com.amazonaws.services.stepfunctions.model.ActivityDoesNotExistException - The specified activity does not exist."
(^com.amazonaws.services.stepfunctions.model.DescribeActivityResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeActivityRequest describe-activity-request]
(-> this (.describeActivity describe-activity-request))))
(defn describe-state-machine
"Describes a state machine.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-state-machine-request - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineRequest`
returns: Result of the DescribeStateMachine operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.DescribeStateMachineResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeStateMachineRequest describe-state-machine-request]
(-> this (.describeStateMachine describe-state-machine-request))))
(defn get-execution-history
"Returns the history of the specified execution as a list of events. By default, the results are returned in
ascending order of the timeStamp of the events. Use the reverseOrder parameter to get
the latest events first.
If nextToken is returned, there are more results available. The value of nextToken is a
unique pagination token for each page. Make the call again using the returned token to retrieve the next page.
Keep all other arguments unchanged. Each pagination token expires after 24 hours. Using an expired pagination
token will return an HTTP 400 InvalidToken error.
get-execution-history-request - `com.amazonaws.services.stepfunctions.model.GetExecutionHistoryRequest`
returns: Result of the GetExecutionHistory operation returned by the service. - `com.amazonaws.services.stepfunctions.model.GetExecutionHistoryResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.GetExecutionHistoryResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.GetExecutionHistoryRequest get-execution-history-request]
(-> this (.getExecutionHistory get-execution-history-request))))
(defn stop-execution
"Stops an execution.
stop-execution-request - `com.amazonaws.services.stepfunctions.model.StopExecutionRequest`
returns: Result of the StopExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.StopExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.StopExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.StopExecutionRequest stop-execution-request]
(-> this (.stopExecution stop-execution-request))))
(defn set-endpoint
"Deprecated. use AwsClientBuilder#setEndpointConfiguration(AwsClientBuilder.EndpointConfiguration) for
example:
endpoint - The endpoint (ex: \"states.us-east-1.amazonaws.com\") or a full URL, including the protocol (ex: \"states.us-east-1.amazonaws.com\") of the region specific AWS endpoint this client will communicate with. - `java.lang.String`"
([^AWSStepFunctions this ^java.lang.String endpoint]
(-> this (.setEndpoint endpoint))))
(defn create-activity
"Creates an activity. An activity is a task that you write in any programming language and host on any machine
that has access to AWS Step Functions. Activities must poll Step Functions using the GetActivityTask
API action and respond using SendTask* API actions. This function lets Step Functions know the
existence of your activity and returns an identifier for use in a state machine and when polling from the
activity.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
create-activity-request - `com.amazonaws.services.stepfunctions.model.CreateActivityRequest`
returns: Result of the CreateActivity operation returned by the service. - `com.amazonaws.services.stepfunctions.model.CreateActivityResult`
throws: com.amazonaws.services.stepfunctions.model.ActivityLimitExceededException - The maximum number of activities has been reached. Existing activities must be deleted before a new activity can be created."
(^com.amazonaws.services.stepfunctions.model.CreateActivityResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.CreateActivityRequest create-activity-request]
(-> this (.createActivity create-activity-request))))
(defn get-activity-task
"Used by workers to retrieve a task (with the specified activity ARN) which has been scheduled for execution by a
running state machine. This initiates a long poll, where the service holds the HTTP connection open and responds
as soon as a task becomes available (i.e. an execution of a task of this type is needed.) The maximum time the
service holds on to the request before responding is 60 seconds. If no task is available within 60 seconds, the
poll returns a taskToken with a null string.
Workers should set their client side socket timeout to at least 65 seconds (5 seconds higher than the maximum
time the service may hold the poll request).
Polling with GetActivityTask can cause latency in some implementations. See Avoid Latency When Polling
for Activity Tasks in the Step Functions Developer Guide.
get-activity-task-request - `com.amazonaws.services.stepfunctions.model.GetActivityTaskRequest`
returns: Result of the GetActivityTask operation returned by the service. - `com.amazonaws.services.stepfunctions.model.GetActivityTaskResult`
throws: com.amazonaws.services.stepfunctions.model.ActivityDoesNotExistException - The specified activity does not exist."
(^com.amazonaws.services.stepfunctions.model.GetActivityTaskResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.GetActivityTaskRequest get-activity-task-request]
(-> this (.getActivityTask get-activity-task-request))))
(defn describe-state-machine-for-execution
"Describes the state machine associated with a specific execution.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-state-machine-for-execution-request - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionRequest`
returns: Result of the DescribeStateMachineForExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeStateMachineForExecutionRequest describe-state-machine-for-execution-request]
(-> this (.describeStateMachineForExecution describe-state-machine-for-execution-request))))
(defn get-cached-response-metadata
"Returns additional metadata for a previously executed successful request, typically used for debugging issues
where a service isn't acting as expected. This data isn't considered part of the result data returned by an
operation, so it's available through this separate, diagnostic interface.
Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
information for an executed request, you should use this method to retrieve it as soon as possible after
executing a request.
request - The originally executed request. - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AWSStepFunctions this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn describe-execution
"Describes an execution.
This operation is eventually consistent. The results are best effort and may not reflect very recent updates and
changes.
describe-execution-request - `com.amazonaws.services.stepfunctions.model.DescribeExecutionRequest`
returns: Result of the DescribeExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.DescribeExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionDoesNotExistException - The specified execution does not exist."
(^com.amazonaws.services.stepfunctions.model.DescribeExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.DescribeExecutionRequest describe-execution-request]
(-> this (.describeExecution describe-execution-request))))
(defn tag-resource
"Add a tag to a Step Functions resource.
tag-resource-request - `com.amazonaws.services.stepfunctions.model.TagResourceRequest`
returns: Result of the TagResource operation returned by the service. - `com.amazonaws.services.stepfunctions.model.TagResourceResult`
throws: com.amazonaws.services.stepfunctions.model.InvalidArnException - The provided Amazon Resource Name (ARN) is invalid."
(^com.amazonaws.services.stepfunctions.model.TagResourceResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.TagResourceRequest tag-resource-request]
(-> this (.tagResource tag-resource-request))))
(defn start-execution
"Starts a state machine execution.
StartExecution is idempotent. If StartExecution is called with the same name and input
as a running execution, the call will succeed and return the same response as the original request. If the
execution is closed or if the input is different, it will return a 400 ExecutionAlreadyExists error.
Names can be reused after 90 days.
start-execution-request - `com.amazonaws.services.stepfunctions.model.StartExecutionRequest`
returns: Result of the StartExecution operation returned by the service. - `com.amazonaws.services.stepfunctions.model.StartExecutionResult`
throws: com.amazonaws.services.stepfunctions.model.ExecutionLimitExceededException - The maximum number of running executions has been reached. Running executions must end or be stopped before a new execution can be started."
(^com.amazonaws.services.stepfunctions.model.StartExecutionResult [^AWSStepFunctions this ^com.amazonaws.services.stepfunctions.model.StartExecutionRequest start-execution-request]
(-> this (.startExecution start-execution-request))))
| |
695e8d850a4ee4095bc9dce8cd2cab623ebeeae34d0a51e48fcbae5af78a2e71 | mirleft/ocaml-tls | x509_lwt.ml | open Lwt
let failure msg = fail @@ Failure msg
let catch_invalid_arg th h =
Lwt.catch (fun () -> th)
(function
| Invalid_argument msg -> h msg
| exn -> fail exn)
let (</>) a b = a ^ "/" ^ b
let o f g x = f (g x)
let read_file path =
let open Lwt_io in
open_file ~mode:Input path >>= fun file ->
read file >|= Cstruct.of_string >>= fun cs ->
close file >|= fun () ->
cs
let read_dir path =
let open Lwt_unix in
let rec collect acc d =
readdir_n d 10 >>= function
| [||] -> return acc
| xs -> collect (Array.to_list xs @ acc) d in
opendir path >>= fun dir ->
collect [] dir >>= fun entries ->
closedir dir >|= fun () ->
entries
let extension str =
let n = String.length str in
let rec scan = function
| i when i = 0 -> None
| i when str.[i - 1] = '.' ->
Some (String.sub str i (n - i))
| i -> scan (pred i) in
scan n
let private_of_pems ~cert ~priv_key =
catch_invalid_arg
(read_file cert >|= fun pem ->
match X509.Certificate.decode_pem_multiple pem with
| Ok cs -> cs
| Error (`Msg m) -> invalid_arg ("failed to parse certificates " ^ m))
(o failure @@ Printf.sprintf "Private certificates (%s): %s" cert) >>= fun certs ->
catch_invalid_arg
(read_file priv_key >|= fun pem ->
match X509.Private_key.decode_pem pem with
| Ok key -> key
| Error (`Msg m) -> invalid_arg ("failed to parse private key " ^ m))
(o failure @@ Printf.sprintf "Private key (%s): %s" priv_key) >>= fun pk ->
return (certs, pk)
let certs_of_pem path =
catch_invalid_arg
(read_file path >|= fun pem ->
match X509.Certificate.decode_pem_multiple pem with
| Ok cs -> cs
| Error (`Msg m) -> invalid_arg ("failed to parse certificates " ^ m))
(o failure @@ Printf.sprintf "Certificates in %s: %s" path)
let certs_of_pem_dir path =
read_dir path
>|= List.filter (fun file -> extension file = Some "crt")
>>= Lwt_list.map_p (fun file -> certs_of_pem (path </> file))
>|= List.concat
let crl_of_pem path =
catch_invalid_arg
(read_file path >|= fun data ->
match X509.CRL.decode_der data with
| Ok cs -> cs
| Error (`Msg m) -> invalid_arg ("failed to parse CRL " ^ m))
(o failure @@ Printf.sprintf "CRL in %s: %s" path)
let crls_of_pem_dir = function
| None -> Lwt.return None
| Some path ->
read_dir path >>= fun files ->
Lwt_list.map_p (fun file -> crl_of_pem (path </> file)) files >|= fun crls ->
Some crls
let authenticator ?allowed_hashes ?crls param =
let time () = Some (Ptime_clock.now ()) in
let of_cas cas =
crls_of_pem_dir crls >|= fun crls ->
X509.Authenticator.chain_of_trust ?allowed_hashes ?crls ~time cas
and dotted_hex_to_cs hex =
Cstruct.of_hex (String.map (function ':' -> ' ' | x -> x) hex)
and fingerp hash fingerprint =
X509.Authenticator.server_key_fingerprint ~time ~hash ~fingerprint
and cert_fingerp hash fingerprint =
X509.Authenticator.server_cert_fingerprint ~time ~hash ~fingerprint
in
match param with
| `Ca_file path -> certs_of_pem path >>= of_cas
| `Ca_dir path -> certs_of_pem_dir path >>= of_cas
| `Key_fingerprint (hash, fp) -> return (fingerp hash fp)
| `Hex_key_fingerprint (hash, fp) ->
let fp = dotted_hex_to_cs fp in
return (fingerp hash fp)
| `Cert_fingerprint (hash, fp) -> return (cert_fingerp hash fp)
| `Hex_cert_fingerprint (hash, fp) ->
let fp = dotted_hex_to_cs fp in
return (cert_fingerp hash fp)
| null | https://raw.githubusercontent.com/mirleft/ocaml-tls/3b7736f61c684bb11170e444126fea7df1ec7d69/lwt/x509_lwt.ml | ocaml | open Lwt
let failure msg = fail @@ Failure msg
let catch_invalid_arg th h =
Lwt.catch (fun () -> th)
(function
| Invalid_argument msg -> h msg
| exn -> fail exn)
let (</>) a b = a ^ "/" ^ b
let o f g x = f (g x)
let read_file path =
let open Lwt_io in
open_file ~mode:Input path >>= fun file ->
read file >|= Cstruct.of_string >>= fun cs ->
close file >|= fun () ->
cs
let read_dir path =
let open Lwt_unix in
let rec collect acc d =
readdir_n d 10 >>= function
| [||] -> return acc
| xs -> collect (Array.to_list xs @ acc) d in
opendir path >>= fun dir ->
collect [] dir >>= fun entries ->
closedir dir >|= fun () ->
entries
let extension str =
let n = String.length str in
let rec scan = function
| i when i = 0 -> None
| i when str.[i - 1] = '.' ->
Some (String.sub str i (n - i))
| i -> scan (pred i) in
scan n
let private_of_pems ~cert ~priv_key =
catch_invalid_arg
(read_file cert >|= fun pem ->
match X509.Certificate.decode_pem_multiple pem with
| Ok cs -> cs
| Error (`Msg m) -> invalid_arg ("failed to parse certificates " ^ m))
(o failure @@ Printf.sprintf "Private certificates (%s): %s" cert) >>= fun certs ->
catch_invalid_arg
(read_file priv_key >|= fun pem ->
match X509.Private_key.decode_pem pem with
| Ok key -> key
| Error (`Msg m) -> invalid_arg ("failed to parse private key " ^ m))
(o failure @@ Printf.sprintf "Private key (%s): %s" priv_key) >>= fun pk ->
return (certs, pk)
let certs_of_pem path =
catch_invalid_arg
(read_file path >|= fun pem ->
match X509.Certificate.decode_pem_multiple pem with
| Ok cs -> cs
| Error (`Msg m) -> invalid_arg ("failed to parse certificates " ^ m))
(o failure @@ Printf.sprintf "Certificates in %s: %s" path)
let certs_of_pem_dir path =
read_dir path
>|= List.filter (fun file -> extension file = Some "crt")
>>= Lwt_list.map_p (fun file -> certs_of_pem (path </> file))
>|= List.concat
let crl_of_pem path =
catch_invalid_arg
(read_file path >|= fun data ->
match X509.CRL.decode_der data with
| Ok cs -> cs
| Error (`Msg m) -> invalid_arg ("failed to parse CRL " ^ m))
(o failure @@ Printf.sprintf "CRL in %s: %s" path)
let crls_of_pem_dir = function
| None -> Lwt.return None
| Some path ->
read_dir path >>= fun files ->
Lwt_list.map_p (fun file -> crl_of_pem (path </> file)) files >|= fun crls ->
Some crls
let authenticator ?allowed_hashes ?crls param =
let time () = Some (Ptime_clock.now ()) in
let of_cas cas =
crls_of_pem_dir crls >|= fun crls ->
X509.Authenticator.chain_of_trust ?allowed_hashes ?crls ~time cas
and dotted_hex_to_cs hex =
Cstruct.of_hex (String.map (function ':' -> ' ' | x -> x) hex)
and fingerp hash fingerprint =
X509.Authenticator.server_key_fingerprint ~time ~hash ~fingerprint
and cert_fingerp hash fingerprint =
X509.Authenticator.server_cert_fingerprint ~time ~hash ~fingerprint
in
match param with
| `Ca_file path -> certs_of_pem path >>= of_cas
| `Ca_dir path -> certs_of_pem_dir path >>= of_cas
| `Key_fingerprint (hash, fp) -> return (fingerp hash fp)
| `Hex_key_fingerprint (hash, fp) ->
let fp = dotted_hex_to_cs fp in
return (fingerp hash fp)
| `Cert_fingerprint (hash, fp) -> return (cert_fingerp hash fp)
| `Hex_cert_fingerprint (hash, fp) ->
let fp = dotted_hex_to_cs fp in
return (cert_fingerp hash fp)
| |
c29cbc8a24daea70d457d955a85c181485c73dd1de3cc25e7583329bf5a31a68 | alexandergunnarson/quantum | auth.cljc | (ns quantum.test.apis.amazon.cloud-drive.auth
(:require [quantum.apis.amazon.cloud-drive.auth :as ns]))
(defn test:retrieve-authorization-code [user])
(defn test:initial-auth-tokens-from-code [user code])
(defn test:refresh-token!
[user])
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/test/quantum/test/apis/amazon/cloud_drive/auth.cljc | clojure | (ns quantum.test.apis.amazon.cloud-drive.auth
(:require [quantum.apis.amazon.cloud-drive.auth :as ns]))
(defn test:retrieve-authorization-code [user])
(defn test:initial-auth-tokens-from-code [user code])
(defn test:refresh-token!
[user])
| |
d14c37128330dd9296a79eb5c4bcbe299eb51c619f676204db09927eb4d69976 | yuce/teacup_nats | nats.erl | Copyright 2016 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
% -2.0
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
-module(nats).
-export([new/0,
new/1]).
-export([connect/2,
connect/3,
pub/2,
pub/3,
sub/2,
sub/3,
unsub/2,
unsub/3,
disconnect/1,
is_ready/1]).
-include("teacup_nats_common.hrl").
%% == API
new() ->
new(#{}).
new(Opts) ->
teacup:new(?HANDLER, Opts).
connect(Host, Port) ->
connect(Host, Port, #{}).
connect(Host, Port, #{verbose := true} = Opts) ->
{ok, Conn} = teacup:new(?HANDLER, Opts),
case teacup:call(Conn, {connect, Host, Port}) of
ok ->
{ok, Conn};
{error, _Reason} = Error ->
Error
end;
connect(Host, Port, Opts) ->
{ok, Conn} = teacup:new(?HANDLER, Opts),
teacup:connect(Conn, Host, Port),
{ok, Conn}.
pub(Ref, Subject) ->
pub(Ref, Subject, #{}).
-spec pub(Ref :: teacup:teacup_ref(), Subject :: binary(), Opts :: map()) ->
ok | {error, Reason :: term()}.
pub({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:call(Ref, {pub, Subject, Opts});
pub({teacup@ref, ?SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:cast(Ref, {pub, Subject, Opts}).
sub(Ref, Subject) ->
sub(Ref, Subject, #{}).
-spec sub(Ref :: teacup:teacup_ref(), Subject :: binary(), Opts :: map()) ->
ok | {error, Reason :: term()}.
sub({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:call(Ref, {sub, Subject, Opts, self()});
sub({teacup@ref, ?SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:cast(Ref, {sub, Subject, Opts, self()}).
unsub(Ref, Subject) ->
unsub(Ref, Subject, #{}).
-spec unsub(Ref :: teacup:teacup_ref(), Subject :: binary(), Opts :: map()) ->
ok | {error, Reason :: term()}.
unsub({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:call(Ref, {unsub, Subject, Opts, self()});
unsub({teacup@ref, ?SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:cast(Ref, {unsub, Subject, Opts, self()}).
-spec disconnect(Ref :: teacup:teacup_ref()) ->
ok | {error, Reason :: term()}.
disconnect({teacup@ref, _, _} = Ref) ->
teacup:call(Ref, {disconnect, self()}).
-spec is_ready(Ref :: teacup:teacup_ref()) ->
true | false.
is_ready({teacup@ref, ?SIGNATURE, _} = Ref) ->
teacup:call(Ref, is_ready);
is_ready({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref) ->
teacup:call(Ref, is_ready). | null | https://raw.githubusercontent.com/yuce/teacup_nats/5f25180f0b664085ccf5c7f4657726c688f8d5c4/src/nats.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
== API | Copyright 2016 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(nats).
-export([new/0,
new/1]).
-export([connect/2,
connect/3,
pub/2,
pub/3,
sub/2,
sub/3,
unsub/2,
unsub/3,
disconnect/1,
is_ready/1]).
-include("teacup_nats_common.hrl").
new() ->
new(#{}).
new(Opts) ->
teacup:new(?HANDLER, Opts).
connect(Host, Port) ->
connect(Host, Port, #{}).
connect(Host, Port, #{verbose := true} = Opts) ->
{ok, Conn} = teacup:new(?HANDLER, Opts),
case teacup:call(Conn, {connect, Host, Port}) of
ok ->
{ok, Conn};
{error, _Reason} = Error ->
Error
end;
connect(Host, Port, Opts) ->
{ok, Conn} = teacup:new(?HANDLER, Opts),
teacup:connect(Conn, Host, Port),
{ok, Conn}.
pub(Ref, Subject) ->
pub(Ref, Subject, #{}).
-spec pub(Ref :: teacup:teacup_ref(), Subject :: binary(), Opts :: map()) ->
ok | {error, Reason :: term()}.
pub({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:call(Ref, {pub, Subject, Opts});
pub({teacup@ref, ?SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:cast(Ref, {pub, Subject, Opts}).
sub(Ref, Subject) ->
sub(Ref, Subject, #{}).
-spec sub(Ref :: teacup:teacup_ref(), Subject :: binary(), Opts :: map()) ->
ok | {error, Reason :: term()}.
sub({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:call(Ref, {sub, Subject, Opts, self()});
sub({teacup@ref, ?SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:cast(Ref, {sub, Subject, Opts, self()}).
unsub(Ref, Subject) ->
unsub(Ref, Subject, #{}).
-spec unsub(Ref :: teacup:teacup_ref(), Subject :: binary(), Opts :: map()) ->
ok | {error, Reason :: term()}.
unsub({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:call(Ref, {unsub, Subject, Opts, self()});
unsub({teacup@ref, ?SIGNATURE, _} = Ref, Subject, Opts) ->
teacup:cast(Ref, {unsub, Subject, Opts, self()}).
-spec disconnect(Ref :: teacup:teacup_ref()) ->
ok | {error, Reason :: term()}.
disconnect({teacup@ref, _, _} = Ref) ->
teacup:call(Ref, {disconnect, self()}).
-spec is_ready(Ref :: teacup:teacup_ref()) ->
true | false.
is_ready({teacup@ref, ?SIGNATURE, _} = Ref) ->
teacup:call(Ref, is_ready);
is_ready({teacup@ref, ?VERBOSE_SIGNATURE, _} = Ref) ->
teacup:call(Ref, is_ready). |
67767a779bb3089a541af722bd4632bd36d2bd5a859405b004d9386f58892170 | bendyworks/api-server | UserSpec.hs | # LANGUAGE QuasiQuotes #
module Api.Types.UserSpec (main, spec) where
import Data.Aeson (Result (..), fromJSON, toJSON)
import Data.Aeson.QQ (aesonQQ)
import Api.Types.Fields
import Api.Types.User
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Login" $ do
let login = Login (UserID 1) (UserToken "token")
json = [aesonQQ| { user_id: 1, api_token: "token" } |]
it "can be serialized to JSON" $
toJSON login `shouldBe` json
it "can be deserialized from JSON" $
fromJSON json `shouldBe` Success login
describe "User" $ do
let user = User (UserID 1) (Just $ ResourceID 2)
json = [aesonQQ| { user_id: 1, resource_id: 2 } |]
it "can be serialized to JSON" $
toJSON user `shouldBe` json
| null | https://raw.githubusercontent.com/bendyworks/api-server/9dd6d7c2599bd1c5a7e898a417a7aeb319415dd2/test/Api/Types/UserSpec.hs | haskell | # LANGUAGE QuasiQuotes #
module Api.Types.UserSpec (main, spec) where
import Data.Aeson (Result (..), fromJSON, toJSON)
import Data.Aeson.QQ (aesonQQ)
import Api.Types.Fields
import Api.Types.User
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Login" $ do
let login = Login (UserID 1) (UserToken "token")
json = [aesonQQ| { user_id: 1, api_token: "token" } |]
it "can be serialized to JSON" $
toJSON login `shouldBe` json
it "can be deserialized from JSON" $
fromJSON json `shouldBe` Success login
describe "User" $ do
let user = User (UserID 1) (Just $ ResourceID 2)
json = [aesonQQ| { user_id: 1, resource_id: 2 } |]
it "can be serialized to JSON" $
toJSON user `shouldBe` json
| |
eed2cec0391774ff3d8c1b2c5c4291ff33396b43aeda818cc8e7374d204959c5 | fpco/unliftio | Concurrent.hs | {-# LANGUAGE RankNTypes #-}
-- | Unlifted "Control.Concurrent".
--
-- This module is not reexported by "UnliftIO",
use it only if " UnliftIO.Async " is not enough .
--
-- @since 0.1.1.0
module UnliftIO.Concurrent
(
-- * Concurrent Haskell
ThreadId,
-- * Basic concurrency operations
myThreadId, forkIO, forkWithUnmask, forkIOWithUnmask, forkFinally, killThread, throwTo,
-- ** Threads with affinity
forkOn, forkOnWithUnmask, getNumCapabilities, setNumCapabilities,
threadCapability,
-- * Scheduling
yield,
-- ** Waiting
threadDelay, threadWaitRead, threadWaitWrite,
-- * Communication abstractions
module UnliftIO.MVar, module UnliftIO.Chan,
-- * Bound Threads
C.rtsSupportsBoundThreads, forkOS, isCurrentThreadBound, runInBoundThread,
runInUnboundThread,
-- * Weak references to ThreadIds
mkWeakThreadId
) where
import Control.Monad.IO.Class (MonadIO, liftIO)
import System.Posix.Types (Fd)
import System.Mem.Weak (Weak)
import Control.Concurrent (ThreadId)
import qualified Control.Concurrent as C
import Control.Monad.IO.Unlift
import UnliftIO.MVar
import UnliftIO.Chan
import UnliftIO.Exception (throwTo, SomeException)
-- | Lifted version of 'C.myThreadId'.
--
-- @since 0.1.1.0
myThreadId :: MonadIO m => m ThreadId
myThreadId = liftIO C.myThreadId
# INLINABLE myThreadId #
-- | Unlifted version of 'C.forkIO'.
--
-- @since 0.1.1.0
forkIO :: MonadUnliftIO m => m () -> m ThreadId
forkIO m = withRunInIO $ \run -> C.forkIO $ run m
# INLINABLE forkIO #
-- | Unlifted version of 'C.forkIOWithUnmask'.
--
@since 0.2.11
forkIOWithUnmask :: MonadUnliftIO m => ((forall a. m a -> m a) -> m ()) -> m ThreadId
forkIOWithUnmask m =
withRunInIO $ \run -> C.forkIOWithUnmask $ \unmask -> run $ m $ liftIO . unmask . run
# INLINABLE forkIOWithUnmask #
-- | Please use 'forkIOWithUnmask' instead. This function has been deprecated
in release 0.2.11 and will be removed in the next major release .
--
-- @since 0.1.1.0
forkWithUnmask :: MonadUnliftIO m => ((forall a. m a -> m a) -> m ()) -> m ThreadId
forkWithUnmask = forkIOWithUnmask
# INLINABLE forkWithUnmask #
# DEPRECATED forkWithUnmask " forkWithUnmask has been renamed to forkIOWithUnmask " #
| Unlifted version of ' C.forkFinally ' .
--
-- @since 0.1.1.0
forkFinally :: MonadUnliftIO m => m a -> (Either SomeException a -> m ()) -> m ThreadId
forkFinally m1 m2 = withRunInIO $ \run -> C.forkFinally (run m1) $ run . m2
# INLINABLE forkFinally #
-- | Lifted version of 'C.killThread'.
--
-- @since 0.1.1.0
killThread :: MonadIO m => ThreadId -> m ()
killThread = liftIO . C.killThread
# INLINABLE killThread #
-- | Unlifted version of 'C.forkOn'.
--
-- @since 0.1.1.0
forkOn :: MonadUnliftIO m => Int -> m () -> m ThreadId
forkOn i m = withRunInIO $ \run -> C.forkOn i $ run m
# INLINABLE forkOn #
| Unlifted version of ' C.forkOnWithUnmask ' .
--
-- @since 0.1.1.0
forkOnWithUnmask :: MonadUnliftIO m => Int -> ((forall a. m a -> m a) -> m ()) -> m ThreadId
forkOnWithUnmask i m =
withRunInIO $ \run -> C.forkOnWithUnmask i $ \unmask -> run $ m $ liftIO . unmask . run
# INLINABLE forkOnWithUnmask #
-- | Lifted version of 'C.getNumCapabilities'.
--
-- @since 0.1.1.0
getNumCapabilities :: MonadIO m => m Int
getNumCapabilities = liftIO C.getNumCapabilities
# INLINABLE getNumCapabilities #
-- | Lifted version of 'C.setNumCapabilities'.
--
-- @since 0.1.1.0
setNumCapabilities :: MonadIO m => Int -> m ()
setNumCapabilities = liftIO . C.setNumCapabilities
# INLINABLE setNumCapabilities #
-- | Lifted version of 'C.threadCapability'.
--
-- @since 0.1.1.0
threadCapability :: MonadIO m => ThreadId -> m (Int, Bool)
threadCapability = liftIO . C.threadCapability
# INLINABLE threadCapability #
-- | Lifted version of 'C.yield'.
--
-- @since 0.1.1.0
yield :: MonadIO m => m ()
yield = liftIO C.yield
# INLINABLE yield #
| Lifted version of ' C.threadDelay ' .
--
-- @since 0.1.1.0
threadDelay :: MonadIO m => Int -> m ()
threadDelay = liftIO . C.threadDelay
# INLINABLE threadDelay #
| Lifted version of ' ' .
--
-- @since 0.1.1.0
threadWaitRead :: MonadIO m => Fd -> m ()
threadWaitRead = liftIO . C.threadWaitRead
# INLINABLE threadWaitRead #
-- | Lifted version of 'C.threadWaitWrite'.
--
-- @since 0.1.1.0
threadWaitWrite :: MonadIO m => Fd -> m ()
threadWaitWrite = liftIO . C.threadWaitWrite
# INLINABLE threadWaitWrite #
| Unflifted version of ' ' .
--
-- @since 0.1.1.0
forkOS :: MonadUnliftIO m => m () -> m ThreadId
forkOS m = withRunInIO $ \run -> C.forkOS $ run m
{-# INLINABLE forkOS #-}
-- | Lifted version of 'C.isCurrentThreadBound'.
--
-- @since 0.1.1.0
isCurrentThreadBound :: MonadIO m => m Bool
isCurrentThreadBound = liftIO C.isCurrentThreadBound
{-# INLINABLE isCurrentThreadBound #-}
-- | Unlifted version of 'C.runInBoundThread'.
--
-- @since 0.1.1.0
runInBoundThread :: MonadUnliftIO m => m a -> m a
runInBoundThread m = withRunInIO $ \run -> C.runInBoundThread $ run m
# INLINABLE runInBoundThread #
-- | Unlifted version of 'C.runInUnboundThread'.
--
-- @since 0.1.1.0
runInUnboundThread :: MonadUnliftIO m => m a -> m a
runInUnboundThread m = withRunInIO $ \run -> C.runInUnboundThread $ run m
# INLINABLE runInUnboundThread #
-- | Lifted version of 'C.mkWeakThreadId'.
--
-- @since 0.1.1.0
mkWeakThreadId :: MonadIO m => ThreadId -> m (Weak ThreadId)
mkWeakThreadId = liftIO . C.mkWeakThreadId
{-# INLINABLE mkWeakThreadId #-}
| null | https://raw.githubusercontent.com/fpco/unliftio/d7ac43b9ae69efea0ca911aa556852e9f95af128/unliftio/src/UnliftIO/Concurrent.hs | haskell | # LANGUAGE RankNTypes #
| Unlifted "Control.Concurrent".
This module is not reexported by "UnliftIO",
@since 0.1.1.0
* Concurrent Haskell
* Basic concurrency operations
** Threads with affinity
* Scheduling
** Waiting
* Communication abstractions
* Bound Threads
* Weak references to ThreadIds
| Lifted version of 'C.myThreadId'.
@since 0.1.1.0
| Unlifted version of 'C.forkIO'.
@since 0.1.1.0
| Unlifted version of 'C.forkIOWithUnmask'.
| Please use 'forkIOWithUnmask' instead. This function has been deprecated
@since 0.1.1.0
@since 0.1.1.0
| Lifted version of 'C.killThread'.
@since 0.1.1.0
| Unlifted version of 'C.forkOn'.
@since 0.1.1.0
@since 0.1.1.0
| Lifted version of 'C.getNumCapabilities'.
@since 0.1.1.0
| Lifted version of 'C.setNumCapabilities'.
@since 0.1.1.0
| Lifted version of 'C.threadCapability'.
@since 0.1.1.0
| Lifted version of 'C.yield'.
@since 0.1.1.0
@since 0.1.1.0
@since 0.1.1.0
| Lifted version of 'C.threadWaitWrite'.
@since 0.1.1.0
@since 0.1.1.0
# INLINABLE forkOS #
| Lifted version of 'C.isCurrentThreadBound'.
@since 0.1.1.0
# INLINABLE isCurrentThreadBound #
| Unlifted version of 'C.runInBoundThread'.
@since 0.1.1.0
| Unlifted version of 'C.runInUnboundThread'.
@since 0.1.1.0
| Lifted version of 'C.mkWeakThreadId'.
@since 0.1.1.0
# INLINABLE mkWeakThreadId # | use it only if " UnliftIO.Async " is not enough .
module UnliftIO.Concurrent
(
ThreadId,
myThreadId, forkIO, forkWithUnmask, forkIOWithUnmask, forkFinally, killThread, throwTo,
forkOn, forkOnWithUnmask, getNumCapabilities, setNumCapabilities,
threadCapability,
yield,
threadDelay, threadWaitRead, threadWaitWrite,
module UnliftIO.MVar, module UnliftIO.Chan,
C.rtsSupportsBoundThreads, forkOS, isCurrentThreadBound, runInBoundThread,
runInUnboundThread,
mkWeakThreadId
) where
import Control.Monad.IO.Class (MonadIO, liftIO)
import System.Posix.Types (Fd)
import System.Mem.Weak (Weak)
import Control.Concurrent (ThreadId)
import qualified Control.Concurrent as C
import Control.Monad.IO.Unlift
import UnliftIO.MVar
import UnliftIO.Chan
import UnliftIO.Exception (throwTo, SomeException)
myThreadId :: MonadIO m => m ThreadId
myThreadId = liftIO C.myThreadId
# INLINABLE myThreadId #
forkIO :: MonadUnliftIO m => m () -> m ThreadId
forkIO m = withRunInIO $ \run -> C.forkIO $ run m
# INLINABLE forkIO #
@since 0.2.11
forkIOWithUnmask :: MonadUnliftIO m => ((forall a. m a -> m a) -> m ()) -> m ThreadId
forkIOWithUnmask m =
withRunInIO $ \run -> C.forkIOWithUnmask $ \unmask -> run $ m $ liftIO . unmask . run
# INLINABLE forkIOWithUnmask #
in release 0.2.11 and will be removed in the next major release .
forkWithUnmask :: MonadUnliftIO m => ((forall a. m a -> m a) -> m ()) -> m ThreadId
forkWithUnmask = forkIOWithUnmask
# INLINABLE forkWithUnmask #
# DEPRECATED forkWithUnmask " forkWithUnmask has been renamed to forkIOWithUnmask " #
| Unlifted version of ' C.forkFinally ' .
forkFinally :: MonadUnliftIO m => m a -> (Either SomeException a -> m ()) -> m ThreadId
forkFinally m1 m2 = withRunInIO $ \run -> C.forkFinally (run m1) $ run . m2
# INLINABLE forkFinally #
killThread :: MonadIO m => ThreadId -> m ()
killThread = liftIO . C.killThread
# INLINABLE killThread #
forkOn :: MonadUnliftIO m => Int -> m () -> m ThreadId
forkOn i m = withRunInIO $ \run -> C.forkOn i $ run m
# INLINABLE forkOn #
| Unlifted version of ' C.forkOnWithUnmask ' .
forkOnWithUnmask :: MonadUnliftIO m => Int -> ((forall a. m a -> m a) -> m ()) -> m ThreadId
forkOnWithUnmask i m =
withRunInIO $ \run -> C.forkOnWithUnmask i $ \unmask -> run $ m $ liftIO . unmask . run
# INLINABLE forkOnWithUnmask #
getNumCapabilities :: MonadIO m => m Int
getNumCapabilities = liftIO C.getNumCapabilities
# INLINABLE getNumCapabilities #
setNumCapabilities :: MonadIO m => Int -> m ()
setNumCapabilities = liftIO . C.setNumCapabilities
# INLINABLE setNumCapabilities #
threadCapability :: MonadIO m => ThreadId -> m (Int, Bool)
threadCapability = liftIO . C.threadCapability
# INLINABLE threadCapability #
yield :: MonadIO m => m ()
yield = liftIO C.yield
# INLINABLE yield #
| Lifted version of ' C.threadDelay ' .
threadDelay :: MonadIO m => Int -> m ()
threadDelay = liftIO . C.threadDelay
# INLINABLE threadDelay #
| Lifted version of ' ' .
threadWaitRead :: MonadIO m => Fd -> m ()
threadWaitRead = liftIO . C.threadWaitRead
# INLINABLE threadWaitRead #
threadWaitWrite :: MonadIO m => Fd -> m ()
threadWaitWrite = liftIO . C.threadWaitWrite
# INLINABLE threadWaitWrite #
| Unflifted version of ' ' .
forkOS :: MonadUnliftIO m => m () -> m ThreadId
forkOS m = withRunInIO $ \run -> C.forkOS $ run m
isCurrentThreadBound :: MonadIO m => m Bool
isCurrentThreadBound = liftIO C.isCurrentThreadBound
runInBoundThread :: MonadUnliftIO m => m a -> m a
runInBoundThread m = withRunInIO $ \run -> C.runInBoundThread $ run m
# INLINABLE runInBoundThread #
runInUnboundThread :: MonadUnliftIO m => m a -> m a
runInUnboundThread m = withRunInIO $ \run -> C.runInUnboundThread $ run m
# INLINABLE runInUnboundThread #
mkWeakThreadId :: MonadIO m => ThreadId -> m (Weak ThreadId)
mkWeakThreadId = liftIO . C.mkWeakThreadId
|
563e1a225dcd27f33f74d1854964f4bcf28661fdd6fdb59ff5e24c0b3db42c0a | hellonico/origami-fun | writing.clj | (ns opencv4.video.writing
(:require [opencv4.core :refer :all])
(:require [opencv4.utils :as u])
(:import
[org.opencv.videoio Videoio VideoCapture VideoWriter]
[org.opencv.video Video]))
(def capture (VideoCapture.))
(def outputVideo (VideoWriter.))
( VideoWriter / fourcc \M \P \E \G )
( VideoWriter / fourcc \X )
(.open
outputVideo
"hello.mp4"
(VideoWriter/fourcc \M \J \P \G)
30
(new-size 400 300))
(def buffer
(new-mat 400 300 CV_8UC3))
(.open capture 0)
(.set capture Videoio/CAP_PROP_FRAME_WIDTH 400)
(.set capture Videoio/CAP_PROP_FRAME_HEIGHT 300)
(dotimes [i 150]
(.read capture buffer)
; (cvt-color! buffer COLOR_RGB2GRAY)
; (rotate! buffer ROTATE_90_CLOCKWISE)
; (put-text buffer "Funny text inside the box"
( new - point ( / ( .rows buffer ) 2 ) ( / ( .cols buffer ) 2 ) )
FONT_ITALIC 1.0 ( new - scalar 255 ) )
; (flip! buffer -1)
(resize! buffer (new-size 400 300))
( println ( .size buffer ) )
(.write outputVideo buffer))
(.release capture)
; (.release outputVideo)
| null | https://raw.githubusercontent.com/hellonico/origami-fun/80117788530d942eaa9a80e2995b37409fa24889/test/opencv4/video/writing.clj | clojure | (cvt-color! buffer COLOR_RGB2GRAY)
(rotate! buffer ROTATE_90_CLOCKWISE)
(put-text buffer "Funny text inside the box"
(flip! buffer -1)
(.release outputVideo) | (ns opencv4.video.writing
(:require [opencv4.core :refer :all])
(:require [opencv4.utils :as u])
(:import
[org.opencv.videoio Videoio VideoCapture VideoWriter]
[org.opencv.video Video]))
(def capture (VideoCapture.))
(def outputVideo (VideoWriter.))
( VideoWriter / fourcc \M \P \E \G )
( VideoWriter / fourcc \X )
(.open
outputVideo
"hello.mp4"
(VideoWriter/fourcc \M \J \P \G)
30
(new-size 400 300))
(def buffer
(new-mat 400 300 CV_8UC3))
(.open capture 0)
(.set capture Videoio/CAP_PROP_FRAME_WIDTH 400)
(.set capture Videoio/CAP_PROP_FRAME_HEIGHT 300)
(dotimes [i 150]
(.read capture buffer)
( new - point ( / ( .rows buffer ) 2 ) ( / ( .cols buffer ) 2 ) )
FONT_ITALIC 1.0 ( new - scalar 255 ) )
(resize! buffer (new-size 400 300))
( println ( .size buffer ) )
(.write outputVideo buffer))
(.release capture)
|
f4858b96afa5a9d14f279e78d575da9a6c46b0eccf66424f45d6ec9f90555e56 | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | SourceTypeP24.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
| Contains the types generated from the schema SourceTypeP24
module StripeAPI.Types.SourceTypeP24 where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
| Defines the object schema located at @components.schemas.source_type_p24@ in the specification .
data SourceTypeP24 = SourceTypeP24
{ -- | reference
sourceTypeP24Reference :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON SourceTypeP24 where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("reference" Data.Aeson.Types.ToJSON..=)) (sourceTypeP24Reference obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("reference" Data.Aeson.Types.ToJSON..=)) (sourceTypeP24Reference obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON SourceTypeP24 where
parseJSON = Data.Aeson.Types.FromJSON.withObject "SourceTypeP24" (\obj -> GHC.Base.pure SourceTypeP24 GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "reference"))
-- | Create a new 'SourceTypeP24' with all required fields.
mkSourceTypeP24 :: SourceTypeP24
mkSourceTypeP24 = SourceTypeP24 {sourceTypeP24Reference = GHC.Maybe.Nothing}
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/SourceTypeP24.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| reference
| Create a new 'SourceTypeP24' with all required fields. | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
| Contains the types generated from the schema SourceTypeP24
module StripeAPI.Types.SourceTypeP24 where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
| Defines the object schema located at @components.schemas.source_type_p24@ in the specification .
data SourceTypeP24 = SourceTypeP24
sourceTypeP24Reference :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON SourceTypeP24 where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("reference" Data.Aeson.Types.ToJSON..=)) (sourceTypeP24Reference obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("reference" Data.Aeson.Types.ToJSON..=)) (sourceTypeP24Reference obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON SourceTypeP24 where
parseJSON = Data.Aeson.Types.FromJSON.withObject "SourceTypeP24" (\obj -> GHC.Base.pure SourceTypeP24 GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "reference"))
mkSourceTypeP24 :: SourceTypeP24
mkSourceTypeP24 = SourceTypeP24 {sourceTypeP24Reference = GHC.Maybe.Nothing}
|
a17f75d3efbe0a347caa5aab74159158d17c82ae89fc92794d26b00157f877a8 | nyu-acsys/drift | a-mapi.ml |
let make_array (n:int) (i:int) = assert (0 <= i && i < n); 0
let update (i:int) (n:int) a (x:int) =
a i;
let ap j = assert (0 <= i && i < n); if i = j then x else a j in ap
let rec mapi_helper (hf: int -> int) hi hn (ha: int -> int) (hb: int -> int) : int -> int =
if (hi < hn) then
let hb2 = update hi hn hb (hf hi (ha(hi))) in
mapi_helper hf (hi + 1) hn ha hb2
else hb
let mapi (mf: int -> int) ma mn : int -> int =
let mb = make_array mn in
mapi_helper mf 0 mn ma mb
let add_idx (sidx: int) (si: int) = sidx + si
let main (n:int) (k:int) =
let a = make_array n in
let b = mapi add_idx a n in
if k >= 0 && k < n then
(b(k); ())
else () | null | https://raw.githubusercontent.com/nyu-acsys/drift/51a3160d74b761626180da4f7dd0bb950cfe40c0/tests/benchmarks/r_type/array/a-mapi.ml | ocaml |
let make_array (n:int) (i:int) = assert (0 <= i && i < n); 0
let update (i:int) (n:int) a (x:int) =
a i;
let ap j = assert (0 <= i && i < n); if i = j then x else a j in ap
let rec mapi_helper (hf: int -> int) hi hn (ha: int -> int) (hb: int -> int) : int -> int =
if (hi < hn) then
let hb2 = update hi hn hb (hf hi (ha(hi))) in
mapi_helper hf (hi + 1) hn ha hb2
else hb
let mapi (mf: int -> int) ma mn : int -> int =
let mb = make_array mn in
mapi_helper mf 0 mn ma mb
let add_idx (sidx: int) (si: int) = sidx + si
let main (n:int) (k:int) =
let a = make_array n in
let b = mapi add_idx a n in
if k >= 0 && k < n then
(b(k); ())
else () | |
df2e14e75d4e65344e2b1280a447421fc6f1d5da47a91738d96ad76f9b906684 | input-output-hk/cardano-wallet | TxIn.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
-- |
Copyright : © 2018 - 2022 IOHK
-- License: Apache-2.0
--
This module defines the ' ' type .
--
module Cardano.Wallet.Primitive.Types.Tx.TxIn
( TxIn (..)
) where
import Prelude
import Cardano.Wallet.Primitive.Types.Hash
( Hash (..) )
import Control.DeepSeq
( NFData (..) )
import Data.Word
( Word32 )
import Fmt
( Buildable (..), ordinalF )
import GHC.Generics
( Generic )
data TxIn = TxIn
{ inputId
:: !(Hash "Tx")
, inputIx
:: !Word32
}
deriving (Read, Show, Generic, Eq, Ord)
instance NFData TxIn
instance Buildable TxIn where
build txin = mempty
<> ordinalF (inputIx txin + 1)
<> " "
<> build (inputId txin)
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/157a6d5f977f4600373596b7cfa9700138e8e140/lib/primitive/lib/Cardano/Wallet/Primitive/Types/Tx/TxIn.hs | haskell | |
License: Apache-2.0
| # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
Copyright : © 2018 - 2022 IOHK
This module defines the ' ' type .
module Cardano.Wallet.Primitive.Types.Tx.TxIn
( TxIn (..)
) where
import Prelude
import Cardano.Wallet.Primitive.Types.Hash
( Hash (..) )
import Control.DeepSeq
( NFData (..) )
import Data.Word
( Word32 )
import Fmt
( Buildable (..), ordinalF )
import GHC.Generics
( Generic )
data TxIn = TxIn
{ inputId
:: !(Hash "Tx")
, inputIx
:: !Word32
}
deriving (Read, Show, Generic, Eq, Ord)
instance NFData TxIn
instance Buildable TxIn where
build txin = mempty
<> ordinalF (inputIx txin + 1)
<> " "
<> build (inputId txin)
|
c6507edc30a61eb6e77a06d350991ba5a815403d86aad1405782f3728c2e830c | haskell/haskell-language-server | AutoForallClassMethod.hs | {-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE MultiParamTypeClasses #
import Data.Functor.Contravariant
class Semigroupal cat t1 t2 to f where
combine :: cat (to (f x y) (f x' y')) (f (t1 x x') (t2 y y'))
comux :: forall p a b c d. Semigroupal Op (,) (,) (,) p => p (a, c) (b, d) -> (p a b, p c d)
comux = _
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/f3ad27ba1634871b2240b8cd7de9f31b91a2e502/plugins/hls-tactics-plugin/new/test/golden/AutoForallClassMethod.hs | haskell | # LANGUAGE ExplicitForAll #
# LANGUAGE FlexibleContexts # | # LANGUAGE MultiParamTypeClasses #
import Data.Functor.Contravariant
class Semigroupal cat t1 t2 to f where
combine :: cat (to (f x y) (f x' y')) (f (t1 x x') (t2 y y'))
comux :: forall p a b c d. Semigroupal Op (,) (,) (,) p => p (a, c) (b, d) -> (p a b, p c d)
comux = _
|
4d922433009409235fb0048ad2fd56f8c601c8d6203122623a38c4394ed4906e | jordanthayer/ocaml-search | actionListener.ml | (** Action Listener for the visualization tool *)
type events =
| StepForward
| StepBack
| CycleSelected
| CycleContext
| Kill
| StoreImage
| NotRecognized
let keyToEvent k =
(** converts the keystroke [k] into the appropriate event.*)
match k with
| 'k' -> Kill
| 'e' -> StepForward
| 'a' -> StepBack
| 'o' -> CycleContext
| _ -> (Verb.pe Verb.always "%s" (Wrutils.str "|%c| not recognized\n%!" k);
NotRecognized)
(*
let get_event () =
(** Returns the current keystroke as an event *)
keyToEvent (Graphics.read_key())
*)
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/search_vis/actionListener.ml | ocaml | * Action Listener for the visualization tool
* converts the keystroke [k] into the appropriate event.
let get_event () =
(** Returns the current keystroke as an event |
type events =
| StepForward
| StepBack
| CycleSelected
| CycleContext
| Kill
| StoreImage
| NotRecognized
let keyToEvent k =
match k with
| 'k' -> Kill
| 'e' -> StepForward
| 'a' -> StepBack
| 'o' -> CycleContext
| _ -> (Verb.pe Verb.always "%s" (Wrutils.str "|%c| not recognized\n%!" k);
NotRecognized)
keyToEvent (Graphics.read_key())
*)
EOF
|
451b9374a71c9fd9b2497d9f7b8f79c31d1069141ad7b715fc45c44df6ab3025 | andrewmcloud/consimilo | minhash.clj | (ns consimilo.minhash
(:require [consimilo.random-seed :as rseed]
[consimilo.sha1 :as sha]
[consimilo.config :as config]
[consimilo.minhash-util :as util]
[clojure.core :exclude [rand-int]]
[clojure.tools.logging :as log]))
;; prime number larger than sha1 hash
(def large-prime 3064991081731777716716694054300618367237478244367416721N)
(defn- init-hashvalues
"initializes minhash signature to infinity"
[]
(vec (repeat config/perms large-prime)))
(defn- build-permutations
"builds seeded random number populated vectors to simulate
the vector permutations a and b"
[]
(rseed/set-random-seed! config/seed)
{:a (rseed/rand-vec config/perms large-prime)
:b (rseed/rand-vec config/perms large-prime)})
;; build seeded vector permutations once. They are the same for every minhash
;; which allows incremental minhashing a single vector at a time.
(defonce permutations (build-permutations))
(defn update-minhash
"updates minhash with each document feature (token, shingle, n-gram, etc...)
Tokens are hashed using sha1 hash and truncated at max-hash to allow hashing
of documents with varying feature sizes. One minhash should be created for
each document"
[hashvalues feature]
(let [hv (sha/get-hash-bigint (str feature))
a (:a permutations)
b (:b permutations)]
(-> (util/scalar-mul a hv)
(util/elementwise-add b)
(util/scalar-mod large-prime)
(util/elementwise-min hashvalues))))
(defn build-minhash
"iterates through a document feature collection: ['token-1' token-2' ... 'token-n],
updating the minhash with each feature. Complete minhash is returned."
([feature-coll]
(build-minhash feature-coll (init-hashvalues)))
([[feature & features] hashvalues]
(if (nil? feature)
(vec hashvalues)
(recur features (update-minhash hashvalues feature)))))
(defn merge-minhash
"merges two minhashes together by taking the elementwise minimum between the two
minhash vectors"
[minhash1 minhash2]
(util/elementwise-min minhash1 minhash2))
| null | https://raw.githubusercontent.com/andrewmcloud/consimilo/db96c1695248c3486e1d23de5589b39f0e0bd49f/src/consimilo/minhash.clj | clojure | prime number larger than sha1 hash
build seeded vector permutations once. They are the same for every minhash
which allows incremental minhashing a single vector at a time. | (ns consimilo.minhash
(:require [consimilo.random-seed :as rseed]
[consimilo.sha1 :as sha]
[consimilo.config :as config]
[consimilo.minhash-util :as util]
[clojure.core :exclude [rand-int]]
[clojure.tools.logging :as log]))
(def large-prime 3064991081731777716716694054300618367237478244367416721N)
(defn- init-hashvalues
"initializes minhash signature to infinity"
[]
(vec (repeat config/perms large-prime)))
(defn- build-permutations
"builds seeded random number populated vectors to simulate
the vector permutations a and b"
[]
(rseed/set-random-seed! config/seed)
{:a (rseed/rand-vec config/perms large-prime)
:b (rseed/rand-vec config/perms large-prime)})
(defonce permutations (build-permutations))
(defn update-minhash
"updates minhash with each document feature (token, shingle, n-gram, etc...)
Tokens are hashed using sha1 hash and truncated at max-hash to allow hashing
of documents with varying feature sizes. One minhash should be created for
each document"
[hashvalues feature]
(let [hv (sha/get-hash-bigint (str feature))
a (:a permutations)
b (:b permutations)]
(-> (util/scalar-mul a hv)
(util/elementwise-add b)
(util/scalar-mod large-prime)
(util/elementwise-min hashvalues))))
(defn build-minhash
"iterates through a document feature collection: ['token-1' token-2' ... 'token-n],
updating the minhash with each feature. Complete minhash is returned."
([feature-coll]
(build-minhash feature-coll (init-hashvalues)))
([[feature & features] hashvalues]
(if (nil? feature)
(vec hashvalues)
(recur features (update-minhash hashvalues feature)))))
(defn merge-minhash
"merges two minhashes together by taking the elementwise minimum between the two
minhash vectors"
[minhash1 minhash2]
(util/elementwise-min minhash1 minhash2))
|
a30d440cbeac2920f1a623cf12ac0b50a4c82711d4c3e36862f6d570677b22c1 | favonia/ocaml-objdump | ExamplesWithPointers.ml | let test x = Format.printf "%a@." Objdump.pp x
let () = test (fun () -> 1)
let () = test (("fst", "snd"), 2, 3.0, (), fun () -> ())
let f x = (42, fun () -> x)
let () = test (f 1000)
let rec f x = g x
and g x = f x
let () = test f
let () = test g
type _ Effect.t += F : unit Effect.t
let () = Effect.Deep.try_with Effect.perform F
{ effc = fun (type a) (e : a Effect.t) ->
match e with
| F -> Some (fun (k : (a, _) Effect.Deep.continuation) -> test k; Effect.Deep.continue k ())
| _ -> None }
| null | https://raw.githubusercontent.com/favonia/ocaml-objdump/11ba435afabb5eb0866a79225dad548b06a964cb/test/ExamplesWithPointers.ml | ocaml | let test x = Format.printf "%a@." Objdump.pp x
let () = test (fun () -> 1)
let () = test (("fst", "snd"), 2, 3.0, (), fun () -> ())
let f x = (42, fun () -> x)
let () = test (f 1000)
let rec f x = g x
and g x = f x
let () = test f
let () = test g
type _ Effect.t += F : unit Effect.t
let () = Effect.Deep.try_with Effect.perform F
{ effc = fun (type a) (e : a Effect.t) ->
match e with
| F -> Some (fun (k : (a, _) Effect.Deep.continuation) -> test k; Effect.Deep.continue k ())
| _ -> None }
| |
3ebbcdca387cb8e46f38220692dfddddca0e02fda5ac5bc644ad8474a6003a70 | tommaisey/aeon | pow.help.scm | ;; (pow a b)
;; Exponentiation, written ** in sclang. When the signal is negative
;; this function extends the usual definition of exponentiation and
;; returns neg(neg(a) ** b). This allows exponentiation of negative
signal values by noninteger exponents .
(audition
(out 0 (let ((a (mul (f-sin-osc ar 100 0) 0.1)))
(mce2 a (pow a 10)))))
;; -users/2006-December/029998.html
(let* ((n0 (mul-add (lf-noise2 kr 8) 200 300))
(n1 (mul-add (lf-noise2 kr 3) 10 20))
(s (blip ar n0 n1))
(x (mouse-x kr 1000 (mul sample-rate 0.5) 1 0.1))
(y (mouse-y kr 1 24 1 0.1))
(d (latch s (impulse ar x 0)))
(b (u:round d (pow 0.5 y))))
(audition
(out 0 (mce2 d b))))
| null | https://raw.githubusercontent.com/tommaisey/aeon/80744a7235425c47a061ec8324d923c53ebedf15/libs/third-party/sc3/rsc3/help/ugen/binary-ops/pow.help.scm | scheme | (pow a b)
Exponentiation, written ** in sclang. When the signal is negative
this function extends the usual definition of exponentiation and
returns neg(neg(a) ** b). This allows exponentiation of negative
-users/2006-December/029998.html |
signal values by noninteger exponents .
(audition
(out 0 (let ((a (mul (f-sin-osc ar 100 0) 0.1)))
(mce2 a (pow a 10)))))
(let* ((n0 (mul-add (lf-noise2 kr 8) 200 300))
(n1 (mul-add (lf-noise2 kr 3) 10 20))
(s (blip ar n0 n1))
(x (mouse-x kr 1000 (mul sample-rate 0.5) 1 0.1))
(y (mouse-y kr 1 24 1 0.1))
(d (latch s (impulse ar x 0)))
(b (u:round d (pow 0.5 y))))
(audition
(out 0 (mce2 d b))))
|
e37a60d9686f0a24ddef9fc938d2861ec83a7bef61ba631a4f1d50797d85a014 | brendanhay/amazonka | WAFV2.hs | # OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
-- Module : Test.Amazonka.Gen.WAFV2
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Test.Amazonka.Gen.WAFV2 where
import Amazonka.WAFV2
import qualified Data.Proxy as Proxy
import Test.Amazonka.Fixture
import Test.Amazonka.Prelude
import Test.Amazonka.WAFV2.Internal
import Test.Tasty
-- Auto-generated: the actual test selection needs to be manually placed into
-- the top-level so that real test data can be incrementally added.
--
-- This commented snippet is what the entire set should look like:
-- fixtures :: TestTree
-- fixtures =
-- [ testGroup "request"
-- [ requestAssociateWebACL $
-- newAssociateWebACL
--
-- , requestCheckCapacity $
-- newCheckCapacity
--
-- , requestCreateIPSet $
-- newCreateIPSet
--
-- , requestCreateRegexPatternSet $
-- newCreateRegexPatternSet
--
-- , requestCreateRuleGroup $
-- newCreateRuleGroup
--
-- , requestCreateWebACL $
-- newCreateWebACL
--
-- , requestDeleteFirewallManagerRuleGroups $
newDeleteFirewallManagerRuleGroups
--
-- , requestDeleteIPSet $
-- newDeleteIPSet
--
-- , requestDeleteLoggingConfiguration $
-- newDeleteLoggingConfiguration
--
-- , requestDeletePermissionPolicy $
--
-- , requestDeleteRegexPatternSet $
-- newDeleteRegexPatternSet
--
-- , requestDeleteRuleGroup $
-- newDeleteRuleGroup
--
-- , requestDeleteWebACL $
-- newDeleteWebACL
--
-- , requestDescribeManagedRuleGroup $
-- newDescribeManagedRuleGroup
--
-- , requestDisassociateWebACL $
-- newDisassociateWebACL
--
-- , requestGenerateMobileSdkReleaseUrl $
-- newGenerateMobileSdkReleaseUrl
--
-- , requestGetIPSet $
newGetIPSet
--
-- , requestGetLoggingConfiguration $
-- newGetLoggingConfiguration
--
-- , requestGetManagedRuleSet $
newGetManagedRuleSet
--
-- , requestGetMobileSdkRelease $
-- newGetMobileSdkRelease
--
,
--
-- , requestGetRateBasedStatementManagedKeys $
-- newGetRateBasedStatementManagedKeys
--
-- , requestGetRegexPatternSet $
-- newGetRegexPatternSet
--
-- , requestGetRuleGroup $
-- newGetRuleGroup
--
-- , requestGetSampledRequests $
-- newGetSampledRequests
--
-- , requestGetWebACL $
-- newGetWebACL
--
-- , requestGetWebACLForResource $
-- newGetWebACLForResource
--
-- , requestListAvailableManagedRuleGroupVersions $
newListAvailableManagedRuleGroupVersions
--
-- , requestListAvailableManagedRuleGroups $
-- newListAvailableManagedRuleGroups
--
-- , requestListIPSets $
-- newListIPSets
--
-- , requestListLoggingConfigurations $
-- newListLoggingConfigurations
--
-- , requestListManagedRuleSets $
newListManagedRuleSets
--
-- , requestListMobileSdkReleases $
-- newListMobileSdkReleases
--
-- , requestListRegexPatternSets $
-- newListRegexPatternSets
--
-- , requestListResourcesForWebACL $
-- newListResourcesForWebACL
--
-- , requestListRuleGroups $
-- newListRuleGroups
--
-- , requestListTagsForResource $
--
-- , requestListWebACLs $
-- newListWebACLs
--
-- , requestPutLoggingConfiguration $
-- newPutLoggingConfiguration
--
-- , requestPutManagedRuleSetVersions $
-- newPutManagedRuleSetVersions
--
-- , requestPutPermissionPolicy $
-- newPutPermissionPolicy
--
-- , requestTagResource $
newTagResource
--
-- , requestUntagResource $
-- newUntagResource
--
-- , requestUpdateIPSet $
-- newUpdateIPSet
--
-- , requestUpdateManagedRuleSetVersionExpiryDate $
--
-- , requestUpdateRegexPatternSet $
-- newUpdateRegexPatternSet
--
-- , requestUpdateRuleGroup $
-- newUpdateRuleGroup
--
-- , requestUpdateWebACL $
-- newUpdateWebACL
--
-- ]
-- , testGroup "response"
-- [ responseAssociateWebACL $
-- newAssociateWebACLResponse
--
-- , responseCheckCapacity $
-- newCheckCapacityResponse
--
-- , responseCreateIPSet $
-- newCreateIPSetResponse
--
-- , responseCreateRegexPatternSet $
-- newCreateRegexPatternSetResponse
--
-- , responseCreateRuleGroup $
-- newCreateRuleGroupResponse
--
-- , responseCreateWebACL $
newCreateWebACLResponse
--
-- , responseDeleteFirewallManagerRuleGroups $
-- newDeleteFirewallManagerRuleGroupsResponse
--
-- , responseDeleteIPSet $
-- newDeleteIPSetResponse
--
-- , responseDeleteLoggingConfiguration $
newDeleteLoggingConfigurationResponse
--
-- , responseDeletePermissionPolicy $
-- newDeletePermissionPolicyResponse
--
-- , responseDeleteRegexPatternSet $
-- newDeleteRegexPatternSetResponse
--
-- , responseDeleteRuleGroup $
newDeleteRuleGroupResponse
--
-- , responseDeleteWebACL $
-- newDeleteWebACLResponse
--
-- , responseDescribeManagedRuleGroup $
-- newDescribeManagedRuleGroupResponse
--
-- , responseDisassociateWebACL $
-- newDisassociateWebACLResponse
--
-- , responseGenerateMobileSdkReleaseUrl $
-- newGenerateMobileSdkReleaseUrlResponse
--
-- , responseGetIPSet $
-- newGetIPSetResponse
--
-- , responseGetLoggingConfiguration $
-- newGetLoggingConfigurationResponse
--
-- , responseGetManagedRuleSet $
-- newGetManagedRuleSetResponse
--
-- , responseGetMobileSdkRelease $
-- newGetMobileSdkReleaseResponse
--
-- , responseGetPermissionPolicy $
-- newGetPermissionPolicyResponse
--
, responseGetRateBasedStatementManagedKeys $
-- newGetRateBasedStatementManagedKeysResponse
--
-- , responseGetRegexPatternSet $
-- newGetRegexPatternSetResponse
--
-- , responseGetRuleGroup $
-- newGetRuleGroupResponse
--
-- , responseGetSampledRequests $
-- newGetSampledRequestsResponse
--
-- , responseGetWebACL $
-- newGetWebACLResponse
--
-- , responseGetWebACLForResource $
-- newGetWebACLForResourceResponse
--
-- , responseListAvailableManagedRuleGroupVersions $
-- newListAvailableManagedRuleGroupVersionsResponse
--
-- , responseListAvailableManagedRuleGroups $
-- newListAvailableManagedRuleGroupsResponse
--
-- , responseListIPSets $
newListIPSetsResponse
--
-- , responseListLoggingConfigurations $
-- newListLoggingConfigurationsResponse
--
-- , responseListManagedRuleSets $
-- newListManagedRuleSetsResponse
--
-- , responseListMobileSdkReleases $
-- newListMobileSdkReleasesResponse
--
-- , responseListRegexPatternSets $
-- newListRegexPatternSetsResponse
--
-- , responseListResourcesForWebACL $
-- newListResourcesForWebACLResponse
--
-- , responseListRuleGroups $
-- newListRuleGroupsResponse
--
-- , responseListTagsForResource $
-- newListTagsForResourceResponse
--
-- , responseListWebACLs $
-- newListWebACLsResponse
--
-- , responsePutLoggingConfiguration $
-- newPutLoggingConfigurationResponse
--
-- , responsePutManagedRuleSetVersions $
newPutManagedRuleSetVersionsResponse
--
-- , responsePutPermissionPolicy $
newPutPermissionPolicyResponse
--
-- , responseTagResource $
-- newTagResourceResponse
--
-- , responseUntagResource $
-- newUntagResourceResponse
--
-- , responseUpdateIPSet $
--
-- , responseUpdateManagedRuleSetVersionExpiryDate $
-- newUpdateManagedRuleSetVersionExpiryDateResponse
--
-- , responseUpdateRegexPatternSet $
-- newUpdateRegexPatternSetResponse
--
-- , responseUpdateRuleGroup $
-- newUpdateRuleGroupResponse
--
-- , responseUpdateWebACL $
-- newUpdateWebACLResponse
--
-- ]
-- ]
-- Requests
requestAssociateWebACL :: AssociateWebACL -> TestTree
requestAssociateWebACL =
req
"AssociateWebACL"
"fixture/AssociateWebACL.yaml"
requestCheckCapacity :: CheckCapacity -> TestTree
requestCheckCapacity =
req
"CheckCapacity"
"fixture/CheckCapacity.yaml"
requestCreateIPSet :: CreateIPSet -> TestTree
requestCreateIPSet =
req
"CreateIPSet"
"fixture/CreateIPSet.yaml"
requestCreateRegexPatternSet :: CreateRegexPatternSet -> TestTree
requestCreateRegexPatternSet =
req
"CreateRegexPatternSet"
"fixture/CreateRegexPatternSet.yaml"
requestCreateRuleGroup :: CreateRuleGroup -> TestTree
requestCreateRuleGroup =
req
"CreateRuleGroup"
"fixture/CreateRuleGroup.yaml"
requestCreateWebACL :: CreateWebACL -> TestTree
requestCreateWebACL =
req
"CreateWebACL"
"fixture/CreateWebACL.yaml"
requestDeleteFirewallManagerRuleGroups :: DeleteFirewallManagerRuleGroups -> TestTree
requestDeleteFirewallManagerRuleGroups =
req
"DeleteFirewallManagerRuleGroups"
"fixture/DeleteFirewallManagerRuleGroups.yaml"
requestDeleteIPSet :: DeleteIPSet -> TestTree
requestDeleteIPSet =
req
"DeleteIPSet"
"fixture/DeleteIPSet.yaml"
requestDeleteLoggingConfiguration :: DeleteLoggingConfiguration -> TestTree
requestDeleteLoggingConfiguration =
req
"DeleteLoggingConfiguration"
"fixture/DeleteLoggingConfiguration.yaml"
requestDeletePermissionPolicy :: DeletePermissionPolicy -> TestTree
requestDeletePermissionPolicy =
req
"DeletePermissionPolicy"
"fixture/DeletePermissionPolicy.yaml"
requestDeleteRegexPatternSet :: DeleteRegexPatternSet -> TestTree
requestDeleteRegexPatternSet =
req
"DeleteRegexPatternSet"
"fixture/DeleteRegexPatternSet.yaml"
requestDeleteRuleGroup :: DeleteRuleGroup -> TestTree
requestDeleteRuleGroup =
req
"DeleteRuleGroup"
"fixture/DeleteRuleGroup.yaml"
requestDeleteWebACL :: DeleteWebACL -> TestTree
requestDeleteWebACL =
req
"DeleteWebACL"
"fixture/DeleteWebACL.yaml"
requestDescribeManagedRuleGroup :: DescribeManagedRuleGroup -> TestTree
requestDescribeManagedRuleGroup =
req
"DescribeManagedRuleGroup"
"fixture/DescribeManagedRuleGroup.yaml"
requestDisassociateWebACL :: DisassociateWebACL -> TestTree
requestDisassociateWebACL =
req
"DisassociateWebACL"
"fixture/DisassociateWebACL.yaml"
requestGenerateMobileSdkReleaseUrl :: GenerateMobileSdkReleaseUrl -> TestTree
requestGenerateMobileSdkReleaseUrl =
req
"GenerateMobileSdkReleaseUrl"
"fixture/GenerateMobileSdkReleaseUrl.yaml"
requestGetIPSet :: GetIPSet -> TestTree
requestGetIPSet =
req
"GetIPSet"
"fixture/GetIPSet.yaml"
requestGetLoggingConfiguration :: GetLoggingConfiguration -> TestTree
requestGetLoggingConfiguration =
req
"GetLoggingConfiguration"
"fixture/GetLoggingConfiguration.yaml"
requestGetManagedRuleSet :: GetManagedRuleSet -> TestTree
requestGetManagedRuleSet =
req
"GetManagedRuleSet"
"fixture/GetManagedRuleSet.yaml"
requestGetMobileSdkRelease :: GetMobileSdkRelease -> TestTree
requestGetMobileSdkRelease =
req
"GetMobileSdkRelease"
"fixture/GetMobileSdkRelease.yaml"
requestGetPermissionPolicy :: GetPermissionPolicy -> TestTree
requestGetPermissionPolicy =
req
"GetPermissionPolicy"
"fixture/GetPermissionPolicy.yaml"
requestGetRateBasedStatementManagedKeys :: GetRateBasedStatementManagedKeys -> TestTree
requestGetRateBasedStatementManagedKeys =
req
"GetRateBasedStatementManagedKeys"
"fixture/GetRateBasedStatementManagedKeys.yaml"
requestGetRegexPatternSet :: GetRegexPatternSet -> TestTree
requestGetRegexPatternSet =
req
"GetRegexPatternSet"
"fixture/GetRegexPatternSet.yaml"
requestGetRuleGroup :: GetRuleGroup -> TestTree
requestGetRuleGroup =
req
"GetRuleGroup"
"fixture/GetRuleGroup.yaml"
requestGetSampledRequests :: GetSampledRequests -> TestTree
requestGetSampledRequests =
req
"GetSampledRequests"
"fixture/GetSampledRequests.yaml"
requestGetWebACL :: GetWebACL -> TestTree
requestGetWebACL =
req
"GetWebACL"
"fixture/GetWebACL.yaml"
requestGetWebACLForResource :: GetWebACLForResource -> TestTree
requestGetWebACLForResource =
req
"GetWebACLForResource"
"fixture/GetWebACLForResource.yaml"
requestListAvailableManagedRuleGroupVersions :: ListAvailableManagedRuleGroupVersions -> TestTree
requestListAvailableManagedRuleGroupVersions =
req
"ListAvailableManagedRuleGroupVersions"
"fixture/ListAvailableManagedRuleGroupVersions.yaml"
requestListAvailableManagedRuleGroups :: ListAvailableManagedRuleGroups -> TestTree
requestListAvailableManagedRuleGroups =
req
"ListAvailableManagedRuleGroups"
"fixture/ListAvailableManagedRuleGroups.yaml"
requestListIPSets :: ListIPSets -> TestTree
requestListIPSets =
req
"ListIPSets"
"fixture/ListIPSets.yaml"
requestListLoggingConfigurations :: ListLoggingConfigurations -> TestTree
requestListLoggingConfigurations =
req
"ListLoggingConfigurations"
"fixture/ListLoggingConfigurations.yaml"
requestListManagedRuleSets :: ListManagedRuleSets -> TestTree
requestListManagedRuleSets =
req
"ListManagedRuleSets"
"fixture/ListManagedRuleSets.yaml"
requestListMobileSdkReleases :: ListMobileSdkReleases -> TestTree
requestListMobileSdkReleases =
req
"ListMobileSdkReleases"
"fixture/ListMobileSdkReleases.yaml"
requestListRegexPatternSets :: ListRegexPatternSets -> TestTree
requestListRegexPatternSets =
req
"ListRegexPatternSets"
"fixture/ListRegexPatternSets.yaml"
requestListResourcesForWebACL :: ListResourcesForWebACL -> TestTree
requestListResourcesForWebACL =
req
"ListResourcesForWebACL"
"fixture/ListResourcesForWebACL.yaml"
requestListRuleGroups :: ListRuleGroups -> TestTree
requestListRuleGroups =
req
"ListRuleGroups"
"fixture/ListRuleGroups.yaml"
requestListTagsForResource :: ListTagsForResource -> TestTree
requestListTagsForResource =
req
"ListTagsForResource"
"fixture/ListTagsForResource.yaml"
requestListWebACLs :: ListWebACLs -> TestTree
requestListWebACLs =
req
"ListWebACLs"
"fixture/ListWebACLs.yaml"
requestPutLoggingConfiguration :: PutLoggingConfiguration -> TestTree
requestPutLoggingConfiguration =
req
"PutLoggingConfiguration"
"fixture/PutLoggingConfiguration.yaml"
requestPutManagedRuleSetVersions :: PutManagedRuleSetVersions -> TestTree
requestPutManagedRuleSetVersions =
req
"PutManagedRuleSetVersions"
"fixture/PutManagedRuleSetVersions.yaml"
requestPutPermissionPolicy :: PutPermissionPolicy -> TestTree
requestPutPermissionPolicy =
req
"PutPermissionPolicy"
"fixture/PutPermissionPolicy.yaml"
requestTagResource :: TagResource -> TestTree
requestTagResource =
req
"TagResource"
"fixture/TagResource.yaml"
requestUntagResource :: UntagResource -> TestTree
requestUntagResource =
req
"UntagResource"
"fixture/UntagResource.yaml"
requestUpdateIPSet :: UpdateIPSet -> TestTree
requestUpdateIPSet =
req
"UpdateIPSet"
"fixture/UpdateIPSet.yaml"
requestUpdateManagedRuleSetVersionExpiryDate :: UpdateManagedRuleSetVersionExpiryDate -> TestTree
requestUpdateManagedRuleSetVersionExpiryDate =
req
"UpdateManagedRuleSetVersionExpiryDate"
"fixture/UpdateManagedRuleSetVersionExpiryDate.yaml"
requestUpdateRegexPatternSet :: UpdateRegexPatternSet -> TestTree
requestUpdateRegexPatternSet =
req
"UpdateRegexPatternSet"
"fixture/UpdateRegexPatternSet.yaml"
requestUpdateRuleGroup :: UpdateRuleGroup -> TestTree
requestUpdateRuleGroup =
req
"UpdateRuleGroup"
"fixture/UpdateRuleGroup.yaml"
requestUpdateWebACL :: UpdateWebACL -> TestTree
requestUpdateWebACL =
req
"UpdateWebACL"
"fixture/UpdateWebACL.yaml"
-- Responses
responseAssociateWebACL :: AssociateWebACLResponse -> TestTree
responseAssociateWebACL =
res
"AssociateWebACLResponse"
"fixture/AssociateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy AssociateWebACL)
responseCheckCapacity :: CheckCapacityResponse -> TestTree
responseCheckCapacity =
res
"CheckCapacityResponse"
"fixture/CheckCapacityResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CheckCapacity)
responseCreateIPSet :: CreateIPSetResponse -> TestTree
responseCreateIPSet =
res
"CreateIPSetResponse"
"fixture/CreateIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateIPSet)
responseCreateRegexPatternSet :: CreateRegexPatternSetResponse -> TestTree
responseCreateRegexPatternSet =
res
"CreateRegexPatternSetResponse"
"fixture/CreateRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateRegexPatternSet)
responseCreateRuleGroup :: CreateRuleGroupResponse -> TestTree
responseCreateRuleGroup =
res
"CreateRuleGroupResponse"
"fixture/CreateRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateRuleGroup)
responseCreateWebACL :: CreateWebACLResponse -> TestTree
responseCreateWebACL =
res
"CreateWebACLResponse"
"fixture/CreateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateWebACL)
responseDeleteFirewallManagerRuleGroups :: DeleteFirewallManagerRuleGroupsResponse -> TestTree
responseDeleteFirewallManagerRuleGroups =
res
"DeleteFirewallManagerRuleGroupsResponse"
"fixture/DeleteFirewallManagerRuleGroupsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteFirewallManagerRuleGroups)
responseDeleteIPSet :: DeleteIPSetResponse -> TestTree
responseDeleteIPSet =
res
"DeleteIPSetResponse"
"fixture/DeleteIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteIPSet)
responseDeleteLoggingConfiguration :: DeleteLoggingConfigurationResponse -> TestTree
responseDeleteLoggingConfiguration =
res
"DeleteLoggingConfigurationResponse"
"fixture/DeleteLoggingConfigurationResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteLoggingConfiguration)
responseDeletePermissionPolicy :: DeletePermissionPolicyResponse -> TestTree
responseDeletePermissionPolicy =
res
"DeletePermissionPolicyResponse"
"fixture/DeletePermissionPolicyResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeletePermissionPolicy)
responseDeleteRegexPatternSet :: DeleteRegexPatternSetResponse -> TestTree
responseDeleteRegexPatternSet =
res
"DeleteRegexPatternSetResponse"
"fixture/DeleteRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteRegexPatternSet)
responseDeleteRuleGroup :: DeleteRuleGroupResponse -> TestTree
responseDeleteRuleGroup =
res
"DeleteRuleGroupResponse"
"fixture/DeleteRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteRuleGroup)
responseDeleteWebACL :: DeleteWebACLResponse -> TestTree
responseDeleteWebACL =
res
"DeleteWebACLResponse"
"fixture/DeleteWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteWebACL)
responseDescribeManagedRuleGroup :: DescribeManagedRuleGroupResponse -> TestTree
responseDescribeManagedRuleGroup =
res
"DescribeManagedRuleGroupResponse"
"fixture/DescribeManagedRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DescribeManagedRuleGroup)
responseDisassociateWebACL :: DisassociateWebACLResponse -> TestTree
responseDisassociateWebACL =
res
"DisassociateWebACLResponse"
"fixture/DisassociateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DisassociateWebACL)
responseGenerateMobileSdkReleaseUrl :: GenerateMobileSdkReleaseUrlResponse -> TestTree
responseGenerateMobileSdkReleaseUrl =
res
"GenerateMobileSdkReleaseUrlResponse"
"fixture/GenerateMobileSdkReleaseUrlResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GenerateMobileSdkReleaseUrl)
responseGetIPSet :: GetIPSetResponse -> TestTree
responseGetIPSet =
res
"GetIPSetResponse"
"fixture/GetIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetIPSet)
responseGetLoggingConfiguration :: GetLoggingConfigurationResponse -> TestTree
responseGetLoggingConfiguration =
res
"GetLoggingConfigurationResponse"
"fixture/GetLoggingConfigurationResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetLoggingConfiguration)
responseGetManagedRuleSet :: GetManagedRuleSetResponse -> TestTree
responseGetManagedRuleSet =
res
"GetManagedRuleSetResponse"
"fixture/GetManagedRuleSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetManagedRuleSet)
responseGetMobileSdkRelease :: GetMobileSdkReleaseResponse -> TestTree
responseGetMobileSdkRelease =
res
"GetMobileSdkReleaseResponse"
"fixture/GetMobileSdkReleaseResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetMobileSdkRelease)
responseGetPermissionPolicy :: GetPermissionPolicyResponse -> TestTree
responseGetPermissionPolicy =
res
"GetPermissionPolicyResponse"
"fixture/GetPermissionPolicyResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetPermissionPolicy)
responseGetRateBasedStatementManagedKeys :: GetRateBasedStatementManagedKeysResponse -> TestTree
responseGetRateBasedStatementManagedKeys =
res
"GetRateBasedStatementManagedKeysResponse"
"fixture/GetRateBasedStatementManagedKeysResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetRateBasedStatementManagedKeys)
responseGetRegexPatternSet :: GetRegexPatternSetResponse -> TestTree
responseGetRegexPatternSet =
res
"GetRegexPatternSetResponse"
"fixture/GetRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetRegexPatternSet)
responseGetRuleGroup :: GetRuleGroupResponse -> TestTree
responseGetRuleGroup =
res
"GetRuleGroupResponse"
"fixture/GetRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetRuleGroup)
responseGetSampledRequests :: GetSampledRequestsResponse -> TestTree
responseGetSampledRequests =
res
"GetSampledRequestsResponse"
"fixture/GetSampledRequestsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetSampledRequests)
responseGetWebACL :: GetWebACLResponse -> TestTree
responseGetWebACL =
res
"GetWebACLResponse"
"fixture/GetWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetWebACL)
responseGetWebACLForResource :: GetWebACLForResourceResponse -> TestTree
responseGetWebACLForResource =
res
"GetWebACLForResourceResponse"
"fixture/GetWebACLForResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetWebACLForResource)
responseListAvailableManagedRuleGroupVersions :: ListAvailableManagedRuleGroupVersionsResponse -> TestTree
responseListAvailableManagedRuleGroupVersions =
res
"ListAvailableManagedRuleGroupVersionsResponse"
"fixture/ListAvailableManagedRuleGroupVersionsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListAvailableManagedRuleGroupVersions)
responseListAvailableManagedRuleGroups :: ListAvailableManagedRuleGroupsResponse -> TestTree
responseListAvailableManagedRuleGroups =
res
"ListAvailableManagedRuleGroupsResponse"
"fixture/ListAvailableManagedRuleGroupsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListAvailableManagedRuleGroups)
responseListIPSets :: ListIPSetsResponse -> TestTree
responseListIPSets =
res
"ListIPSetsResponse"
"fixture/ListIPSetsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListIPSets)
responseListLoggingConfigurations :: ListLoggingConfigurationsResponse -> TestTree
responseListLoggingConfigurations =
res
"ListLoggingConfigurationsResponse"
"fixture/ListLoggingConfigurationsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListLoggingConfigurations)
responseListManagedRuleSets :: ListManagedRuleSetsResponse -> TestTree
responseListManagedRuleSets =
res
"ListManagedRuleSetsResponse"
"fixture/ListManagedRuleSetsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListManagedRuleSets)
responseListMobileSdkReleases :: ListMobileSdkReleasesResponse -> TestTree
responseListMobileSdkReleases =
res
"ListMobileSdkReleasesResponse"
"fixture/ListMobileSdkReleasesResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListMobileSdkReleases)
responseListRegexPatternSets :: ListRegexPatternSetsResponse -> TestTree
responseListRegexPatternSets =
res
"ListRegexPatternSetsResponse"
"fixture/ListRegexPatternSetsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListRegexPatternSets)
responseListResourcesForWebACL :: ListResourcesForWebACLResponse -> TestTree
responseListResourcesForWebACL =
res
"ListResourcesForWebACLResponse"
"fixture/ListResourcesForWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListResourcesForWebACL)
responseListRuleGroups :: ListRuleGroupsResponse -> TestTree
responseListRuleGroups =
res
"ListRuleGroupsResponse"
"fixture/ListRuleGroupsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListRuleGroups)
responseListTagsForResource :: ListTagsForResourceResponse -> TestTree
responseListTagsForResource =
res
"ListTagsForResourceResponse"
"fixture/ListTagsForResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListTagsForResource)
responseListWebACLs :: ListWebACLsResponse -> TestTree
responseListWebACLs =
res
"ListWebACLsResponse"
"fixture/ListWebACLsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListWebACLs)
responsePutLoggingConfiguration :: PutLoggingConfigurationResponse -> TestTree
responsePutLoggingConfiguration =
res
"PutLoggingConfigurationResponse"
"fixture/PutLoggingConfigurationResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy PutLoggingConfiguration)
responsePutManagedRuleSetVersions :: PutManagedRuleSetVersionsResponse -> TestTree
responsePutManagedRuleSetVersions =
res
"PutManagedRuleSetVersionsResponse"
"fixture/PutManagedRuleSetVersionsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy PutManagedRuleSetVersions)
responsePutPermissionPolicy :: PutPermissionPolicyResponse -> TestTree
responsePutPermissionPolicy =
res
"PutPermissionPolicyResponse"
"fixture/PutPermissionPolicyResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy PutPermissionPolicy)
responseTagResource :: TagResourceResponse -> TestTree
responseTagResource =
res
"TagResourceResponse"
"fixture/TagResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy TagResource)
responseUntagResource :: UntagResourceResponse -> TestTree
responseUntagResource =
res
"UntagResourceResponse"
"fixture/UntagResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UntagResource)
responseUpdateIPSet :: UpdateIPSetResponse -> TestTree
responseUpdateIPSet =
res
"UpdateIPSetResponse"
"fixture/UpdateIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateIPSet)
responseUpdateManagedRuleSetVersionExpiryDate :: UpdateManagedRuleSetVersionExpiryDateResponse -> TestTree
responseUpdateManagedRuleSetVersionExpiryDate =
res
"UpdateManagedRuleSetVersionExpiryDateResponse"
"fixture/UpdateManagedRuleSetVersionExpiryDateResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateManagedRuleSetVersionExpiryDate)
responseUpdateRegexPatternSet :: UpdateRegexPatternSetResponse -> TestTree
responseUpdateRegexPatternSet =
res
"UpdateRegexPatternSetResponse"
"fixture/UpdateRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateRegexPatternSet)
responseUpdateRuleGroup :: UpdateRuleGroupResponse -> TestTree
responseUpdateRuleGroup =
res
"UpdateRuleGroupResponse"
"fixture/UpdateRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateRuleGroup)
responseUpdateWebACL :: UpdateWebACLResponse -> TestTree
responseUpdateWebACL =
res
"UpdateWebACLResponse"
"fixture/UpdateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateWebACL)
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-wafv2/test/Test/Amazonka/Gen/WAFV2.hs | haskell | |
Module : Test.Amazonka.Gen.WAFV2
Stability : auto-generated
Auto-generated: the actual test selection needs to be manually placed into
the top-level so that real test data can be incrementally added.
This commented snippet is what the entire set should look like:
fixtures :: TestTree
fixtures =
[ testGroup "request"
[ requestAssociateWebACL $
newAssociateWebACL
, requestCheckCapacity $
newCheckCapacity
, requestCreateIPSet $
newCreateIPSet
, requestCreateRegexPatternSet $
newCreateRegexPatternSet
, requestCreateRuleGroup $
newCreateRuleGroup
, requestCreateWebACL $
newCreateWebACL
, requestDeleteFirewallManagerRuleGroups $
, requestDeleteIPSet $
newDeleteIPSet
, requestDeleteLoggingConfiguration $
newDeleteLoggingConfiguration
, requestDeletePermissionPolicy $
, requestDeleteRegexPatternSet $
newDeleteRegexPatternSet
, requestDeleteRuleGroup $
newDeleteRuleGroup
, requestDeleteWebACL $
newDeleteWebACL
, requestDescribeManagedRuleGroup $
newDescribeManagedRuleGroup
, requestDisassociateWebACL $
newDisassociateWebACL
, requestGenerateMobileSdkReleaseUrl $
newGenerateMobileSdkReleaseUrl
, requestGetIPSet $
, requestGetLoggingConfiguration $
newGetLoggingConfiguration
, requestGetManagedRuleSet $
, requestGetMobileSdkRelease $
newGetMobileSdkRelease
, requestGetRateBasedStatementManagedKeys $
newGetRateBasedStatementManagedKeys
, requestGetRegexPatternSet $
newGetRegexPatternSet
, requestGetRuleGroup $
newGetRuleGroup
, requestGetSampledRequests $
newGetSampledRequests
, requestGetWebACL $
newGetWebACL
, requestGetWebACLForResource $
newGetWebACLForResource
, requestListAvailableManagedRuleGroupVersions $
, requestListAvailableManagedRuleGroups $
newListAvailableManagedRuleGroups
, requestListIPSets $
newListIPSets
, requestListLoggingConfigurations $
newListLoggingConfigurations
, requestListManagedRuleSets $
, requestListMobileSdkReleases $
newListMobileSdkReleases
, requestListRegexPatternSets $
newListRegexPatternSets
, requestListResourcesForWebACL $
newListResourcesForWebACL
, requestListRuleGroups $
newListRuleGroups
, requestListTagsForResource $
, requestListWebACLs $
newListWebACLs
, requestPutLoggingConfiguration $
newPutLoggingConfiguration
, requestPutManagedRuleSetVersions $
newPutManagedRuleSetVersions
, requestPutPermissionPolicy $
newPutPermissionPolicy
, requestTagResource $
, requestUntagResource $
newUntagResource
, requestUpdateIPSet $
newUpdateIPSet
, requestUpdateManagedRuleSetVersionExpiryDate $
, requestUpdateRegexPatternSet $
newUpdateRegexPatternSet
, requestUpdateRuleGroup $
newUpdateRuleGroup
, requestUpdateWebACL $
newUpdateWebACL
]
, testGroup "response"
[ responseAssociateWebACL $
newAssociateWebACLResponse
, responseCheckCapacity $
newCheckCapacityResponse
, responseCreateIPSet $
newCreateIPSetResponse
, responseCreateRegexPatternSet $
newCreateRegexPatternSetResponse
, responseCreateRuleGroup $
newCreateRuleGroupResponse
, responseCreateWebACL $
, responseDeleteFirewallManagerRuleGroups $
newDeleteFirewallManagerRuleGroupsResponse
, responseDeleteIPSet $
newDeleteIPSetResponse
, responseDeleteLoggingConfiguration $
, responseDeletePermissionPolicy $
newDeletePermissionPolicyResponse
, responseDeleteRegexPatternSet $
newDeleteRegexPatternSetResponse
, responseDeleteRuleGroup $
, responseDeleteWebACL $
newDeleteWebACLResponse
, responseDescribeManagedRuleGroup $
newDescribeManagedRuleGroupResponse
, responseDisassociateWebACL $
newDisassociateWebACLResponse
, responseGenerateMobileSdkReleaseUrl $
newGenerateMobileSdkReleaseUrlResponse
, responseGetIPSet $
newGetIPSetResponse
, responseGetLoggingConfiguration $
newGetLoggingConfigurationResponse
, responseGetManagedRuleSet $
newGetManagedRuleSetResponse
, responseGetMobileSdkRelease $
newGetMobileSdkReleaseResponse
, responseGetPermissionPolicy $
newGetPermissionPolicyResponse
newGetRateBasedStatementManagedKeysResponse
, responseGetRegexPatternSet $
newGetRegexPatternSetResponse
, responseGetRuleGroup $
newGetRuleGroupResponse
, responseGetSampledRequests $
newGetSampledRequestsResponse
, responseGetWebACL $
newGetWebACLResponse
, responseGetWebACLForResource $
newGetWebACLForResourceResponse
, responseListAvailableManagedRuleGroupVersions $
newListAvailableManagedRuleGroupVersionsResponse
, responseListAvailableManagedRuleGroups $
newListAvailableManagedRuleGroupsResponse
, responseListIPSets $
, responseListLoggingConfigurations $
newListLoggingConfigurationsResponse
, responseListManagedRuleSets $
newListManagedRuleSetsResponse
, responseListMobileSdkReleases $
newListMobileSdkReleasesResponse
, responseListRegexPatternSets $
newListRegexPatternSetsResponse
, responseListResourcesForWebACL $
newListResourcesForWebACLResponse
, responseListRuleGroups $
newListRuleGroupsResponse
, responseListTagsForResource $
newListTagsForResourceResponse
, responseListWebACLs $
newListWebACLsResponse
, responsePutLoggingConfiguration $
newPutLoggingConfigurationResponse
, responsePutManagedRuleSetVersions $
, responsePutPermissionPolicy $
, responseTagResource $
newTagResourceResponse
, responseUntagResource $
newUntagResourceResponse
, responseUpdateIPSet $
, responseUpdateManagedRuleSetVersionExpiryDate $
newUpdateManagedRuleSetVersionExpiryDateResponse
, responseUpdateRegexPatternSet $
newUpdateRegexPatternSetResponse
, responseUpdateRuleGroup $
newUpdateRuleGroupResponse
, responseUpdateWebACL $
newUpdateWebACLResponse
]
]
Requests
Responses | # OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Test.Amazonka.Gen.WAFV2 where
import Amazonka.WAFV2
import qualified Data.Proxy as Proxy
import Test.Amazonka.Fixture
import Test.Amazonka.Prelude
import Test.Amazonka.WAFV2.Internal
import Test.Tasty
newDeleteFirewallManagerRuleGroups
newGetIPSet
newGetManagedRuleSet
,
newListAvailableManagedRuleGroupVersions
newListManagedRuleSets
newTagResource
newCreateWebACLResponse
newDeleteLoggingConfigurationResponse
newDeleteRuleGroupResponse
, responseGetRateBasedStatementManagedKeys $
newListIPSetsResponse
newPutManagedRuleSetVersionsResponse
newPutPermissionPolicyResponse
requestAssociateWebACL :: AssociateWebACL -> TestTree
requestAssociateWebACL =
req
"AssociateWebACL"
"fixture/AssociateWebACL.yaml"
requestCheckCapacity :: CheckCapacity -> TestTree
requestCheckCapacity =
req
"CheckCapacity"
"fixture/CheckCapacity.yaml"
requestCreateIPSet :: CreateIPSet -> TestTree
requestCreateIPSet =
req
"CreateIPSet"
"fixture/CreateIPSet.yaml"
requestCreateRegexPatternSet :: CreateRegexPatternSet -> TestTree
requestCreateRegexPatternSet =
req
"CreateRegexPatternSet"
"fixture/CreateRegexPatternSet.yaml"
requestCreateRuleGroup :: CreateRuleGroup -> TestTree
requestCreateRuleGroup =
req
"CreateRuleGroup"
"fixture/CreateRuleGroup.yaml"
requestCreateWebACL :: CreateWebACL -> TestTree
requestCreateWebACL =
req
"CreateWebACL"
"fixture/CreateWebACL.yaml"
requestDeleteFirewallManagerRuleGroups :: DeleteFirewallManagerRuleGroups -> TestTree
requestDeleteFirewallManagerRuleGroups =
req
"DeleteFirewallManagerRuleGroups"
"fixture/DeleteFirewallManagerRuleGroups.yaml"
requestDeleteIPSet :: DeleteIPSet -> TestTree
requestDeleteIPSet =
req
"DeleteIPSet"
"fixture/DeleteIPSet.yaml"
requestDeleteLoggingConfiguration :: DeleteLoggingConfiguration -> TestTree
requestDeleteLoggingConfiguration =
req
"DeleteLoggingConfiguration"
"fixture/DeleteLoggingConfiguration.yaml"
requestDeletePermissionPolicy :: DeletePermissionPolicy -> TestTree
requestDeletePermissionPolicy =
req
"DeletePermissionPolicy"
"fixture/DeletePermissionPolicy.yaml"
requestDeleteRegexPatternSet :: DeleteRegexPatternSet -> TestTree
requestDeleteRegexPatternSet =
req
"DeleteRegexPatternSet"
"fixture/DeleteRegexPatternSet.yaml"
requestDeleteRuleGroup :: DeleteRuleGroup -> TestTree
requestDeleteRuleGroup =
req
"DeleteRuleGroup"
"fixture/DeleteRuleGroup.yaml"
requestDeleteWebACL :: DeleteWebACL -> TestTree
requestDeleteWebACL =
req
"DeleteWebACL"
"fixture/DeleteWebACL.yaml"
requestDescribeManagedRuleGroup :: DescribeManagedRuleGroup -> TestTree
requestDescribeManagedRuleGroup =
req
"DescribeManagedRuleGroup"
"fixture/DescribeManagedRuleGroup.yaml"
requestDisassociateWebACL :: DisassociateWebACL -> TestTree
requestDisassociateWebACL =
req
"DisassociateWebACL"
"fixture/DisassociateWebACL.yaml"
requestGenerateMobileSdkReleaseUrl :: GenerateMobileSdkReleaseUrl -> TestTree
requestGenerateMobileSdkReleaseUrl =
req
"GenerateMobileSdkReleaseUrl"
"fixture/GenerateMobileSdkReleaseUrl.yaml"
requestGetIPSet :: GetIPSet -> TestTree
requestGetIPSet =
req
"GetIPSet"
"fixture/GetIPSet.yaml"
requestGetLoggingConfiguration :: GetLoggingConfiguration -> TestTree
requestGetLoggingConfiguration =
req
"GetLoggingConfiguration"
"fixture/GetLoggingConfiguration.yaml"
requestGetManagedRuleSet :: GetManagedRuleSet -> TestTree
requestGetManagedRuleSet =
req
"GetManagedRuleSet"
"fixture/GetManagedRuleSet.yaml"
requestGetMobileSdkRelease :: GetMobileSdkRelease -> TestTree
requestGetMobileSdkRelease =
req
"GetMobileSdkRelease"
"fixture/GetMobileSdkRelease.yaml"
requestGetPermissionPolicy :: GetPermissionPolicy -> TestTree
requestGetPermissionPolicy =
req
"GetPermissionPolicy"
"fixture/GetPermissionPolicy.yaml"
requestGetRateBasedStatementManagedKeys :: GetRateBasedStatementManagedKeys -> TestTree
requestGetRateBasedStatementManagedKeys =
req
"GetRateBasedStatementManagedKeys"
"fixture/GetRateBasedStatementManagedKeys.yaml"
requestGetRegexPatternSet :: GetRegexPatternSet -> TestTree
requestGetRegexPatternSet =
req
"GetRegexPatternSet"
"fixture/GetRegexPatternSet.yaml"
requestGetRuleGroup :: GetRuleGroup -> TestTree
requestGetRuleGroup =
req
"GetRuleGroup"
"fixture/GetRuleGroup.yaml"
requestGetSampledRequests :: GetSampledRequests -> TestTree
requestGetSampledRequests =
req
"GetSampledRequests"
"fixture/GetSampledRequests.yaml"
requestGetWebACL :: GetWebACL -> TestTree
requestGetWebACL =
req
"GetWebACL"
"fixture/GetWebACL.yaml"
requestGetWebACLForResource :: GetWebACLForResource -> TestTree
requestGetWebACLForResource =
req
"GetWebACLForResource"
"fixture/GetWebACLForResource.yaml"
requestListAvailableManagedRuleGroupVersions :: ListAvailableManagedRuleGroupVersions -> TestTree
requestListAvailableManagedRuleGroupVersions =
req
"ListAvailableManagedRuleGroupVersions"
"fixture/ListAvailableManagedRuleGroupVersions.yaml"
requestListAvailableManagedRuleGroups :: ListAvailableManagedRuleGroups -> TestTree
requestListAvailableManagedRuleGroups =
req
"ListAvailableManagedRuleGroups"
"fixture/ListAvailableManagedRuleGroups.yaml"
requestListIPSets :: ListIPSets -> TestTree
requestListIPSets =
req
"ListIPSets"
"fixture/ListIPSets.yaml"
requestListLoggingConfigurations :: ListLoggingConfigurations -> TestTree
requestListLoggingConfigurations =
req
"ListLoggingConfigurations"
"fixture/ListLoggingConfigurations.yaml"
requestListManagedRuleSets :: ListManagedRuleSets -> TestTree
requestListManagedRuleSets =
req
"ListManagedRuleSets"
"fixture/ListManagedRuleSets.yaml"
requestListMobileSdkReleases :: ListMobileSdkReleases -> TestTree
requestListMobileSdkReleases =
req
"ListMobileSdkReleases"
"fixture/ListMobileSdkReleases.yaml"
requestListRegexPatternSets :: ListRegexPatternSets -> TestTree
requestListRegexPatternSets =
req
"ListRegexPatternSets"
"fixture/ListRegexPatternSets.yaml"
requestListResourcesForWebACL :: ListResourcesForWebACL -> TestTree
requestListResourcesForWebACL =
req
"ListResourcesForWebACL"
"fixture/ListResourcesForWebACL.yaml"
requestListRuleGroups :: ListRuleGroups -> TestTree
requestListRuleGroups =
req
"ListRuleGroups"
"fixture/ListRuleGroups.yaml"
requestListTagsForResource :: ListTagsForResource -> TestTree
requestListTagsForResource =
req
"ListTagsForResource"
"fixture/ListTagsForResource.yaml"
requestListWebACLs :: ListWebACLs -> TestTree
requestListWebACLs =
req
"ListWebACLs"
"fixture/ListWebACLs.yaml"
requestPutLoggingConfiguration :: PutLoggingConfiguration -> TestTree
requestPutLoggingConfiguration =
req
"PutLoggingConfiguration"
"fixture/PutLoggingConfiguration.yaml"
requestPutManagedRuleSetVersions :: PutManagedRuleSetVersions -> TestTree
requestPutManagedRuleSetVersions =
req
"PutManagedRuleSetVersions"
"fixture/PutManagedRuleSetVersions.yaml"
requestPutPermissionPolicy :: PutPermissionPolicy -> TestTree
requestPutPermissionPolicy =
req
"PutPermissionPolicy"
"fixture/PutPermissionPolicy.yaml"
requestTagResource :: TagResource -> TestTree
requestTagResource =
req
"TagResource"
"fixture/TagResource.yaml"
requestUntagResource :: UntagResource -> TestTree
requestUntagResource =
req
"UntagResource"
"fixture/UntagResource.yaml"
requestUpdateIPSet :: UpdateIPSet -> TestTree
requestUpdateIPSet =
req
"UpdateIPSet"
"fixture/UpdateIPSet.yaml"
requestUpdateManagedRuleSetVersionExpiryDate :: UpdateManagedRuleSetVersionExpiryDate -> TestTree
requestUpdateManagedRuleSetVersionExpiryDate =
req
"UpdateManagedRuleSetVersionExpiryDate"
"fixture/UpdateManagedRuleSetVersionExpiryDate.yaml"
requestUpdateRegexPatternSet :: UpdateRegexPatternSet -> TestTree
requestUpdateRegexPatternSet =
req
"UpdateRegexPatternSet"
"fixture/UpdateRegexPatternSet.yaml"
requestUpdateRuleGroup :: UpdateRuleGroup -> TestTree
requestUpdateRuleGroup =
req
"UpdateRuleGroup"
"fixture/UpdateRuleGroup.yaml"
requestUpdateWebACL :: UpdateWebACL -> TestTree
requestUpdateWebACL =
req
"UpdateWebACL"
"fixture/UpdateWebACL.yaml"
responseAssociateWebACL :: AssociateWebACLResponse -> TestTree
responseAssociateWebACL =
res
"AssociateWebACLResponse"
"fixture/AssociateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy AssociateWebACL)
responseCheckCapacity :: CheckCapacityResponse -> TestTree
responseCheckCapacity =
res
"CheckCapacityResponse"
"fixture/CheckCapacityResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CheckCapacity)
responseCreateIPSet :: CreateIPSetResponse -> TestTree
responseCreateIPSet =
res
"CreateIPSetResponse"
"fixture/CreateIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateIPSet)
responseCreateRegexPatternSet :: CreateRegexPatternSetResponse -> TestTree
responseCreateRegexPatternSet =
res
"CreateRegexPatternSetResponse"
"fixture/CreateRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateRegexPatternSet)
responseCreateRuleGroup :: CreateRuleGroupResponse -> TestTree
responseCreateRuleGroup =
res
"CreateRuleGroupResponse"
"fixture/CreateRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateRuleGroup)
responseCreateWebACL :: CreateWebACLResponse -> TestTree
responseCreateWebACL =
res
"CreateWebACLResponse"
"fixture/CreateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy CreateWebACL)
responseDeleteFirewallManagerRuleGroups :: DeleteFirewallManagerRuleGroupsResponse -> TestTree
responseDeleteFirewallManagerRuleGroups =
res
"DeleteFirewallManagerRuleGroupsResponse"
"fixture/DeleteFirewallManagerRuleGroupsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteFirewallManagerRuleGroups)
responseDeleteIPSet :: DeleteIPSetResponse -> TestTree
responseDeleteIPSet =
res
"DeleteIPSetResponse"
"fixture/DeleteIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteIPSet)
responseDeleteLoggingConfiguration :: DeleteLoggingConfigurationResponse -> TestTree
responseDeleteLoggingConfiguration =
res
"DeleteLoggingConfigurationResponse"
"fixture/DeleteLoggingConfigurationResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteLoggingConfiguration)
responseDeletePermissionPolicy :: DeletePermissionPolicyResponse -> TestTree
responseDeletePermissionPolicy =
res
"DeletePermissionPolicyResponse"
"fixture/DeletePermissionPolicyResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeletePermissionPolicy)
responseDeleteRegexPatternSet :: DeleteRegexPatternSetResponse -> TestTree
responseDeleteRegexPatternSet =
res
"DeleteRegexPatternSetResponse"
"fixture/DeleteRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteRegexPatternSet)
responseDeleteRuleGroup :: DeleteRuleGroupResponse -> TestTree
responseDeleteRuleGroup =
res
"DeleteRuleGroupResponse"
"fixture/DeleteRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteRuleGroup)
responseDeleteWebACL :: DeleteWebACLResponse -> TestTree
responseDeleteWebACL =
res
"DeleteWebACLResponse"
"fixture/DeleteWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DeleteWebACL)
responseDescribeManagedRuleGroup :: DescribeManagedRuleGroupResponse -> TestTree
responseDescribeManagedRuleGroup =
res
"DescribeManagedRuleGroupResponse"
"fixture/DescribeManagedRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DescribeManagedRuleGroup)
responseDisassociateWebACL :: DisassociateWebACLResponse -> TestTree
responseDisassociateWebACL =
res
"DisassociateWebACLResponse"
"fixture/DisassociateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy DisassociateWebACL)
responseGenerateMobileSdkReleaseUrl :: GenerateMobileSdkReleaseUrlResponse -> TestTree
responseGenerateMobileSdkReleaseUrl =
res
"GenerateMobileSdkReleaseUrlResponse"
"fixture/GenerateMobileSdkReleaseUrlResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GenerateMobileSdkReleaseUrl)
responseGetIPSet :: GetIPSetResponse -> TestTree
responseGetIPSet =
res
"GetIPSetResponse"
"fixture/GetIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetIPSet)
responseGetLoggingConfiguration :: GetLoggingConfigurationResponse -> TestTree
responseGetLoggingConfiguration =
res
"GetLoggingConfigurationResponse"
"fixture/GetLoggingConfigurationResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetLoggingConfiguration)
responseGetManagedRuleSet :: GetManagedRuleSetResponse -> TestTree
responseGetManagedRuleSet =
res
"GetManagedRuleSetResponse"
"fixture/GetManagedRuleSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetManagedRuleSet)
responseGetMobileSdkRelease :: GetMobileSdkReleaseResponse -> TestTree
responseGetMobileSdkRelease =
res
"GetMobileSdkReleaseResponse"
"fixture/GetMobileSdkReleaseResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetMobileSdkRelease)
responseGetPermissionPolicy :: GetPermissionPolicyResponse -> TestTree
responseGetPermissionPolicy =
res
"GetPermissionPolicyResponse"
"fixture/GetPermissionPolicyResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetPermissionPolicy)
responseGetRateBasedStatementManagedKeys :: GetRateBasedStatementManagedKeysResponse -> TestTree
responseGetRateBasedStatementManagedKeys =
res
"GetRateBasedStatementManagedKeysResponse"
"fixture/GetRateBasedStatementManagedKeysResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetRateBasedStatementManagedKeys)
responseGetRegexPatternSet :: GetRegexPatternSetResponse -> TestTree
responseGetRegexPatternSet =
res
"GetRegexPatternSetResponse"
"fixture/GetRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetRegexPatternSet)
responseGetRuleGroup :: GetRuleGroupResponse -> TestTree
responseGetRuleGroup =
res
"GetRuleGroupResponse"
"fixture/GetRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetRuleGroup)
responseGetSampledRequests :: GetSampledRequestsResponse -> TestTree
responseGetSampledRequests =
res
"GetSampledRequestsResponse"
"fixture/GetSampledRequestsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetSampledRequests)
responseGetWebACL :: GetWebACLResponse -> TestTree
responseGetWebACL =
res
"GetWebACLResponse"
"fixture/GetWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetWebACL)
responseGetWebACLForResource :: GetWebACLForResourceResponse -> TestTree
responseGetWebACLForResource =
res
"GetWebACLForResourceResponse"
"fixture/GetWebACLForResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy GetWebACLForResource)
responseListAvailableManagedRuleGroupVersions :: ListAvailableManagedRuleGroupVersionsResponse -> TestTree
responseListAvailableManagedRuleGroupVersions =
res
"ListAvailableManagedRuleGroupVersionsResponse"
"fixture/ListAvailableManagedRuleGroupVersionsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListAvailableManagedRuleGroupVersions)
responseListAvailableManagedRuleGroups :: ListAvailableManagedRuleGroupsResponse -> TestTree
responseListAvailableManagedRuleGroups =
res
"ListAvailableManagedRuleGroupsResponse"
"fixture/ListAvailableManagedRuleGroupsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListAvailableManagedRuleGroups)
responseListIPSets :: ListIPSetsResponse -> TestTree
responseListIPSets =
res
"ListIPSetsResponse"
"fixture/ListIPSetsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListIPSets)
responseListLoggingConfigurations :: ListLoggingConfigurationsResponse -> TestTree
responseListLoggingConfigurations =
res
"ListLoggingConfigurationsResponse"
"fixture/ListLoggingConfigurationsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListLoggingConfigurations)
responseListManagedRuleSets :: ListManagedRuleSetsResponse -> TestTree
responseListManagedRuleSets =
res
"ListManagedRuleSetsResponse"
"fixture/ListManagedRuleSetsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListManagedRuleSets)
responseListMobileSdkReleases :: ListMobileSdkReleasesResponse -> TestTree
responseListMobileSdkReleases =
res
"ListMobileSdkReleasesResponse"
"fixture/ListMobileSdkReleasesResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListMobileSdkReleases)
responseListRegexPatternSets :: ListRegexPatternSetsResponse -> TestTree
responseListRegexPatternSets =
res
"ListRegexPatternSetsResponse"
"fixture/ListRegexPatternSetsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListRegexPatternSets)
responseListResourcesForWebACL :: ListResourcesForWebACLResponse -> TestTree
responseListResourcesForWebACL =
res
"ListResourcesForWebACLResponse"
"fixture/ListResourcesForWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListResourcesForWebACL)
responseListRuleGroups :: ListRuleGroupsResponse -> TestTree
responseListRuleGroups =
res
"ListRuleGroupsResponse"
"fixture/ListRuleGroupsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListRuleGroups)
responseListTagsForResource :: ListTagsForResourceResponse -> TestTree
responseListTagsForResource =
res
"ListTagsForResourceResponse"
"fixture/ListTagsForResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListTagsForResource)
responseListWebACLs :: ListWebACLsResponse -> TestTree
responseListWebACLs =
res
"ListWebACLsResponse"
"fixture/ListWebACLsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy ListWebACLs)
responsePutLoggingConfiguration :: PutLoggingConfigurationResponse -> TestTree
responsePutLoggingConfiguration =
res
"PutLoggingConfigurationResponse"
"fixture/PutLoggingConfigurationResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy PutLoggingConfiguration)
responsePutManagedRuleSetVersions :: PutManagedRuleSetVersionsResponse -> TestTree
responsePutManagedRuleSetVersions =
res
"PutManagedRuleSetVersionsResponse"
"fixture/PutManagedRuleSetVersionsResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy PutManagedRuleSetVersions)
responsePutPermissionPolicy :: PutPermissionPolicyResponse -> TestTree
responsePutPermissionPolicy =
res
"PutPermissionPolicyResponse"
"fixture/PutPermissionPolicyResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy PutPermissionPolicy)
responseTagResource :: TagResourceResponse -> TestTree
responseTagResource =
res
"TagResourceResponse"
"fixture/TagResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy TagResource)
responseUntagResource :: UntagResourceResponse -> TestTree
responseUntagResource =
res
"UntagResourceResponse"
"fixture/UntagResourceResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UntagResource)
responseUpdateIPSet :: UpdateIPSetResponse -> TestTree
responseUpdateIPSet =
res
"UpdateIPSetResponse"
"fixture/UpdateIPSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateIPSet)
responseUpdateManagedRuleSetVersionExpiryDate :: UpdateManagedRuleSetVersionExpiryDateResponse -> TestTree
responseUpdateManagedRuleSetVersionExpiryDate =
res
"UpdateManagedRuleSetVersionExpiryDateResponse"
"fixture/UpdateManagedRuleSetVersionExpiryDateResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateManagedRuleSetVersionExpiryDate)
responseUpdateRegexPatternSet :: UpdateRegexPatternSetResponse -> TestTree
responseUpdateRegexPatternSet =
res
"UpdateRegexPatternSetResponse"
"fixture/UpdateRegexPatternSetResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateRegexPatternSet)
responseUpdateRuleGroup :: UpdateRuleGroupResponse -> TestTree
responseUpdateRuleGroup =
res
"UpdateRuleGroupResponse"
"fixture/UpdateRuleGroupResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateRuleGroup)
responseUpdateWebACL :: UpdateWebACLResponse -> TestTree
responseUpdateWebACL =
res
"UpdateWebACLResponse"
"fixture/UpdateWebACLResponse.proto"
defaultService
(Proxy.Proxy :: Proxy.Proxy UpdateWebACL)
|
624b731a73f17167739fd3ae90f1500beb6d173352f7042d5be3ed7d3cad9845 | letmaik/monadiccp | PriorityQueue.hs | Copyright ( c ) 2008 the authors listed at the following URL , and/or
the authors of referenced articles or incorporated external code :
(Haskell)?action=history&offset=20080608152146
Permission is hereby granted , free of charge , to any person obtaining
a copy of this software and associated documentation files ( the
" Software " ) , to deal in the Software without restriction , including
without limitation the rights to use , copy , modify , merge , publish ,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
the following conditions :
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE .
Retrieved from : (Haskell)?oldid=13634
the authors of referenced articles or incorporated external code:
(Haskell)?action=history&offset=20080608152146
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Retrieved from: (Haskell)?oldid=13634
-}
# LANGUAGE DatatypeContexts #
module Control.CP.PriorityQueue (
PriorityQueue,
empty,
is_empty,
minKey,
minKeyValue,
insert,
deleteMin,
deleteMinAndInsert
) where
import Prelude
-- Declare the data type constructors.
data Ord k => PriorityQueue k a = Nil | Branch k a (PriorityQueue k a) (PriorityQueue k a)
-- Declare the exported interface functions.
-- Return an empty priority queue.
is_empty Nil = True
is_empty _ = False
empty :: Ord k => PriorityQueue k a
empty = Nil
-- Return the highest-priority key.
minKey :: Ord k => PriorityQueue k a -> k
minKey = fst . minKeyValue
-- Return the highest-priority key plus its associated value.
minKeyValue :: Ord k => PriorityQueue k a -> (k, a)
minKeyValue Nil = error "empty queue"
minKeyValue (Branch k a _ _) = (k, a)
-- Insert a key/value pair into a queue.
insert :: Ord k => k -> a -> PriorityQueue k a -> PriorityQueue k a
insert k a q = union (singleton k a) q
deleteMin :: Ord k => PriorityQueue k a -> ((k,a), PriorityQueue k a)
deleteMin(Branch k a l r) = ((k,a),union l r)
-- Delete the highest-priority key/value pair and insert a new key/value pair into the queue.
deleteMinAndInsert :: Ord k => k -> a -> PriorityQueue k a -> PriorityQueue k a
deleteMinAndInsert k a Nil = singleton k a
deleteMinAndInsert k a (Branch _ _ l r) = union (insert k a l) r
-- Declare the private helper functions.
Join two queues in sorted order .
union :: Ord k => PriorityQueue k a -> PriorityQueue k a -> PriorityQueue k a
union l Nil = l
union Nil r = r
union l@(Branch kl _ _ _) r@(Branch kr _ _ _)
| kl <= kr = link l r
| otherwise = link r l
Join two queues without regard to order .
-- (This is a helper to the union helper.)
link (Branch k a Nil m) r = Branch k a r m
link (Branch k a ll lr) r = Branch k a lr (union ll r)
-- Return a queue with a single item from a key/value pair.
singleton :: Ord k => k -> a -> PriorityQueue k a
singleton k a = Branch k a Nil Nil
| null | https://raw.githubusercontent.com/letmaik/monadiccp/fe4498e46a7b9d9e387fd5e4ed5d0749a89d0188/src/Control/CP/PriorityQueue.hs | haskell | Declare the data type constructors.
Declare the exported interface functions.
Return an empty priority queue.
Return the highest-priority key.
Return the highest-priority key plus its associated value.
Insert a key/value pair into a queue.
Delete the highest-priority key/value pair and insert a new key/value pair into the queue.
Declare the private helper functions.
(This is a helper to the union helper.)
Return a queue with a single item from a key/value pair. | Copyright ( c ) 2008 the authors listed at the following URL , and/or
the authors of referenced articles or incorporated external code :
(Haskell)?action=history&offset=20080608152146
Permission is hereby granted , free of charge , to any person obtaining
a copy of this software and associated documentation files ( the
" Software " ) , to deal in the Software without restriction , including
without limitation the rights to use , copy , modify , merge , publish ,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
the following conditions :
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
EXPRESS OR IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE .
Retrieved from : (Haskell)?oldid=13634
the authors of referenced articles or incorporated external code:
(Haskell)?action=history&offset=20080608152146
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Retrieved from: (Haskell)?oldid=13634
-}
# LANGUAGE DatatypeContexts #
module Control.CP.PriorityQueue (
PriorityQueue,
empty,
is_empty,
minKey,
minKeyValue,
insert,
deleteMin,
deleteMinAndInsert
) where
import Prelude
data Ord k => PriorityQueue k a = Nil | Branch k a (PriorityQueue k a) (PriorityQueue k a)
is_empty Nil = True
is_empty _ = False
empty :: Ord k => PriorityQueue k a
empty = Nil
minKey :: Ord k => PriorityQueue k a -> k
minKey = fst . minKeyValue
minKeyValue :: Ord k => PriorityQueue k a -> (k, a)
minKeyValue Nil = error "empty queue"
minKeyValue (Branch k a _ _) = (k, a)
insert :: Ord k => k -> a -> PriorityQueue k a -> PriorityQueue k a
insert k a q = union (singleton k a) q
deleteMin :: Ord k => PriorityQueue k a -> ((k,a), PriorityQueue k a)
deleteMin(Branch k a l r) = ((k,a),union l r)
deleteMinAndInsert :: Ord k => k -> a -> PriorityQueue k a -> PriorityQueue k a
deleteMinAndInsert k a Nil = singleton k a
deleteMinAndInsert k a (Branch _ _ l r) = union (insert k a l) r
Join two queues in sorted order .
union :: Ord k => PriorityQueue k a -> PriorityQueue k a -> PriorityQueue k a
union l Nil = l
union Nil r = r
union l@(Branch kl _ _ _) r@(Branch kr _ _ _)
| kl <= kr = link l r
| otherwise = link r l
Join two queues without regard to order .
link (Branch k a Nil m) r = Branch k a r m
link (Branch k a ll lr) r = Branch k a lr (union ll r)
singleton :: Ord k => k -> a -> PriorityQueue k a
singleton k a = Branch k a Nil Nil
|
aea8c9a94c85ababe8a5c780d03b02a6ae945860d298d5889b0c94231107a0e3 | weavejester/ittyon | test_runner.cljs | (ns ittyon.test-runner
(:require [doo.runner :refer-macros [doo-tests]]
ittyon.client-server-test
ittyon.core-test))
(doo-tests 'ittyon.client-server-test
'ittyon.core-test)
| null | https://raw.githubusercontent.com/weavejester/ittyon/6776dac6f5f63060130226a2602cc6a5640575da/test/ittyon/test_runner.cljs | clojure | (ns ittyon.test-runner
(:require [doo.runner :refer-macros [doo-tests]]
ittyon.client-server-test
ittyon.core-test))
(doo-tests 'ittyon.client-server-test
'ittyon.core-test)
| |
b602b37113cf8b0f2e5e768ec2fa6fedf7b1d982ee9497b8617a1c4b5c133306 | mauny/the-functional-approach-to-programming | tree.ml | (* *)
(* Projet Formel *)
(* *)
CAML - light :
(* *)
(*************************************************************************)
(* *)
(* LIENS *)
45 rue d'Ulm
75005 PARIS
France
(* *)
(*************************************************************************)
$ I d : tree.mlp , v 1.1 1997/08/14 11:34:25
(* tree.ml Tree drawing algorithms for mlPicTex *)
Guy Cousineau &
Tue Jun 30 1992
Author :
(* Creation: 30/6/92 *)
Updates : 4/5/92 ( ) btrees = > trees and proof trees
(* This file contains functions to draw trees *)
(* The main function is makeTreePicture *)
#open "MLgraph";;
#open "compatibility";;
#open "prelude";;
#open "geometry";;
#open "frames";;
#open "paint";;
#open "fonts";;
#open "texts";;
#open "sketches";;
#open "pictures";;
#open "option";;
#open "graph";;
let fold f =
let rec fold_f a1 = function
[] -> a1,[]
| (b1::bl) -> let a2,c2 = f a1 b1 in let a,cl = fold_f a2 bl in a,c2::cl
in fold_f;;
let rec tree_it f t x =
match t with
Nil -> x
| Node {info=a;sons=sons;label=lab}
-> list_it (tree_it f) sons (f a x lab);;
let rec tree_map f t =
match t with
Nil -> Nil
| Node {info=a;sons=sons;label=lab}
-> Node{info=f a;sons=map (tree_map f) sons;label=lab};;
(* function drawTree requires parameters *)
(* drn : the function for drawing nodes (drawTree assumes that this *)
(* function operates in "fill" mode) *)
drl : the function for drawing labels ( drawTree assumes that this
(* function operates in "fill" mode) *)
(* h: the height (distance between tree levels) *)
d : the distance between 2 brother nodes at level 1
(* cl : the coefficient list which gives for each level the ratio *)
(* between distance at level (n+1) and at level n *)
(* pt: the point where the root should be placed *)
let drawTree opts (drn,drl) (h,d,cl,pt) =
let draw = sketchGen opts d
in let rec drawR (d,cl,({xc=x; yc=y} as pt),ori) = function
Nil -> failwith "Cannot draw an empty tree"
| Node{info=a;sons=[];label=lab}
-> (center_picture (drn a) pt::
(match lab with Nolabel -> []|Label b -> [drl (ori,pt) b]))
| Node{info=a;sons=sons;label=lab}
-> let d=d*.(hd cl) and coef = float_of_int(1-list_length sons)/.2.0
in let pts,_ = list_it
(fun t (l,c) -> ((t,{xc=x+.c*.d;yc=y-.h})::l),c+.1.0)
(rev sons) ([],coef)
in
it_list append [] (map (function (Nil,_) -> []
| (t1,pt1) ->
draw (make_sketch [Seg [pt;pt1]])::
drawR (d,tl cl,pt1,pt) t1) pts)
@(center_picture (drn a) pt::
(match lab with Nolabel -> []|Label b -> [drl (ori,pt) b]))
in compose group_pictures (drawR (d,cl,pt,{xc=pt.xc;yc=pt.yc+.h}));;
(* function drawProofTree requires parameters *)
(* h: the height (distance between tree levels) *)
d : the distance between 2 brother nodes at level 1
(* cl : the coefficient list which gives for each level the ratio *)
(* between distance at level (n+1) and at level n *)
(* pt: the point where the root should be placed *)
let widthOfInfo = function
Nil -> raise (Failure "widthOfInfo")
| Node{info=a;sons=sons;label=lab} -> let f=picture_frame a in (f.xmax-.f.xmin);;
let drawProofTree opts sep (h,d,cl,pt) tree =
let pos = lt_float 0. (theFloat opts "treeLabelPos" 0.5)
and draw = sketchGen opts ((h+.d)/.2.0)
and h2 = h/.2.0
in let rec drawR (d',cl) (t,({xc=x; yc=y} as pt)) = match t with
Nil -> failwith "Cannot draw an empty tree"
| Node{info=a;sons=[];label=lab}
-> [center_picture a pt]
| Node{info=a;sons=sons;label=lab}
-> let d=d'*.(hd cl) and coef = float_of_int(list_length sons-1)/.2.0
in match fst(list_it
(fun arg1 arg2 -> match (arg1,arg2) with
Nil ,(l,c) -> l,c-.1.0
| t , (l,c) -> ((t,{xc=x+.c*.d;yc=y+.h})::l),c-.1.0)
sons ([],coef))
with [] -> [center_picture a pt]
| pts ->
let lw = widthOfInfo t/.2.0 in
let wLeft = max_float (coef*.d+.widthOfInfo (fst(hd pts))/.2.0) lw
and wRight = max_float (coef*.d+.widthOfInfo (fst(hd (rev pts)))/.2.0) lw in
it_list append [] (map (drawR (d,tl cl)) pts)
@(draw
(make_sketch [Seg [
{xc=x-.wLeft;yc=y+.h2};
(* {xc=x;yc=y+.h2};{xc=x;yc=y+.h2-.10.0*.LS.linewidth};{xc=x;yc=y+.h2}; *)
{xc=x+.wRight;yc=y+.h2}]])::
center_picture a pt::
(match lab with Nolabel -> []|Label b ->
[center_picture b
{xc=x+.(if pos then wRight+.sep else -.(wLeft+.sep));
yc=y+.h2}]))
in group_pictures(drawR (d,cl) (tree,pt));;
Various functions for drawing nodes
let drawNode r a =
make_fill_picture ( Nzfill , white )
( make_sketch [ Arc(origin , r,0.0,360.0 ) ] ) ; ;
let drawStringNode r a =
let s= center_picture
( make_text_picture ( make_font Helvetica r ) black a )
origin
and f= make_fill_picture ( Nzfill , white )
( make_sketch [ Arc(origin , r,0.0,360.0 ) ] )
and c= make_draw_picture ( { linewidth= r*.0.1;linecap = Buttcap ;
linejoin = Miterjoin;dashpattern= [ ] }
, black )
( make_sketch [ Arc(origin , r,0.0,360.0 ) ] )
in group_pictures [ f;c;s ] ; ;
let r n = drawStringNode r ( string_of_int n ) ; ;
let drawNode r a =
make_fill_picture (Nzfill,white)
(make_sketch [Arc(origin,r,0.0,360.0)]);;
let drawStringNode r a =
let s= center_picture
(make_text_picture (make_font Helvetica r) black a)
origin
and f= make_fill_picture (Nzfill,white)
(make_sketch [Arc(origin,r,0.0,360.0)])
and c= make_draw_picture ({linewidth= r*.0.1;linecap=Buttcap;
linejoin=Miterjoin;dashpattern=[]}
,black)
(make_sketch [Arc(origin,r,0.0,360.0)])
in group_pictures [f;c;s];;
let drawIntNode r n = drawStringNode r (string_of_int n);;
*)
(* How to compute the coefficient list *)
(* The function "computeCoefList" recursively computes for each subtree *)
an information which has shape ( cl , )
(* cl is the list of reduction coefficients to be applied at each level *)
(* it is the information which will finally by used by "drawTree" *)
(* trl is a list of triples (l,r,c) where *)
(* l is the horiz distance between tree root and leftmost node *)
(* at the given level *)
(* r is the horiz distance between tree root and rightmost node *)
(* at the given level *)
(* c is the ration between distance between brother node at the *)
given level and the same distance at level 1
For a given binary tree t= N(t1,t2 ) the function " computeCoefList "
first computes ( cl1,trl1 ) and ( cl2,trl2 ) for t1 and t2
(* Then cl1 and cl2 are combined by taken the minimum coefficient at each *)
(* level giving a new list cl *)
Then , using this new list , trl1 and trl2 are recomputed by
(* function "recomputeTriples" giving trl1' and trl2' *)
(* Then, the function "computeHeadCoef" computes for each level what *)
(* should be the reduction coefficient to be applied at the root of tree *)
(* in order to have the rightmost node of t1 and the leftmost node of t2 *)
(* be separated by distance c and takes the minimum of all these *)
(* coefficients *)
(* The method is the following: *)
(* If t1 and t2 where drawn using cl1 and cl2, then the distance between *)
their roots should be at least r1 - l2+c for t1 and t2 to behave nicely
(* at the given level. Therefore the root coefficient should be *)
(* 1/(r1-l2+c) *)
The function " makeTreePicture " uses the final list by
dividing by their product dMin , the minimal distance between
nodes to obtain the distance between the two sons of the root
let rec minl = function
[] -> []
| [l] -> l
| ([]::ll) -> minl ll
| ((x::l)::ll) ->
let c,ll' = list_it
(fun x y -> match x,y with
[] , b -> b | (x::l), (c,ll) -> min_float x c,(l::ll))
ll (x,[l])
in c::minl ll';;
let recomputeTriples cl =
let rec recomp (n,cl) =
function [] -> []
| ((l,r,c)::ll) -> (l*.n/.c,r*.n/.c,n):: recomp (n*.(hd cl),tl cl) ll
in recomp (hd cl,tl cl);;
let computeHeadCoef (trl1,trl2) =
let rec compCoef =
function ([],_) -> []
| (_,[]) -> []
| ((_,r1,c)::ll1,(l2,_,_)::ll2)
-> abs_float(1.0/.(r1-.l2+.c)) :: compCoef (ll1,ll2)
in it_list min_float 1.0 (compCoef (trl1,trl2));;
let combineTriples coef x (trl1,trl2) =
let rec comb =
function [],[] -> []
| (l1,r1,c)::ll1 , [] -> (coef/.x+.l1,coef/.x+.r1,c) :: comb(ll1,[])
| [] , (l2,r2,c)::ll2 ->
((coef+.1.0)/.x+.l2,(coef+.1.0)/.x+.r2,c) :: comb([],ll2)
| (l1,r1,c)::ll1 , (l2,r2,_)::ll2 ->
(coef/.x+.l1,(coef+.1.0)/.x+.r2,c) :: comb(ll1,ll2)
in (*(-.0.5,0.5,1.0)::*)comb (trl1,trl2) ;;
let scaleTriple x (l1,r1,c) = (l1*.x,r1*.x,c*.x);;
let computeCoefList t =
let rec comp = function
Nil -> [1.0],[]
| Node {info=_;sons=[];label=_} -> [1.0],[]
| Node {info=_;sons=[t1];label=_}
-> let (cl,trl) = comp t1
in (1.0::cl,(0.0,0.0,1.0)::trl)
| Node {info=_;sons=sons;label=_}
-> let trls = map comp sons in
let cl = minl(map fst trls)
and coef = float_of_int(1-list_length trls)/.2.0 in
let rec compSons (pos,x) = function
[] -> failwith "tree__compSons : empty list"
| [trl1] -> (1.0::x::tl cl,
(coef,-.coef,1.0)::map (scaleTriple x) trl1)
| (trl1::trl2::ll) ->
let x' = (computeHeadCoef (trl1,trl2))
in compSons (pos+.1.0,(min_float x x'))
(combineTriples pos x' (trl1,trl2)::ll)
in compSons (coef,1.0) (map (compose (recomputeTriples cl) snd) trls)
in (compose fst comp) t;;
let makeTreePictureGen opts drn (height,dMin,root) t =
let coefList = computeCoefList t
in let totalCoef = it_list mult_float 1.0 coefList
in let d= dMin/.totalCoef
in drawTree opts drn (height,d,coefList,root) t;;
let makeTreePicture drn hdr t = makeTreePictureGen [] drn hdr t;;
let treeLabelPos (pos,sep) (p1,p2) =
({xc=p1.xc+.(p2.xc-.p1.xc)*.(min_float (pos+.0.1) 1.0)+.
(if le_float p1.xc p2.xc then sep else -.sep);
yc=p1.yc+.(p2.yc-.p1.yc)*.(min_float (pos(* +.0.1 *)) 1.0)});;
let treeGen opts t =
let height,width =
tree_it (fun p (h,w) _ -> let fr=picture_frame p in
max_float (fr.ymax-.fr.ymin) h,max_float (fr.xmax-.fr.xmin) w)
t (0.0,0.0) in
let h = theFloat opts "treeHeightCoef" 1.0*.2.0*.height
and w = theFloat opts "treeWidthCoef" 1.0*.1.65*.width in
let pos = theFloat opts "treeLabelPos" 0.5
and sep = theFloat opts "sep" 1.0*.0.2*.w in
makeTreePictureGen opts
((fun x->x),
(fun arg x -> center_picture x (treeLabelPos(pos,sep) arg)))
(h,w,origin) t;;
let tree = treeGen [];;
let makeProofTreePictureGen opts drn sep (height,dMin,root) t' =
let t = tree_map drn t' in
let coefList = computeCoefList t
in let totalCoef = it_list mult_float 1.0 coefList
in let d= dMin/.totalCoef
in drawProofTree opts sep (height,d,coefList,root) t;;
let makeProofTreePicture drn sep hdr t = makeProofTreePictureGen [] drn sep hdr t;;
let proofTreeGen opts t =
let height,width,lWidth =
tree_it (fun p (h,w,lw) lab -> let fr=picture_frame p in
max_float (fr.ymax-.fr.ymin) h,max_float (fr.xmax-.fr.xmin) w,
max_float (match lab with Nolabel -> 0.0
| Label(p) -> let fr = picture_frame p in (fr.xmax-.fr.xmin)) lw)
t (0.0,0.0,0.0) in
let sep = theFloat opts "sep" 1.0*.0.2*.width in
makeProofTreePictureGen opts (fun x->x) sep
(1.5*.height,max_float (1.1*.width) (width+.lWidth+.sep),origin) t;;
let proofTree = proofTreeGen [];;
let treeGraphGen = fun opts name t ->
let coefList = computeCoefList t in
let d = 1./.list_it mult_float coefList 1. in
let rec pointsOfTree =
(fun x y -> match x,y with
Nil , b -> ([] : (string * point ) list)
| (Node{info=name;sons=sons;label=lab}) ,
((d',(c::coefList)),({xc=x;yc=y} as ori)) ->
let pos = float_of_int(1-list_length sons)/.2.0 and d = d'*.c in
(name,ori)::it_list append []
(snd(fold (fun (d,p) t ->
(d,(p+.1.0)),pointsOfTree t ((d,coefList),{xc=x+.p*.d;yc=y-.1.0}))
(d,pos) sons))
| _, _ -> raise (Failure "treeGraph")) in
let points = pointsOfTree t ((d,coefList),origin) in
let g1 = PGraph(Graph name,points) in
if theOption opts "treeLines" then
let rec lines =
function Nil -> []
| (Node{info=a;sons=sons;label=lab}) ->
it_list append [] (map (function Nil -> [] | (Node{info=b;sons=ssons;label=slabs} as t) ->
(match lines t with
[] -> [[assoc a points;assoc b points]]
| (l1::l) -> (assoc a points::l1)::l)) sons)
in addLines (LGraph(g1,[opts,map (fun l -> [Seg l]) (lines t)]))
else addLines g1;;
let treeGraph name t = treeGraphGen [] name t;;
Ex
let f s = Node{info = s;sons=[];label = Nolabel } ; ;
let b s l = Node{info = s;sons = l;label = Nolabel } ; ;
let t1 = b " a " [ b " e " [ f"f"];b " b " [ b " d " [ b " c " [ f " h " ] ] ] ;
b " b " [ b " d " [ b " c " [ f " h " ] ] ] ] ; ;
eps_file(makeTreePicture(circPict o mkText,(fun _ x ) ) ( 30.,30.,origin ) t1 ) " t1 " ; ;
let f s = Node{info = 12 . s;sons=[];label = Label(mkText(string_of_int s ) ) } ; ;
let b s l = Node{info = drawIntNode 12 . s;sons = l;label = Label(mkText(string_of_int s ) ) } ; ;
let t2 = b 30 [ b 12 [ b 8 [ b 4 [ f 1;f 2;f 6;f 7];f 10 ] ;
b 20 [ b 16 [ f 14;f 18];b 24 [ f 22;f 25;b 28 [ f 26;Nil]]]];f 31 ;
b 46 [ b 38 [ b 34 [ f 32;f 36];b 42 [ f 40;f 44 ] ] ;
b 50 [ f 48;b 52 [ Nil;f 54 ] ] ] ] ; ;
eps_file(scale_picture ( 0.8,0.8 ) ( treeGen [ ] t2 ) ) " t2 " ; ;
let t3 = b 4 [ f 2;b 40 [ b 20 [ b 12 [ b 8 [ f 6;f 10];b 16 [ f 14;f 18 ] ] ;
b 30 [ f 24;f 34 ] ] ;
f 21 ] ] ; ;
eps_file(scale_picture ( 0.8,0.8 ) ( treeGen [ ] t2 ) ) " t3 " ; ;
let =
( let f s = Node{info=(rotate deg ( mkText s));sons=[Nil];label = ( mkText s ) ) }
and b s l = Node{info=(rotate deg ( mkText s));sons = l;label = ( mkText s ) ) } in
b " e " [ f " 0 " ; b " 1 "
[ b " 10 " [ f " 100 " ; f " 101 " ] ; f"11";f"11 " ;
b " 12 " [ f"120 " ; f"121 " ] ] ] ) ; ;
eps_file(translate(100.,100 . ) ( rotate ( 90 . ) ( proofTree ( ( -.90 . ) ) ) ) )
" " ; ;
let t5 = b"a"[b"b"[f"c";b"d"[Nil;Nil;f"e"];f"f"];b"g"[f"h";Nil;Nil ] ] ; ;
eps_file(tree ( tree_map ( circPict o mkText ) t5 ) ) " t5 " ; ;
( treeGraphGen[option " treeLines " ] " t " t5
[ GLine([],["h";"e " ] ) ] )
( tree_it ( fun a l _ - > ( a , circPict(mkText a))::l ) t5 [ ] ) ) " t6 " ; ;
let f s = Node{info=s;sons=[];label=Nolabel};;
let b s l = Node{info=s;sons=l;label=Nolabel};;
let t1 = b "a" [b "e" [f"f"];b "b" [b "d" [b "c" [f "h"]]];
b "b" [b "d" [b "c" [f "h"]]]];;
eps_file(makeTreePicture(circPict o mkText,(fun _ x ->x)) (30.,30.,origin) t1) "t1";;
let f s = Node{info=drawIntNode 12. s;sons=[];label=Label(mkText(string_of_int s))};;
let b s l = Node{info=drawIntNode 12. s;sons=l;label=Label(mkText(string_of_int s))};;
let t2 = b 30 [b 12 [b 8 [b 4 [f 1;f 2;f 6;f 7];f 10];
b 20 [b 16 [f 14;f 18];b 24 [f 22;f 25;b 28 [f 26;Nil]]]];f 31;
b 46 [b 38 [b 34 [f 32;f 36];b 42 [f 40;f 44]];
b 50 [f 48;b 52 [Nil;f 54]]]];;
eps_file(scale_picture (0.8,0.8) (treeGen[] t2)) "t2";;
let t3 = b 4 [f 2;b 40 [b 20 [b 12 [b 8 [f 6;f 10];b 16 [f 14;f 18]];
b 30 [f 24;f 34]];
f 21]];;
eps_file(scale_picture (0.8,0.8) (treeGen[] t2)) "t3";;
let t4 deg =
(let f s = Node{info=(rotate deg (mkText s));sons=[Nil];label=Label(rotate deg (mkText s))}
and b s l = Node{info=(rotate deg (mkText s));sons=l;label=Label(rotate deg (mkText s))} in
b "e" [f "0"; b "1"
[b "10" [f "100"; f "101"]; f"11";f"11";
b "12" [f"120"; f"121"]]]);;
eps_file(translate(100.,100.) (rotate (90.) (proofTree (t4 (-.90.)))))
"t4";;
let t5 = b"a"[b"b"[f"c";b"d"[Nil;Nil;f"e"];f"f"];b"g"[f"h";Nil;Nil]];;
eps_file(tree (tree_map (circPict o mkText) t5)) "t5";;
eps_file(makeGraphPictureGen
(treeGraphGen[option "treeLines"] "t" t5
[GLine([],["h";"e"])])
(tree_it (fun a l _ -> (a,circPict(mkText a))::l) t5 [])) "t6";;
*)
| null | https://raw.githubusercontent.com/mauny/the-functional-approach-to-programming/1ec8bed5d33d3a67bbd67d09afb3f5c3c8978838/cl-75/MLGRAPH.DIR/tree.ml | ocaml |
Projet Formel
***********************************************************************
LIENS
***********************************************************************
tree.ml Tree drawing algorithms for mlPicTex
Creation: 30/6/92
This file contains functions to draw trees
The main function is makeTreePicture
function drawTree requires parameters
drn : the function for drawing nodes (drawTree assumes that this
function operates in "fill" mode)
function operates in "fill" mode)
h: the height (distance between tree levels)
cl : the coefficient list which gives for each level the ratio
between distance at level (n+1) and at level n
pt: the point where the root should be placed
function drawProofTree requires parameters
h: the height (distance between tree levels)
cl : the coefficient list which gives for each level the ratio
between distance at level (n+1) and at level n
pt: the point where the root should be placed
{xc=x;yc=y+.h2};{xc=x;yc=y+.h2-.10.0*.LS.linewidth};{xc=x;yc=y+.h2};
How to compute the coefficient list
The function "computeCoefList" recursively computes for each subtree
cl is the list of reduction coefficients to be applied at each level
it is the information which will finally by used by "drawTree"
trl is a list of triples (l,r,c) where
l is the horiz distance between tree root and leftmost node
at the given level
r is the horiz distance between tree root and rightmost node
at the given level
c is the ration between distance between brother node at the
Then cl1 and cl2 are combined by taken the minimum coefficient at each
level giving a new list cl
function "recomputeTriples" giving trl1' and trl2'
Then, the function "computeHeadCoef" computes for each level what
should be the reduction coefficient to be applied at the root of tree
in order to have the rightmost node of t1 and the leftmost node of t2
be separated by distance c and takes the minimum of all these
coefficients
The method is the following:
If t1 and t2 where drawn using cl1 and cl2, then the distance between
at the given level. Therefore the root coefficient should be
1/(r1-l2+c)
(-.0.5,0.5,1.0)::
+.0.1 | CAML - light :
45 rue d'Ulm
75005 PARIS
France
$ I d : tree.mlp , v 1.1 1997/08/14 11:34:25
Guy Cousineau &
Tue Jun 30 1992
Author :
Updates : 4/5/92 ( ) btrees = > trees and proof trees
#open "MLgraph";;
#open "compatibility";;
#open "prelude";;
#open "geometry";;
#open "frames";;
#open "paint";;
#open "fonts";;
#open "texts";;
#open "sketches";;
#open "pictures";;
#open "option";;
#open "graph";;
let fold f =
let rec fold_f a1 = function
[] -> a1,[]
| (b1::bl) -> let a2,c2 = f a1 b1 in let a,cl = fold_f a2 bl in a,c2::cl
in fold_f;;
let rec tree_it f t x =
match t with
Nil -> x
| Node {info=a;sons=sons;label=lab}
-> list_it (tree_it f) sons (f a x lab);;
let rec tree_map f t =
match t with
Nil -> Nil
| Node {info=a;sons=sons;label=lab}
-> Node{info=f a;sons=map (tree_map f) sons;label=lab};;
drl : the function for drawing labels ( drawTree assumes that this
d : the distance between 2 brother nodes at level 1
let drawTree opts (drn,drl) (h,d,cl,pt) =
let draw = sketchGen opts d
in let rec drawR (d,cl,({xc=x; yc=y} as pt),ori) = function
Nil -> failwith "Cannot draw an empty tree"
| Node{info=a;sons=[];label=lab}
-> (center_picture (drn a) pt::
(match lab with Nolabel -> []|Label b -> [drl (ori,pt) b]))
| Node{info=a;sons=sons;label=lab}
-> let d=d*.(hd cl) and coef = float_of_int(1-list_length sons)/.2.0
in let pts,_ = list_it
(fun t (l,c) -> ((t,{xc=x+.c*.d;yc=y-.h})::l),c+.1.0)
(rev sons) ([],coef)
in
it_list append [] (map (function (Nil,_) -> []
| (t1,pt1) ->
draw (make_sketch [Seg [pt;pt1]])::
drawR (d,tl cl,pt1,pt) t1) pts)
@(center_picture (drn a) pt::
(match lab with Nolabel -> []|Label b -> [drl (ori,pt) b]))
in compose group_pictures (drawR (d,cl,pt,{xc=pt.xc;yc=pt.yc+.h}));;
d : the distance between 2 brother nodes at level 1
let widthOfInfo = function
Nil -> raise (Failure "widthOfInfo")
| Node{info=a;sons=sons;label=lab} -> let f=picture_frame a in (f.xmax-.f.xmin);;
let drawProofTree opts sep (h,d,cl,pt) tree =
let pos = lt_float 0. (theFloat opts "treeLabelPos" 0.5)
and draw = sketchGen opts ((h+.d)/.2.0)
and h2 = h/.2.0
in let rec drawR (d',cl) (t,({xc=x; yc=y} as pt)) = match t with
Nil -> failwith "Cannot draw an empty tree"
| Node{info=a;sons=[];label=lab}
-> [center_picture a pt]
| Node{info=a;sons=sons;label=lab}
-> let d=d'*.(hd cl) and coef = float_of_int(list_length sons-1)/.2.0
in match fst(list_it
(fun arg1 arg2 -> match (arg1,arg2) with
Nil ,(l,c) -> l,c-.1.0
| t , (l,c) -> ((t,{xc=x+.c*.d;yc=y+.h})::l),c-.1.0)
sons ([],coef))
with [] -> [center_picture a pt]
| pts ->
let lw = widthOfInfo t/.2.0 in
let wLeft = max_float (coef*.d+.widthOfInfo (fst(hd pts))/.2.0) lw
and wRight = max_float (coef*.d+.widthOfInfo (fst(hd (rev pts)))/.2.0) lw in
it_list append [] (map (drawR (d,tl cl)) pts)
@(draw
(make_sketch [Seg [
{xc=x-.wLeft;yc=y+.h2};
{xc=x+.wRight;yc=y+.h2}]])::
center_picture a pt::
(match lab with Nolabel -> []|Label b ->
[center_picture b
{xc=x+.(if pos then wRight+.sep else -.(wLeft+.sep));
yc=y+.h2}]))
in group_pictures(drawR (d,cl) (tree,pt));;
Various functions for drawing nodes
let drawNode r a =
make_fill_picture ( Nzfill , white )
( make_sketch [ Arc(origin , r,0.0,360.0 ) ] ) ; ;
let drawStringNode r a =
let s= center_picture
( make_text_picture ( make_font Helvetica r ) black a )
origin
and f= make_fill_picture ( Nzfill , white )
( make_sketch [ Arc(origin , r,0.0,360.0 ) ] )
and c= make_draw_picture ( { linewidth= r*.0.1;linecap = Buttcap ;
linejoin = Miterjoin;dashpattern= [ ] }
, black )
( make_sketch [ Arc(origin , r,0.0,360.0 ) ] )
in group_pictures [ f;c;s ] ; ;
let r n = drawStringNode r ( string_of_int n ) ; ;
let drawNode r a =
make_fill_picture (Nzfill,white)
(make_sketch [Arc(origin,r,0.0,360.0)]);;
let drawStringNode r a =
let s= center_picture
(make_text_picture (make_font Helvetica r) black a)
origin
and f= make_fill_picture (Nzfill,white)
(make_sketch [Arc(origin,r,0.0,360.0)])
and c= make_draw_picture ({linewidth= r*.0.1;linecap=Buttcap;
linejoin=Miterjoin;dashpattern=[]}
,black)
(make_sketch [Arc(origin,r,0.0,360.0)])
in group_pictures [f;c;s];;
let drawIntNode r n = drawStringNode r (string_of_int n);;
*)
an information which has shape ( cl , )
given level and the same distance at level 1
For a given binary tree t= N(t1,t2 ) the function " computeCoefList "
first computes ( cl1,trl1 ) and ( cl2,trl2 ) for t1 and t2
Then , using this new list , trl1 and trl2 are recomputed by
their roots should be at least r1 - l2+c for t1 and t2 to behave nicely
The function " makeTreePicture " uses the final list by
dividing by their product dMin , the minimal distance between
nodes to obtain the distance between the two sons of the root
let rec minl = function
[] -> []
| [l] -> l
| ([]::ll) -> minl ll
| ((x::l)::ll) ->
let c,ll' = list_it
(fun x y -> match x,y with
[] , b -> b | (x::l), (c,ll) -> min_float x c,(l::ll))
ll (x,[l])
in c::minl ll';;
let recomputeTriples cl =
let rec recomp (n,cl) =
function [] -> []
| ((l,r,c)::ll) -> (l*.n/.c,r*.n/.c,n):: recomp (n*.(hd cl),tl cl) ll
in recomp (hd cl,tl cl);;
let computeHeadCoef (trl1,trl2) =
let rec compCoef =
function ([],_) -> []
| (_,[]) -> []
| ((_,r1,c)::ll1,(l2,_,_)::ll2)
-> abs_float(1.0/.(r1-.l2+.c)) :: compCoef (ll1,ll2)
in it_list min_float 1.0 (compCoef (trl1,trl2));;
let combineTriples coef x (trl1,trl2) =
let rec comb =
function [],[] -> []
| (l1,r1,c)::ll1 , [] -> (coef/.x+.l1,coef/.x+.r1,c) :: comb(ll1,[])
| [] , (l2,r2,c)::ll2 ->
((coef+.1.0)/.x+.l2,(coef+.1.0)/.x+.r2,c) :: comb([],ll2)
| (l1,r1,c)::ll1 , (l2,r2,_)::ll2 ->
(coef/.x+.l1,(coef+.1.0)/.x+.r2,c) :: comb(ll1,ll2)
let scaleTriple x (l1,r1,c) = (l1*.x,r1*.x,c*.x);;
let computeCoefList t =
let rec comp = function
Nil -> [1.0],[]
| Node {info=_;sons=[];label=_} -> [1.0],[]
| Node {info=_;sons=[t1];label=_}
-> let (cl,trl) = comp t1
in (1.0::cl,(0.0,0.0,1.0)::trl)
| Node {info=_;sons=sons;label=_}
-> let trls = map comp sons in
let cl = minl(map fst trls)
and coef = float_of_int(1-list_length trls)/.2.0 in
let rec compSons (pos,x) = function
[] -> failwith "tree__compSons : empty list"
| [trl1] -> (1.0::x::tl cl,
(coef,-.coef,1.0)::map (scaleTriple x) trl1)
| (trl1::trl2::ll) ->
let x' = (computeHeadCoef (trl1,trl2))
in compSons (pos+.1.0,(min_float x x'))
(combineTriples pos x' (trl1,trl2)::ll)
in compSons (coef,1.0) (map (compose (recomputeTriples cl) snd) trls)
in (compose fst comp) t;;
let makeTreePictureGen opts drn (height,dMin,root) t =
let coefList = computeCoefList t
in let totalCoef = it_list mult_float 1.0 coefList
in let d= dMin/.totalCoef
in drawTree opts drn (height,d,coefList,root) t;;
let makeTreePicture drn hdr t = makeTreePictureGen [] drn hdr t;;
let treeLabelPos (pos,sep) (p1,p2) =
({xc=p1.xc+.(p2.xc-.p1.xc)*.(min_float (pos+.0.1) 1.0)+.
(if le_float p1.xc p2.xc then sep else -.sep);
let treeGen opts t =
let height,width =
tree_it (fun p (h,w) _ -> let fr=picture_frame p in
max_float (fr.ymax-.fr.ymin) h,max_float (fr.xmax-.fr.xmin) w)
t (0.0,0.0) in
let h = theFloat opts "treeHeightCoef" 1.0*.2.0*.height
and w = theFloat opts "treeWidthCoef" 1.0*.1.65*.width in
let pos = theFloat opts "treeLabelPos" 0.5
and sep = theFloat opts "sep" 1.0*.0.2*.w in
makeTreePictureGen opts
((fun x->x),
(fun arg x -> center_picture x (treeLabelPos(pos,sep) arg)))
(h,w,origin) t;;
let tree = treeGen [];;
let makeProofTreePictureGen opts drn sep (height,dMin,root) t' =
let t = tree_map drn t' in
let coefList = computeCoefList t
in let totalCoef = it_list mult_float 1.0 coefList
in let d= dMin/.totalCoef
in drawProofTree opts sep (height,d,coefList,root) t;;
let makeProofTreePicture drn sep hdr t = makeProofTreePictureGen [] drn sep hdr t;;
let proofTreeGen opts t =
let height,width,lWidth =
tree_it (fun p (h,w,lw) lab -> let fr=picture_frame p in
max_float (fr.ymax-.fr.ymin) h,max_float (fr.xmax-.fr.xmin) w,
max_float (match lab with Nolabel -> 0.0
| Label(p) -> let fr = picture_frame p in (fr.xmax-.fr.xmin)) lw)
t (0.0,0.0,0.0) in
let sep = theFloat opts "sep" 1.0*.0.2*.width in
makeProofTreePictureGen opts (fun x->x) sep
(1.5*.height,max_float (1.1*.width) (width+.lWidth+.sep),origin) t;;
let proofTree = proofTreeGen [];;
let treeGraphGen = fun opts name t ->
let coefList = computeCoefList t in
let d = 1./.list_it mult_float coefList 1. in
let rec pointsOfTree =
(fun x y -> match x,y with
Nil , b -> ([] : (string * point ) list)
| (Node{info=name;sons=sons;label=lab}) ,
((d',(c::coefList)),({xc=x;yc=y} as ori)) ->
let pos = float_of_int(1-list_length sons)/.2.0 and d = d'*.c in
(name,ori)::it_list append []
(snd(fold (fun (d,p) t ->
(d,(p+.1.0)),pointsOfTree t ((d,coefList),{xc=x+.p*.d;yc=y-.1.0}))
(d,pos) sons))
| _, _ -> raise (Failure "treeGraph")) in
let points = pointsOfTree t ((d,coefList),origin) in
let g1 = PGraph(Graph name,points) in
if theOption opts "treeLines" then
let rec lines =
function Nil -> []
| (Node{info=a;sons=sons;label=lab}) ->
it_list append [] (map (function Nil -> [] | (Node{info=b;sons=ssons;label=slabs} as t) ->
(match lines t with
[] -> [[assoc a points;assoc b points]]
| (l1::l) -> (assoc a points::l1)::l)) sons)
in addLines (LGraph(g1,[opts,map (fun l -> [Seg l]) (lines t)]))
else addLines g1;;
let treeGraph name t = treeGraphGen [] name t;;
Ex
let f s = Node{info = s;sons=[];label = Nolabel } ; ;
let b s l = Node{info = s;sons = l;label = Nolabel } ; ;
let t1 = b " a " [ b " e " [ f"f"];b " b " [ b " d " [ b " c " [ f " h " ] ] ] ;
b " b " [ b " d " [ b " c " [ f " h " ] ] ] ] ; ;
eps_file(makeTreePicture(circPict o mkText,(fun _ x ) ) ( 30.,30.,origin ) t1 ) " t1 " ; ;
let f s = Node{info = 12 . s;sons=[];label = Label(mkText(string_of_int s ) ) } ; ;
let b s l = Node{info = drawIntNode 12 . s;sons = l;label = Label(mkText(string_of_int s ) ) } ; ;
let t2 = b 30 [ b 12 [ b 8 [ b 4 [ f 1;f 2;f 6;f 7];f 10 ] ;
b 20 [ b 16 [ f 14;f 18];b 24 [ f 22;f 25;b 28 [ f 26;Nil]]]];f 31 ;
b 46 [ b 38 [ b 34 [ f 32;f 36];b 42 [ f 40;f 44 ] ] ;
b 50 [ f 48;b 52 [ Nil;f 54 ] ] ] ] ; ;
eps_file(scale_picture ( 0.8,0.8 ) ( treeGen [ ] t2 ) ) " t2 " ; ;
let t3 = b 4 [ f 2;b 40 [ b 20 [ b 12 [ b 8 [ f 6;f 10];b 16 [ f 14;f 18 ] ] ;
b 30 [ f 24;f 34 ] ] ;
f 21 ] ] ; ;
eps_file(scale_picture ( 0.8,0.8 ) ( treeGen [ ] t2 ) ) " t3 " ; ;
let =
( let f s = Node{info=(rotate deg ( mkText s));sons=[Nil];label = ( mkText s ) ) }
and b s l = Node{info=(rotate deg ( mkText s));sons = l;label = ( mkText s ) ) } in
b " e " [ f " 0 " ; b " 1 "
[ b " 10 " [ f " 100 " ; f " 101 " ] ; f"11";f"11 " ;
b " 12 " [ f"120 " ; f"121 " ] ] ] ) ; ;
eps_file(translate(100.,100 . ) ( rotate ( 90 . ) ( proofTree ( ( -.90 . ) ) ) ) )
" " ; ;
let t5 = b"a"[b"b"[f"c";b"d"[Nil;Nil;f"e"];f"f"];b"g"[f"h";Nil;Nil ] ] ; ;
eps_file(tree ( tree_map ( circPict o mkText ) t5 ) ) " t5 " ; ;
( treeGraphGen[option " treeLines " ] " t " t5
[ GLine([],["h";"e " ] ) ] )
( tree_it ( fun a l _ - > ( a , circPict(mkText a))::l ) t5 [ ] ) ) " t6 " ; ;
let f s = Node{info=s;sons=[];label=Nolabel};;
let b s l = Node{info=s;sons=l;label=Nolabel};;
let t1 = b "a" [b "e" [f"f"];b "b" [b "d" [b "c" [f "h"]]];
b "b" [b "d" [b "c" [f "h"]]]];;
eps_file(makeTreePicture(circPict o mkText,(fun _ x ->x)) (30.,30.,origin) t1) "t1";;
let f s = Node{info=drawIntNode 12. s;sons=[];label=Label(mkText(string_of_int s))};;
let b s l = Node{info=drawIntNode 12. s;sons=l;label=Label(mkText(string_of_int s))};;
let t2 = b 30 [b 12 [b 8 [b 4 [f 1;f 2;f 6;f 7];f 10];
b 20 [b 16 [f 14;f 18];b 24 [f 22;f 25;b 28 [f 26;Nil]]]];f 31;
b 46 [b 38 [b 34 [f 32;f 36];b 42 [f 40;f 44]];
b 50 [f 48;b 52 [Nil;f 54]]]];;
eps_file(scale_picture (0.8,0.8) (treeGen[] t2)) "t2";;
let t3 = b 4 [f 2;b 40 [b 20 [b 12 [b 8 [f 6;f 10];b 16 [f 14;f 18]];
b 30 [f 24;f 34]];
f 21]];;
eps_file(scale_picture (0.8,0.8) (treeGen[] t2)) "t3";;
let t4 deg =
(let f s = Node{info=(rotate deg (mkText s));sons=[Nil];label=Label(rotate deg (mkText s))}
and b s l = Node{info=(rotate deg (mkText s));sons=l;label=Label(rotate deg (mkText s))} in
b "e" [f "0"; b "1"
[b "10" [f "100"; f "101"]; f"11";f"11";
b "12" [f"120"; f"121"]]]);;
eps_file(translate(100.,100.) (rotate (90.) (proofTree (t4 (-.90.)))))
"t4";;
let t5 = b"a"[b"b"[f"c";b"d"[Nil;Nil;f"e"];f"f"];b"g"[f"h";Nil;Nil]];;
eps_file(tree (tree_map (circPict o mkText) t5)) "t5";;
eps_file(makeGraphPictureGen
(treeGraphGen[option "treeLines"] "t" t5
[GLine([],["h";"e"])])
(tree_it (fun a l _ -> (a,circPict(mkText a))::l) t5 [])) "t6";;
*)
|
a41fff5b46557089ea74714c014e1946c2586afb66bcc7d8dd6849e0424297dd | rescript-lang/rescript-compiler | flow_ast_mapper.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module Ast = Flow_ast
let map_opt : 'node. ('node -> 'node) -> 'node option -> 'node option =
fun map opt ->
match opt with
| Some item ->
let item' = map item in
if item == item' then
opt
else
Some item'
| None -> opt
let id_loc : 'node 'a. ('loc -> 'node -> 'node) -> 'loc -> 'node -> 'a -> ('node -> 'a) -> 'a =
fun map loc item same diff ->
let item' = map loc item in
if item == item' then
same
else
diff item'
let id : 'node 'a. ('node -> 'node) -> 'node -> 'a -> ('node -> 'a) -> 'a =
fun map item same diff ->
let item' = map item in
if item == item' then
same
else
diff item'
let map_loc : 'node. ('loc -> 'node -> 'node) -> 'loc * 'node -> 'loc * 'node =
fun map same ->
let (loc, item) = same in
id_loc map loc item same (fun diff -> (loc, diff))
let map_loc_opt : 'node. ('loc -> 'node -> 'node) -> ('loc * 'node) option -> ('loc * 'node) option
=
fun map same ->
map_opt
(fun same ->
let (loc, item) = same in
id_loc map loc item same (fun diff -> (loc, diff)))
same
let map_list map lst =
let (rev_lst, changed) =
List.fold_left
(fun (lst', changed) item ->
let item' = map item in
(item' :: lst', changed || item' != item))
([], false)
lst
in
if changed then
List.rev rev_lst
else
lst
let map_list_multiple map lst =
let (rev_lst, changed) =
List.fold_left
(fun (lst', changed) item ->
match map item with
| [] -> (lst', true)
| [item'] -> (item' :: lst', changed || item != item')
| items' -> (List.rev_append items' lst', true))
([], false)
lst
in
if changed then
List.rev rev_lst
else
lst
class ['loc] mapper =
object (this)
method program (program : ('loc, 'loc) Ast.Program.t) =
let open Ast.Program in
let (loc, { statements; comments; all_comments }) = program in
let statements' = this#toplevel_statement_list statements in
let comments' = this#syntax_opt comments in
let all_comments' = map_list this#comment all_comments in
if statements == statements' && comments == comments' && all_comments == all_comments' then
program
else
(loc, { statements = statements'; comments = comments'; all_comments = all_comments' })
method statement (stmt : ('loc, 'loc) Ast.Statement.t) =
let open Ast.Statement in
match stmt with
| (loc, Block block) -> id_loc this#block loc block stmt (fun block -> (loc, Block block))
| (loc, Break break) -> id_loc this#break loc break stmt (fun break -> (loc, Break break))
| (loc, ClassDeclaration cls) ->
id_loc this#class_declaration loc cls stmt (fun cls -> (loc, ClassDeclaration cls))
| (loc, Continue cont) -> id_loc this#continue loc cont stmt (fun cont -> (loc, Continue cont))
| (loc, Debugger dbg) -> id_loc this#debugger loc dbg stmt (fun dbg -> (loc, Debugger dbg))
| (loc, DeclareClass stuff) ->
id_loc this#declare_class loc stuff stmt (fun stuff -> (loc, DeclareClass stuff))
| (loc, DeclareExportDeclaration decl) ->
id_loc this#declare_export_declaration loc decl stmt (fun decl ->
(loc, DeclareExportDeclaration decl)
)
| (loc, DeclareFunction stuff) ->
id_loc this#declare_function loc stuff stmt (fun stuff -> (loc, DeclareFunction stuff))
| (loc, DeclareInterface stuff) ->
id_loc this#declare_interface loc stuff stmt (fun stuff -> (loc, DeclareInterface stuff))
| (loc, DeclareModule m) ->
id_loc this#declare_module loc m stmt (fun m -> (loc, DeclareModule m))
| (loc, DeclareTypeAlias stuff) ->
id_loc this#declare_type_alias loc stuff stmt (fun stuff -> (loc, DeclareTypeAlias stuff))
| (loc, DeclareVariable stuff) ->
id_loc this#declare_variable loc stuff stmt (fun stuff -> (loc, DeclareVariable stuff))
| (loc, DeclareModuleExports annot) ->
id_loc this#declare_module_exports loc annot stmt (fun annot ->
(loc, DeclareModuleExports annot)
)
| (loc, DoWhile stuff) ->
id_loc this#do_while loc stuff stmt (fun stuff -> (loc, DoWhile stuff))
| (loc, Empty empty) -> id_loc this#empty loc empty stmt (fun empty -> (loc, Empty empty))
| (loc, EnumDeclaration enum) ->
id_loc this#enum_declaration loc enum stmt (fun enum -> (loc, EnumDeclaration enum))
| (loc, ExportDefaultDeclaration decl) ->
id_loc this#export_default_declaration loc decl stmt (fun decl ->
(loc, ExportDefaultDeclaration decl)
)
| (loc, ExportNamedDeclaration decl) ->
id_loc this#export_named_declaration loc decl stmt (fun decl ->
(loc, ExportNamedDeclaration decl)
)
| (loc, Expression expr) ->
id_loc this#expression_statement loc expr stmt (fun expr -> (loc, Expression expr))
| (loc, For for_stmt) ->
id_loc this#for_statement loc for_stmt stmt (fun for_stmt -> (loc, For for_stmt))
| (loc, ForIn stuff) ->
id_loc this#for_in_statement loc stuff stmt (fun stuff -> (loc, ForIn stuff))
| (loc, ForOf stuff) ->
id_loc this#for_of_statement loc stuff stmt (fun stuff -> (loc, ForOf stuff))
| (loc, FunctionDeclaration func) ->
id_loc this#function_declaration loc func stmt (fun func -> (loc, FunctionDeclaration func))
| (loc, If if_stmt) ->
id_loc this#if_statement loc if_stmt stmt (fun if_stmt -> (loc, If if_stmt))
| (loc, ImportDeclaration decl) ->
id_loc this#import_declaration loc decl stmt (fun decl -> (loc, ImportDeclaration decl))
| (loc, InterfaceDeclaration stuff) ->
id_loc this#interface_declaration loc stuff stmt (fun stuff ->
(loc, InterfaceDeclaration stuff)
)
| (loc, Labeled label) ->
id_loc this#labeled_statement loc label stmt (fun label -> (loc, Labeled label))
| (loc, OpaqueType otype) ->
id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype))
| (loc, Return ret) -> id_loc this#return loc ret stmt (fun ret -> (loc, Return ret))
| (loc, Switch switch) ->
id_loc this#switch loc switch stmt (fun switch -> (loc, Switch switch))
| (loc, Throw throw) -> id_loc this#throw loc throw stmt (fun throw -> (loc, Throw throw))
| (loc, Try try_stmt) ->
id_loc this#try_catch loc try_stmt stmt (fun try_stmt -> (loc, Try try_stmt))
| (loc, VariableDeclaration decl) ->
id_loc this#variable_declaration loc decl stmt (fun decl -> (loc, VariableDeclaration decl))
| (loc, While stuff) -> id_loc this#while_ loc stuff stmt (fun stuff -> (loc, While stuff))
| (loc, With stuff) -> id_loc this#with_ loc stuff stmt (fun stuff -> (loc, With stuff))
| (loc, TypeAlias stuff) ->
id_loc this#type_alias loc stuff stmt (fun stuff -> (loc, TypeAlias stuff))
| (loc, DeclareOpaqueType otype) ->
id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype))
method comment (c : 'loc Ast.Comment.t) = c
method syntax_opt
: 'internal. ('loc, 'internal) Ast.Syntax.t option -> ('loc, 'internal) Ast.Syntax.t option
=
map_opt this#syntax
method syntax : 'internal. ('loc, 'internal) Ast.Syntax.t -> ('loc, 'internal) Ast.Syntax.t =
fun attached ->
let open Ast.Syntax in
let { leading; trailing; internal } = attached in
let leading' = map_list this#comment leading in
let trailing' = map_list this#comment trailing in
if leading == leading' && trailing == trailing' then
attached
else
{ leading = leading'; trailing = trailing'; internal }
method expression (expr : ('loc, 'loc) Ast.Expression.t) =
let open Ast.Expression in
match expr with
| (loc, Array x) -> id_loc this#array loc x expr (fun x -> (loc, Array x))
| (loc, ArrowFunction x) ->
id_loc this#arrow_function loc x expr (fun x -> (loc, ArrowFunction x))
| (loc, Assignment x) -> id_loc this#assignment loc x expr (fun x -> (loc, Assignment x))
| (loc, Binary x) -> id_loc this#binary loc x expr (fun x -> (loc, Binary x))
| (loc, Call x) -> id_loc this#call loc x expr (fun x -> (loc, Call x))
| (loc, Class x) -> id_loc this#class_expression loc x expr (fun x -> (loc, Class x))
| (loc, Comprehension x) ->
id_loc this#comprehension loc x expr (fun x -> (loc, Comprehension x))
| (loc, Conditional x) -> id_loc this#conditional loc x expr (fun x -> (loc, Conditional x))
| (loc, Function x) -> id_loc this#function_expression loc x expr (fun x -> (loc, Function x))
| (loc, Generator x) -> id_loc this#generator loc x expr (fun x -> (loc, Generator x))
| (loc, Identifier x) -> id this#identifier x expr (fun x -> (loc, Identifier x))
| (loc, Import x) -> id (this#import loc) x expr (fun x -> (loc, Import x))
| (loc, JSXElement x) -> id_loc this#jsx_element loc x expr (fun x -> (loc, JSXElement x))
| (loc, JSXFragment x) -> id_loc this#jsx_fragment loc x expr (fun x -> (loc, JSXFragment x))
| (loc, Literal x) -> id_loc this#literal loc x expr (fun x -> (loc, Literal x))
| (loc, Logical x) -> id_loc this#logical loc x expr (fun x -> (loc, Logical x))
| (loc, Member x) -> id_loc this#member loc x expr (fun x -> (loc, Member x))
| (loc, MetaProperty x) ->
id_loc this#meta_property loc x expr (fun x -> (loc, MetaProperty x))
| (loc, New x) -> id_loc this#new_ loc x expr (fun x -> (loc, New x))
| (loc, Object x) -> id_loc this#object_ loc x expr (fun x -> (loc, Object x))
| (loc, OptionalCall x) -> id (this#optional_call loc) x expr (fun x -> (loc, OptionalCall x))
| (loc, OptionalMember x) ->
id_loc this#optional_member loc x expr (fun x -> (loc, OptionalMember x))
| (loc, Sequence x) -> id_loc this#sequence loc x expr (fun x -> (loc, Sequence x))
| (loc, Super x) -> id_loc this#super_expression loc x expr (fun x -> (loc, Super x))
| (loc, TaggedTemplate x) ->
id_loc this#tagged_template loc x expr (fun x -> (loc, TaggedTemplate x))
| (loc, TemplateLiteral x) ->
id_loc this#template_literal loc x expr (fun x -> (loc, TemplateLiteral x))
| (loc, This x) -> id_loc this#this_expression loc x expr (fun x -> (loc, This x))
| (loc, TypeCast x) -> id_loc this#type_cast loc x expr (fun x -> (loc, TypeCast x))
| (loc, Unary x) -> id_loc this#unary_expression loc x expr (fun x -> (loc, Unary x))
| (loc, Update x) -> id_loc this#update_expression loc x expr (fun x -> (loc, Update x))
| (loc, Yield x) -> id_loc this#yield loc x expr (fun x -> (loc, Yield x))
method array _loc (expr : ('loc, 'loc) Ast.Expression.Array.t) =
let open Ast.Expression in
let { Array.elements; comments } = expr in
let elements' = map_list this#array_element elements in
let comments' = this#syntax_opt comments in
if elements == elements' && comments == comments' then
expr
else
{ Array.elements = elements'; comments = comments' }
method array_element element =
let open Ast.Expression.Array in
match element with
| Expression expr -> id this#expression expr element (fun expr -> Expression expr)
| Spread spread -> id this#spread_element spread element (fun spread -> Spread spread)
| Hole _ -> element
method arrow_function loc (expr : ('loc, 'loc) Ast.Function.t) = this#function_ loc expr
method assignment _loc (expr : ('loc, 'loc) Ast.Expression.Assignment.t) =
let open Ast.Expression.Assignment in
let { operator = _; left; right; comments } = expr in
let left' = this#assignment_pattern left in
let right' = this#expression right in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && comments == comments' then
expr
else
{ expr with left = left'; right = right'; comments = comments' }
method binary _loc (expr : ('loc, 'loc) Ast.Expression.Binary.t) =
let open Ast.Expression.Binary in
let { operator = _; left; right; comments } = expr in
let left' = this#expression left in
let right' = this#expression right in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && comments == comments' then
expr
else
{ expr with left = left'; right = right'; comments = comments' }
method block _loc (stmt : ('loc, 'loc) Ast.Statement.Block.t) =
let open Ast.Statement.Block in
let { body; comments } = stmt in
let body' = this#statement_list body in
let comments' = this#syntax_opt comments in
if body == body' && comments == comments' then
stmt
else
{ body = body'; comments = comments' }
method break _loc (break : 'loc Ast.Statement.Break.t) =
let open Ast.Statement.Break in
let { label; comments } = break in
let label' = map_opt this#label_identifier label in
let comments' = this#syntax_opt comments in
if label == label' && comments == comments' then
break
else
{ label = label'; comments = comments' }
method call _loc (expr : ('loc, 'loc) Ast.Expression.Call.t) =
let open Ast.Expression.Call in
let { callee; targs; arguments; comments } = expr in
let callee' = this#expression callee in
let targs' = map_opt this#call_type_args targs in
let arguments' = this#call_arguments arguments in
let comments' = this#syntax_opt comments in
if callee == callee' && targs == targs' && arguments == arguments' && comments == comments'
then
expr
else
{ callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }
method call_arguments (arg_list : ('loc, 'loc) Ast.Expression.ArgList.t) =
let open Ast.Expression.ArgList in
let (loc, { arguments; comments }) = arg_list in
let arguments' = map_list this#expression_or_spread arguments in
let comments' = this#syntax_opt comments in
if arguments == arguments' && comments == comments' then
arg_list
else
(loc, { arguments = arguments'; comments = comments' })
method optional_call loc (expr : ('loc, 'loc) Ast.Expression.OptionalCall.t) =
let open Ast.Expression.OptionalCall in
let { call; optional = _; filtered_out = _ } = expr in
let call' = this#call loc call in
if call == call' then
expr
else
{ expr with call = call' }
method call_type_args (targs : ('loc, 'loc) Ast.Expression.CallTypeArgs.t) =
let open Ast.Expression.CallTypeArgs in
let (loc, { arguments; comments }) = targs in
let arguments' = map_list this#call_type_arg arguments in
let comments' = this#syntax_opt comments in
if arguments == arguments' && comments == comments' then
targs
else
(loc, { arguments = arguments'; comments = comments' })
method call_type_arg t =
let open Ast.Expression.CallTypeArg in
match t with
| Explicit x ->
let x' = this#type_ x in
if x' == x then
t
else
Explicit x'
| Implicit (loc, { Implicit.comments }) ->
let comments' = this#syntax_opt comments in
if comments == comments' then
t
else
Implicit (loc, { Implicit.comments = comments' })
method catch_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) = map_loc this#block body
method catch_clause _loc (clause : ('loc, 'loc) Ast.Statement.Try.CatchClause.t') =
let open Ast.Statement.Try.CatchClause in
let { param; body; comments } = clause in
let param' = map_opt this#catch_clause_pattern param in
let body' = this#catch_body body in
let comments' = this#syntax_opt comments in
if param == param' && body == body' && comments == comments' then
clause
else
{ param = param'; body = body'; comments = comments' }
method class_declaration loc (cls : ('loc, 'loc) Ast.Class.t) = this#class_ loc cls
method class_expression loc (cls : ('loc, 'loc) Ast.Class.t) = this#class_ loc cls
method class_ _loc (cls : ('loc, 'loc) Ast.Class.t) =
let open Ast.Class in
let { id; body; tparams; extends; implements; class_decorators; comments } = cls in
let id' = map_opt this#class_identifier id in
let tparams' = map_opt this#type_params tparams in
let body' = this#class_body body in
let extends' = map_opt (map_loc this#class_extends) extends in
let implements' = map_opt this#class_implements implements in
let class_decorators' = map_list this#class_decorator class_decorators in
let comments' = this#syntax_opt comments in
if
id == id'
&& body == body'
&& extends == extends'
&& implements == implements'
&& class_decorators == class_decorators'
&& comments == comments'
&& tparams == tparams'
then
cls
else
{
id = id';
body = body';
extends = extends';
implements = implements';
class_decorators = class_decorators';
comments = comments';
tparams = tparams';
}
method class_extends _loc (extends : ('loc, 'loc) Ast.Class.Extends.t') =
let open Ast.Class.Extends in
let { expr; targs; comments } = extends in
let expr' = this#expression expr in
let targs' = map_opt this#type_args targs in
let comments' = this#syntax_opt comments in
if expr == expr' && targs == targs' && comments == comments' then
extends
else
{ expr = expr'; targs = targs'; comments = comments' }
method class_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let ident
method class_body (cls_body : ('loc, 'loc) Ast.Class.Body.t) =
let open Ast.Class.Body in
let (loc, { body; comments }) = cls_body in
let body' = map_list this#class_element body in
let comments' = this#syntax_opt comments in
if body == body' && comments == comments' then
cls_body
else
(loc, { body = body'; comments = comments' })
method class_decorator (dec : ('loc, 'loc) Ast.Class.Decorator.t) =
let open Ast.Class.Decorator in
let (loc, { expression; comments }) = dec in
let expression' = this#expression expression in
let comments' = this#syntax_opt comments in
if expression == expression' && comments == comments' then
dec
else
(loc, { expression = expression'; comments = comments' })
method class_element (elem : ('loc, 'loc) Ast.Class.Body.element) =
let open Ast.Class.Body in
match elem with
| Method (loc, meth) -> id_loc this#class_method loc meth elem (fun meth -> Method (loc, meth))
| Property (loc, prop) ->
id_loc this#class_property loc prop elem (fun prop -> Property (loc, prop))
| PrivateField (loc, field) ->
id_loc this#class_private_field loc field elem (fun field -> PrivateField (loc, field))
method class_implements (implements : ('loc, 'loc) Ast.Class.Implements.t) =
let open Ast.Class.Implements in
let (loc, { interfaces; comments }) = implements in
let interfaces' = map_list this#class_implements_interface interfaces in
let comments' = this#syntax_opt comments in
if interfaces == interfaces' && comments == comments' then
implements
else
(loc, { interfaces = interfaces'; comments = comments' })
method class_implements_interface (interface : ('loc, 'loc) Ast.Class.Implements.Interface.t) =
let open Ast.Class.Implements.Interface in
let (loc, { id; targs }) = interface in
let id' = this#type_identifier_reference id in
let targs' = map_opt this#type_args targs in
if id == id' && targs == targs' then
interface
else
(loc, { id = id'; targs = targs' })
method class_method _loc (meth : ('loc, 'loc) Ast.Class.Method.t') =
let open Ast.Class.Method in
let { kind = _; key; value; static = _; decorators; comments } = meth in
let key' = this#object_key key in
let value' = map_loc this#function_expression_or_method value in
let decorators' = map_list this#class_decorator decorators in
let comments' = this#syntax_opt comments in
if key == key' && value == value' && decorators == decorators' && comments == comments' then
meth
else
{ meth with key = key'; value = value'; decorators = decorators'; comments = comments' }
method class_property _loc (prop : ('loc, 'loc) Ast.Class.Property.t') =
let open Ast.Class.Property in
let { key; value; annot; static = _; variance; comments } = prop in
let key' = this#object_key key in
let value' = this#class_property_value value in
let annot' = this#type_annotation_hint annot in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if
key == key'
&& value == value'
&& annot' == annot
&& variance' == variance
&& comments' == comments
then
prop
else
{
prop with
key = key';
value = value';
annot = annot';
variance = variance';
comments = comments';
}
method class_property_value (value : ('loc, 'loc) Ast.Class.Property.value) =
let open Ast.Class.Property in
match value with
| Declared -> value
| Uninitialized -> value
| Initialized x ->
let x' = this#expression x in
if x == x' then
value
else
Initialized x'
method class_private_field _loc (prop : ('loc, 'loc) Ast.Class.PrivateField.t') =
let open Ast.Class.PrivateField in
let { key; value; annot; static = _; variance; comments } = prop in
let key' = this#private_name key in
let value' = this#class_property_value value in
let annot' = this#type_annotation_hint annot in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if
key == key'
&& value == value'
&& annot' == annot
&& variance' == variance
&& comments' == comments
then
prop
else
{
prop with
key = key';
value = value';
annot = annot';
variance = variance';
comments = comments';
}
TODO
method comprehension _loc (expr : ('loc, 'loc) Ast.Expression.Comprehension.t) = expr
method conditional _loc (expr : ('loc, 'loc) Ast.Expression.Conditional.t) =
let open Ast.Expression.Conditional in
let { test; consequent; alternate; comments } = expr in
let test' = this#predicate_expression test in
let consequent' = this#expression consequent in
let alternate' = this#expression alternate in
let comments' = this#syntax_opt comments in
if
test == test'
&& consequent == consequent'
&& alternate == alternate'
&& comments == comments'
then
expr
else
{ test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }
method continue _loc (cont : 'loc Ast.Statement.Continue.t) =
let open Ast.Statement.Continue in
let { label; comments } = cont in
let label' = map_opt this#label_identifier label in
let comments' = this#syntax_opt comments in
if label == label' && comments == comments' then
cont
else
{ label = label'; comments = comments' }
method debugger _loc (dbg : 'loc Ast.Statement.Debugger.t) =
let open Ast.Statement.Debugger in
let { comments } = dbg in
let comments' = this#syntax_opt comments in
if comments == comments' then
dbg
else
{ comments = comments' }
method declare_class _loc (decl : ('loc, 'loc) Ast.Statement.DeclareClass.t) =
let open Ast.Statement.DeclareClass in
let { id = ident; tparams; body; extends; mixins; implements; comments } = decl in
let id' = this#class_identifier ident in
let tparams' = map_opt this#type_params tparams in
let body' = map_loc this#object_type body in
let extends' = map_opt (map_loc this#generic_type) extends in
let mixins' = map_list (map_loc this#generic_type) mixins in
let implements' = map_opt this#class_implements implements in
let comments' = this#syntax_opt comments in
if
id' == ident
&& tparams' == tparams
&& body' == body
&& extends' == extends
&& mixins' == mixins
&& implements' == implements
&& comments' == comments
then
decl
else
{
id = id';
tparams = tparams';
body = body';
extends = extends';
mixins = mixins';
implements = implements';
comments = comments';
}
method declare_export_declaration
_loc (decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.t) =
let open Ast.Statement.DeclareExportDeclaration in
let { default; source; specifiers; declaration; comments } = decl in
let source' = map_loc_opt this#export_source source in
let specifiers' = map_opt this#export_named_specifier specifiers in
let declaration' = map_opt this#declare_export_declaration_decl declaration in
let comments' = this#syntax_opt comments in
if
source == source'
&& specifiers == specifiers'
&& declaration == declaration'
&& comments == comments'
then
decl
else
{
default;
source = source';
specifiers = specifiers';
declaration = declaration';
comments = comments';
}
method declare_export_declaration_decl
(decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.declaration) =
let open Ast.Statement.DeclareExportDeclaration in
match decl with
| Variable (loc, dv) ->
let dv' = this#declare_variable loc dv in
if dv' == dv then
decl
else
Variable (loc, dv')
| Function (loc, df) ->
let df' = this#declare_function loc df in
if df' == df then
decl
else
Function (loc, df')
| Class (loc, dc) ->
let dc' = this#declare_class loc dc in
if dc' == dc then
decl
else
Class (loc, dc')
| DefaultType t ->
let t' = this#type_ t in
if t' == t then
decl
else
DefaultType t'
| NamedType (loc, ta) ->
let ta' = this#type_alias loc ta in
if ta' == ta then
decl
else
NamedType (loc, ta')
| NamedOpaqueType (loc, ot) ->
let ot' = this#opaque_type loc ot in
if ot' == ot then
decl
else
NamedOpaqueType (loc, ot')
| Interface (loc, i) ->
let i' = this#interface loc i in
if i' == i then
decl
else
Interface (loc, i')
method declare_function _loc (decl : ('loc, 'loc) Ast.Statement.DeclareFunction.t) =
let open Ast.Statement.DeclareFunction in
let { id = ident; annot; predicate; comments } = decl in
let id' = this#function_identifier ident in
let annot' = this#type_annotation annot in
let predicate' = map_opt this#predicate predicate in
let comments' = this#syntax_opt comments in
if id' == ident && annot' == annot && predicate' == predicate && comments' == comments then
decl
else
{ id = id'; annot = annot'; predicate = predicate'; comments = comments' }
method declare_interface loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) =
this#interface loc decl
method declare_module _loc (m : ('loc, 'loc) Ast.Statement.DeclareModule.t) =
let open Ast.Statement.DeclareModule in
let { id; body; kind; comments } = m in
let body' = map_loc this#block body in
let comments' = this#syntax_opt comments in
if body' == body && comments == comments' then
m
else
{ id; body = body'; kind; comments = comments' }
method declare_module_exports _loc (exports : ('loc, 'loc) Ast.Statement.DeclareModuleExports.t)
=
let open Ast.Statement.DeclareModuleExports in
let { annot; comments } = exports in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if annot == annot' && comments == comments' then
exports
else
{ annot = annot'; comments = comments' }
method declare_type_alias loc (decl : ('loc, 'loc) Ast.Statement.TypeAlias.t) =
this#type_alias loc decl
method declare_variable _loc (decl : ('loc, 'loc) Ast.Statement.DeclareVariable.t) =
let open Ast.Statement.DeclareVariable in
let { id = ident; annot; comments } = decl in
let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if id' == ident && annot' == annot && comments' == comments then
decl
else
{ id = id'; annot = annot'; comments = comments' }
method do_while _loc (stuff : ('loc, 'loc) Ast.Statement.DoWhile.t) =
let open Ast.Statement.DoWhile in
let { body; test; comments } = stuff in
let body' = this#statement body in
let test' = this#predicate_expression test in
let comments' = this#syntax_opt comments in
if body == body' && test == test' && comments == comments' then
stuff
else
{ body = body'; test = test'; comments = comments' }
method empty _loc empty =
let open Ast.Statement.Empty in
let { comments } = empty in
let comments' = this#syntax_opt comments in
if comments == comments' then
empty
else
{ comments = comments' }
method enum_declaration _loc (enum : ('loc, 'loc) Ast.Statement.EnumDeclaration.t) =
let open Ast.Statement.EnumDeclaration in
let { id = ident; body; comments } = enum in
let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Const ident in
let body' = this#enum_body body in
let comments' = this#syntax_opt comments in
if ident == id' && body == body' && comments == comments' then
enum
else
{ id = id'; body = body'; comments = comments' }
method enum_body (body : 'loc Ast.Statement.EnumDeclaration.body) =
let open Ast.Statement.EnumDeclaration in
match body with
| (loc, BooleanBody boolean_body) ->
id this#enum_boolean_body boolean_body body (fun body -> (loc, BooleanBody body))
| (loc, NumberBody number_body) ->
id this#enum_number_body number_body body (fun body -> (loc, NumberBody body))
| (loc, StringBody string_body) ->
id this#enum_string_body string_body body (fun body -> (loc, StringBody body))
| (loc, SymbolBody symbol_body) ->
id this#enum_symbol_body symbol_body body (fun body -> (loc, SymbolBody body))
method enum_boolean_body (body : 'loc Ast.Statement.EnumDeclaration.BooleanBody.t) =
let open Ast.Statement.EnumDeclaration.BooleanBody in
let { members; explicit_type = _; has_unknown_members = _; comments } = body in
let members' = map_list this#enum_boolean_member members in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_number_body (body : 'loc Ast.Statement.EnumDeclaration.NumberBody.t) =
let open Ast.Statement.EnumDeclaration.NumberBody in
let { members; explicit_type = _; has_unknown_members = _; comments } = body in
let members' = map_list this#enum_number_member members in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_string_body (body : 'loc Ast.Statement.EnumDeclaration.StringBody.t) =
let open Ast.Statement.EnumDeclaration.StringBody in
let { members; explicit_type = _; has_unknown_members = _; comments } = body in
let members' =
match members with
| Defaulted m -> id (map_list this#enum_defaulted_member) m members (fun m -> Defaulted m)
| Initialized m -> id (map_list this#enum_string_member) m members (fun m -> Initialized m)
in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_symbol_body (body : 'loc Ast.Statement.EnumDeclaration.SymbolBody.t) =
let open Ast.Statement.EnumDeclaration.SymbolBody in
let { members; has_unknown_members = _; comments } = body in
let members' = map_list this#enum_defaulted_member members in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_defaulted_member (member : 'loc Ast.Statement.EnumDeclaration.DefaultedMember.t) =
let open Ast.Statement.EnumDeclaration.DefaultedMember in
let (loc, { id = ident }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id' })
method enum_boolean_member
(member :
('loc Ast.BooleanLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t
) =
let open Ast.Statement.EnumDeclaration.InitializedMember in
let (loc, { id = ident; init }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id'; init })
method enum_number_member
(member : ('loc Ast.NumberLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t)
=
let open Ast.Statement.EnumDeclaration.InitializedMember in
let (loc, { id = ident; init }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id'; init })
method enum_string_member
(member : ('loc Ast.StringLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t)
=
let open Ast.Statement.EnumDeclaration.InitializedMember in
let (loc, { id = ident; init }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id'; init })
method enum_member_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#identifier id
method export_default_declaration
_loc (decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.t) =
let open Ast.Statement.ExportDefaultDeclaration in
let { default; declaration; comments } = decl in
let declaration' = this#export_default_declaration_decl declaration in
let comments' = this#syntax_opt comments in
if declaration' == declaration && comments' == comments then
decl
else
{ default; declaration = declaration'; comments = comments' }
method export_default_declaration_decl
(decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.declaration) =
let open Ast.Statement.ExportDefaultDeclaration in
match decl with
| Declaration stmt -> id this#statement stmt decl (fun stmt -> Declaration stmt)
| Expression expr -> id this#expression expr decl (fun expr -> Expression expr)
method export_named_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ExportNamedDeclaration.t)
=
let open Ast.Statement.ExportNamedDeclaration in
let { export_kind; source; specifiers; declaration; comments } = decl in
let source' = map_loc_opt this#export_source source in
let specifiers' = map_opt this#export_named_specifier specifiers in
let declaration' = map_opt this#statement declaration in
let comments' = this#syntax_opt comments in
if
source == source'
&& specifiers == specifiers'
&& declaration == declaration'
&& comments == comments'
then
decl
else
{
export_kind;
source = source';
specifiers = specifiers';
declaration = declaration';
comments = comments';
}
method export_named_declaration_specifier
(spec : 'loc Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) =
let open Ast.Statement.ExportNamedDeclaration.ExportSpecifier in
let (loc, { local; exported }) = spec in
let local' = this#identifier local in
let exported' = map_opt this#identifier exported in
if local == local' && exported == exported' then
spec
else
(loc, { local = local'; exported = exported' })
method export_batch_specifier
(spec : 'loc Ast.Statement.ExportNamedDeclaration.ExportBatchSpecifier.t) =
let (loc, id_opt) = spec in
let id_opt' = map_opt this#identifier id_opt in
if id_opt == id_opt' then
spec
else
(loc, id_opt')
method export_named_specifier (spec : 'loc Ast.Statement.ExportNamedDeclaration.specifier) =
let open Ast.Statement.ExportNamedDeclaration in
match spec with
| ExportSpecifiers spec_list ->
let spec_list' = map_list this#export_named_declaration_specifier spec_list in
if spec_list == spec_list' then
spec
else
ExportSpecifiers spec_list'
| ExportBatchSpecifier batch ->
let batch' = this#export_batch_specifier batch in
if batch == batch' then
spec
else
ExportBatchSpecifier batch'
method export_source _loc (source : 'loc Ast.StringLiteral.t) =
let open Ast.StringLiteral in
let { value; raw; comments } = source in
let comments' = this#syntax_opt comments in
if comments == comments' then
source
else
{ value; raw; comments = comments' }
method expression_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Expression.t) =
let open Ast.Statement.Expression in
let { expression = expr; directive; comments } = stmt in
let expr' = this#expression expr in
let comments' = this#syntax_opt comments in
if expr == expr' && comments == comments' then
stmt
else
{ expression = expr'; directive; comments = comments' }
method expression_or_spread expr_or_spread =
let open Ast.Expression in
match expr_or_spread with
| Expression expr -> id this#expression expr expr_or_spread (fun expr -> Expression expr)
| Spread spread -> id this#spread_element spread expr_or_spread (fun spread -> Spread spread)
method for_in_statement _loc (stmt : ('loc, 'loc) Ast.Statement.ForIn.t) =
let open Ast.Statement.ForIn in
let { left; right; body; each; comments } = stmt in
let left' = this#for_in_statement_lhs left in
let right' = this#expression right in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && body == body' && comments == comments' then
stmt
else
{ left = left'; right = right'; body = body'; each; comments = comments' }
method for_in_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForIn.left) =
let open Ast.Statement.ForIn in
match left with
| LeftDeclaration decl ->
id this#for_in_left_declaration decl left (fun decl -> LeftDeclaration decl)
| LeftPattern patt ->
id this#for_in_assignment_pattern patt left (fun patt -> LeftPattern patt)
method for_in_left_declaration left =
let (loc, decl) = left in
id_loc this#variable_declaration loc decl left (fun decl -> (loc, decl))
method for_of_statement _loc (stuff : ('loc, 'loc) Ast.Statement.ForOf.t) =
let open Ast.Statement.ForOf in
let { left; right; body; await; comments } = stuff in
let left' = this#for_of_statement_lhs left in
let right' = this#expression right in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && body == body' && comments == comments' then
stuff
else
{ left = left'; right = right'; body = body'; await; comments = comments' }
method for_of_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForOf.left) =
let open Ast.Statement.ForOf in
match left with
| LeftDeclaration decl ->
id this#for_of_left_declaration decl left (fun decl -> LeftDeclaration decl)
| LeftPattern patt ->
id this#for_of_assignment_pattern patt left (fun patt -> LeftPattern patt)
method for_of_left_declaration left =
let (loc, decl) = left in
id_loc this#variable_declaration loc decl left (fun decl -> (loc, decl))
method for_statement _loc (stmt : ('loc, 'loc) Ast.Statement.For.t) =
let open Ast.Statement.For in
let { init; test; update; body; comments } = stmt in
let init' = map_opt this#for_statement_init init in
let test' = map_opt this#predicate_expression test in
let update' = map_opt this#expression update in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if
init == init'
&& test == test'
&& update == update'
&& body == body'
&& comments == comments'
then
stmt
else
{ init = init'; test = test'; update = update'; body = body'; comments = comments' }
method for_statement_init (init : ('loc, 'loc) Ast.Statement.For.init) =
let open Ast.Statement.For in
match init with
| InitDeclaration decl ->
id this#for_init_declaration decl init (fun decl -> InitDeclaration decl)
| InitExpression expr -> id this#expression expr init (fun expr -> InitExpression expr)
method for_init_declaration init =
let (loc, decl) = init in
id_loc this#variable_declaration loc decl init (fun decl -> (loc, decl))
method function_param_type (fpt : ('loc, 'loc) Ast.Type.Function.Param.t) =
let open Ast.Type.Function.Param in
let (loc, { annot; name; optional }) = fpt in
let annot' = this#type_ annot in
let name' = map_opt this#identifier name in
if annot' == annot && name' == name then
fpt
else
(loc, { annot = annot'; name = name'; optional })
method function_rest_param_type (frpt : ('loc, 'loc) Ast.Type.Function.RestParam.t) =
let open Ast.Type.Function.RestParam in
let (loc, { argument; comments }) = frpt in
let argument' = this#function_param_type argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments' == comments then
frpt
else
(loc, { argument = argument'; comments = comments' })
method function_this_param_type (this_param : ('loc, 'loc) Ast.Type.Function.ThisParam.t) =
let open Ast.Type.Function.ThisParam in
let (loc, { annot; comments }) = this_param in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if annot' == annot && comments' == comments then
this_param
else
(loc, { annot = annot'; comments = comments' })
method function_type _loc (ft : ('loc, 'loc) Ast.Type.Function.t) =
let open Ast.Type.Function in
let {
params = (params_loc, { Params.this_; params = ps; rest = rpo; comments = params_comments });
return;
tparams;
comments = func_comments;
} =
ft
in
let tparams' = map_opt this#type_params tparams in
let this_' = map_opt this#function_this_param_type this_ in
let ps' = map_list this#function_param_type ps in
let rpo' = map_opt this#function_rest_param_type rpo in
let return' = this#type_ return in
let func_comments' = this#syntax_opt func_comments in
let params_comments' = this#syntax_opt params_comments in
if
ps' == ps
&& rpo' == rpo
&& return' == return
&& tparams' == tparams
&& func_comments' == func_comments
&& params_comments' == params_comments
&& this_' == this_
then
ft
else
{
params =
( params_loc,
{ Params.this_ = this_'; params = ps'; rest = rpo'; comments = params_comments' }
);
return = return';
tparams = tparams';
comments = func_comments';
}
method label_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident
method object_property_value_type (opvt : ('loc, 'loc) Ast.Type.Object.Property.value) =
let open Ast.Type.Object.Property in
match opvt with
| Init t -> id this#type_ t opvt (fun t -> Init t)
| Get t -> id this#object_type_property_getter t opvt (fun t -> Get t)
| Set t -> id this#object_type_property_setter t opvt (fun t -> Set t)
method object_type_property_getter getter =
let (loc, ft) = getter in
id_loc this#function_type loc ft getter (fun ft -> (loc, ft))
method object_type_property_setter setter =
let (loc, ft) = setter in
id_loc this#function_type loc ft setter (fun ft -> (loc, ft))
method object_property_type (opt : ('loc, 'loc) Ast.Type.Object.Property.t) =
let open Ast.Type.Object.Property in
let (loc, { key; value; optional; static; proto; _method; variance; comments }) = opt in
let key' = this#object_key key in
let value' = this#object_property_value_type value in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if key' == key && value' == value && variance' == variance && comments' == comments then
opt
else
( loc,
{
key = key';
value = value';
optional;
static;
proto;
_method;
variance = variance';
comments = comments';
}
)
method object_spread_property_type (opt : ('loc, 'loc) Ast.Type.Object.SpreadProperty.t) =
let open Ast.Type.Object.SpreadProperty in
let (loc, { argument; comments }) = opt in
let argument' = this#type_ argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments == comments' then
opt
else
(loc, { argument = argument'; comments = comments' })
method object_indexer_property_type (opt : ('loc, 'loc) Ast.Type.Object.Indexer.t) =
let open Ast.Type.Object.Indexer in
let (loc, { id; key; value; static; variance; comments }) = opt in
let key' = this#type_ key in
let value' = this#type_ value in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if key' == key && value' == value && variance' == variance && comments' == comments then
opt
else
(loc, { id; key = key'; value = value'; static; variance = variance'; comments = comments' })
method object_internal_slot_property_type (slot : ('loc, 'loc) Ast.Type.Object.InternalSlot.t) =
let open Ast.Type.Object.InternalSlot in
let (loc, { id; value; optional; static; _method; comments }) = slot in
let id' = this#identifier id in
let value' = this#type_ value in
let comments' = this#syntax_opt comments in
if id == id' && value == value' && comments == comments' then
slot
else
(loc, { id = id'; value = value'; optional; static; _method; comments = comments' })
method object_call_property_type (call : ('loc, 'loc) Ast.Type.Object.CallProperty.t) =
let open Ast.Type.Object.CallProperty in
let (loc, { value = (value_loc, value); static; comments }) = call in
let value' = this#function_type value_loc value in
let comments' = this#syntax_opt comments in
if value == value' && comments == comments' then
call
else
(loc, { value = (value_loc, value'); static; comments = comments' })
method object_type _loc (ot : ('loc, 'loc) Ast.Type.Object.t) =
let open Ast.Type.Object in
let { properties; exact; inexact; comments } = ot in
let properties' =
map_list
(fun p ->
match p with
| Property p' -> id this#object_property_type p' p (fun p' -> Property p')
| SpreadProperty p' ->
id this#object_spread_property_type p' p (fun p' -> SpreadProperty p')
| Indexer p' -> id this#object_indexer_property_type p' p (fun p' -> Indexer p')
| InternalSlot p' ->
id this#object_internal_slot_property_type p' p (fun p' -> InternalSlot p')
| CallProperty p' -> id this#object_call_property_type p' p (fun p' -> CallProperty p'))
properties
in
let comments' = this#syntax_opt comments in
if properties' == properties && comments == comments' then
ot
else
{ properties = properties'; exact; inexact; comments = comments' }
method interface_type _loc (i : ('loc, 'loc) Ast.Type.Interface.t) =
let open Ast.Type.Interface in
let { extends; body; comments } = i in
let extends' = map_list (map_loc this#generic_type) extends in
let body' = map_loc this#object_type body in
let comments' = this#syntax_opt comments in
if extends' == extends && body' == body && comments == comments' then
i
else
{ extends = extends'; body = body'; comments = comments' }
method generic_identifier_type (git : ('loc, 'loc) Ast.Type.Generic.Identifier.t) =
let open Ast.Type.Generic.Identifier in
match git with
| Unqualified i -> id this#type_identifier_reference i git (fun i -> Unqualified i)
| Qualified i -> id this#generic_qualified_identifier_type i git (fun i -> Qualified i)
method generic_qualified_identifier_type qual =
let open Ast.Type.Generic.Identifier in
let (loc, { qualification; id }) = qual in
let qualification' = this#generic_identifier_type qualification in
let id' = this#member_type_identifier id in
if qualification' == qualification && id' == id then
qual
else
(loc, { qualification = qualification'; id = id' })
method member_type_identifier id = this#identifier id
method variance (variance : 'loc Ast.Variance.t) =
let (loc, { Ast.Variance.kind; comments }) = variance in
let comments' = this#syntax_opt comments in
if comments == comments' then
variance
else
(loc, { Ast.Variance.kind; comments = comments' })
method variance_opt (opt : 'loc Ast.Variance.t option) = map_opt this#variance opt
method type_args (targs : ('loc, 'loc) Ast.Type.TypeArgs.t) =
let open Ast.Type.TypeArgs in
let (loc, { arguments; comments }) = targs in
let arguments' = map_list this#type_ arguments in
let comments' = this#syntax_opt comments in
if arguments == arguments' && comments == comments' then
targs
else
(loc, { arguments = arguments'; comments = comments' })
method type_params (tparams : ('loc, 'loc) Ast.Type.TypeParams.t) =
let open Ast.Type.TypeParams in
let (loc, { params = tps; comments }) = tparams in
let tps' = map_list this#type_param tps in
let comments' = this#syntax_opt comments in
if tps' == tps && comments' == comments then
tparams
else
(loc, { params = tps'; comments = comments' })
method type_param (tparam : ('loc, 'loc) Ast.Type.TypeParam.t) =
let open Ast.Type.TypeParam in
let (loc, { name; bound; variance; default }) = tparam in
let bound' = this#type_annotation_hint bound in
let variance' = this#variance_opt variance in
let default' = map_opt this#type_ default in
let name' = this#binding_type_identifier name in
if name' == name && bound' == bound && variance' == variance && default' == default then
tparam
else
(loc, { name = name'; bound = bound'; variance = variance'; default = default' })
method generic_type _loc (gt : ('loc, 'loc) Ast.Type.Generic.t) =
let open Ast.Type.Generic in
let { id; targs; comments } = gt in
let id' = this#generic_identifier_type id in
let targs' = map_opt this#type_args targs in
let comments' = this#syntax_opt comments in
if id' == id && targs' == targs && comments' == comments then
gt
else
{ id = id'; targs = targs'; comments = comments' }
method indexed_access _loc (ia : ('loc, 'loc) Ast.Type.IndexedAccess.t) =
let open Ast.Type.IndexedAccess in
let { _object; index; comments } = ia in
let _object' = this#type_ _object in
let index' = this#type_ index in
let comments' = this#syntax_opt comments in
if _object' == _object && index' == index && comments' == comments then
ia
else
{ _object = _object'; index = index'; comments = comments' }
method optional_indexed_access loc (ia : ('loc, 'loc) Ast.Type.OptionalIndexedAccess.t) =
let open Ast.Type.OptionalIndexedAccess in
let { indexed_access; optional } = ia in
let indexed_access' = this#indexed_access loc indexed_access in
if indexed_access' == indexed_access then
ia
else
{ indexed_access = indexed_access'; optional }
method string_literal_type _loc (lit : 'loc Ast.StringLiteral.t) =
let open Ast.StringLiteral in
let { value; raw; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; raw; comments = comments' }
method number_literal_type _loc (lit : 'loc Ast.NumberLiteral.t) =
let open Ast.NumberLiteral in
let { value; raw; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; raw; comments = comments' }
method bigint_literal_type _loc (lit : 'loc Ast.BigIntLiteral.t) =
let open Ast.BigIntLiteral in
let { value; raw; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; raw; comments = comments' }
method boolean_literal_type _loc (lit : 'loc Ast.BooleanLiteral.t) =
let open Ast.BooleanLiteral in
let { value; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; comments = comments' }
method nullable_type (t : ('loc, 'loc) Ast.Type.Nullable.t) =
let open Ast.Type.Nullable in
let { argument; comments } = t in
let argument' = this#type_ argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
t
else
{ argument = argument'; comments = comments' }
method typeof_type (t : ('loc, 'loc) Ast.Type.Typeof.t) =
let open Ast.Type.Typeof in
let { argument; comments } = t in
let argument' = this#typeof_expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
t
else
{ argument = argument'; comments = comments' }
method typeof_expression (git : ('loc, 'loc) Ast.Type.Typeof.Target.t) =
let open Ast.Type.Typeof.Target in
match git with
| Unqualified i -> id this#typeof_identifier i git (fun i -> Unqualified i)
| Qualified i -> id this#typeof_qualified_identifier i git (fun i -> Qualified i)
method typeof_identifier id = this#identifier id
method typeof_member_identifier id = this#identifier id
method typeof_qualified_identifier qual =
let open Ast.Type.Typeof.Target in
let (loc, { qualification; id }) = qual in
let qualification' = this#typeof_expression qualification in
let id' = this#typeof_member_identifier id in
if qualification' == qualification && id' == id then
qual
else
(loc, { qualification = qualification'; id = id' })
method tuple_type (t : ('loc, 'loc) Ast.Type.Tuple.t) =
let open Ast.Type.Tuple in
let { types; comments } = t in
let types' = map_list this#type_ types in
let comments' = this#syntax_opt comments in
if types == types' && comments == comments' then
t
else
{ types = types'; comments = comments' }
method array_type (t : ('loc, 'loc) Ast.Type.Array.t) =
let open Ast.Type.Array in
let { argument; comments } = t in
let argument' = this#type_ argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
t
else
{ argument = argument'; comments = comments' }
method union_type _loc (t : ('loc, 'loc) Ast.Type.Union.t) =
let open Ast.Type.Union in
let { types = (t0, t1, ts); comments } = t in
let t0' = this#type_ t0 in
let t1' = this#type_ t1 in
let ts' = map_list this#type_ ts in
let comments' = this#syntax_opt comments in
if t0' == t0 && t1' == t1 && ts' == ts && comments' == comments then
t
else
{ types = (t0', t1', ts'); comments = comments' }
method intersection_type _loc (t : ('loc, 'loc) Ast.Type.Intersection.t) =
let open Ast.Type.Intersection in
let { types = (t0, t1, ts); comments } = t in
let t0' = this#type_ t0 in
let t1' = this#type_ t1 in
let ts' = map_list this#type_ ts in
let comments' = this#syntax_opt comments in
if t0' == t0 && t1' == t1 && ts' == ts && comments' == comments then
t
else
{ types = (t0', t1', ts'); comments = comments' }
method type_ (t : ('loc, 'loc) Ast.Type.t) =
let open Ast.Type in
match t with
| (loc, Any comments) -> id this#syntax_opt comments t (fun comments -> (loc, Any comments))
| (loc, Mixed comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Mixed comments))
| (loc, Empty comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Empty comments))
| (loc, Void comments) -> id this#syntax_opt comments t (fun comments -> (loc, Void comments))
| (loc, Null comments) -> id this#syntax_opt comments t (fun comments -> (loc, Null comments))
| (loc, Symbol comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Symbol comments))
| (loc, Number comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Number comments))
| (loc, BigInt comments) ->
id this#syntax_opt comments t (fun comments -> (loc, BigInt comments))
| (loc, String comments) ->
id this#syntax_opt comments t (fun comments -> (loc, String comments))
| (loc, Boolean comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Boolean comments))
| (loc, Exists comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Exists comments))
| (loc, Nullable t') -> id this#nullable_type t' t (fun t' -> (loc, Nullable t'))
| (loc, Array t') -> id this#array_type t' t (fun t' -> (loc, Array t'))
| (loc, Typeof t') -> id this#typeof_type t' t (fun t' -> (loc, Typeof t'))
| (loc, Function ft) -> id_loc this#function_type loc ft t (fun ft -> (loc, Function ft))
| (loc, Object ot) -> id_loc this#object_type loc ot t (fun ot -> (loc, Object ot))
| (loc, Interface i) -> id_loc this#interface_type loc i t (fun i -> (loc, Interface i))
| (loc, Generic gt) -> id_loc this#generic_type loc gt t (fun gt -> (loc, Generic gt))
| (loc, IndexedAccess ia) ->
id_loc this#indexed_access loc ia t (fun ia -> (loc, IndexedAccess ia))
| (loc, OptionalIndexedAccess ia) ->
id_loc this#optional_indexed_access loc ia t (fun ia -> (loc, OptionalIndexedAccess ia))
| (loc, StringLiteral lit) ->
id_loc this#string_literal_type loc lit t (fun lit -> (loc, StringLiteral lit))
| (loc, NumberLiteral lit) ->
id_loc this#number_literal_type loc lit t (fun lit -> (loc, NumberLiteral lit))
| (loc, BigIntLiteral lit) ->
id_loc this#bigint_literal_type loc lit t (fun lit -> (loc, BigIntLiteral lit))
| (loc, BooleanLiteral lit) ->
id_loc this#boolean_literal_type loc lit t (fun lit -> (loc, BooleanLiteral lit))
| (loc, Union t') -> id_loc this#union_type loc t' t (fun t' -> (loc, Union t'))
| (loc, Intersection t') ->
id_loc this#intersection_type loc t' t (fun t' -> (loc, Intersection t'))
| (loc, Tuple t') -> id this#tuple_type t' t (fun t' -> (loc, Tuple t'))
method type_annotation (annot : ('loc, 'loc) Ast.Type.annotation) =
let (loc, a) = annot in
id this#type_ a annot (fun a -> (loc, a))
method type_annotation_hint (return : ('M, 'T) Ast.Type.annotation_or_hint) =
let open Ast.Type in
match return with
| Available annot ->
let annot' = this#type_annotation annot in
if annot' == annot then
return
else
Available annot'
| Missing _loc -> return
method function_declaration loc (stmt : ('loc, 'loc) Ast.Function.t) = this#function_ loc stmt
method function_expression loc (stmt : ('loc, 'loc) Ast.Function.t) =
this#function_expression_or_method loc stmt
(** previously, we conflated [function_expression] and [class_method]. callers should be
updated to override those individually. *)
method function_expression_or_method loc (stmt : ('loc, 'loc) Ast.Function.t) =
this#function_ loc stmt
[@@alert deprecated "Use either function_expression or class_method"]
Internal helper for function declarations , function expressions and arrow functions
method function_ _loc (expr : ('loc, 'loc) Ast.Function.t) =
let open Ast.Function in
let {
id = ident;
params;
body;
async;
generator;
predicate;
return;
tparams;
sig_loc;
comments;
} =
expr
in
let ident' = map_opt this#function_identifier ident in
let tparams' = map_opt this#type_params tparams in
let params' = this#function_params params in
let return' = this#type_annotation_hint return in
let body' = this#function_body_any body in
let predicate' = map_opt this#predicate predicate in
let comments' = this#syntax_opt comments in
if
ident == ident'
&& params == params'
&& body == body'
&& predicate == predicate'
&& return == return'
&& tparams == tparams'
&& comments == comments'
then
expr
else
{
id = ident';
params = params';
return = return';
body = body';
async;
generator;
predicate = predicate';
tparams = tparams';
sig_loc;
comments = comments';
}
method function_params (params : ('loc, 'loc) Ast.Function.Params.t) =
let open Ast.Function in
let (loc, { Params.params = params_list; rest; comments; this_ }) = params in
let params_list' = map_list this#function_param params_list in
let rest' = map_opt this#function_rest_param rest in
let this_' = map_opt this#function_this_param this_ in
let comments' = this#syntax_opt comments in
if params_list == params_list' && rest == rest' && comments == comments' && this_ == this_'
then
params
else
(loc, { Params.params = params_list'; rest = rest'; comments = comments'; this_ = this_' })
method function_this_param (this_param : ('loc, 'loc) Ast.Function.ThisParam.t) =
let open Ast.Function.ThisParam in
let (loc, { annot; comments }) = this_param in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if annot' == annot && comments' == comments then
this_param
else
(loc, { annot = annot'; comments = comments' })
method function_param (param : ('loc, 'loc) Ast.Function.Param.t) =
let open Ast.Function.Param in
let (loc, { argument; default }) = param in
let argument' = this#function_param_pattern argument in
let default' = map_opt this#expression default in
if argument == argument' && default == default' then
param
else
(loc, { argument = argument'; default = default' })
method function_body_any (body : ('loc, 'loc) Ast.Function.body) =
match body with
| Ast.Function.BodyBlock block ->
id this#function_body block body (fun block -> Ast.Function.BodyBlock block)
| Ast.Function.BodyExpression expr ->
id this#expression expr body (fun expr -> Ast.Function.BodyExpression expr)
method function_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) =
let (loc, block) = body in
id_loc this#block loc block body (fun block -> (loc, block))
method function_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident
TODO
method generator _loc (expr : ('loc, 'loc) Ast.Expression.Generator.t) = expr
method identifier (id : ('loc, 'loc) Ast.Identifier.t) =
let open Ast.Identifier in
let (loc, { name; comments }) = id in
let comments' = this#syntax_opt comments in
if comments == comments' then
id
else
(loc, { name; comments = comments' })
method type_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#identifier id
method type_identifier_reference (id : ('loc, 'loc) Ast.Identifier.t) = this#type_identifier id
method binding_type_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#type_identifier id
method interface _loc (interface : ('loc, 'loc) Ast.Statement.Interface.t) =
let open Ast.Statement.Interface in
let { id = ident; tparams; extends; body; comments } = interface in
let id' = this#binding_type_identifier ident in
let tparams' = map_opt this#type_params tparams in
let extends' = map_list (map_loc this#generic_type) extends in
let body' = map_loc this#object_type body in
let comments' = this#syntax_opt comments in
if
id' == ident
&& tparams' == tparams
&& extends' == extends
&& body' == body
&& comments' == comments
then
interface
else
{ id = id'; tparams = tparams'; extends = extends'; body = body'; comments = comments' }
method interface_declaration loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) =
this#interface loc decl
method private_name (id : 'loc Ast.PrivateName.t) =
let open Ast.PrivateName in
let (loc, { name; comments }) = id in
let comments' = this#syntax_opt comments in
if comments == comments' then
id
else
(loc, { name; comments = comments' })
method computed_key (key : ('loc, 'loc) Ast.ComputedKey.t) =
let open Ast.ComputedKey in
let (loc, { expression; comments }) = key in
let expression' = this#expression expression in
let comments' = this#syntax_opt comments in
if expression == expression' && comments == comments' then
key
else
(loc, { expression = expression'; comments = comments' })
method import _loc (expr : ('loc, 'loc) Ast.Expression.Import.t) =
let open Ast.Expression.Import in
let { argument; comments } = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
{ argument = argument'; comments = comments' }
method if_consequent_statement ~has_else (stmt : ('loc, 'loc) Ast.Statement.t) =
ignore has_else;
this#statement stmt
method if_alternate_statement _loc (altern : ('loc, 'loc) Ast.Statement.If.Alternate.t') =
let open Ast.Statement.If.Alternate in
let { body; comments } = altern in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if body == body' && comments == comments' then
altern
else
{ body = body'; comments = comments' }
method if_statement _loc (stmt : ('loc, 'loc) Ast.Statement.If.t) =
let open Ast.Statement.If in
let { test; consequent; alternate; comments } = stmt in
let test' = this#predicate_expression test in
let consequent' = this#if_consequent_statement ~has_else:(alternate <> None) consequent in
let alternate' = map_opt (map_loc this#if_alternate_statement) alternate in
let comments' = this#syntax_opt comments in
if
test == test'
&& consequent == consequent'
&& alternate == alternate'
&& comments == comments'
then
stmt
else
{ test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }
method import_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ImportDeclaration.t) =
let open Ast.Statement.ImportDeclaration in
let { import_kind; source; specifiers; default; comments } = decl in
let source' = map_loc this#import_source source in
let specifiers' = map_opt (this#import_specifier ~import_kind) specifiers in
let default' = map_opt (this#import_default_specifier ~import_kind) default in
let comments' = this#syntax_opt comments in
if
source == source'
&& specifiers == specifiers'
&& default == default'
&& comments == comments'
then
decl
else
{
import_kind;
source = source';
specifiers = specifiers';
default = default';
comments = comments';
}
method import_source _loc (source : 'loc Ast.StringLiteral.t) =
let open Ast.StringLiteral in
let { value; raw; comments } = source in
let comments' = this#syntax_opt comments in
if comments == comments' then
source
else
{ value; raw; comments = comments' }
method import_specifier
~import_kind (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.specifier) =
let open Ast.Statement.ImportDeclaration in
match specifier with
| ImportNamedSpecifiers named_specifiers ->
let named_specifiers' =
map_list (this#import_named_specifier ~import_kind) named_specifiers
in
if named_specifiers == named_specifiers' then
specifier
else
ImportNamedSpecifiers named_specifiers'
| ImportNamespaceSpecifier (loc, ident) ->
id_loc (this#import_namespace_specifier ~import_kind) loc ident specifier (fun ident ->
ImportNamespaceSpecifier (loc, ident)
)
method remote_identifier id = this#identifier id
method import_named_specifier
~(import_kind : Ast.Statement.ImportDeclaration.import_kind)
(specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.named_specifier) =
let open Ast.Statement.ImportDeclaration in
let { kind; local; remote } = specifier in
let (is_type_remote, is_type_local) =
match (import_kind, kind) with
| (ImportType, _)
| (_, Some ImportType) ->
(true, true)
| (ImportTypeof, _)
| (_, Some ImportTypeof) ->
(false, true)
| _ -> (false, false)
in
let remote' =
match local with
| None ->
if is_type_remote then
this#binding_type_identifier remote
else
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let remote
| Some _ -> this#remote_identifier remote
in
let local' =
match local with
| None -> None
| Some ident ->
let local_visitor =
if is_type_local then
this#binding_type_identifier
else
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let
in
id local_visitor ident local (fun ident -> Some ident)
in
if local == local' && remote == remote' then
specifier
else
{ kind; local = local'; remote = remote' }
method import_default_specifier ~import_kind (id : ('loc, 'loc) Ast.Identifier.t) =
let open Ast.Statement.ImportDeclaration in
let local_visitor =
match import_kind with
| ImportType
| ImportTypeof ->
this#binding_type_identifier
| _ -> this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let
in
local_visitor id
method import_namespace_specifier ~import_kind _loc (id : ('loc, 'loc) Ast.Identifier.t) =
let open Ast.Statement.ImportDeclaration in
let local_visitor =
match import_kind with
| ImportType
| ImportTypeof ->
this#binding_type_identifier
| _ -> this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let
in
local_visitor id
method jsx_element _loc (expr : ('loc, 'loc) Ast.JSX.element) =
let open Ast.JSX in
let { opening_element; closing_element; children; comments } = expr in
let opening_element' = this#jsx_opening_element opening_element in
let closing_element' = map_opt this#jsx_closing_element closing_element in
let children' = this#jsx_children children in
let comments' = this#syntax_opt comments in
if
opening_element == opening_element'
&& closing_element == closing_element'
&& children == children'
&& comments == comments'
then
expr
else
{
opening_element = opening_element';
closing_element = closing_element';
children = children';
comments = comments';
}
method jsx_fragment _loc (expr : ('loc, 'loc) Ast.JSX.fragment) =
let open Ast.JSX in
let { frag_children; frag_comments; _ } = expr in
let children' = this#jsx_children frag_children in
let frag_comments' = this#syntax_opt frag_comments in
if frag_children == children' && frag_comments == frag_comments' then
expr
else
{ expr with frag_children = children'; frag_comments = frag_comments' }
method jsx_opening_element (elem : ('loc, 'loc) Ast.JSX.Opening.t) =
let open Ast.JSX.Opening in
let (loc, { name; self_closing; attributes }) = elem in
let name' = this#jsx_element_name name in
let attributes' = map_list this#jsx_opening_attribute attributes in
if name == name' && attributes == attributes' then
elem
else
(loc, { name = name'; self_closing; attributes = attributes' })
method jsx_closing_element (elem : ('loc, 'loc) Ast.JSX.Closing.t) =
let open Ast.JSX.Closing in
let (loc, { name }) = elem in
let name' = this#jsx_element_name name in
if name == name' then
elem
else
(loc, { name = name' })
method jsx_opening_attribute (jsx_attr : ('loc, 'loc) Ast.JSX.Opening.attribute) =
let open Ast.JSX.Opening in
match jsx_attr with
| Attribute attr -> id this#jsx_attribute attr jsx_attr (fun attr -> Attribute attr)
| SpreadAttribute (loc, attr) ->
id_loc this#jsx_spread_attribute loc attr jsx_attr (fun attr -> SpreadAttribute (loc, attr))
method jsx_spread_attribute _loc (attr : ('loc, 'loc) Ast.JSX.SpreadAttribute.t') =
let open Ast.JSX.SpreadAttribute in
let { argument; comments } = attr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
attr
else
{ argument = argument'; comments = comments' }
method jsx_attribute (attr : ('loc, 'loc) Ast.JSX.Attribute.t) =
let open Ast.JSX.Attribute in
let (loc, { name; value }) = attr in
let name' = this#jsx_attribute_name name in
let value' = map_opt this#jsx_attribute_value value in
if name == name' && value == value' then
attr
else
(loc, { name = name'; value = value' })
method jsx_attribute_name (name : ('loc, 'loc) Ast.JSX.Attribute.name) =
let open Ast.JSX.Attribute in
match name with
| Identifier ident ->
id this#jsx_attribute_name_identifier ident name (fun ident -> Identifier ident)
| NamespacedName ns ->
id this#jsx_attribute_name_namespaced ns name (fun ns -> NamespacedName ns)
method jsx_attribute_name_identifier ident = this#jsx_identifier ident
method jsx_attribute_name_namespaced ns = this#jsx_namespaced_name ns
method jsx_attribute_value (value : ('loc, 'loc) Ast.JSX.Attribute.value) =
let open Ast.JSX.Attribute in
match value with
| Literal (loc, lit) ->
id_loc this#jsx_attribute_value_literal loc lit value (fun lit -> Literal (loc, lit))
| ExpressionContainer (loc, expr) ->
id_loc this#jsx_attribute_value_expression loc expr value (fun expr ->
ExpressionContainer (loc, expr)
)
method jsx_attribute_value_expression loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t)
=
this#jsx_expression loc jsx_expr
method jsx_attribute_value_literal loc (lit : 'loc Ast.Literal.t) = this#literal loc lit
method jsx_children ((loc, children) as orig : 'loc * ('loc, 'loc) Ast.JSX.child list) =
let children' = map_list this#jsx_child children in
if children == children' then
orig
else
(loc, children')
method jsx_child (child : ('loc, 'loc) Ast.JSX.child) =
let open Ast.JSX in
match child with
| (loc, Element elem) ->
id_loc this#jsx_element loc elem child (fun elem -> (loc, Element elem))
| (loc, Fragment frag) ->
id_loc this#jsx_fragment loc frag child (fun frag -> (loc, Fragment frag))
| (loc, ExpressionContainer expr) ->
id_loc this#jsx_expression loc expr child (fun expr -> (loc, ExpressionContainer expr))
| (loc, SpreadChild spread) ->
id this#jsx_spread_child spread child (fun spread -> (loc, SpreadChild spread))
| (_loc, Text _) -> child
method jsx_expression _loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t) =
let open Ast.JSX.ExpressionContainer in
let { expression; comments } = jsx_expr in
let comments' = this#syntax_opt comments in
match expression with
| Expression expr ->
let expr' = this#expression expr in
if expr == expr' && comments == comments' then
jsx_expr
else
{ expression = Expression expr'; comments = comments' }
| EmptyExpression ->
if comments == comments' then
jsx_expr
else
{ expression = EmptyExpression; comments = comments' }
method jsx_spread_child (jsx_spread_child : ('loc, 'loc) Ast.JSX.SpreadChild.t) =
let open Ast.JSX.SpreadChild in
let { expression; comments } = jsx_spread_child in
let expression' = this#expression expression in
let comments' = this#syntax_opt comments in
if expression == expression' && comments == comments' then
jsx_spread_child
else
{ expression = expression'; comments = comments' }
method jsx_element_name (name : ('loc, 'loc) Ast.JSX.name) =
let open Ast.JSX in
match name with
| Identifier ident ->
id this#jsx_element_name_identifier ident name (fun ident -> Identifier ident)
| NamespacedName ns ->
id this#jsx_element_name_namespaced ns name (fun ns -> NamespacedName ns)
| MemberExpression expr ->
id this#jsx_element_name_member_expression expr name (fun expr -> MemberExpression expr)
method jsx_element_name_identifier ident = this#jsx_identifier ident
method jsx_element_name_namespaced ns = this#jsx_namespaced_name ns
method jsx_element_name_member_expression expr = this#jsx_member_expression expr
method jsx_namespaced_name (namespaced_name : ('loc, 'loc) Ast.JSX.NamespacedName.t) =
let open Ast.JSX in
NamespacedName.(
let (loc, { namespace; name }) = namespaced_name in
let namespace' = this#jsx_identifier namespace in
let name' = this#jsx_identifier name in
if namespace == namespace' && name == name' then
namespaced_name
else
(loc, { namespace = namespace'; name = name' })
)
method jsx_member_expression (member_exp : ('loc, 'loc) Ast.JSX.MemberExpression.t) =
let open Ast.JSX in
let (loc, { MemberExpression._object; MemberExpression.property }) = member_exp in
let _object' = this#jsx_member_expression_object _object in
let property' = this#jsx_identifier property in
if _object == _object' && property == property' then
member_exp
else
(loc, MemberExpression.{ _object = _object'; property = property' })
method jsx_member_expression_object (_object : ('loc, 'loc) Ast.JSX.MemberExpression._object) =
let open Ast.JSX.MemberExpression in
match _object with
| Identifier ident ->
id this#jsx_member_expression_identifier ident _object (fun ident -> Identifier ident)
| MemberExpression nested_exp ->
id this#jsx_member_expression nested_exp _object (fun exp -> MemberExpression exp)
method jsx_member_expression_identifier ident = this#jsx_element_name_identifier ident
method jsx_identifier (id : ('loc, 'loc) Ast.JSX.Identifier.t) =
let open Ast.JSX.Identifier in
let (loc, { name; comments }) = id in
let comments' = this#syntax_opt comments in
if comments == comments' then
id
else
(loc, { name; comments = comments' })
method labeled_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Labeled.t) =
let open Ast.Statement.Labeled in
let { label; body; comments } = stmt in
let label' = this#label_identifier label in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if label == label' && body == body' && comments == comments' then
stmt
else
{ label = label'; body = body'; comments = comments' }
method literal _loc (expr : 'loc Ast.Literal.t) =
let open Ast.Literal in
let { value; raw; comments } = expr in
let comments' = this#syntax_opt comments in
if comments == comments' then
expr
else
{ value; raw; comments = comments' }
method logical _loc (expr : ('loc, 'loc) Ast.Expression.Logical.t) =
let open Ast.Expression.Logical in
let { operator = _; left; right; comments } = expr in
let left' = this#expression left in
let right' = this#expression right in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && comments == comments' then
expr
else
{ expr with left = left'; right = right'; comments = comments' }
method member _loc (expr : ('loc, 'loc) Ast.Expression.Member.t) =
let open Ast.Expression.Member in
let { _object; property; comments } = expr in
let _object' = this#expression _object in
let property' = this#member_property property in
let comments' = this#syntax_opt comments in
if _object == _object' && property == property' && comments == comments' then
expr
else
{ _object = _object'; property = property'; comments = comments' }
method optional_member loc (expr : ('loc, 'loc) Ast.Expression.OptionalMember.t) =
let open Ast.Expression.OptionalMember in
let { member; optional = _; filtered_out = _ } = expr in
let member' = this#member loc member in
if member == member' then
expr
else
{ expr with member = member' }
method member_property (expr : ('loc, 'loc) Ast.Expression.Member.property) =
let open Ast.Expression.Member in
match expr with
| PropertyIdentifier ident ->
id this#member_property_identifier ident expr (fun ident -> PropertyIdentifier ident)
| PropertyPrivateName ident ->
id this#member_private_name ident expr (fun ident -> PropertyPrivateName ident)
| PropertyExpression e ->
id this#member_property_expression e expr (fun e -> PropertyExpression e)
method member_property_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =
this#identifier ident
method member_private_name (name : 'loc Ast.PrivateName.t) = this#private_name name
method member_property_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr
method meta_property _loc (expr : 'loc Ast.Expression.MetaProperty.t) =
let open Ast.Expression.MetaProperty in
let { meta; property; comments } = expr in
let meta' = this#identifier meta in
let property' = this#identifier property in
let comments' = this#syntax_opt comments in
if meta == meta' && property == property' && comments == comments' then
expr
else
{ meta = meta'; property = property'; comments = comments' }
method new_ _loc (expr : ('loc, 'loc) Ast.Expression.New.t) =
let open Ast.Expression.New in
let { callee; targs; arguments; comments } = expr in
let callee' = this#expression callee in
let targs' = map_opt this#call_type_args targs in
let arguments' = map_opt this#call_arguments arguments in
let comments' = this#syntax_opt comments in
if callee == callee' && targs == targs' && arguments == arguments' && comments == comments'
then
expr
else
{ callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }
method object_ _loc (expr : ('loc, 'loc) Ast.Expression.Object.t) =
let open Ast.Expression.Object in
let { properties; comments } = expr in
let properties' =
map_list
(fun prop ->
match prop with
| Property p ->
let p' = this#object_property p in
if p == p' then
prop
else
Property p'
| SpreadProperty s ->
let s' = this#spread_property s in
if s == s' then
prop
else
SpreadProperty s')
properties
in
let comments' = this#syntax_opt comments in
if properties == properties' && comments == comments' then
expr
else
{ properties = properties'; comments = comments' }
method object_property (prop : ('loc, 'loc) Ast.Expression.Object.Property.t) =
let open Ast.Expression.Object.Property in
match prop with
| (loc, Init { key; value; shorthand }) ->
let key' = this#object_key key in
let value' = this#expression value in
let shorthand' =
(* Try to figure out if shorthand should still be true--if
key and value change differently, it should become false *)
shorthand
&&
match (key', value') with
| ( Identifier (_, { Ast.Identifier.name = key_name; _ }),
(_, Ast.Expression.Identifier (_, { Ast.Identifier.name = value_name; _ }))
) ->
String.equal key_name value_name
| _ -> key == key' && value == value'
in
if key == key' && value == value' && shorthand == shorthand' then
prop
else
(loc, Init { key = key'; value = value'; shorthand = shorthand' })
| (loc, Method { key; value = fn }) ->
let key' = this#object_key key in
let fn' = map_loc this#function_expression_or_method fn in
if key == key' && fn == fn' then
prop
else
(loc, Method { key = key'; value = fn' })
| (loc, Get { key; value = fn; comments }) ->
let key' = this#object_key key in
let fn' = map_loc this#function_expression_or_method fn in
let comments' = this#syntax_opt comments in
if key == key' && fn == fn' && comments == comments' then
prop
else
(loc, Get { key = key'; value = fn'; comments = comments' })
| (loc, Set { key; value = fn; comments }) ->
let key' = this#object_key key in
let fn' = map_loc this#function_expression_or_method fn in
let comments' = this#syntax_opt comments in
if key == key' && fn == fn' && comments == comments' then
prop
else
(loc, Set { key = key'; value = fn'; comments = comments' })
method object_key (key : ('loc, 'loc) Ast.Expression.Object.Property.key) =
let open Ast.Expression.Object.Property in
match key with
| Literal literal -> id this#object_key_literal literal key (fun lit -> Literal lit)
| Identifier ident -> id this#object_key_identifier ident key (fun ident -> Identifier ident)
| PrivateName ident -> id this#private_name ident key (fun ident -> PrivateName ident)
| Computed computed -> id this#object_key_computed computed key (fun expr -> Computed expr)
method object_key_literal (literal : 'loc * 'loc Ast.Literal.t) =
let (loc, lit) = literal in
id_loc this#literal loc lit literal (fun lit -> (loc, lit))
method object_key_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident
method object_key_computed (key : ('loc, 'loc) Ast.ComputedKey.t) = this#computed_key key
method opaque_type _loc (otype : ('loc, 'loc) Ast.Statement.OpaqueType.t) =
let open Ast.Statement.OpaqueType in
let { id; tparams; impltype; supertype; comments } = otype in
let id' = this#binding_type_identifier id in
let tparams' = map_opt this#type_params tparams in
let impltype' = map_opt this#type_ impltype in
let supertype' = map_opt this#type_ supertype in
let comments' = this#syntax_opt comments in
if
id == id'
&& impltype == impltype'
&& tparams == tparams'
&& impltype == impltype'
&& supertype == supertype'
&& comments == comments'
then
otype
else
{
id = id';
tparams = tparams';
impltype = impltype';
supertype = supertype';
comments = comments';
}
method function_param_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr
method variable_declarator_pattern ~kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#binding_pattern ~kind expr
method catch_clause_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr
method for_in_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#assignment_pattern expr
method for_of_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#assignment_pattern expr
method binding_pattern
?(kind = Ast.Statement.VariableDeclaration.Var) (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ~kind expr
method assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = this#pattern expr
NOTE : Patterns are highly overloaded . A pattern can be a binding pattern ,
which has a kind ( Var / Let / Const , with Var being the default for all pre - ES5
bindings ) , or an assignment pattern , which has no kind . Subterms that are
patterns inherit the kind ( or lack thereof ) .
which has a kind (Var/Let/Const, with Var being the default for all pre-ES5
bindings), or an assignment pattern, which has no kind. Subterms that are
patterns inherit the kind (or lack thereof). *)
method pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
let open Ast.Pattern in
let (loc, patt) = expr in
let patt' =
match patt with
| Object { Object.properties; annot; comments } ->
let properties' = map_list (this#pattern_object_p ?kind) properties in
let annot' = this#type_annotation_hint annot in
let comments' = this#syntax_opt comments in
if properties' == properties && annot' == annot && comments' == comments then
patt
else
Object { Object.properties = properties'; annot = annot'; comments = comments' }
| Array { Array.elements; annot; comments } ->
let elements' = map_list (this#pattern_array_e ?kind) elements in
let annot' = this#type_annotation_hint annot in
let comments' = this#syntax_opt comments in
if comments == comments' && elements' == elements && annot' == annot then
patt
else
Array { Array.elements = elements'; annot = annot'; comments = comments' }
| Identifier { Identifier.name; annot; optional } ->
let name' = this#pattern_identifier ?kind name in
let annot' = this#type_annotation_hint annot in
if name == name' && annot == annot' then
patt
else
Identifier { Identifier.name = name'; annot = annot'; optional }
| Expression e -> id this#pattern_expression e patt (fun e -> Expression e)
in
if patt == patt' then
expr
else
(loc, patt')
method pattern_identifier ?kind (ident : ('loc, 'loc) Ast.Identifier.t) =
ignore kind;
this#identifier ident
method pattern_literal ?kind loc (expr : 'loc Ast.Literal.t) =
ignore kind;
this#literal loc expr
method pattern_object_p ?kind (p : ('loc, 'loc) Ast.Pattern.Object.property) =
let open Ast.Pattern.Object in
match p with
| Property prop -> id (this#pattern_object_property ?kind) prop p (fun prop -> Property prop)
| RestElement prop ->
id (this#pattern_object_rest_property ?kind) prop p (fun prop -> RestElement prop)
method pattern_object_property ?kind (prop : ('loc, 'loc) Ast.Pattern.Object.Property.t) =
let open Ast.Pattern.Object.Property in
let (loc, { key; pattern; default; shorthand }) = prop in
let key' = this#pattern_object_property_key ?kind key in
let pattern' = this#pattern_object_property_pattern ?kind pattern in
let default' = map_opt this#expression default in
let shorthand' =
(* Try to figure out if shorthand should still be true--if
key and value change differently, it should become false *)
shorthand
&&
match (key', pattern') with
| ( Identifier (_, { Ast.Identifier.name = key_name; _ }),
( _,
Ast.Pattern.Identifier
{ Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name = value_name; _ }); _ }
)
) ->
String.equal key_name value_name
| _ -> key == key' && pattern == pattern'
in
if key' == key && pattern' == pattern && default' == default && shorthand == shorthand' then
prop
else
(loc, { key = key'; pattern = pattern'; default = default'; shorthand = shorthand' })
method pattern_object_property_key ?kind (key : ('loc, 'loc) Ast.Pattern.Object.Property.key) =
let open Ast.Pattern.Object.Property in
match key with
| Literal lit ->
id (this#pattern_object_property_literal_key ?kind) lit key (fun lit' -> Literal lit')
| Identifier identifier ->
id (this#pattern_object_property_identifier_key ?kind) identifier key (fun id' ->
Identifier id'
)
| Computed expr ->
id (this#pattern_object_property_computed_key ?kind) expr key (fun expr' -> Computed expr')
method pattern_object_property_literal_key ?kind (literal : 'loc * 'loc Ast.Literal.t) =
let (loc, key) = literal in
id_loc (this#pattern_literal ?kind) loc key literal (fun key' -> (loc, key'))
method pattern_object_property_identifier_key ?kind (key : ('loc, 'loc) Ast.Identifier.t) =
this#pattern_identifier ?kind key
method pattern_object_property_computed_key ?kind (key : ('loc, 'loc) Ast.ComputedKey.t) =
ignore kind;
this#computed_key key
method pattern_object_rest_property ?kind (prop : ('loc, 'loc) Ast.Pattern.RestElement.t) =
let open Ast.Pattern.RestElement in
let (loc, { argument; comments }) = prop in
let argument' = this#pattern_object_rest_property_pattern ?kind argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments == comments' then
prop
else
(loc, { argument = argument'; comments = comments' })
method pattern_object_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind expr
method pattern_object_rest_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind expr
method pattern_array_e ?kind (e : ('loc, 'loc) Ast.Pattern.Array.element) =
let open Ast.Pattern.Array in
match e with
| Hole _ -> e
| Element elem -> id (this#pattern_array_element ?kind) elem e (fun elem -> Element elem)
| RestElement elem ->
id (this#pattern_array_rest_element ?kind) elem e (fun elem -> RestElement elem)
method pattern_array_element ?kind (elem : ('loc, 'loc) Ast.Pattern.Array.Element.t) =
let open Ast.Pattern.Array.Element in
let (loc, { argument; default }) = elem in
let argument' = this#pattern_array_element_pattern ?kind argument in
let default' = map_opt this#expression default in
if argument == argument' && default == default' then
elem
else
(loc, { argument = argument'; default = default' })
method pattern_array_element_pattern ?kind (patt : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind patt
method pattern_array_rest_element ?kind (elem : ('loc, 'loc) Ast.Pattern.RestElement.t) =
let open Ast.Pattern.RestElement in
let (loc, { argument; comments }) = elem in
let argument' = this#pattern_array_rest_element_pattern ?kind argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments == comments' then
elem
else
(loc, { argument = argument'; comments = comments' })
method pattern_array_rest_element_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind expr
method pattern_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr
method predicate (pred : ('loc, 'loc) Ast.Type.Predicate.t) =
let open Ast.Type.Predicate in
let (loc, { kind; comments }) = pred in
let kind' =
match kind with
| Inferred -> kind
| Declared expr -> id this#expression expr kind (fun expr' -> Declared expr')
in
let comments' = this#syntax_opt comments in
if kind == kind' && comments == comments' then
pred
else
(loc, { kind = kind'; comments = comments' })
method predicate_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr
method function_rest_param (expr : ('loc, 'loc) Ast.Function.RestParam.t) =
let open Ast.Function.RestParam in
let (loc, { argument; comments }) = expr in
let argument' = this#function_param_pattern argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
(loc, { argument = argument'; comments = comments' })
method return _loc (stmt : ('loc, 'loc) Ast.Statement.Return.t) =
let open Ast.Statement.Return in
let { argument; comments; return_out } = stmt in
let argument' = map_opt this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
stmt
else
{ argument = argument'; comments = comments'; return_out }
method sequence _loc (expr : ('loc, 'loc) Ast.Expression.Sequence.t) =
let open Ast.Expression.Sequence in
let { expressions; comments } = expr in
let expressions' = map_list this#expression expressions in
let comments' = this#syntax_opt comments in
if expressions == expressions' && comments == comments' then
expr
else
{ expressions = expressions'; comments = comments' }
method toplevel_statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) =
this#statement_list stmts
method statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) =
map_list_multiple this#statement_fork_point stmts
method statement_fork_point (stmt : ('loc, 'loc) Ast.Statement.t) = [this#statement stmt]
method spread_element (expr : ('loc, 'loc) Ast.Expression.SpreadElement.t) =
let open Ast.Expression.SpreadElement in
let (loc, { argument; comments }) = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
(loc, { argument = argument'; comments = comments' })
method spread_property (expr : ('loc, 'loc) Ast.Expression.Object.SpreadProperty.t) =
let open Ast.Expression.Object.SpreadProperty in
let (loc, { argument; comments }) = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
(loc, { argument = argument'; comments = comments' })
method super_expression _loc (expr : 'loc Ast.Expression.Super.t) =
let open Ast.Expression.Super in
let { comments } = expr in
let comments' = this#syntax_opt comments in
if comments == comments' then
expr
else
{ comments = comments' }
method switch _loc (switch : ('loc, 'loc) Ast.Statement.Switch.t) =
let open Ast.Statement.Switch in
let { discriminant; cases; comments; exhaustive_out } = switch in
let discriminant' = this#expression discriminant in
let cases' = map_list this#switch_case cases in
let comments' = this#syntax_opt comments in
if discriminant == discriminant' && cases == cases' && comments == comments' then
switch
else
{ discriminant = discriminant'; cases = cases'; comments = comments'; exhaustive_out }
method switch_case (case : ('loc, 'loc) Ast.Statement.Switch.Case.t) =
let open Ast.Statement.Switch.Case in
let (loc, { test; consequent; comments }) = case in
let test' = map_opt this#expression test in
let consequent' = this#statement_list consequent in
let comments' = this#syntax_opt comments in
if test == test' && consequent == consequent' && comments == comments' then
case
else
(loc, { test = test'; consequent = consequent'; comments = comments' })
method tagged_template _loc (expr : ('loc, 'loc) Ast.Expression.TaggedTemplate.t) =
let open Ast.Expression.TaggedTemplate in
let { tag; quasi; comments } = expr in
let tag' = this#expression tag in
let quasi' = map_loc this#template_literal quasi in
let comments' = this#syntax_opt comments in
if tag == tag' && quasi == quasi' && comments == comments' then
expr
else
{ tag = tag'; quasi = quasi'; comments = comments' }
method template_literal _loc (expr : ('loc, 'loc) Ast.Expression.TemplateLiteral.t) =
let open Ast.Expression.TemplateLiteral in
let { quasis; expressions; comments } = expr in
let quasis' = map_list this#template_literal_element quasis in
let expressions' = map_list this#expression expressions in
let comments' = this#syntax_opt comments in
if quasis == quasis' && expressions == expressions' && comments == comments' then
expr
else
{ quasis = quasis'; expressions = expressions'; comments = comments' }
TODO
method template_literal_element (elem : 'loc Ast.Expression.TemplateLiteral.Element.t) = elem
method this_expression _loc (expr : 'loc Ast.Expression.This.t) =
let open Ast.Expression.This in
let { comments } = expr in
let comments' = this#syntax_opt comments in
if comments == comments' then
expr
else
{ comments = comments' }
method throw _loc (stmt : ('loc, 'loc) Ast.Statement.Throw.t) =
let open Ast.Statement.Throw in
let { argument; comments } = stmt in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
stmt
else
{ argument = argument'; comments = comments' }
method try_catch _loc (stmt : ('loc, 'loc) Ast.Statement.Try.t) =
let open Ast.Statement.Try in
let { block; handler; finalizer; comments } = stmt in
let block' = map_loc this#block block in
let handler' =
match handler with
| Some (loc, clause) ->
id_loc this#catch_clause loc clause handler (fun clause -> Some (loc, clause))
| None -> handler
in
let finalizer' =
match finalizer with
| Some (finalizer_loc, block) ->
id_loc this#block finalizer_loc block finalizer (fun block -> Some (finalizer_loc, block))
| None -> finalizer
in
let comments' = this#syntax_opt comments in
if block == block' && handler == handler' && finalizer == finalizer' && comments == comments'
then
stmt
else
{ block = block'; handler = handler'; finalizer = finalizer'; comments = comments' }
method type_cast _loc (expr : ('loc, 'loc) Ast.Expression.TypeCast.t) =
let open Ast.Expression.TypeCast in
let { expression; annot; comments } = expr in
let expression' = this#expression expression in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if expression' == expression && annot' == annot && comments' == comments then
expr
else
{ expression = expression'; annot = annot'; comments = comments' }
method unary_expression _loc (expr : ('loc, 'loc) Flow_ast.Expression.Unary.t) =
let open Flow_ast.Expression.Unary in
let { argument; operator = _; comments } = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
{ expr with argument = argument'; comments = comments' }
method update_expression _loc (expr : ('loc, 'loc) Ast.Expression.Update.t) =
let open Ast.Expression.Update in
let { argument; operator = _; prefix = _; comments } = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
{ expr with argument = argument'; comments = comments' }
method variable_declaration _loc (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.t) =
let open Ast.Statement.VariableDeclaration in
let { declarations; kind; comments } = decl in
let decls' = map_list (this#variable_declarator ~kind) declarations in
let comments' = this#syntax_opt comments in
if declarations == decls' && comments == comments' then
decl
else
{ declarations = decls'; kind; comments = comments' }
method variable_declarator
~kind (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.Declarator.t) =
let open Ast.Statement.VariableDeclaration.Declarator in
let (loc, { id; init }) = decl in
let id' = this#variable_declarator_pattern ~kind id in
let init' = map_opt this#expression init in
if id == id' && init == init' then
decl
else
(loc, { id = id'; init = init' })
method while_ _loc (stuff : ('loc, 'loc) Ast.Statement.While.t) =
let open Ast.Statement.While in
let { test; body; comments } = stuff in
let test' = this#predicate_expression test in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if test == test' && body == body' && comments == comments' then
stuff
else
{ test = test'; body = body'; comments = comments' }
method with_ _loc (stuff : ('loc, 'loc) Ast.Statement.With.t) =
let open Ast.Statement.With in
let { _object; body; comments } = stuff in
let _object' = this#expression _object in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if _object == _object' && body == body' && comments == comments' then
stuff
else
{ _object = _object'; body = body'; comments = comments' }
method type_alias _loc (stuff : ('loc, 'loc) Ast.Statement.TypeAlias.t) =
let open Ast.Statement.TypeAlias in
let { id; tparams; right; comments } = stuff in
let id' = this#binding_type_identifier id in
let tparams' = map_opt this#type_params tparams in
let right' = this#type_ right in
let comments' = this#syntax_opt comments in
if id == id' && right == right' && tparams == tparams' && comments == comments' then
stuff
else
{ id = id'; tparams = tparams'; right = right'; comments = comments' }
method yield _loc (expr : ('loc, 'loc) Ast.Expression.Yield.t) =
let open Ast.Expression.Yield in
let { argument; delegate; comments; result_out } = expr in
let argument' = map_opt this#expression argument in
let comments' = this#syntax_opt comments in
if comments == comments' && argument == argument' then
expr
else
{ argument = argument'; delegate; comments = comments'; result_out }
end
let fold_program (mappers : 'a mapper list) ast =
List.fold_left (fun ast (m : 'a mapper) -> m#program ast) ast mappers
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/0f3c02b13cb8a9c5e2586541622f4a0f5f561216/jscomp/js_parser/flow_ast_mapper.ml | ocaml | * previously, we conflated [function_expression] and [class_method]. callers should be
updated to override those individually.
Try to figure out if shorthand should still be true--if
key and value change differently, it should become false
Try to figure out if shorthand should still be true--if
key and value change differently, it should become false |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
module Ast = Flow_ast
let map_opt : 'node. ('node -> 'node) -> 'node option -> 'node option =
fun map opt ->
match opt with
| Some item ->
let item' = map item in
if item == item' then
opt
else
Some item'
| None -> opt
let id_loc : 'node 'a. ('loc -> 'node -> 'node) -> 'loc -> 'node -> 'a -> ('node -> 'a) -> 'a =
fun map loc item same diff ->
let item' = map loc item in
if item == item' then
same
else
diff item'
let id : 'node 'a. ('node -> 'node) -> 'node -> 'a -> ('node -> 'a) -> 'a =
fun map item same diff ->
let item' = map item in
if item == item' then
same
else
diff item'
let map_loc : 'node. ('loc -> 'node -> 'node) -> 'loc * 'node -> 'loc * 'node =
fun map same ->
let (loc, item) = same in
id_loc map loc item same (fun diff -> (loc, diff))
let map_loc_opt : 'node. ('loc -> 'node -> 'node) -> ('loc * 'node) option -> ('loc * 'node) option
=
fun map same ->
map_opt
(fun same ->
let (loc, item) = same in
id_loc map loc item same (fun diff -> (loc, diff)))
same
let map_list map lst =
let (rev_lst, changed) =
List.fold_left
(fun (lst', changed) item ->
let item' = map item in
(item' :: lst', changed || item' != item))
([], false)
lst
in
if changed then
List.rev rev_lst
else
lst
let map_list_multiple map lst =
let (rev_lst, changed) =
List.fold_left
(fun (lst', changed) item ->
match map item with
| [] -> (lst', true)
| [item'] -> (item' :: lst', changed || item != item')
| items' -> (List.rev_append items' lst', true))
([], false)
lst
in
if changed then
List.rev rev_lst
else
lst
class ['loc] mapper =
object (this)
method program (program : ('loc, 'loc) Ast.Program.t) =
let open Ast.Program in
let (loc, { statements; comments; all_comments }) = program in
let statements' = this#toplevel_statement_list statements in
let comments' = this#syntax_opt comments in
let all_comments' = map_list this#comment all_comments in
if statements == statements' && comments == comments' && all_comments == all_comments' then
program
else
(loc, { statements = statements'; comments = comments'; all_comments = all_comments' })
method statement (stmt : ('loc, 'loc) Ast.Statement.t) =
let open Ast.Statement in
match stmt with
| (loc, Block block) -> id_loc this#block loc block stmt (fun block -> (loc, Block block))
| (loc, Break break) -> id_loc this#break loc break stmt (fun break -> (loc, Break break))
| (loc, ClassDeclaration cls) ->
id_loc this#class_declaration loc cls stmt (fun cls -> (loc, ClassDeclaration cls))
| (loc, Continue cont) -> id_loc this#continue loc cont stmt (fun cont -> (loc, Continue cont))
| (loc, Debugger dbg) -> id_loc this#debugger loc dbg stmt (fun dbg -> (loc, Debugger dbg))
| (loc, DeclareClass stuff) ->
id_loc this#declare_class loc stuff stmt (fun stuff -> (loc, DeclareClass stuff))
| (loc, DeclareExportDeclaration decl) ->
id_loc this#declare_export_declaration loc decl stmt (fun decl ->
(loc, DeclareExportDeclaration decl)
)
| (loc, DeclareFunction stuff) ->
id_loc this#declare_function loc stuff stmt (fun stuff -> (loc, DeclareFunction stuff))
| (loc, DeclareInterface stuff) ->
id_loc this#declare_interface loc stuff stmt (fun stuff -> (loc, DeclareInterface stuff))
| (loc, DeclareModule m) ->
id_loc this#declare_module loc m stmt (fun m -> (loc, DeclareModule m))
| (loc, DeclareTypeAlias stuff) ->
id_loc this#declare_type_alias loc stuff stmt (fun stuff -> (loc, DeclareTypeAlias stuff))
| (loc, DeclareVariable stuff) ->
id_loc this#declare_variable loc stuff stmt (fun stuff -> (loc, DeclareVariable stuff))
| (loc, DeclareModuleExports annot) ->
id_loc this#declare_module_exports loc annot stmt (fun annot ->
(loc, DeclareModuleExports annot)
)
| (loc, DoWhile stuff) ->
id_loc this#do_while loc stuff stmt (fun stuff -> (loc, DoWhile stuff))
| (loc, Empty empty) -> id_loc this#empty loc empty stmt (fun empty -> (loc, Empty empty))
| (loc, EnumDeclaration enum) ->
id_loc this#enum_declaration loc enum stmt (fun enum -> (loc, EnumDeclaration enum))
| (loc, ExportDefaultDeclaration decl) ->
id_loc this#export_default_declaration loc decl stmt (fun decl ->
(loc, ExportDefaultDeclaration decl)
)
| (loc, ExportNamedDeclaration decl) ->
id_loc this#export_named_declaration loc decl stmt (fun decl ->
(loc, ExportNamedDeclaration decl)
)
| (loc, Expression expr) ->
id_loc this#expression_statement loc expr stmt (fun expr -> (loc, Expression expr))
| (loc, For for_stmt) ->
id_loc this#for_statement loc for_stmt stmt (fun for_stmt -> (loc, For for_stmt))
| (loc, ForIn stuff) ->
id_loc this#for_in_statement loc stuff stmt (fun stuff -> (loc, ForIn stuff))
| (loc, ForOf stuff) ->
id_loc this#for_of_statement loc stuff stmt (fun stuff -> (loc, ForOf stuff))
| (loc, FunctionDeclaration func) ->
id_loc this#function_declaration loc func stmt (fun func -> (loc, FunctionDeclaration func))
| (loc, If if_stmt) ->
id_loc this#if_statement loc if_stmt stmt (fun if_stmt -> (loc, If if_stmt))
| (loc, ImportDeclaration decl) ->
id_loc this#import_declaration loc decl stmt (fun decl -> (loc, ImportDeclaration decl))
| (loc, InterfaceDeclaration stuff) ->
id_loc this#interface_declaration loc stuff stmt (fun stuff ->
(loc, InterfaceDeclaration stuff)
)
| (loc, Labeled label) ->
id_loc this#labeled_statement loc label stmt (fun label -> (loc, Labeled label))
| (loc, OpaqueType otype) ->
id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype))
| (loc, Return ret) -> id_loc this#return loc ret stmt (fun ret -> (loc, Return ret))
| (loc, Switch switch) ->
id_loc this#switch loc switch stmt (fun switch -> (loc, Switch switch))
| (loc, Throw throw) -> id_loc this#throw loc throw stmt (fun throw -> (loc, Throw throw))
| (loc, Try try_stmt) ->
id_loc this#try_catch loc try_stmt stmt (fun try_stmt -> (loc, Try try_stmt))
| (loc, VariableDeclaration decl) ->
id_loc this#variable_declaration loc decl stmt (fun decl -> (loc, VariableDeclaration decl))
| (loc, While stuff) -> id_loc this#while_ loc stuff stmt (fun stuff -> (loc, While stuff))
| (loc, With stuff) -> id_loc this#with_ loc stuff stmt (fun stuff -> (loc, With stuff))
| (loc, TypeAlias stuff) ->
id_loc this#type_alias loc stuff stmt (fun stuff -> (loc, TypeAlias stuff))
| (loc, DeclareOpaqueType otype) ->
id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype))
method comment (c : 'loc Ast.Comment.t) = c
method syntax_opt
: 'internal. ('loc, 'internal) Ast.Syntax.t option -> ('loc, 'internal) Ast.Syntax.t option
=
map_opt this#syntax
method syntax : 'internal. ('loc, 'internal) Ast.Syntax.t -> ('loc, 'internal) Ast.Syntax.t =
fun attached ->
let open Ast.Syntax in
let { leading; trailing; internal } = attached in
let leading' = map_list this#comment leading in
let trailing' = map_list this#comment trailing in
if leading == leading' && trailing == trailing' then
attached
else
{ leading = leading'; trailing = trailing'; internal }
method expression (expr : ('loc, 'loc) Ast.Expression.t) =
let open Ast.Expression in
match expr with
| (loc, Array x) -> id_loc this#array loc x expr (fun x -> (loc, Array x))
| (loc, ArrowFunction x) ->
id_loc this#arrow_function loc x expr (fun x -> (loc, ArrowFunction x))
| (loc, Assignment x) -> id_loc this#assignment loc x expr (fun x -> (loc, Assignment x))
| (loc, Binary x) -> id_loc this#binary loc x expr (fun x -> (loc, Binary x))
| (loc, Call x) -> id_loc this#call loc x expr (fun x -> (loc, Call x))
| (loc, Class x) -> id_loc this#class_expression loc x expr (fun x -> (loc, Class x))
| (loc, Comprehension x) ->
id_loc this#comprehension loc x expr (fun x -> (loc, Comprehension x))
| (loc, Conditional x) -> id_loc this#conditional loc x expr (fun x -> (loc, Conditional x))
| (loc, Function x) -> id_loc this#function_expression loc x expr (fun x -> (loc, Function x))
| (loc, Generator x) -> id_loc this#generator loc x expr (fun x -> (loc, Generator x))
| (loc, Identifier x) -> id this#identifier x expr (fun x -> (loc, Identifier x))
| (loc, Import x) -> id (this#import loc) x expr (fun x -> (loc, Import x))
| (loc, JSXElement x) -> id_loc this#jsx_element loc x expr (fun x -> (loc, JSXElement x))
| (loc, JSXFragment x) -> id_loc this#jsx_fragment loc x expr (fun x -> (loc, JSXFragment x))
| (loc, Literal x) -> id_loc this#literal loc x expr (fun x -> (loc, Literal x))
| (loc, Logical x) -> id_loc this#logical loc x expr (fun x -> (loc, Logical x))
| (loc, Member x) -> id_loc this#member loc x expr (fun x -> (loc, Member x))
| (loc, MetaProperty x) ->
id_loc this#meta_property loc x expr (fun x -> (loc, MetaProperty x))
| (loc, New x) -> id_loc this#new_ loc x expr (fun x -> (loc, New x))
| (loc, Object x) -> id_loc this#object_ loc x expr (fun x -> (loc, Object x))
| (loc, OptionalCall x) -> id (this#optional_call loc) x expr (fun x -> (loc, OptionalCall x))
| (loc, OptionalMember x) ->
id_loc this#optional_member loc x expr (fun x -> (loc, OptionalMember x))
| (loc, Sequence x) -> id_loc this#sequence loc x expr (fun x -> (loc, Sequence x))
| (loc, Super x) -> id_loc this#super_expression loc x expr (fun x -> (loc, Super x))
| (loc, TaggedTemplate x) ->
id_loc this#tagged_template loc x expr (fun x -> (loc, TaggedTemplate x))
| (loc, TemplateLiteral x) ->
id_loc this#template_literal loc x expr (fun x -> (loc, TemplateLiteral x))
| (loc, This x) -> id_loc this#this_expression loc x expr (fun x -> (loc, This x))
| (loc, TypeCast x) -> id_loc this#type_cast loc x expr (fun x -> (loc, TypeCast x))
| (loc, Unary x) -> id_loc this#unary_expression loc x expr (fun x -> (loc, Unary x))
| (loc, Update x) -> id_loc this#update_expression loc x expr (fun x -> (loc, Update x))
| (loc, Yield x) -> id_loc this#yield loc x expr (fun x -> (loc, Yield x))
method array _loc (expr : ('loc, 'loc) Ast.Expression.Array.t) =
let open Ast.Expression in
let { Array.elements; comments } = expr in
let elements' = map_list this#array_element elements in
let comments' = this#syntax_opt comments in
if elements == elements' && comments == comments' then
expr
else
{ Array.elements = elements'; comments = comments' }
method array_element element =
let open Ast.Expression.Array in
match element with
| Expression expr -> id this#expression expr element (fun expr -> Expression expr)
| Spread spread -> id this#spread_element spread element (fun spread -> Spread spread)
| Hole _ -> element
method arrow_function loc (expr : ('loc, 'loc) Ast.Function.t) = this#function_ loc expr
method assignment _loc (expr : ('loc, 'loc) Ast.Expression.Assignment.t) =
let open Ast.Expression.Assignment in
let { operator = _; left; right; comments } = expr in
let left' = this#assignment_pattern left in
let right' = this#expression right in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && comments == comments' then
expr
else
{ expr with left = left'; right = right'; comments = comments' }
method binary _loc (expr : ('loc, 'loc) Ast.Expression.Binary.t) =
let open Ast.Expression.Binary in
let { operator = _; left; right; comments } = expr in
let left' = this#expression left in
let right' = this#expression right in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && comments == comments' then
expr
else
{ expr with left = left'; right = right'; comments = comments' }
method block _loc (stmt : ('loc, 'loc) Ast.Statement.Block.t) =
let open Ast.Statement.Block in
let { body; comments } = stmt in
let body' = this#statement_list body in
let comments' = this#syntax_opt comments in
if body == body' && comments == comments' then
stmt
else
{ body = body'; comments = comments' }
method break _loc (break : 'loc Ast.Statement.Break.t) =
let open Ast.Statement.Break in
let { label; comments } = break in
let label' = map_opt this#label_identifier label in
let comments' = this#syntax_opt comments in
if label == label' && comments == comments' then
break
else
{ label = label'; comments = comments' }
method call _loc (expr : ('loc, 'loc) Ast.Expression.Call.t) =
let open Ast.Expression.Call in
let { callee; targs; arguments; comments } = expr in
let callee' = this#expression callee in
let targs' = map_opt this#call_type_args targs in
let arguments' = this#call_arguments arguments in
let comments' = this#syntax_opt comments in
if callee == callee' && targs == targs' && arguments == arguments' && comments == comments'
then
expr
else
{ callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }
method call_arguments (arg_list : ('loc, 'loc) Ast.Expression.ArgList.t) =
let open Ast.Expression.ArgList in
let (loc, { arguments; comments }) = arg_list in
let arguments' = map_list this#expression_or_spread arguments in
let comments' = this#syntax_opt comments in
if arguments == arguments' && comments == comments' then
arg_list
else
(loc, { arguments = arguments'; comments = comments' })
method optional_call loc (expr : ('loc, 'loc) Ast.Expression.OptionalCall.t) =
let open Ast.Expression.OptionalCall in
let { call; optional = _; filtered_out = _ } = expr in
let call' = this#call loc call in
if call == call' then
expr
else
{ expr with call = call' }
method call_type_args (targs : ('loc, 'loc) Ast.Expression.CallTypeArgs.t) =
let open Ast.Expression.CallTypeArgs in
let (loc, { arguments; comments }) = targs in
let arguments' = map_list this#call_type_arg arguments in
let comments' = this#syntax_opt comments in
if arguments == arguments' && comments == comments' then
targs
else
(loc, { arguments = arguments'; comments = comments' })
method call_type_arg t =
let open Ast.Expression.CallTypeArg in
match t with
| Explicit x ->
let x' = this#type_ x in
if x' == x then
t
else
Explicit x'
| Implicit (loc, { Implicit.comments }) ->
let comments' = this#syntax_opt comments in
if comments == comments' then
t
else
Implicit (loc, { Implicit.comments = comments' })
method catch_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) = map_loc this#block body
method catch_clause _loc (clause : ('loc, 'loc) Ast.Statement.Try.CatchClause.t') =
let open Ast.Statement.Try.CatchClause in
let { param; body; comments } = clause in
let param' = map_opt this#catch_clause_pattern param in
let body' = this#catch_body body in
let comments' = this#syntax_opt comments in
if param == param' && body == body' && comments == comments' then
clause
else
{ param = param'; body = body'; comments = comments' }
method class_declaration loc (cls : ('loc, 'loc) Ast.Class.t) = this#class_ loc cls
method class_expression loc (cls : ('loc, 'loc) Ast.Class.t) = this#class_ loc cls
method class_ _loc (cls : ('loc, 'loc) Ast.Class.t) =
let open Ast.Class in
let { id; body; tparams; extends; implements; class_decorators; comments } = cls in
let id' = map_opt this#class_identifier id in
let tparams' = map_opt this#type_params tparams in
let body' = this#class_body body in
let extends' = map_opt (map_loc this#class_extends) extends in
let implements' = map_opt this#class_implements implements in
let class_decorators' = map_list this#class_decorator class_decorators in
let comments' = this#syntax_opt comments in
if
id == id'
&& body == body'
&& extends == extends'
&& implements == implements'
&& class_decorators == class_decorators'
&& comments == comments'
&& tparams == tparams'
then
cls
else
{
id = id';
body = body';
extends = extends';
implements = implements';
class_decorators = class_decorators';
comments = comments';
tparams = tparams';
}
method class_extends _loc (extends : ('loc, 'loc) Ast.Class.Extends.t') =
let open Ast.Class.Extends in
let { expr; targs; comments } = extends in
let expr' = this#expression expr in
let targs' = map_opt this#type_args targs in
let comments' = this#syntax_opt comments in
if expr == expr' && targs == targs' && comments == comments' then
extends
else
{ expr = expr'; targs = targs'; comments = comments' }
method class_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let ident
method class_body (cls_body : ('loc, 'loc) Ast.Class.Body.t) =
let open Ast.Class.Body in
let (loc, { body; comments }) = cls_body in
let body' = map_list this#class_element body in
let comments' = this#syntax_opt comments in
if body == body' && comments == comments' then
cls_body
else
(loc, { body = body'; comments = comments' })
method class_decorator (dec : ('loc, 'loc) Ast.Class.Decorator.t) =
let open Ast.Class.Decorator in
let (loc, { expression; comments }) = dec in
let expression' = this#expression expression in
let comments' = this#syntax_opt comments in
if expression == expression' && comments == comments' then
dec
else
(loc, { expression = expression'; comments = comments' })
method class_element (elem : ('loc, 'loc) Ast.Class.Body.element) =
let open Ast.Class.Body in
match elem with
| Method (loc, meth) -> id_loc this#class_method loc meth elem (fun meth -> Method (loc, meth))
| Property (loc, prop) ->
id_loc this#class_property loc prop elem (fun prop -> Property (loc, prop))
| PrivateField (loc, field) ->
id_loc this#class_private_field loc field elem (fun field -> PrivateField (loc, field))
method class_implements (implements : ('loc, 'loc) Ast.Class.Implements.t) =
let open Ast.Class.Implements in
let (loc, { interfaces; comments }) = implements in
let interfaces' = map_list this#class_implements_interface interfaces in
let comments' = this#syntax_opt comments in
if interfaces == interfaces' && comments == comments' then
implements
else
(loc, { interfaces = interfaces'; comments = comments' })
method class_implements_interface (interface : ('loc, 'loc) Ast.Class.Implements.Interface.t) =
let open Ast.Class.Implements.Interface in
let (loc, { id; targs }) = interface in
let id' = this#type_identifier_reference id in
let targs' = map_opt this#type_args targs in
if id == id' && targs == targs' then
interface
else
(loc, { id = id'; targs = targs' })
method class_method _loc (meth : ('loc, 'loc) Ast.Class.Method.t') =
let open Ast.Class.Method in
let { kind = _; key; value; static = _; decorators; comments } = meth in
let key' = this#object_key key in
let value' = map_loc this#function_expression_or_method value in
let decorators' = map_list this#class_decorator decorators in
let comments' = this#syntax_opt comments in
if key == key' && value == value' && decorators == decorators' && comments == comments' then
meth
else
{ meth with key = key'; value = value'; decorators = decorators'; comments = comments' }
method class_property _loc (prop : ('loc, 'loc) Ast.Class.Property.t') =
let open Ast.Class.Property in
let { key; value; annot; static = _; variance; comments } = prop in
let key' = this#object_key key in
let value' = this#class_property_value value in
let annot' = this#type_annotation_hint annot in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if
key == key'
&& value == value'
&& annot' == annot
&& variance' == variance
&& comments' == comments
then
prop
else
{
prop with
key = key';
value = value';
annot = annot';
variance = variance';
comments = comments';
}
method class_property_value (value : ('loc, 'loc) Ast.Class.Property.value) =
let open Ast.Class.Property in
match value with
| Declared -> value
| Uninitialized -> value
| Initialized x ->
let x' = this#expression x in
if x == x' then
value
else
Initialized x'
method class_private_field _loc (prop : ('loc, 'loc) Ast.Class.PrivateField.t') =
let open Ast.Class.PrivateField in
let { key; value; annot; static = _; variance; comments } = prop in
let key' = this#private_name key in
let value' = this#class_property_value value in
let annot' = this#type_annotation_hint annot in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if
key == key'
&& value == value'
&& annot' == annot
&& variance' == variance
&& comments' == comments
then
prop
else
{
prop with
key = key';
value = value';
annot = annot';
variance = variance';
comments = comments';
}
TODO
method comprehension _loc (expr : ('loc, 'loc) Ast.Expression.Comprehension.t) = expr
method conditional _loc (expr : ('loc, 'loc) Ast.Expression.Conditional.t) =
let open Ast.Expression.Conditional in
let { test; consequent; alternate; comments } = expr in
let test' = this#predicate_expression test in
let consequent' = this#expression consequent in
let alternate' = this#expression alternate in
let comments' = this#syntax_opt comments in
if
test == test'
&& consequent == consequent'
&& alternate == alternate'
&& comments == comments'
then
expr
else
{ test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }
method continue _loc (cont : 'loc Ast.Statement.Continue.t) =
let open Ast.Statement.Continue in
let { label; comments } = cont in
let label' = map_opt this#label_identifier label in
let comments' = this#syntax_opt comments in
if label == label' && comments == comments' then
cont
else
{ label = label'; comments = comments' }
method debugger _loc (dbg : 'loc Ast.Statement.Debugger.t) =
let open Ast.Statement.Debugger in
let { comments } = dbg in
let comments' = this#syntax_opt comments in
if comments == comments' then
dbg
else
{ comments = comments' }
method declare_class _loc (decl : ('loc, 'loc) Ast.Statement.DeclareClass.t) =
let open Ast.Statement.DeclareClass in
let { id = ident; tparams; body; extends; mixins; implements; comments } = decl in
let id' = this#class_identifier ident in
let tparams' = map_opt this#type_params tparams in
let body' = map_loc this#object_type body in
let extends' = map_opt (map_loc this#generic_type) extends in
let mixins' = map_list (map_loc this#generic_type) mixins in
let implements' = map_opt this#class_implements implements in
let comments' = this#syntax_opt comments in
if
id' == ident
&& tparams' == tparams
&& body' == body
&& extends' == extends
&& mixins' == mixins
&& implements' == implements
&& comments' == comments
then
decl
else
{
id = id';
tparams = tparams';
body = body';
extends = extends';
mixins = mixins';
implements = implements';
comments = comments';
}
method declare_export_declaration
_loc (decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.t) =
let open Ast.Statement.DeclareExportDeclaration in
let { default; source; specifiers; declaration; comments } = decl in
let source' = map_loc_opt this#export_source source in
let specifiers' = map_opt this#export_named_specifier specifiers in
let declaration' = map_opt this#declare_export_declaration_decl declaration in
let comments' = this#syntax_opt comments in
if
source == source'
&& specifiers == specifiers'
&& declaration == declaration'
&& comments == comments'
then
decl
else
{
default;
source = source';
specifiers = specifiers';
declaration = declaration';
comments = comments';
}
method declare_export_declaration_decl
(decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.declaration) =
let open Ast.Statement.DeclareExportDeclaration in
match decl with
| Variable (loc, dv) ->
let dv' = this#declare_variable loc dv in
if dv' == dv then
decl
else
Variable (loc, dv')
| Function (loc, df) ->
let df' = this#declare_function loc df in
if df' == df then
decl
else
Function (loc, df')
| Class (loc, dc) ->
let dc' = this#declare_class loc dc in
if dc' == dc then
decl
else
Class (loc, dc')
| DefaultType t ->
let t' = this#type_ t in
if t' == t then
decl
else
DefaultType t'
| NamedType (loc, ta) ->
let ta' = this#type_alias loc ta in
if ta' == ta then
decl
else
NamedType (loc, ta')
| NamedOpaqueType (loc, ot) ->
let ot' = this#opaque_type loc ot in
if ot' == ot then
decl
else
NamedOpaqueType (loc, ot')
| Interface (loc, i) ->
let i' = this#interface loc i in
if i' == i then
decl
else
Interface (loc, i')
method declare_function _loc (decl : ('loc, 'loc) Ast.Statement.DeclareFunction.t) =
let open Ast.Statement.DeclareFunction in
let { id = ident; annot; predicate; comments } = decl in
let id' = this#function_identifier ident in
let annot' = this#type_annotation annot in
let predicate' = map_opt this#predicate predicate in
let comments' = this#syntax_opt comments in
if id' == ident && annot' == annot && predicate' == predicate && comments' == comments then
decl
else
{ id = id'; annot = annot'; predicate = predicate'; comments = comments' }
method declare_interface loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) =
this#interface loc decl
method declare_module _loc (m : ('loc, 'loc) Ast.Statement.DeclareModule.t) =
let open Ast.Statement.DeclareModule in
let { id; body; kind; comments } = m in
let body' = map_loc this#block body in
let comments' = this#syntax_opt comments in
if body' == body && comments == comments' then
m
else
{ id; body = body'; kind; comments = comments' }
method declare_module_exports _loc (exports : ('loc, 'loc) Ast.Statement.DeclareModuleExports.t)
=
let open Ast.Statement.DeclareModuleExports in
let { annot; comments } = exports in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if annot == annot' && comments == comments' then
exports
else
{ annot = annot'; comments = comments' }
method declare_type_alias loc (decl : ('loc, 'loc) Ast.Statement.TypeAlias.t) =
this#type_alias loc decl
method declare_variable _loc (decl : ('loc, 'loc) Ast.Statement.DeclareVariable.t) =
let open Ast.Statement.DeclareVariable in
let { id = ident; annot; comments } = decl in
let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if id' == ident && annot' == annot && comments' == comments then
decl
else
{ id = id'; annot = annot'; comments = comments' }
method do_while _loc (stuff : ('loc, 'loc) Ast.Statement.DoWhile.t) =
let open Ast.Statement.DoWhile in
let { body; test; comments } = stuff in
let body' = this#statement body in
let test' = this#predicate_expression test in
let comments' = this#syntax_opt comments in
if body == body' && test == test' && comments == comments' then
stuff
else
{ body = body'; test = test'; comments = comments' }
method empty _loc empty =
let open Ast.Statement.Empty in
let { comments } = empty in
let comments' = this#syntax_opt comments in
if comments == comments' then
empty
else
{ comments = comments' }
method enum_declaration _loc (enum : ('loc, 'loc) Ast.Statement.EnumDeclaration.t) =
let open Ast.Statement.EnumDeclaration in
let { id = ident; body; comments } = enum in
let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Const ident in
let body' = this#enum_body body in
let comments' = this#syntax_opt comments in
if ident == id' && body == body' && comments == comments' then
enum
else
{ id = id'; body = body'; comments = comments' }
method enum_body (body : 'loc Ast.Statement.EnumDeclaration.body) =
let open Ast.Statement.EnumDeclaration in
match body with
| (loc, BooleanBody boolean_body) ->
id this#enum_boolean_body boolean_body body (fun body -> (loc, BooleanBody body))
| (loc, NumberBody number_body) ->
id this#enum_number_body number_body body (fun body -> (loc, NumberBody body))
| (loc, StringBody string_body) ->
id this#enum_string_body string_body body (fun body -> (loc, StringBody body))
| (loc, SymbolBody symbol_body) ->
id this#enum_symbol_body symbol_body body (fun body -> (loc, SymbolBody body))
method enum_boolean_body (body : 'loc Ast.Statement.EnumDeclaration.BooleanBody.t) =
let open Ast.Statement.EnumDeclaration.BooleanBody in
let { members; explicit_type = _; has_unknown_members = _; comments } = body in
let members' = map_list this#enum_boolean_member members in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_number_body (body : 'loc Ast.Statement.EnumDeclaration.NumberBody.t) =
let open Ast.Statement.EnumDeclaration.NumberBody in
let { members; explicit_type = _; has_unknown_members = _; comments } = body in
let members' = map_list this#enum_number_member members in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_string_body (body : 'loc Ast.Statement.EnumDeclaration.StringBody.t) =
let open Ast.Statement.EnumDeclaration.StringBody in
let { members; explicit_type = _; has_unknown_members = _; comments } = body in
let members' =
match members with
| Defaulted m -> id (map_list this#enum_defaulted_member) m members (fun m -> Defaulted m)
| Initialized m -> id (map_list this#enum_string_member) m members (fun m -> Initialized m)
in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_symbol_body (body : 'loc Ast.Statement.EnumDeclaration.SymbolBody.t) =
let open Ast.Statement.EnumDeclaration.SymbolBody in
let { members; has_unknown_members = _; comments } = body in
let members' = map_list this#enum_defaulted_member members in
let comments' = this#syntax_opt comments in
if members == members' && comments == comments' then
body
else
{ body with members = members'; comments = comments' }
method enum_defaulted_member (member : 'loc Ast.Statement.EnumDeclaration.DefaultedMember.t) =
let open Ast.Statement.EnumDeclaration.DefaultedMember in
let (loc, { id = ident }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id' })
method enum_boolean_member
(member :
('loc Ast.BooleanLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t
) =
let open Ast.Statement.EnumDeclaration.InitializedMember in
let (loc, { id = ident; init }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id'; init })
method enum_number_member
(member : ('loc Ast.NumberLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t)
=
let open Ast.Statement.EnumDeclaration.InitializedMember in
let (loc, { id = ident; init }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id'; init })
method enum_string_member
(member : ('loc Ast.StringLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t)
=
let open Ast.Statement.EnumDeclaration.InitializedMember in
let (loc, { id = ident; init }) = member in
let id' = this#enum_member_identifier ident in
if ident == id' then
member
else
(loc, { id = id'; init })
method enum_member_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#identifier id
method export_default_declaration
_loc (decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.t) =
let open Ast.Statement.ExportDefaultDeclaration in
let { default; declaration; comments } = decl in
let declaration' = this#export_default_declaration_decl declaration in
let comments' = this#syntax_opt comments in
if declaration' == declaration && comments' == comments then
decl
else
{ default; declaration = declaration'; comments = comments' }
method export_default_declaration_decl
(decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.declaration) =
let open Ast.Statement.ExportDefaultDeclaration in
match decl with
| Declaration stmt -> id this#statement stmt decl (fun stmt -> Declaration stmt)
| Expression expr -> id this#expression expr decl (fun expr -> Expression expr)
method export_named_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ExportNamedDeclaration.t)
=
let open Ast.Statement.ExportNamedDeclaration in
let { export_kind; source; specifiers; declaration; comments } = decl in
let source' = map_loc_opt this#export_source source in
let specifiers' = map_opt this#export_named_specifier specifiers in
let declaration' = map_opt this#statement declaration in
let comments' = this#syntax_opt comments in
if
source == source'
&& specifiers == specifiers'
&& declaration == declaration'
&& comments == comments'
then
decl
else
{
export_kind;
source = source';
specifiers = specifiers';
declaration = declaration';
comments = comments';
}
method export_named_declaration_specifier
(spec : 'loc Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) =
let open Ast.Statement.ExportNamedDeclaration.ExportSpecifier in
let (loc, { local; exported }) = spec in
let local' = this#identifier local in
let exported' = map_opt this#identifier exported in
if local == local' && exported == exported' then
spec
else
(loc, { local = local'; exported = exported' })
method export_batch_specifier
(spec : 'loc Ast.Statement.ExportNamedDeclaration.ExportBatchSpecifier.t) =
let (loc, id_opt) = spec in
let id_opt' = map_opt this#identifier id_opt in
if id_opt == id_opt' then
spec
else
(loc, id_opt')
method export_named_specifier (spec : 'loc Ast.Statement.ExportNamedDeclaration.specifier) =
let open Ast.Statement.ExportNamedDeclaration in
match spec with
| ExportSpecifiers spec_list ->
let spec_list' = map_list this#export_named_declaration_specifier spec_list in
if spec_list == spec_list' then
spec
else
ExportSpecifiers spec_list'
| ExportBatchSpecifier batch ->
let batch' = this#export_batch_specifier batch in
if batch == batch' then
spec
else
ExportBatchSpecifier batch'
method export_source _loc (source : 'loc Ast.StringLiteral.t) =
let open Ast.StringLiteral in
let { value; raw; comments } = source in
let comments' = this#syntax_opt comments in
if comments == comments' then
source
else
{ value; raw; comments = comments' }
method expression_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Expression.t) =
let open Ast.Statement.Expression in
let { expression = expr; directive; comments } = stmt in
let expr' = this#expression expr in
let comments' = this#syntax_opt comments in
if expr == expr' && comments == comments' then
stmt
else
{ expression = expr'; directive; comments = comments' }
method expression_or_spread expr_or_spread =
let open Ast.Expression in
match expr_or_spread with
| Expression expr -> id this#expression expr expr_or_spread (fun expr -> Expression expr)
| Spread spread -> id this#spread_element spread expr_or_spread (fun spread -> Spread spread)
method for_in_statement _loc (stmt : ('loc, 'loc) Ast.Statement.ForIn.t) =
let open Ast.Statement.ForIn in
let { left; right; body; each; comments } = stmt in
let left' = this#for_in_statement_lhs left in
let right' = this#expression right in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && body == body' && comments == comments' then
stmt
else
{ left = left'; right = right'; body = body'; each; comments = comments' }
method for_in_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForIn.left) =
let open Ast.Statement.ForIn in
match left with
| LeftDeclaration decl ->
id this#for_in_left_declaration decl left (fun decl -> LeftDeclaration decl)
| LeftPattern patt ->
id this#for_in_assignment_pattern patt left (fun patt -> LeftPattern patt)
method for_in_left_declaration left =
let (loc, decl) = left in
id_loc this#variable_declaration loc decl left (fun decl -> (loc, decl))
method for_of_statement _loc (stuff : ('loc, 'loc) Ast.Statement.ForOf.t) =
let open Ast.Statement.ForOf in
let { left; right; body; await; comments } = stuff in
let left' = this#for_of_statement_lhs left in
let right' = this#expression right in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && body == body' && comments == comments' then
stuff
else
{ left = left'; right = right'; body = body'; await; comments = comments' }
method for_of_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForOf.left) =
let open Ast.Statement.ForOf in
match left with
| LeftDeclaration decl ->
id this#for_of_left_declaration decl left (fun decl -> LeftDeclaration decl)
| LeftPattern patt ->
id this#for_of_assignment_pattern patt left (fun patt -> LeftPattern patt)
method for_of_left_declaration left =
let (loc, decl) = left in
id_loc this#variable_declaration loc decl left (fun decl -> (loc, decl))
method for_statement _loc (stmt : ('loc, 'loc) Ast.Statement.For.t) =
let open Ast.Statement.For in
let { init; test; update; body; comments } = stmt in
let init' = map_opt this#for_statement_init init in
let test' = map_opt this#predicate_expression test in
let update' = map_opt this#expression update in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if
init == init'
&& test == test'
&& update == update'
&& body == body'
&& comments == comments'
then
stmt
else
{ init = init'; test = test'; update = update'; body = body'; comments = comments' }
method for_statement_init (init : ('loc, 'loc) Ast.Statement.For.init) =
let open Ast.Statement.For in
match init with
| InitDeclaration decl ->
id this#for_init_declaration decl init (fun decl -> InitDeclaration decl)
| InitExpression expr -> id this#expression expr init (fun expr -> InitExpression expr)
method for_init_declaration init =
let (loc, decl) = init in
id_loc this#variable_declaration loc decl init (fun decl -> (loc, decl))
method function_param_type (fpt : ('loc, 'loc) Ast.Type.Function.Param.t) =
let open Ast.Type.Function.Param in
let (loc, { annot; name; optional }) = fpt in
let annot' = this#type_ annot in
let name' = map_opt this#identifier name in
if annot' == annot && name' == name then
fpt
else
(loc, { annot = annot'; name = name'; optional })
method function_rest_param_type (frpt : ('loc, 'loc) Ast.Type.Function.RestParam.t) =
let open Ast.Type.Function.RestParam in
let (loc, { argument; comments }) = frpt in
let argument' = this#function_param_type argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments' == comments then
frpt
else
(loc, { argument = argument'; comments = comments' })
method function_this_param_type (this_param : ('loc, 'loc) Ast.Type.Function.ThisParam.t) =
let open Ast.Type.Function.ThisParam in
let (loc, { annot; comments }) = this_param in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if annot' == annot && comments' == comments then
this_param
else
(loc, { annot = annot'; comments = comments' })
method function_type _loc (ft : ('loc, 'loc) Ast.Type.Function.t) =
let open Ast.Type.Function in
let {
params = (params_loc, { Params.this_; params = ps; rest = rpo; comments = params_comments });
return;
tparams;
comments = func_comments;
} =
ft
in
let tparams' = map_opt this#type_params tparams in
let this_' = map_opt this#function_this_param_type this_ in
let ps' = map_list this#function_param_type ps in
let rpo' = map_opt this#function_rest_param_type rpo in
let return' = this#type_ return in
let func_comments' = this#syntax_opt func_comments in
let params_comments' = this#syntax_opt params_comments in
if
ps' == ps
&& rpo' == rpo
&& return' == return
&& tparams' == tparams
&& func_comments' == func_comments
&& params_comments' == params_comments
&& this_' == this_
then
ft
else
{
params =
( params_loc,
{ Params.this_ = this_'; params = ps'; rest = rpo'; comments = params_comments' }
);
return = return';
tparams = tparams';
comments = func_comments';
}
method label_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident
method object_property_value_type (opvt : ('loc, 'loc) Ast.Type.Object.Property.value) =
let open Ast.Type.Object.Property in
match opvt with
| Init t -> id this#type_ t opvt (fun t -> Init t)
| Get t -> id this#object_type_property_getter t opvt (fun t -> Get t)
| Set t -> id this#object_type_property_setter t opvt (fun t -> Set t)
method object_type_property_getter getter =
let (loc, ft) = getter in
id_loc this#function_type loc ft getter (fun ft -> (loc, ft))
method object_type_property_setter setter =
let (loc, ft) = setter in
id_loc this#function_type loc ft setter (fun ft -> (loc, ft))
method object_property_type (opt : ('loc, 'loc) Ast.Type.Object.Property.t) =
let open Ast.Type.Object.Property in
let (loc, { key; value; optional; static; proto; _method; variance; comments }) = opt in
let key' = this#object_key key in
let value' = this#object_property_value_type value in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if key' == key && value' == value && variance' == variance && comments' == comments then
opt
else
( loc,
{
key = key';
value = value';
optional;
static;
proto;
_method;
variance = variance';
comments = comments';
}
)
method object_spread_property_type (opt : ('loc, 'loc) Ast.Type.Object.SpreadProperty.t) =
let open Ast.Type.Object.SpreadProperty in
let (loc, { argument; comments }) = opt in
let argument' = this#type_ argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments == comments' then
opt
else
(loc, { argument = argument'; comments = comments' })
method object_indexer_property_type (opt : ('loc, 'loc) Ast.Type.Object.Indexer.t) =
let open Ast.Type.Object.Indexer in
let (loc, { id; key; value; static; variance; comments }) = opt in
let key' = this#type_ key in
let value' = this#type_ value in
let variance' = this#variance_opt variance in
let comments' = this#syntax_opt comments in
if key' == key && value' == value && variance' == variance && comments' == comments then
opt
else
(loc, { id; key = key'; value = value'; static; variance = variance'; comments = comments' })
method object_internal_slot_property_type (slot : ('loc, 'loc) Ast.Type.Object.InternalSlot.t) =
let open Ast.Type.Object.InternalSlot in
let (loc, { id; value; optional; static; _method; comments }) = slot in
let id' = this#identifier id in
let value' = this#type_ value in
let comments' = this#syntax_opt comments in
if id == id' && value == value' && comments == comments' then
slot
else
(loc, { id = id'; value = value'; optional; static; _method; comments = comments' })
method object_call_property_type (call : ('loc, 'loc) Ast.Type.Object.CallProperty.t) =
let open Ast.Type.Object.CallProperty in
let (loc, { value = (value_loc, value); static; comments }) = call in
let value' = this#function_type value_loc value in
let comments' = this#syntax_opt comments in
if value == value' && comments == comments' then
call
else
(loc, { value = (value_loc, value'); static; comments = comments' })
method object_type _loc (ot : ('loc, 'loc) Ast.Type.Object.t) =
let open Ast.Type.Object in
let { properties; exact; inexact; comments } = ot in
let properties' =
map_list
(fun p ->
match p with
| Property p' -> id this#object_property_type p' p (fun p' -> Property p')
| SpreadProperty p' ->
id this#object_spread_property_type p' p (fun p' -> SpreadProperty p')
| Indexer p' -> id this#object_indexer_property_type p' p (fun p' -> Indexer p')
| InternalSlot p' ->
id this#object_internal_slot_property_type p' p (fun p' -> InternalSlot p')
| CallProperty p' -> id this#object_call_property_type p' p (fun p' -> CallProperty p'))
properties
in
let comments' = this#syntax_opt comments in
if properties' == properties && comments == comments' then
ot
else
{ properties = properties'; exact; inexact; comments = comments' }
method interface_type _loc (i : ('loc, 'loc) Ast.Type.Interface.t) =
let open Ast.Type.Interface in
let { extends; body; comments } = i in
let extends' = map_list (map_loc this#generic_type) extends in
let body' = map_loc this#object_type body in
let comments' = this#syntax_opt comments in
if extends' == extends && body' == body && comments == comments' then
i
else
{ extends = extends'; body = body'; comments = comments' }
method generic_identifier_type (git : ('loc, 'loc) Ast.Type.Generic.Identifier.t) =
let open Ast.Type.Generic.Identifier in
match git with
| Unqualified i -> id this#type_identifier_reference i git (fun i -> Unqualified i)
| Qualified i -> id this#generic_qualified_identifier_type i git (fun i -> Qualified i)
method generic_qualified_identifier_type qual =
let open Ast.Type.Generic.Identifier in
let (loc, { qualification; id }) = qual in
let qualification' = this#generic_identifier_type qualification in
let id' = this#member_type_identifier id in
if qualification' == qualification && id' == id then
qual
else
(loc, { qualification = qualification'; id = id' })
method member_type_identifier id = this#identifier id
method variance (variance : 'loc Ast.Variance.t) =
let (loc, { Ast.Variance.kind; comments }) = variance in
let comments' = this#syntax_opt comments in
if comments == comments' then
variance
else
(loc, { Ast.Variance.kind; comments = comments' })
method variance_opt (opt : 'loc Ast.Variance.t option) = map_opt this#variance opt
method type_args (targs : ('loc, 'loc) Ast.Type.TypeArgs.t) =
let open Ast.Type.TypeArgs in
let (loc, { arguments; comments }) = targs in
let arguments' = map_list this#type_ arguments in
let comments' = this#syntax_opt comments in
if arguments == arguments' && comments == comments' then
targs
else
(loc, { arguments = arguments'; comments = comments' })
method type_params (tparams : ('loc, 'loc) Ast.Type.TypeParams.t) =
let open Ast.Type.TypeParams in
let (loc, { params = tps; comments }) = tparams in
let tps' = map_list this#type_param tps in
let comments' = this#syntax_opt comments in
if tps' == tps && comments' == comments then
tparams
else
(loc, { params = tps'; comments = comments' })
method type_param (tparam : ('loc, 'loc) Ast.Type.TypeParam.t) =
let open Ast.Type.TypeParam in
let (loc, { name; bound; variance; default }) = tparam in
let bound' = this#type_annotation_hint bound in
let variance' = this#variance_opt variance in
let default' = map_opt this#type_ default in
let name' = this#binding_type_identifier name in
if name' == name && bound' == bound && variance' == variance && default' == default then
tparam
else
(loc, { name = name'; bound = bound'; variance = variance'; default = default' })
method generic_type _loc (gt : ('loc, 'loc) Ast.Type.Generic.t) =
let open Ast.Type.Generic in
let { id; targs; comments } = gt in
let id' = this#generic_identifier_type id in
let targs' = map_opt this#type_args targs in
let comments' = this#syntax_opt comments in
if id' == id && targs' == targs && comments' == comments then
gt
else
{ id = id'; targs = targs'; comments = comments' }
method indexed_access _loc (ia : ('loc, 'loc) Ast.Type.IndexedAccess.t) =
let open Ast.Type.IndexedAccess in
let { _object; index; comments } = ia in
let _object' = this#type_ _object in
let index' = this#type_ index in
let comments' = this#syntax_opt comments in
if _object' == _object && index' == index && comments' == comments then
ia
else
{ _object = _object'; index = index'; comments = comments' }
method optional_indexed_access loc (ia : ('loc, 'loc) Ast.Type.OptionalIndexedAccess.t) =
let open Ast.Type.OptionalIndexedAccess in
let { indexed_access; optional } = ia in
let indexed_access' = this#indexed_access loc indexed_access in
if indexed_access' == indexed_access then
ia
else
{ indexed_access = indexed_access'; optional }
method string_literal_type _loc (lit : 'loc Ast.StringLiteral.t) =
let open Ast.StringLiteral in
let { value; raw; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; raw; comments = comments' }
method number_literal_type _loc (lit : 'loc Ast.NumberLiteral.t) =
let open Ast.NumberLiteral in
let { value; raw; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; raw; comments = comments' }
method bigint_literal_type _loc (lit : 'loc Ast.BigIntLiteral.t) =
let open Ast.BigIntLiteral in
let { value; raw; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; raw; comments = comments' }
method boolean_literal_type _loc (lit : 'loc Ast.BooleanLiteral.t) =
let open Ast.BooleanLiteral in
let { value; comments } = lit in
let comments' = this#syntax_opt comments in
if comments == comments' then
lit
else
{ value; comments = comments' }
method nullable_type (t : ('loc, 'loc) Ast.Type.Nullable.t) =
let open Ast.Type.Nullable in
let { argument; comments } = t in
let argument' = this#type_ argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
t
else
{ argument = argument'; comments = comments' }
method typeof_type (t : ('loc, 'loc) Ast.Type.Typeof.t) =
let open Ast.Type.Typeof in
let { argument; comments } = t in
let argument' = this#typeof_expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
t
else
{ argument = argument'; comments = comments' }
method typeof_expression (git : ('loc, 'loc) Ast.Type.Typeof.Target.t) =
let open Ast.Type.Typeof.Target in
match git with
| Unqualified i -> id this#typeof_identifier i git (fun i -> Unqualified i)
| Qualified i -> id this#typeof_qualified_identifier i git (fun i -> Qualified i)
method typeof_identifier id = this#identifier id
method typeof_member_identifier id = this#identifier id
method typeof_qualified_identifier qual =
let open Ast.Type.Typeof.Target in
let (loc, { qualification; id }) = qual in
let qualification' = this#typeof_expression qualification in
let id' = this#typeof_member_identifier id in
if qualification' == qualification && id' == id then
qual
else
(loc, { qualification = qualification'; id = id' })
method tuple_type (t : ('loc, 'loc) Ast.Type.Tuple.t) =
let open Ast.Type.Tuple in
let { types; comments } = t in
let types' = map_list this#type_ types in
let comments' = this#syntax_opt comments in
if types == types' && comments == comments' then
t
else
{ types = types'; comments = comments' }
method array_type (t : ('loc, 'loc) Ast.Type.Array.t) =
let open Ast.Type.Array in
let { argument; comments } = t in
let argument' = this#type_ argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
t
else
{ argument = argument'; comments = comments' }
method union_type _loc (t : ('loc, 'loc) Ast.Type.Union.t) =
let open Ast.Type.Union in
let { types = (t0, t1, ts); comments } = t in
let t0' = this#type_ t0 in
let t1' = this#type_ t1 in
let ts' = map_list this#type_ ts in
let comments' = this#syntax_opt comments in
if t0' == t0 && t1' == t1 && ts' == ts && comments' == comments then
t
else
{ types = (t0', t1', ts'); comments = comments' }
method intersection_type _loc (t : ('loc, 'loc) Ast.Type.Intersection.t) =
let open Ast.Type.Intersection in
let { types = (t0, t1, ts); comments } = t in
let t0' = this#type_ t0 in
let t1' = this#type_ t1 in
let ts' = map_list this#type_ ts in
let comments' = this#syntax_opt comments in
if t0' == t0 && t1' == t1 && ts' == ts && comments' == comments then
t
else
{ types = (t0', t1', ts'); comments = comments' }
method type_ (t : ('loc, 'loc) Ast.Type.t) =
let open Ast.Type in
match t with
| (loc, Any comments) -> id this#syntax_opt comments t (fun comments -> (loc, Any comments))
| (loc, Mixed comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Mixed comments))
| (loc, Empty comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Empty comments))
| (loc, Void comments) -> id this#syntax_opt comments t (fun comments -> (loc, Void comments))
| (loc, Null comments) -> id this#syntax_opt comments t (fun comments -> (loc, Null comments))
| (loc, Symbol comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Symbol comments))
| (loc, Number comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Number comments))
| (loc, BigInt comments) ->
id this#syntax_opt comments t (fun comments -> (loc, BigInt comments))
| (loc, String comments) ->
id this#syntax_opt comments t (fun comments -> (loc, String comments))
| (loc, Boolean comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Boolean comments))
| (loc, Exists comments) ->
id this#syntax_opt comments t (fun comments -> (loc, Exists comments))
| (loc, Nullable t') -> id this#nullable_type t' t (fun t' -> (loc, Nullable t'))
| (loc, Array t') -> id this#array_type t' t (fun t' -> (loc, Array t'))
| (loc, Typeof t') -> id this#typeof_type t' t (fun t' -> (loc, Typeof t'))
| (loc, Function ft) -> id_loc this#function_type loc ft t (fun ft -> (loc, Function ft))
| (loc, Object ot) -> id_loc this#object_type loc ot t (fun ot -> (loc, Object ot))
| (loc, Interface i) -> id_loc this#interface_type loc i t (fun i -> (loc, Interface i))
| (loc, Generic gt) -> id_loc this#generic_type loc gt t (fun gt -> (loc, Generic gt))
| (loc, IndexedAccess ia) ->
id_loc this#indexed_access loc ia t (fun ia -> (loc, IndexedAccess ia))
| (loc, OptionalIndexedAccess ia) ->
id_loc this#optional_indexed_access loc ia t (fun ia -> (loc, OptionalIndexedAccess ia))
| (loc, StringLiteral lit) ->
id_loc this#string_literal_type loc lit t (fun lit -> (loc, StringLiteral lit))
| (loc, NumberLiteral lit) ->
id_loc this#number_literal_type loc lit t (fun lit -> (loc, NumberLiteral lit))
| (loc, BigIntLiteral lit) ->
id_loc this#bigint_literal_type loc lit t (fun lit -> (loc, BigIntLiteral lit))
| (loc, BooleanLiteral lit) ->
id_loc this#boolean_literal_type loc lit t (fun lit -> (loc, BooleanLiteral lit))
| (loc, Union t') -> id_loc this#union_type loc t' t (fun t' -> (loc, Union t'))
| (loc, Intersection t') ->
id_loc this#intersection_type loc t' t (fun t' -> (loc, Intersection t'))
| (loc, Tuple t') -> id this#tuple_type t' t (fun t' -> (loc, Tuple t'))
method type_annotation (annot : ('loc, 'loc) Ast.Type.annotation) =
let (loc, a) = annot in
id this#type_ a annot (fun a -> (loc, a))
method type_annotation_hint (return : ('M, 'T) Ast.Type.annotation_or_hint) =
let open Ast.Type in
match return with
| Available annot ->
let annot' = this#type_annotation annot in
if annot' == annot then
return
else
Available annot'
| Missing _loc -> return
method function_declaration loc (stmt : ('loc, 'loc) Ast.Function.t) = this#function_ loc stmt
method function_expression loc (stmt : ('loc, 'loc) Ast.Function.t) =
this#function_expression_or_method loc stmt
method function_expression_or_method loc (stmt : ('loc, 'loc) Ast.Function.t) =
this#function_ loc stmt
[@@alert deprecated "Use either function_expression or class_method"]
Internal helper for function declarations , function expressions and arrow functions
method function_ _loc (expr : ('loc, 'loc) Ast.Function.t) =
let open Ast.Function in
let {
id = ident;
params;
body;
async;
generator;
predicate;
return;
tparams;
sig_loc;
comments;
} =
expr
in
let ident' = map_opt this#function_identifier ident in
let tparams' = map_opt this#type_params tparams in
let params' = this#function_params params in
let return' = this#type_annotation_hint return in
let body' = this#function_body_any body in
let predicate' = map_opt this#predicate predicate in
let comments' = this#syntax_opt comments in
if
ident == ident'
&& params == params'
&& body == body'
&& predicate == predicate'
&& return == return'
&& tparams == tparams'
&& comments == comments'
then
expr
else
{
id = ident';
params = params';
return = return';
body = body';
async;
generator;
predicate = predicate';
tparams = tparams';
sig_loc;
comments = comments';
}
method function_params (params : ('loc, 'loc) Ast.Function.Params.t) =
let open Ast.Function in
let (loc, { Params.params = params_list; rest; comments; this_ }) = params in
let params_list' = map_list this#function_param params_list in
let rest' = map_opt this#function_rest_param rest in
let this_' = map_opt this#function_this_param this_ in
let comments' = this#syntax_opt comments in
if params_list == params_list' && rest == rest' && comments == comments' && this_ == this_'
then
params
else
(loc, { Params.params = params_list'; rest = rest'; comments = comments'; this_ = this_' })
method function_this_param (this_param : ('loc, 'loc) Ast.Function.ThisParam.t) =
let open Ast.Function.ThisParam in
let (loc, { annot; comments }) = this_param in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if annot' == annot && comments' == comments then
this_param
else
(loc, { annot = annot'; comments = comments' })
method function_param (param : ('loc, 'loc) Ast.Function.Param.t) =
let open Ast.Function.Param in
let (loc, { argument; default }) = param in
let argument' = this#function_param_pattern argument in
let default' = map_opt this#expression default in
if argument == argument' && default == default' then
param
else
(loc, { argument = argument'; default = default' })
method function_body_any (body : ('loc, 'loc) Ast.Function.body) =
match body with
| Ast.Function.BodyBlock block ->
id this#function_body block body (fun block -> Ast.Function.BodyBlock block)
| Ast.Function.BodyExpression expr ->
id this#expression expr body (fun expr -> Ast.Function.BodyExpression expr)
method function_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) =
let (loc, block) = body in
id_loc this#block loc block body (fun block -> (loc, block))
method function_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident
TODO
method generator _loc (expr : ('loc, 'loc) Ast.Expression.Generator.t) = expr
method identifier (id : ('loc, 'loc) Ast.Identifier.t) =
let open Ast.Identifier in
let (loc, { name; comments }) = id in
let comments' = this#syntax_opt comments in
if comments == comments' then
id
else
(loc, { name; comments = comments' })
method type_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#identifier id
method type_identifier_reference (id : ('loc, 'loc) Ast.Identifier.t) = this#type_identifier id
method binding_type_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#type_identifier id
method interface _loc (interface : ('loc, 'loc) Ast.Statement.Interface.t) =
let open Ast.Statement.Interface in
let { id = ident; tparams; extends; body; comments } = interface in
let id' = this#binding_type_identifier ident in
let tparams' = map_opt this#type_params tparams in
let extends' = map_list (map_loc this#generic_type) extends in
let body' = map_loc this#object_type body in
let comments' = this#syntax_opt comments in
if
id' == ident
&& tparams' == tparams
&& extends' == extends
&& body' == body
&& comments' == comments
then
interface
else
{ id = id'; tparams = tparams'; extends = extends'; body = body'; comments = comments' }
method interface_declaration loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) =
this#interface loc decl
method private_name (id : 'loc Ast.PrivateName.t) =
let open Ast.PrivateName in
let (loc, { name; comments }) = id in
let comments' = this#syntax_opt comments in
if comments == comments' then
id
else
(loc, { name; comments = comments' })
method computed_key (key : ('loc, 'loc) Ast.ComputedKey.t) =
let open Ast.ComputedKey in
let (loc, { expression; comments }) = key in
let expression' = this#expression expression in
let comments' = this#syntax_opt comments in
if expression == expression' && comments == comments' then
key
else
(loc, { expression = expression'; comments = comments' })
method import _loc (expr : ('loc, 'loc) Ast.Expression.Import.t) =
let open Ast.Expression.Import in
let { argument; comments } = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
{ argument = argument'; comments = comments' }
method if_consequent_statement ~has_else (stmt : ('loc, 'loc) Ast.Statement.t) =
ignore has_else;
this#statement stmt
method if_alternate_statement _loc (altern : ('loc, 'loc) Ast.Statement.If.Alternate.t') =
let open Ast.Statement.If.Alternate in
let { body; comments } = altern in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if body == body' && comments == comments' then
altern
else
{ body = body'; comments = comments' }
method if_statement _loc (stmt : ('loc, 'loc) Ast.Statement.If.t) =
let open Ast.Statement.If in
let { test; consequent; alternate; comments } = stmt in
let test' = this#predicate_expression test in
let consequent' = this#if_consequent_statement ~has_else:(alternate <> None) consequent in
let alternate' = map_opt (map_loc this#if_alternate_statement) alternate in
let comments' = this#syntax_opt comments in
if
test == test'
&& consequent == consequent'
&& alternate == alternate'
&& comments == comments'
then
stmt
else
{ test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }
method import_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ImportDeclaration.t) =
let open Ast.Statement.ImportDeclaration in
let { import_kind; source; specifiers; default; comments } = decl in
let source' = map_loc this#import_source source in
let specifiers' = map_opt (this#import_specifier ~import_kind) specifiers in
let default' = map_opt (this#import_default_specifier ~import_kind) default in
let comments' = this#syntax_opt comments in
if
source == source'
&& specifiers == specifiers'
&& default == default'
&& comments == comments'
then
decl
else
{
import_kind;
source = source';
specifiers = specifiers';
default = default';
comments = comments';
}
method import_source _loc (source : 'loc Ast.StringLiteral.t) =
let open Ast.StringLiteral in
let { value; raw; comments } = source in
let comments' = this#syntax_opt comments in
if comments == comments' then
source
else
{ value; raw; comments = comments' }
method import_specifier
~import_kind (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.specifier) =
let open Ast.Statement.ImportDeclaration in
match specifier with
| ImportNamedSpecifiers named_specifiers ->
let named_specifiers' =
map_list (this#import_named_specifier ~import_kind) named_specifiers
in
if named_specifiers == named_specifiers' then
specifier
else
ImportNamedSpecifiers named_specifiers'
| ImportNamespaceSpecifier (loc, ident) ->
id_loc (this#import_namespace_specifier ~import_kind) loc ident specifier (fun ident ->
ImportNamespaceSpecifier (loc, ident)
)
method remote_identifier id = this#identifier id
method import_named_specifier
~(import_kind : Ast.Statement.ImportDeclaration.import_kind)
(specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.named_specifier) =
let open Ast.Statement.ImportDeclaration in
let { kind; local; remote } = specifier in
let (is_type_remote, is_type_local) =
match (import_kind, kind) with
| (ImportType, _)
| (_, Some ImportType) ->
(true, true)
| (ImportTypeof, _)
| (_, Some ImportTypeof) ->
(false, true)
| _ -> (false, false)
in
let remote' =
match local with
| None ->
if is_type_remote then
this#binding_type_identifier remote
else
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let remote
| Some _ -> this#remote_identifier remote
in
let local' =
match local with
| None -> None
| Some ident ->
let local_visitor =
if is_type_local then
this#binding_type_identifier
else
this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let
in
id local_visitor ident local (fun ident -> Some ident)
in
if local == local' && remote == remote' then
specifier
else
{ kind; local = local'; remote = remote' }
method import_default_specifier ~import_kind (id : ('loc, 'loc) Ast.Identifier.t) =
let open Ast.Statement.ImportDeclaration in
let local_visitor =
match import_kind with
| ImportType
| ImportTypeof ->
this#binding_type_identifier
| _ -> this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let
in
local_visitor id
method import_namespace_specifier ~import_kind _loc (id : ('loc, 'loc) Ast.Identifier.t) =
let open Ast.Statement.ImportDeclaration in
let local_visitor =
match import_kind with
| ImportType
| ImportTypeof ->
this#binding_type_identifier
| _ -> this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let
in
local_visitor id
method jsx_element _loc (expr : ('loc, 'loc) Ast.JSX.element) =
let open Ast.JSX in
let { opening_element; closing_element; children; comments } = expr in
let opening_element' = this#jsx_opening_element opening_element in
let closing_element' = map_opt this#jsx_closing_element closing_element in
let children' = this#jsx_children children in
let comments' = this#syntax_opt comments in
if
opening_element == opening_element'
&& closing_element == closing_element'
&& children == children'
&& comments == comments'
then
expr
else
{
opening_element = opening_element';
closing_element = closing_element';
children = children';
comments = comments';
}
method jsx_fragment _loc (expr : ('loc, 'loc) Ast.JSX.fragment) =
let open Ast.JSX in
let { frag_children; frag_comments; _ } = expr in
let children' = this#jsx_children frag_children in
let frag_comments' = this#syntax_opt frag_comments in
if frag_children == children' && frag_comments == frag_comments' then
expr
else
{ expr with frag_children = children'; frag_comments = frag_comments' }
method jsx_opening_element (elem : ('loc, 'loc) Ast.JSX.Opening.t) =
let open Ast.JSX.Opening in
let (loc, { name; self_closing; attributes }) = elem in
let name' = this#jsx_element_name name in
let attributes' = map_list this#jsx_opening_attribute attributes in
if name == name' && attributes == attributes' then
elem
else
(loc, { name = name'; self_closing; attributes = attributes' })
method jsx_closing_element (elem : ('loc, 'loc) Ast.JSX.Closing.t) =
let open Ast.JSX.Closing in
let (loc, { name }) = elem in
let name' = this#jsx_element_name name in
if name == name' then
elem
else
(loc, { name = name' })
method jsx_opening_attribute (jsx_attr : ('loc, 'loc) Ast.JSX.Opening.attribute) =
let open Ast.JSX.Opening in
match jsx_attr with
| Attribute attr -> id this#jsx_attribute attr jsx_attr (fun attr -> Attribute attr)
| SpreadAttribute (loc, attr) ->
id_loc this#jsx_spread_attribute loc attr jsx_attr (fun attr -> SpreadAttribute (loc, attr))
method jsx_spread_attribute _loc (attr : ('loc, 'loc) Ast.JSX.SpreadAttribute.t') =
let open Ast.JSX.SpreadAttribute in
let { argument; comments } = attr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
attr
else
{ argument = argument'; comments = comments' }
method jsx_attribute (attr : ('loc, 'loc) Ast.JSX.Attribute.t) =
let open Ast.JSX.Attribute in
let (loc, { name; value }) = attr in
let name' = this#jsx_attribute_name name in
let value' = map_opt this#jsx_attribute_value value in
if name == name' && value == value' then
attr
else
(loc, { name = name'; value = value' })
method jsx_attribute_name (name : ('loc, 'loc) Ast.JSX.Attribute.name) =
let open Ast.JSX.Attribute in
match name with
| Identifier ident ->
id this#jsx_attribute_name_identifier ident name (fun ident -> Identifier ident)
| NamespacedName ns ->
id this#jsx_attribute_name_namespaced ns name (fun ns -> NamespacedName ns)
method jsx_attribute_name_identifier ident = this#jsx_identifier ident
method jsx_attribute_name_namespaced ns = this#jsx_namespaced_name ns
method jsx_attribute_value (value : ('loc, 'loc) Ast.JSX.Attribute.value) =
let open Ast.JSX.Attribute in
match value with
| Literal (loc, lit) ->
id_loc this#jsx_attribute_value_literal loc lit value (fun lit -> Literal (loc, lit))
| ExpressionContainer (loc, expr) ->
id_loc this#jsx_attribute_value_expression loc expr value (fun expr ->
ExpressionContainer (loc, expr)
)
method jsx_attribute_value_expression loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t)
=
this#jsx_expression loc jsx_expr
method jsx_attribute_value_literal loc (lit : 'loc Ast.Literal.t) = this#literal loc lit
method jsx_children ((loc, children) as orig : 'loc * ('loc, 'loc) Ast.JSX.child list) =
let children' = map_list this#jsx_child children in
if children == children' then
orig
else
(loc, children')
method jsx_child (child : ('loc, 'loc) Ast.JSX.child) =
let open Ast.JSX in
match child with
| (loc, Element elem) ->
id_loc this#jsx_element loc elem child (fun elem -> (loc, Element elem))
| (loc, Fragment frag) ->
id_loc this#jsx_fragment loc frag child (fun frag -> (loc, Fragment frag))
| (loc, ExpressionContainer expr) ->
id_loc this#jsx_expression loc expr child (fun expr -> (loc, ExpressionContainer expr))
| (loc, SpreadChild spread) ->
id this#jsx_spread_child spread child (fun spread -> (loc, SpreadChild spread))
| (_loc, Text _) -> child
method jsx_expression _loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t) =
let open Ast.JSX.ExpressionContainer in
let { expression; comments } = jsx_expr in
let comments' = this#syntax_opt comments in
match expression with
| Expression expr ->
let expr' = this#expression expr in
if expr == expr' && comments == comments' then
jsx_expr
else
{ expression = Expression expr'; comments = comments' }
| EmptyExpression ->
if comments == comments' then
jsx_expr
else
{ expression = EmptyExpression; comments = comments' }
method jsx_spread_child (jsx_spread_child : ('loc, 'loc) Ast.JSX.SpreadChild.t) =
let open Ast.JSX.SpreadChild in
let { expression; comments } = jsx_spread_child in
let expression' = this#expression expression in
let comments' = this#syntax_opt comments in
if expression == expression' && comments == comments' then
jsx_spread_child
else
{ expression = expression'; comments = comments' }
method jsx_element_name (name : ('loc, 'loc) Ast.JSX.name) =
let open Ast.JSX in
match name with
| Identifier ident ->
id this#jsx_element_name_identifier ident name (fun ident -> Identifier ident)
| NamespacedName ns ->
id this#jsx_element_name_namespaced ns name (fun ns -> NamespacedName ns)
| MemberExpression expr ->
id this#jsx_element_name_member_expression expr name (fun expr -> MemberExpression expr)
method jsx_element_name_identifier ident = this#jsx_identifier ident
method jsx_element_name_namespaced ns = this#jsx_namespaced_name ns
method jsx_element_name_member_expression expr = this#jsx_member_expression expr
method jsx_namespaced_name (namespaced_name : ('loc, 'loc) Ast.JSX.NamespacedName.t) =
let open Ast.JSX in
NamespacedName.(
let (loc, { namespace; name }) = namespaced_name in
let namespace' = this#jsx_identifier namespace in
let name' = this#jsx_identifier name in
if namespace == namespace' && name == name' then
namespaced_name
else
(loc, { namespace = namespace'; name = name' })
)
method jsx_member_expression (member_exp : ('loc, 'loc) Ast.JSX.MemberExpression.t) =
let open Ast.JSX in
let (loc, { MemberExpression._object; MemberExpression.property }) = member_exp in
let _object' = this#jsx_member_expression_object _object in
let property' = this#jsx_identifier property in
if _object == _object' && property == property' then
member_exp
else
(loc, MemberExpression.{ _object = _object'; property = property' })
method jsx_member_expression_object (_object : ('loc, 'loc) Ast.JSX.MemberExpression._object) =
let open Ast.JSX.MemberExpression in
match _object with
| Identifier ident ->
id this#jsx_member_expression_identifier ident _object (fun ident -> Identifier ident)
| MemberExpression nested_exp ->
id this#jsx_member_expression nested_exp _object (fun exp -> MemberExpression exp)
method jsx_member_expression_identifier ident = this#jsx_element_name_identifier ident
method jsx_identifier (id : ('loc, 'loc) Ast.JSX.Identifier.t) =
let open Ast.JSX.Identifier in
let (loc, { name; comments }) = id in
let comments' = this#syntax_opt comments in
if comments == comments' then
id
else
(loc, { name; comments = comments' })
method labeled_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Labeled.t) =
let open Ast.Statement.Labeled in
let { label; body; comments } = stmt in
let label' = this#label_identifier label in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if label == label' && body == body' && comments == comments' then
stmt
else
{ label = label'; body = body'; comments = comments' }
method literal _loc (expr : 'loc Ast.Literal.t) =
let open Ast.Literal in
let { value; raw; comments } = expr in
let comments' = this#syntax_opt comments in
if comments == comments' then
expr
else
{ value; raw; comments = comments' }
method logical _loc (expr : ('loc, 'loc) Ast.Expression.Logical.t) =
let open Ast.Expression.Logical in
let { operator = _; left; right; comments } = expr in
let left' = this#expression left in
let right' = this#expression right in
let comments' = this#syntax_opt comments in
if left == left' && right == right' && comments == comments' then
expr
else
{ expr with left = left'; right = right'; comments = comments' }
method member _loc (expr : ('loc, 'loc) Ast.Expression.Member.t) =
let open Ast.Expression.Member in
let { _object; property; comments } = expr in
let _object' = this#expression _object in
let property' = this#member_property property in
let comments' = this#syntax_opt comments in
if _object == _object' && property == property' && comments == comments' then
expr
else
{ _object = _object'; property = property'; comments = comments' }
method optional_member loc (expr : ('loc, 'loc) Ast.Expression.OptionalMember.t) =
let open Ast.Expression.OptionalMember in
let { member; optional = _; filtered_out = _ } = expr in
let member' = this#member loc member in
if member == member' then
expr
else
{ expr with member = member' }
method member_property (expr : ('loc, 'loc) Ast.Expression.Member.property) =
let open Ast.Expression.Member in
match expr with
| PropertyIdentifier ident ->
id this#member_property_identifier ident expr (fun ident -> PropertyIdentifier ident)
| PropertyPrivateName ident ->
id this#member_private_name ident expr (fun ident -> PropertyPrivateName ident)
| PropertyExpression e ->
id this#member_property_expression e expr (fun e -> PropertyExpression e)
method member_property_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =
this#identifier ident
method member_private_name (name : 'loc Ast.PrivateName.t) = this#private_name name
method member_property_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr
method meta_property _loc (expr : 'loc Ast.Expression.MetaProperty.t) =
let open Ast.Expression.MetaProperty in
let { meta; property; comments } = expr in
let meta' = this#identifier meta in
let property' = this#identifier property in
let comments' = this#syntax_opt comments in
if meta == meta' && property == property' && comments == comments' then
expr
else
{ meta = meta'; property = property'; comments = comments' }
method new_ _loc (expr : ('loc, 'loc) Ast.Expression.New.t) =
let open Ast.Expression.New in
let { callee; targs; arguments; comments } = expr in
let callee' = this#expression callee in
let targs' = map_opt this#call_type_args targs in
let arguments' = map_opt this#call_arguments arguments in
let comments' = this#syntax_opt comments in
if callee == callee' && targs == targs' && arguments == arguments' && comments == comments'
then
expr
else
{ callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }
method object_ _loc (expr : ('loc, 'loc) Ast.Expression.Object.t) =
let open Ast.Expression.Object in
let { properties; comments } = expr in
let properties' =
map_list
(fun prop ->
match prop with
| Property p ->
let p' = this#object_property p in
if p == p' then
prop
else
Property p'
| SpreadProperty s ->
let s' = this#spread_property s in
if s == s' then
prop
else
SpreadProperty s')
properties
in
let comments' = this#syntax_opt comments in
if properties == properties' && comments == comments' then
expr
else
{ properties = properties'; comments = comments' }
method object_property (prop : ('loc, 'loc) Ast.Expression.Object.Property.t) =
let open Ast.Expression.Object.Property in
match prop with
| (loc, Init { key; value; shorthand }) ->
let key' = this#object_key key in
let value' = this#expression value in
let shorthand' =
shorthand
&&
match (key', value') with
| ( Identifier (_, { Ast.Identifier.name = key_name; _ }),
(_, Ast.Expression.Identifier (_, { Ast.Identifier.name = value_name; _ }))
) ->
String.equal key_name value_name
| _ -> key == key' && value == value'
in
if key == key' && value == value' && shorthand == shorthand' then
prop
else
(loc, Init { key = key'; value = value'; shorthand = shorthand' })
| (loc, Method { key; value = fn }) ->
let key' = this#object_key key in
let fn' = map_loc this#function_expression_or_method fn in
if key == key' && fn == fn' then
prop
else
(loc, Method { key = key'; value = fn' })
| (loc, Get { key; value = fn; comments }) ->
let key' = this#object_key key in
let fn' = map_loc this#function_expression_or_method fn in
let comments' = this#syntax_opt comments in
if key == key' && fn == fn' && comments == comments' then
prop
else
(loc, Get { key = key'; value = fn'; comments = comments' })
| (loc, Set { key; value = fn; comments }) ->
let key' = this#object_key key in
let fn' = map_loc this#function_expression_or_method fn in
let comments' = this#syntax_opt comments in
if key == key' && fn == fn' && comments == comments' then
prop
else
(loc, Set { key = key'; value = fn'; comments = comments' })
method object_key (key : ('loc, 'loc) Ast.Expression.Object.Property.key) =
let open Ast.Expression.Object.Property in
match key with
| Literal literal -> id this#object_key_literal literal key (fun lit -> Literal lit)
| Identifier ident -> id this#object_key_identifier ident key (fun ident -> Identifier ident)
| PrivateName ident -> id this#private_name ident key (fun ident -> PrivateName ident)
| Computed computed -> id this#object_key_computed computed key (fun expr -> Computed expr)
method object_key_literal (literal : 'loc * 'loc Ast.Literal.t) =
let (loc, lit) = literal in
id_loc this#literal loc lit literal (fun lit -> (loc, lit))
method object_key_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident
method object_key_computed (key : ('loc, 'loc) Ast.ComputedKey.t) = this#computed_key key
method opaque_type _loc (otype : ('loc, 'loc) Ast.Statement.OpaqueType.t) =
let open Ast.Statement.OpaqueType in
let { id; tparams; impltype; supertype; comments } = otype in
let id' = this#binding_type_identifier id in
let tparams' = map_opt this#type_params tparams in
let impltype' = map_opt this#type_ impltype in
let supertype' = map_opt this#type_ supertype in
let comments' = this#syntax_opt comments in
if
id == id'
&& impltype == impltype'
&& tparams == tparams'
&& impltype == impltype'
&& supertype == supertype'
&& comments == comments'
then
otype
else
{
id = id';
tparams = tparams';
impltype = impltype';
supertype = supertype';
comments = comments';
}
method function_param_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr
method variable_declarator_pattern ~kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#binding_pattern ~kind expr
method catch_clause_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr
method for_in_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#assignment_pattern expr
method for_of_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =
this#assignment_pattern expr
method binding_pattern
?(kind = Ast.Statement.VariableDeclaration.Var) (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ~kind expr
method assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = this#pattern expr
NOTE : Patterns are highly overloaded . A pattern can be a binding pattern ,
which has a kind ( Var / Let / Const , with Var being the default for all pre - ES5
bindings ) , or an assignment pattern , which has no kind . Subterms that are
patterns inherit the kind ( or lack thereof ) .
which has a kind (Var/Let/Const, with Var being the default for all pre-ES5
bindings), or an assignment pattern, which has no kind. Subterms that are
patterns inherit the kind (or lack thereof). *)
method pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
let open Ast.Pattern in
let (loc, patt) = expr in
let patt' =
match patt with
| Object { Object.properties; annot; comments } ->
let properties' = map_list (this#pattern_object_p ?kind) properties in
let annot' = this#type_annotation_hint annot in
let comments' = this#syntax_opt comments in
if properties' == properties && annot' == annot && comments' == comments then
patt
else
Object { Object.properties = properties'; annot = annot'; comments = comments' }
| Array { Array.elements; annot; comments } ->
let elements' = map_list (this#pattern_array_e ?kind) elements in
let annot' = this#type_annotation_hint annot in
let comments' = this#syntax_opt comments in
if comments == comments' && elements' == elements && annot' == annot then
patt
else
Array { Array.elements = elements'; annot = annot'; comments = comments' }
| Identifier { Identifier.name; annot; optional } ->
let name' = this#pattern_identifier ?kind name in
let annot' = this#type_annotation_hint annot in
if name == name' && annot == annot' then
patt
else
Identifier { Identifier.name = name'; annot = annot'; optional }
| Expression e -> id this#pattern_expression e patt (fun e -> Expression e)
in
if patt == patt' then
expr
else
(loc, patt')
method pattern_identifier ?kind (ident : ('loc, 'loc) Ast.Identifier.t) =
ignore kind;
this#identifier ident
method pattern_literal ?kind loc (expr : 'loc Ast.Literal.t) =
ignore kind;
this#literal loc expr
method pattern_object_p ?kind (p : ('loc, 'loc) Ast.Pattern.Object.property) =
let open Ast.Pattern.Object in
match p with
| Property prop -> id (this#pattern_object_property ?kind) prop p (fun prop -> Property prop)
| RestElement prop ->
id (this#pattern_object_rest_property ?kind) prop p (fun prop -> RestElement prop)
method pattern_object_property ?kind (prop : ('loc, 'loc) Ast.Pattern.Object.Property.t) =
let open Ast.Pattern.Object.Property in
let (loc, { key; pattern; default; shorthand }) = prop in
let key' = this#pattern_object_property_key ?kind key in
let pattern' = this#pattern_object_property_pattern ?kind pattern in
let default' = map_opt this#expression default in
let shorthand' =
shorthand
&&
match (key', pattern') with
| ( Identifier (_, { Ast.Identifier.name = key_name; _ }),
( _,
Ast.Pattern.Identifier
{ Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name = value_name; _ }); _ }
)
) ->
String.equal key_name value_name
| _ -> key == key' && pattern == pattern'
in
if key' == key && pattern' == pattern && default' == default && shorthand == shorthand' then
prop
else
(loc, { key = key'; pattern = pattern'; default = default'; shorthand = shorthand' })
method pattern_object_property_key ?kind (key : ('loc, 'loc) Ast.Pattern.Object.Property.key) =
let open Ast.Pattern.Object.Property in
match key with
| Literal lit ->
id (this#pattern_object_property_literal_key ?kind) lit key (fun lit' -> Literal lit')
| Identifier identifier ->
id (this#pattern_object_property_identifier_key ?kind) identifier key (fun id' ->
Identifier id'
)
| Computed expr ->
id (this#pattern_object_property_computed_key ?kind) expr key (fun expr' -> Computed expr')
method pattern_object_property_literal_key ?kind (literal : 'loc * 'loc Ast.Literal.t) =
let (loc, key) = literal in
id_loc (this#pattern_literal ?kind) loc key literal (fun key' -> (loc, key'))
method pattern_object_property_identifier_key ?kind (key : ('loc, 'loc) Ast.Identifier.t) =
this#pattern_identifier ?kind key
method pattern_object_property_computed_key ?kind (key : ('loc, 'loc) Ast.ComputedKey.t) =
ignore kind;
this#computed_key key
method pattern_object_rest_property ?kind (prop : ('loc, 'loc) Ast.Pattern.RestElement.t) =
let open Ast.Pattern.RestElement in
let (loc, { argument; comments }) = prop in
let argument' = this#pattern_object_rest_property_pattern ?kind argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments == comments' then
prop
else
(loc, { argument = argument'; comments = comments' })
method pattern_object_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind expr
method pattern_object_rest_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind expr
method pattern_array_e ?kind (e : ('loc, 'loc) Ast.Pattern.Array.element) =
let open Ast.Pattern.Array in
match e with
| Hole _ -> e
| Element elem -> id (this#pattern_array_element ?kind) elem e (fun elem -> Element elem)
| RestElement elem ->
id (this#pattern_array_rest_element ?kind) elem e (fun elem -> RestElement elem)
method pattern_array_element ?kind (elem : ('loc, 'loc) Ast.Pattern.Array.Element.t) =
let open Ast.Pattern.Array.Element in
let (loc, { argument; default }) = elem in
let argument' = this#pattern_array_element_pattern ?kind argument in
let default' = map_opt this#expression default in
if argument == argument' && default == default' then
elem
else
(loc, { argument = argument'; default = default' })
method pattern_array_element_pattern ?kind (patt : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind patt
method pattern_array_rest_element ?kind (elem : ('loc, 'loc) Ast.Pattern.RestElement.t) =
let open Ast.Pattern.RestElement in
let (loc, { argument; comments }) = elem in
let argument' = this#pattern_array_rest_element_pattern ?kind argument in
let comments' = this#syntax_opt comments in
if argument' == argument && comments == comments' then
elem
else
(loc, { argument = argument'; comments = comments' })
method pattern_array_rest_element_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =
this#pattern ?kind expr
method pattern_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr
method predicate (pred : ('loc, 'loc) Ast.Type.Predicate.t) =
let open Ast.Type.Predicate in
let (loc, { kind; comments }) = pred in
let kind' =
match kind with
| Inferred -> kind
| Declared expr -> id this#expression expr kind (fun expr' -> Declared expr')
in
let comments' = this#syntax_opt comments in
if kind == kind' && comments == comments' then
pred
else
(loc, { kind = kind'; comments = comments' })
method predicate_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr
method function_rest_param (expr : ('loc, 'loc) Ast.Function.RestParam.t) =
let open Ast.Function.RestParam in
let (loc, { argument; comments }) = expr in
let argument' = this#function_param_pattern argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
(loc, { argument = argument'; comments = comments' })
method return _loc (stmt : ('loc, 'loc) Ast.Statement.Return.t) =
let open Ast.Statement.Return in
let { argument; comments; return_out } = stmt in
let argument' = map_opt this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
stmt
else
{ argument = argument'; comments = comments'; return_out }
method sequence _loc (expr : ('loc, 'loc) Ast.Expression.Sequence.t) =
let open Ast.Expression.Sequence in
let { expressions; comments } = expr in
let expressions' = map_list this#expression expressions in
let comments' = this#syntax_opt comments in
if expressions == expressions' && comments == comments' then
expr
else
{ expressions = expressions'; comments = comments' }
method toplevel_statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) =
this#statement_list stmts
method statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) =
map_list_multiple this#statement_fork_point stmts
method statement_fork_point (stmt : ('loc, 'loc) Ast.Statement.t) = [this#statement stmt]
method spread_element (expr : ('loc, 'loc) Ast.Expression.SpreadElement.t) =
let open Ast.Expression.SpreadElement in
let (loc, { argument; comments }) = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
(loc, { argument = argument'; comments = comments' })
method spread_property (expr : ('loc, 'loc) Ast.Expression.Object.SpreadProperty.t) =
let open Ast.Expression.Object.SpreadProperty in
let (loc, { argument; comments }) = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
(loc, { argument = argument'; comments = comments' })
method super_expression _loc (expr : 'loc Ast.Expression.Super.t) =
let open Ast.Expression.Super in
let { comments } = expr in
let comments' = this#syntax_opt comments in
if comments == comments' then
expr
else
{ comments = comments' }
method switch _loc (switch : ('loc, 'loc) Ast.Statement.Switch.t) =
let open Ast.Statement.Switch in
let { discriminant; cases; comments; exhaustive_out } = switch in
let discriminant' = this#expression discriminant in
let cases' = map_list this#switch_case cases in
let comments' = this#syntax_opt comments in
if discriminant == discriminant' && cases == cases' && comments == comments' then
switch
else
{ discriminant = discriminant'; cases = cases'; comments = comments'; exhaustive_out }
method switch_case (case : ('loc, 'loc) Ast.Statement.Switch.Case.t) =
let open Ast.Statement.Switch.Case in
let (loc, { test; consequent; comments }) = case in
let test' = map_opt this#expression test in
let consequent' = this#statement_list consequent in
let comments' = this#syntax_opt comments in
if test == test' && consequent == consequent' && comments == comments' then
case
else
(loc, { test = test'; consequent = consequent'; comments = comments' })
method tagged_template _loc (expr : ('loc, 'loc) Ast.Expression.TaggedTemplate.t) =
let open Ast.Expression.TaggedTemplate in
let { tag; quasi; comments } = expr in
let tag' = this#expression tag in
let quasi' = map_loc this#template_literal quasi in
let comments' = this#syntax_opt comments in
if tag == tag' && quasi == quasi' && comments == comments' then
expr
else
{ tag = tag'; quasi = quasi'; comments = comments' }
method template_literal _loc (expr : ('loc, 'loc) Ast.Expression.TemplateLiteral.t) =
let open Ast.Expression.TemplateLiteral in
let { quasis; expressions; comments } = expr in
let quasis' = map_list this#template_literal_element quasis in
let expressions' = map_list this#expression expressions in
let comments' = this#syntax_opt comments in
if quasis == quasis' && expressions == expressions' && comments == comments' then
expr
else
{ quasis = quasis'; expressions = expressions'; comments = comments' }
TODO
method template_literal_element (elem : 'loc Ast.Expression.TemplateLiteral.Element.t) = elem
method this_expression _loc (expr : 'loc Ast.Expression.This.t) =
let open Ast.Expression.This in
let { comments } = expr in
let comments' = this#syntax_opt comments in
if comments == comments' then
expr
else
{ comments = comments' }
method throw _loc (stmt : ('loc, 'loc) Ast.Statement.Throw.t) =
let open Ast.Statement.Throw in
let { argument; comments } = stmt in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
stmt
else
{ argument = argument'; comments = comments' }
method try_catch _loc (stmt : ('loc, 'loc) Ast.Statement.Try.t) =
let open Ast.Statement.Try in
let { block; handler; finalizer; comments } = stmt in
let block' = map_loc this#block block in
let handler' =
match handler with
| Some (loc, clause) ->
id_loc this#catch_clause loc clause handler (fun clause -> Some (loc, clause))
| None -> handler
in
let finalizer' =
match finalizer with
| Some (finalizer_loc, block) ->
id_loc this#block finalizer_loc block finalizer (fun block -> Some (finalizer_loc, block))
| None -> finalizer
in
let comments' = this#syntax_opt comments in
if block == block' && handler == handler' && finalizer == finalizer' && comments == comments'
then
stmt
else
{ block = block'; handler = handler'; finalizer = finalizer'; comments = comments' }
method type_cast _loc (expr : ('loc, 'loc) Ast.Expression.TypeCast.t) =
let open Ast.Expression.TypeCast in
let { expression; annot; comments } = expr in
let expression' = this#expression expression in
let annot' = this#type_annotation annot in
let comments' = this#syntax_opt comments in
if expression' == expression && annot' == annot && comments' == comments then
expr
else
{ expression = expression'; annot = annot'; comments = comments' }
method unary_expression _loc (expr : ('loc, 'loc) Flow_ast.Expression.Unary.t) =
let open Flow_ast.Expression.Unary in
let { argument; operator = _; comments } = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
{ expr with argument = argument'; comments = comments' }
method update_expression _loc (expr : ('loc, 'loc) Ast.Expression.Update.t) =
let open Ast.Expression.Update in
let { argument; operator = _; prefix = _; comments } = expr in
let argument' = this#expression argument in
let comments' = this#syntax_opt comments in
if argument == argument' && comments == comments' then
expr
else
{ expr with argument = argument'; comments = comments' }
method variable_declaration _loc (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.t) =
let open Ast.Statement.VariableDeclaration in
let { declarations; kind; comments } = decl in
let decls' = map_list (this#variable_declarator ~kind) declarations in
let comments' = this#syntax_opt comments in
if declarations == decls' && comments == comments' then
decl
else
{ declarations = decls'; kind; comments = comments' }
method variable_declarator
~kind (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.Declarator.t) =
let open Ast.Statement.VariableDeclaration.Declarator in
let (loc, { id; init }) = decl in
let id' = this#variable_declarator_pattern ~kind id in
let init' = map_opt this#expression init in
if id == id' && init == init' then
decl
else
(loc, { id = id'; init = init' })
method while_ _loc (stuff : ('loc, 'loc) Ast.Statement.While.t) =
let open Ast.Statement.While in
let { test; body; comments } = stuff in
let test' = this#predicate_expression test in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if test == test' && body == body' && comments == comments' then
stuff
else
{ test = test'; body = body'; comments = comments' }
method with_ _loc (stuff : ('loc, 'loc) Ast.Statement.With.t) =
let open Ast.Statement.With in
let { _object; body; comments } = stuff in
let _object' = this#expression _object in
let body' = this#statement body in
let comments' = this#syntax_opt comments in
if _object == _object' && body == body' && comments == comments' then
stuff
else
{ _object = _object'; body = body'; comments = comments' }
method type_alias _loc (stuff : ('loc, 'loc) Ast.Statement.TypeAlias.t) =
let open Ast.Statement.TypeAlias in
let { id; tparams; right; comments } = stuff in
let id' = this#binding_type_identifier id in
let tparams' = map_opt this#type_params tparams in
let right' = this#type_ right in
let comments' = this#syntax_opt comments in
if id == id' && right == right' && tparams == tparams' && comments == comments' then
stuff
else
{ id = id'; tparams = tparams'; right = right'; comments = comments' }
method yield _loc (expr : ('loc, 'loc) Ast.Expression.Yield.t) =
let open Ast.Expression.Yield in
let { argument; delegate; comments; result_out } = expr in
let argument' = map_opt this#expression argument in
let comments' = this#syntax_opt comments in
if comments == comments' && argument == argument' then
expr
else
{ argument = argument'; delegate; comments = comments'; result_out }
end
let fold_program (mappers : 'a mapper list) ast =
List.fold_left (fun ast (m : 'a mapper) -> m#program ast) ast mappers
|
f5a11a65c27e1631e089e9c3d94ac3cce3ae16b44cbe609890b3674ac8a2341a | DerekCuevas/interview-cake-clj | core_test.clj | (ns word-cloud.core-test
(:require [clojure.test :refer :all]
[word-cloud.core :refer :all]))
(def sentence-a "After beating the eggs, Dana read the next step:")
(def sentence-b "Add milk and eggs, then add flour and sugar.")
(def sentence-c "Hi how are you?")
(def sentence-d "What when where why when why what? Where when. I don't know.")
(def sentence-e "Hello bye hi hi hello hi bye hi bye.")
(def map-a {"after" 1 "beating" 1 "the" 2 "eggs" 1 "dana" 1 "read" 1 "next" 1 "step" 1})
(def map-b {"add" 2 "milk" 1 "and" 2 "eggs" 1 "then" 1 "flour" 1 "sugar" 1})
(def map-c {"hi" 1 "how" 1 "are" 1 "you" 1})
(def map-d {"what" 2 "when" 3 "where" 2 "why" 2 "i" 1 "don't" 1 "know" 1})
(def map-e {"hello" 2 "bye" 3 "hi" 4})
(deftest word-cloud-test
(testing "edge cases"
(is (= (word-cloud "hi") {"hi" 1}))
(is (= (word-cloud "hi HI Hi hI hi. hi? hi!") {"hi" 7})))
(testing "returns frequencies of words in sentence, ignores case and punctuation."
(is (= (word-cloud sentence-a) map-a))
(is (= (word-cloud sentence-b) map-b))
(is (= (word-cloud sentence-c) map-c))
(is (= (word-cloud sentence-d) map-d))
(is (= (word-cloud sentence-e) map-e))))
| null | https://raw.githubusercontent.com/DerekCuevas/interview-cake-clj/f17d3239bb30bcc17ced473f055a9859f9d1fb8d/word-cloud/test/word_cloud/core_test.clj | clojure | (ns word-cloud.core-test
(:require [clojure.test :refer :all]
[word-cloud.core :refer :all]))
(def sentence-a "After beating the eggs, Dana read the next step:")
(def sentence-b "Add milk and eggs, then add flour and sugar.")
(def sentence-c "Hi how are you?")
(def sentence-d "What when where why when why what? Where when. I don't know.")
(def sentence-e "Hello bye hi hi hello hi bye hi bye.")
(def map-a {"after" 1 "beating" 1 "the" 2 "eggs" 1 "dana" 1 "read" 1 "next" 1 "step" 1})
(def map-b {"add" 2 "milk" 1 "and" 2 "eggs" 1 "then" 1 "flour" 1 "sugar" 1})
(def map-c {"hi" 1 "how" 1 "are" 1 "you" 1})
(def map-d {"what" 2 "when" 3 "where" 2 "why" 2 "i" 1 "don't" 1 "know" 1})
(def map-e {"hello" 2 "bye" 3 "hi" 4})
(deftest word-cloud-test
(testing "edge cases"
(is (= (word-cloud "hi") {"hi" 1}))
(is (= (word-cloud "hi HI Hi hI hi. hi? hi!") {"hi" 7})))
(testing "returns frequencies of words in sentence, ignores case and punctuation."
(is (= (word-cloud sentence-a) map-a))
(is (= (word-cloud sentence-b) map-b))
(is (= (word-cloud sentence-c) map-c))
(is (= (word-cloud sentence-d) map-d))
(is (= (word-cloud sentence-e) map-e))))
| |
e5c35e2448bfc775df44692c20478a5ee7077b7007c238fc18acfdd98d7467b4 | johnyob/dromedary | test_pr6690.ml | open! Import
open Util
let%expect_test "" =
let str =
{|
type 'a visit_action;;
type insert;;
type 'a local_visit_action;;
type ('a, 'result, 'visit_action) context =
| Local of 'insert. unit constraint 'result = 'a * 'insert and 'visit_action = 'a local_visit_action
| Global constraint 'a = 'result and 'visit_action = 'a visit_action
;;
external raise : 'a. exn -> 'a = "%raise";;
exception Exit;;
let (type 'visit_action) vexpr =
exists (type 'a 'result 'c) ->
fun (ctx : ('a, 'result, 'visit_action) context) ->
(match ctx with
( Local () -> fun _ -> raise Exit
| Global -> fun _ -> raise Exit
)
: 'c -> 'visit_action)
;;
|}
in
print_infer_result str;
[%expect {|
"Non rigid equations" |}]
let%expect_test "" =
let str =
{|
type 'a visit_action;;
type insert;;
type 'a local_visit_action;;
type ('a, 'result, 'visit_action) context =
| Local of 'insert. unit constraint 'result = 'a * 'insert and 'visit_action = 'a local_visit_action
| Global constraint 'a = 'result and 'visit_action = 'a visit_action
;;
external raise : 'a. exn -> 'a = "%raise";;
exception Exit;;
let (type 'result 'visit_action) vexpr =
fun (ctx : (unit, 'result, 'visit_action) context) ->
(match ctx with
( Local () -> fun _ -> raise Exit
| Global -> fun _ -> raise Exit
)
: unit -> 'visit_action)
;;
|}
in
print_infer_result str;
[%expect {|
Structure:
└──Structure:
└──Structure item: Type
└──Type declaration:
└──Type name: visit_action
└──Type declaration kind: Abstract
└──Structure item: Type
└──Type declaration:
└──Type name: insert
└──Type declaration kind: Abstract
└──Structure item: Type
└──Type declaration:
└──Type name: local_visit_action
└──Type declaration kind: Abstract
└──Structure item: Type
└──Type declaration:
└──Type name: context
└──Type declaration kind: Variant
└──Constructor declaration:
└──Constructor name: Local
└──Constructor alphas: 37 38 39
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Variable: 37
└──Type expr: Variable: 38
└──Type expr: Variable: 39
└──Constructor argument:
└──Constructor betas: 40
└──Type expr: Constructor: unit
└──Constraint:
└──Type expr: Variable: 38
└──Type expr: Tuple
└──Type expr: Variable: 37
└──Type expr: Variable: 40
└──Constraint:
└──Type expr: Variable: 39
└──Type expr: Constructor: local_visit_action
└──Type expr: Variable: 37
└──Constructor declaration:
└──Constructor name: Global
└──Constructor alphas: 37 38 39
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Variable: 37
└──Type expr: Variable: 38
└──Type expr: Variable: 39
└──Constraint:
└──Type expr: Variable: 37
└──Type expr: Variable: 38
└──Constraint:
└──Type expr: Variable: 39
└──Type expr: Constructor: visit_action
└──Type expr: Variable: 37
└──Structure item: Primitive
└──Value description:
└──Name: raise
└──Scheme:
└──Variables: 0
└──Type expr: Arrow
└──Type expr: Constructor: exn
└──Type expr: Variable: 0
└──Primitive name: %raise
└──Structure item: Exception
└──Type exception:
└──Extension constructor:
└──Extension name: exn
└──Extension parameters:
└──Extension constructor kind: Declaration
└──Constructor declaration:
└──Constructor name: Exit
└──Constructor alphas:
└──Constructor type:
└──Type expr: Constructor: exn
└──Structure item: Let
└──Value bindings:
└──Value binding:
└──Pattern:
└──Type expr: Arrow
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Variable: vexpr
└──Abstraction:
└──Variables: 18,19
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Function
└──Pattern:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Variable: ctx
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Match
└──Expression:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Variable
└──Variable: ctx
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Cases:
└──Case:
└──Pattern:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Construct
└──Constructor description:
└──Name: Local
└──Constructor argument type:
└──Type expr: Constructor: unit
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Pattern:
└──Type expr: Constructor: unit
└──Desc: Constant: ()
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Function
└──Pattern:
└──Type expr: Constructor: unit
└──Desc: Any
└──Expression:
└──Type expr: Variable: 19
└──Desc: Application
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: exn
└──Type expr: Variable: 19
└──Desc: Variable
└──Variable: raise
└──Type expr: Variable: 19
└──Expression:
└──Type expr: Constructor: exn
└──Desc: Construct
└──Constructor description:
└──Name: Exit
└──Constructor type:
└──Type expr: Constructor: exn
└──Case:
└──Pattern:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Construct
└──Constructor description:
└──Name: Global
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Function
└──Pattern:
└──Type expr: Constructor: unit
└──Desc: Any
└──Expression:
└──Type expr: Variable: 19
└──Desc: Application
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: exn
└──Type expr: Variable: 19
└──Desc: Variable
└──Variable: raise
└──Type expr: Variable: 19
└──Expression:
└──Type expr: Constructor: exn
└──Desc: Construct
└──Constructor description:
└──Name: Exit
└──Constructor type:
└──Type expr: Constructor: exn |}] | null | https://raw.githubusercontent.com/johnyob/dromedary/a9359321492ff5c38c143385513e673d8d1f05a4/test/typing/gadts/test_pr6690.ml | ocaml | open! Import
open Util
let%expect_test "" =
let str =
{|
type 'a visit_action;;
type insert;;
type 'a local_visit_action;;
type ('a, 'result, 'visit_action) context =
| Local of 'insert. unit constraint 'result = 'a * 'insert and 'visit_action = 'a local_visit_action
| Global constraint 'a = 'result and 'visit_action = 'a visit_action
;;
external raise : 'a. exn -> 'a = "%raise";;
exception Exit;;
let (type 'visit_action) vexpr =
exists (type 'a 'result 'c) ->
fun (ctx : ('a, 'result, 'visit_action) context) ->
(match ctx with
( Local () -> fun _ -> raise Exit
| Global -> fun _ -> raise Exit
)
: 'c -> 'visit_action)
;;
|}
in
print_infer_result str;
[%expect {|
"Non rigid equations" |}]
let%expect_test "" =
let str =
{|
type 'a visit_action;;
type insert;;
type 'a local_visit_action;;
type ('a, 'result, 'visit_action) context =
| Local of 'insert. unit constraint 'result = 'a * 'insert and 'visit_action = 'a local_visit_action
| Global constraint 'a = 'result and 'visit_action = 'a visit_action
;;
external raise : 'a. exn -> 'a = "%raise";;
exception Exit;;
let (type 'result 'visit_action) vexpr =
fun (ctx : (unit, 'result, 'visit_action) context) ->
(match ctx with
( Local () -> fun _ -> raise Exit
| Global -> fun _ -> raise Exit
)
: unit -> 'visit_action)
;;
|}
in
print_infer_result str;
[%expect {|
Structure:
└──Structure:
└──Structure item: Type
└──Type declaration:
└──Type name: visit_action
└──Type declaration kind: Abstract
└──Structure item: Type
└──Type declaration:
└──Type name: insert
└──Type declaration kind: Abstract
└──Structure item: Type
└──Type declaration:
└──Type name: local_visit_action
└──Type declaration kind: Abstract
└──Structure item: Type
└──Type declaration:
└──Type name: context
└──Type declaration kind: Variant
└──Constructor declaration:
└──Constructor name: Local
└──Constructor alphas: 37 38 39
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Variable: 37
└──Type expr: Variable: 38
└──Type expr: Variable: 39
└──Constructor argument:
└──Constructor betas: 40
└──Type expr: Constructor: unit
└──Constraint:
└──Type expr: Variable: 38
└──Type expr: Tuple
└──Type expr: Variable: 37
└──Type expr: Variable: 40
└──Constraint:
└──Type expr: Variable: 39
└──Type expr: Constructor: local_visit_action
└──Type expr: Variable: 37
└──Constructor declaration:
└──Constructor name: Global
└──Constructor alphas: 37 38 39
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Variable: 37
└──Type expr: Variable: 38
└──Type expr: Variable: 39
└──Constraint:
└──Type expr: Variable: 37
└──Type expr: Variable: 38
└──Constraint:
└──Type expr: Variable: 39
└──Type expr: Constructor: visit_action
└──Type expr: Variable: 37
└──Structure item: Primitive
└──Value description:
└──Name: raise
└──Scheme:
└──Variables: 0
└──Type expr: Arrow
└──Type expr: Constructor: exn
└──Type expr: Variable: 0
└──Primitive name: %raise
└──Structure item: Exception
└──Type exception:
└──Extension constructor:
└──Extension name: exn
└──Extension parameters:
└──Extension constructor kind: Declaration
└──Constructor declaration:
└──Constructor name: Exit
└──Constructor alphas:
└──Constructor type:
└──Type expr: Constructor: exn
└──Structure item: Let
└──Value bindings:
└──Value binding:
└──Pattern:
└──Type expr: Arrow
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Variable: vexpr
└──Abstraction:
└──Variables: 18,19
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Function
└──Pattern:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Variable: ctx
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Match
└──Expression:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Variable
└──Variable: ctx
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Cases:
└──Case:
└──Pattern:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Construct
└──Constructor description:
└──Name: Local
└──Constructor argument type:
└──Type expr: Constructor: unit
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Pattern:
└──Type expr: Constructor: unit
└──Desc: Constant: ()
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Function
└──Pattern:
└──Type expr: Constructor: unit
└──Desc: Any
└──Expression:
└──Type expr: Variable: 19
└──Desc: Application
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: exn
└──Type expr: Variable: 19
└──Desc: Variable
└──Variable: raise
└──Type expr: Variable: 19
└──Expression:
└──Type expr: Constructor: exn
└──Desc: Construct
└──Constructor description:
└──Name: Exit
└──Constructor type:
└──Type expr: Constructor: exn
└──Case:
└──Pattern:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Desc: Construct
└──Constructor description:
└──Name: Global
└──Constructor type:
└──Type expr: Constructor: context
└──Type expr: Constructor: unit
└──Type expr: Variable: 18
└──Type expr: Variable: 19
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: unit
└──Type expr: Variable: 19
└──Desc: Function
└──Pattern:
└──Type expr: Constructor: unit
└──Desc: Any
└──Expression:
└──Type expr: Variable: 19
└──Desc: Application
└──Expression:
└──Type expr: Arrow
└──Type expr: Constructor: exn
└──Type expr: Variable: 19
└──Desc: Variable
└──Variable: raise
└──Type expr: Variable: 19
└──Expression:
└──Type expr: Constructor: exn
└──Desc: Construct
└──Constructor description:
└──Name: Exit
└──Constructor type:
└──Type expr: Constructor: exn |}] | |
58329a487fb412189a4058a9abad12928f71d27af5ab70f4c72418fe48668089 | simmone/racket-simple-xlsx | lib.rkt | #lang racket
(provide (contract-out
[squeeze-range-hash (-> (hash/c natural? number?) (listof (list/c natural? natural? number?)))]
[rgb? (-> string? boolean?)]
))
(define (rgb? color_string)
(if (or
(regexp-match #px"^([0-9]|[a-zA-Z]){6}$" color_string)
(regexp-match #px"^([0-9]|[a-zA-Z]){8}$" color_string))
#t
#f))
(define (squeeze-range-hash range_hash)
(let loop ([range_list (sort #:key car (hash->list range_hash) <)]
[loop_start_index -1]
[loop_end_index -1]
[loop_val -1]
[result_list '()])
(if (not (null? range_list))
(let* ([range (car range_list)]
[index (car range)]
[val (cdr range)])
(if (and (= index (add1 loop_end_index)) (= val loop_val))
(loop
(cdr range_list)
loop_start_index
(add1 loop_end_index)
val
result_list)
(if (= loop_start_index -1)
(loop (cdr range_list) index index val result_list)
(loop (cdr range_list) index index val (cons (list loop_start_index loop_end_index loop_val) result_list)))))
(if (= loop_start_index -1)
'()
(reverse
(cons (list loop_start_index loop_end_index loop_val) result_list))))))
| null | https://raw.githubusercontent.com/simmone/racket-simple-xlsx/e0ac3190b6700b0ee1dd80ed91a8f4318533d012/simple-xlsx/style/lib.rkt | racket | #lang racket
(provide (contract-out
[squeeze-range-hash (-> (hash/c natural? number?) (listof (list/c natural? natural? number?)))]
[rgb? (-> string? boolean?)]
))
(define (rgb? color_string)
(if (or
(regexp-match #px"^([0-9]|[a-zA-Z]){6}$" color_string)
(regexp-match #px"^([0-9]|[a-zA-Z]){8}$" color_string))
#t
#f))
(define (squeeze-range-hash range_hash)
(let loop ([range_list (sort #:key car (hash->list range_hash) <)]
[loop_start_index -1]
[loop_end_index -1]
[loop_val -1]
[result_list '()])
(if (not (null? range_list))
(let* ([range (car range_list)]
[index (car range)]
[val (cdr range)])
(if (and (= index (add1 loop_end_index)) (= val loop_val))
(loop
(cdr range_list)
loop_start_index
(add1 loop_end_index)
val
result_list)
(if (= loop_start_index -1)
(loop (cdr range_list) index index val result_list)
(loop (cdr range_list) index index val (cons (list loop_start_index loop_end_index loop_val) result_list)))))
(if (= loop_start_index -1)
'()
(reverse
(cons (list loop_start_index loop_end_index loop_val) result_list))))))
| |
b3137670ccfee1b396394f70a5103476f46e6f5b30a69432dfb103de5c2c13ee | mbutterick/quad | test-hello.rkt | #lang quadwriter/markdown
Hello world | null | https://raw.githubusercontent.com/mbutterick/quad/395447f35c2fb9fc7b6199ed185850906d80811d/qtest/test-hello.rkt | racket | #lang quadwriter/markdown
Hello world | |
c1e3e57c5a05758d87bbbb69a6e5d16b667e7bb382f09dff454dc21deead7370 | tommay/pokemon-go | Cost.hs | module Cost (
Cost,
new,
dust,
candy,
xlCandy,
needsXlCandy,
) where
import qualified Data.Ord as Ord
data Cost = Cost {
dust :: Int,
candy :: Int,
xlCandy :: Int
} deriving (Show)
instance Semigroup Cost where
Cost dust candy xlCandy <> Cost dust' candy' xlCandy' =
Cost (dust + dust') (candy + candy') (xlCandy + xlCandy')
instance Monoid Cost where
mempty = Cost 0 0 0
new = Cost
needsXlCandy :: Cost -> Bool
needsXlCandy = (> 0) . xlCandy
| null | https://raw.githubusercontent.com/tommay/pokemon-go/d2e35858b0a3cd25ddd14af674c7216560475914/src/Cost.hs | haskell | module Cost (
Cost,
new,
dust,
candy,
xlCandy,
needsXlCandy,
) where
import qualified Data.Ord as Ord
data Cost = Cost {
dust :: Int,
candy :: Int,
xlCandy :: Int
} deriving (Show)
instance Semigroup Cost where
Cost dust candy xlCandy <> Cost dust' candy' xlCandy' =
Cost (dust + dust') (candy + candy') (xlCandy + xlCandy')
instance Monoid Cost where
mempty = Cost 0 0 0
new = Cost
needsXlCandy :: Cost -> Bool
needsXlCandy = (> 0) . xlCandy
| |
6c937ee218b8555a8627cdf4143c9067c3b42aef068a303cfd533a000ceec26d | wies/grasshopper | reduction.ml | * { 5 Reduction from GRASS to SMT }
open Util
open Grass
open GrassUtil
open Axioms
open SimplifyGrass
(** Eliminate all implicit and explicit existential quantifiers using skolemization.
** Assumes that [f] is typed and in negation normal form. *)
let elim_exists =
let e = fresh_ident "?e" in
let rec elim_neq seen_adts bvs = function
| BoolOp (Not, [Atom (App (Eq, [t1; t2], _), a)]) as f when bvs = [] ->
(match sort_of t1 with
| Set srt ->
let ve = mk_var srt e in
mk_exists [(e, srt)] (smk_or [smk_and [smk_elem ~ann:a ve t1; mk_not (smk_elem ~ann:a ve t2)];
smk_and [smk_elem ~ann:a ve t2; mk_not (smk_elem ~ann:a ve t1)]])
| Map (dsrts, rsrt) ->
let dom_vs = List.map (fun dsrts -> List.map (fun srt -> fresh_ident "?i", srt) dsrts) [dsrts] in
let dom_vts = List.map (fun vs -> List.map (fun (v, srt) -> mk_var srt v) vs) dom_vs in
let mk_reads t = List.fold_left (fun t_read vts -> mk_read t_read vts) t dom_vts in
let t1_read = mk_reads t1 in
let t2_read = mk_reads t2 in
let vs = List.flatten dom_vs in
mk_and [f; elim_neq seen_adts bvs (mk_exists vs (annotate (mk_neq t1_read t2_read) a))]
| Adt (id, adts) when not @@ IdSet.mem id seen_adts ->
let cstrs = List.assoc id adts in
let expand new_vs = function
| App (Constructor cid, ts, _) -> new_vs, [(cid, mk_true, ts)]
| t ->
match cstrs with
| [cid, dstrs] ->
let ts =
List.map (fun (id, srt) -> mk_destr srt id t) dstrs
in
(new_vs, [cid, mk_true, ts])
| _ ->
List.fold_left
(fun (new_vs, cases) (cid, dstrs) ->
let vs = List.map (fun (id, srt) -> fresh_ident "?x", unfold_adts adts srt) dstrs in
let vts = List.map (fun (v, srt) -> mk_var srt v) vs in
vs @ new_vs, (cid, mk_eq t (mk_constr (Adt (id, adts)) cid vts), vts) :: cases
) (new_vs, []) cstrs
in
let new_vs1, t1_cases = expand [] t1 in
let new_vs2, t2_cases = expand new_vs1 t2 in
let cases = List.fold_left
(fun cases (cid1, def_t1, args1) ->
List.fold_left
(fun cases (cid2, def_t2, args2) ->
if cid1 = cid2 then
let seen_adts1 = IdSet.add id seen_adts in
let sub_cases =
List.map2 (fun arg1 arg2 -> elim_neq seen_adts1 bvs (mk_neq arg1 arg2)) args1 args2
in
mk_and [def_t1; def_t2; mk_or sub_cases] :: cases
else mk_and [def_t1; def_t2] :: cases
) cases t2_cases
) [] t1_cases
in
mk_exists ~ann:a new_vs2 (mk_or cases)
| _ -> f)
| BoolOp (Not, [Atom (App (Disjoint, [s1; s2], _), a)]) when bvs = [] ->
let srt = element_sort_of_set s1 in
elim_neq seen_adts bvs (mk_not (Atom (App (Eq, [mk_inter [s1; s2]; mk_empty (Set srt)], Bool), a)))
| BoolOp (Not, [Atom (App (SubsetEq, [s1; s2], _), a)]) when bvs = [] ->
let srt = element_sort_of_set s1 in
let ve = mk_var srt e in
mk_exists [(e, srt)] (annotate (smk_and [smk_elem ve s1; mk_not (smk_elem ve s2)]) a)
| BoolOp (op, fs) ->
smk_op op (List.map (elim_neq IdSet.empty bvs) fs)
| Binder (Exists, vs, f, a) ->
mk_exists ~ann:a vs (elim_neq seen_adts bvs f)
| Binder (Forall, vs, f, a) ->
mk_forall ~ann:a vs (elim_neq seen_adts (bvs @ vs) f)
| f -> f
in
List.map (fun f ->
let f1 = elim_neq IdSet.empty [] f in
let f2 = propagate_exists_up f1 in
let f3 = skolemize f2 in
f3)
(** Hoist all universally quantified subformulas to top level.
** Assumes that formulas [fs] are in negation normal form. *)
let factorize_axioms fs =
let rec extract f axioms =
match f with
| Binder (b, [], g, a) ->
let g1, axioms = extract g axioms in
Binder (b, [], g1, a), axioms
| Binder (Forall, (_ :: _ as vs), f1, a) ->
let p = mk_atom (FreeSym (fresh_ident "Axiom")) [] in
let names, other_annots = List.partition (function Name _ -> true | _ -> false) a in
let fact_axiom = annotate (mk_implies p (Binder (Forall, vs, f1, other_annots))) names in
p, fact_axiom :: axioms
| BoolOp (op, fs) ->
let fs1, axioms =
List.fold_right
(fun f (fs1, axioms) ->
let f1, axioms1 = extract f axioms in
f1 :: fs1, axioms1)
fs ([], axioms)
in
BoolOp (op, fs1), axioms
| f -> f, axioms
in
let process (fs1, axioms) f =
match f with
| Binder (Forall, _ :: _, _, _) -> f :: fs1, axioms
| _ ->
let f1, axioms1 = extract f axioms in
f1 :: fs1, axioms1
in
let fs1, axioms = List.fold_left process ([], []) fs in
axioms @ fs1
(** Add axioms for frame predicates. *)
let field_partitions fs gts =
let fld_partition, fld_map, fields =
let max, fld_map, fields =
TermSet.fold (fun t (n, fld_map, fields) -> match t with
| App (_, _, Map (Loc _ :: _, _)) as fld ->
n+1, TermMap.add fld n fld_map, TermSet.add fld fields
| _ -> n, fld_map, fields)
gts (0, TermMap.empty, TermSet.empty)
in
let rec collect_eq partition = function
| BoolOp (Not, f) -> partition
| BoolOp (op, fs) -> List.fold_left collect_eq partition fs
| Atom (App (Eq, [App (_, _, Map (Loc _ :: _, _)) as fld1; fld2], _), _) ->
Puf.union partition (TermMap.find fld1 fld_map) (TermMap.find fld2 fld_map)
| Binder (_, _, f, _) -> collect_eq partition f
| f -> partition
in
let fld_partition0 = List.fold_left collect_eq (Puf.create max) fs in
let fld_partition =
TermSet.fold (fun t partition ->
match t with
| App (Write, fld1 :: _, _) as fld2
| App (Frame, [_; _; fld1; fld2], _) ->
Puf.union partition (TermMap.find fld1 fld_map) (TermMap.find fld2 fld_map)
| _ -> partition)
gts fld_partition0
in
fld_partition, fld_map, fields
in
let partition_of fld =
let p =
try Puf.find fld_partition (TermMap.find fld fld_map)
with Not_found -> failwith ("did not find field " ^ (string_of_term fld))
in
let res = TermSet.filter (fun fld1 -> Puf.find fld_partition (TermMap.find fld1 fld_map) = p) fields in
res
in
partition_of
(** Add axioms for frame predicates. *)
let add_frame_axioms fs =
let gs = ground_terms ~include_atoms:true (mk_and fs) in
let frame_sorts =
TermSet.fold
(fun t frame_sorts ->
match t with
| App (Frame, [_; _; f; _], _) ->
SortSet.add (sort_of f) frame_sorts
| _ -> frame_sorts
)
gs SortSet.empty
in
SortSet.fold
(fun srt fs ->
match srt with
| Map ((Loc ssrt :: dsrts), rsrt) ->
Axioms.frame_axioms ssrt dsrts rsrt @ fs
| _ -> fs)
frame_sorts fs
let rec valid = function
| BoolOp (op, fs) ->
List.for_all valid fs
| Binder (Forall, [], f, ann) ->
let has_gen = List.for_all (function TermGenerator _ -> false | _ -> true) ann in
if not has_gen then print_form stdout f;
valid f && has_gen
| Binder (_, _, f, ann) ->
valid f
| Atom (_, ann) ->
true
(** Simplifies set constraints and adds axioms for set operations.
** Assumes that f is typed and in negation normal form. *)
let add_set_axioms fs =
let _split ts = List.fold_left (fun (ts1, ts2) t -> (ts2, t :: ts1)) ([], []) ts in
let elem_srts =
let set_srts =
List.fold_left
(fun acc f -> SortSet.union (sorts f) acc)
SortSet.empty fs
in
SortSet.fold (fun set_srt acc ->
match set_srt with
| Set srt -> srt :: acc
| _ -> acc) set_srts []
in
rev_concat [fs; Axioms.set_axioms elem_srts]
(** Compute the set of struct sorts of the domain sort from the set of field terms [flds]. *)
let struct_sorts_of_fields flds =
TermSet.fold
(fun fld structs ->
match fld with
| App (_, _, Map (Loc srt :: _, _)) -> SortSet.add srt structs
| _ -> structs)
flds SortSet.empty
let array_sorts fs =
let rec ars srts = function
| App (_, ts, srt) ->
let srts1 = match srt with
| Loc (Array srt) | Loc (ArrayCell srt) -> SortSet.add srt srts
| _ -> srts
in
List.fold_left ars srts1 ts
| _ -> srts
in
List.fold_left (fold_terms ars) SortSet.empty fs
(** Adds theory axioms for the entry point function to formulas [fs].
** Assumes that all frame predicates have been reduced in formulas [fs]. *)
let add_ep_axioms fs =
let gts = generated_ground_terms fs in
let rec get_struct_sorts acc = function
| App (_, ts, srt) ->
let acc = List.fold_left get_struct_sorts acc ts in
(match srt with
| Map([Loc srt1], Loc srt2) when srt1 = srt2 -> SortSet.add srt1 acc
| _ -> acc)
| Var (_, Map([Loc srt1], srt2)) when srt1 = srt2 -> SortSet.add srt1 acc
| Var _ -> acc
in
let struct_sorts = fold_terms get_struct_sorts SortSet.empty (mk_and fs) in
let axioms = SortSet.fold (fun srt axioms -> Axioms.ep_axioms srt @ axioms) struct_sorts [] in
axioms @ fs
let get_read_propagators gts =
let field_sorts = TermSet.fold (fun t srts ->
match sort_of t with
| (Loc ArrayCell _ | Map (_ :: _, _)) as srt -> SortSet.add srt srts
| Adt (_, adts) ->
List.fold_left
(fun srts (id, _) -> SortSet.add (Adt (id, adts)) srts)
srts adts
| _ -> srts)
gts SortSet.empty
in
let add_propagators = function
| Adt (id, adts) -> fun propagators ->
let s = fresh_ident "?s" in
let t = fresh_ident "?t" in
let cstrs = List.assoc id adts in
let adt_srt = Adt (id, adts) in
let s = mk_var adt_srt s in
let t = mk_var adt_srt t in
let destrs = flat_map (fun (_, destrs) -> destrs) cstrs in
let propagators =
List.fold_left
(fun propagators (destr, srt) ->
let srt = unfold_adts adts srt in
([Match (mk_eq_term s t, []);
(* s == t, s.destr -> t.destr *)
Match (mk_destr srt destr s, [])],
[mk_destr srt destr t]) ::
([Match (mk_eq_term s t, []);
(* s == t, t.destr -> s.destr *)
Match (mk_destr srt destr t, [])],
[mk_destr srt destr s]) :: propagators
)
propagators destrs
in
List.fold_left (fun propagators (cid, destrs) ->
let args =
List.map (fun (destr, srt) ->
let srt = unfold_adts adts srt in
mk_var srt (fresh_ident "?v"))
destrs
in
let t = mk_constr adt_srt cid args in
let gen_terms =
List.map (fun (destr, srt) -> mk_destr (unfold_adts adts srt) destr t) destrs
in
([Match (t, [])], gen_terms) :: propagators)
propagators cstrs
| Loc (ArrayCell srt) -> fun propagators ->
let f = fresh_ident "?f", field_sort (ArrayCell srt) srt in
let fld = mk_var (snd f) (fst f) in
let a = Axioms.loc1 (Array srt) in
let b = Axioms.loc2 (Array srt) in
let i = fresh_ident "?i" in
let idx = mk_var Int i in
(* a == b, a.cells[i].f -> b.cells[i].f *)
([Match (mk_eq_term a b, []);
Match (mk_read fld [mk_read (mk_array_cells a) [idx]], [])],
[mk_read fld [mk_read (mk_array_cells b) [idx]]]) ::
([Match (mk_eq_term a b, []);
(* a == b, b.cells[i].f -> a.cells[i].f *)
Match (mk_read fld [mk_read (mk_array_cells b) [idx]], [])],
[mk_read fld [mk_read (mk_array_cells a) [idx]]]) :: propagators
| Loc (Array srt) -> fun propagators ->
let f = fresh_ident "?f", Map ([Loc (Array srt); Int], srt) in
let fld = mk_var (snd f) (fst f) in
let a = Axioms.loc1 (Array srt) in
let b = Axioms.loc2 (Array srt) in
let i = fresh_ident "?i" in
let idx = mk_var Int i in
a = = b , a.f[i ] - > b.f[i ]
([Match (mk_eq_term a b, []);
Match (mk_read fld [a; idx], [])],
[mk_read fld [b; idx]]) ::
(* a == b, b.cells[i].f -> a.cells[i].f *)
([Match (mk_eq_term a b, []);
Match (mk_read fld [mk_read (mk_array_cells b) [idx]], [])],
[mk_read fld [mk_read (mk_array_cells a) [idx]]]) :: propagators
| Map (dsrts, srt) as mapsrt -> fun propagators ->
let f = fresh_ident "?f", mapsrt in
let fvar = mk_var (snd f) (fst f) in
let g = fresh_ident "?g", mapsrt in
let gvar = mk_var (snd g) (fst g) in
let d = fresh_ident "?d" in
let dvar = mk_var srt d in
let xvars = List.map (fun srt -> mk_var srt (fresh_ident "?i")) dsrts in
let yvars = List.map (fun srt -> mk_var srt (fresh_ident "?j")) dsrts in
let propagators =
match srt with
| Adt (id, adts) ->
let cstrs = List.assoc id adts in
let destrs = flat_map (fun (_, destrs) -> destrs) cstrs in
let propagators =
List.fold_left
(fun propagators -> function
| (destr, (Map ([dsrt2], _) as srt)) ->
let srt = unfold_adts adts srt in
let zvar = mk_var (unfold_adts adts dsrt2) (fresh_ident "?z") in
(* f[x := d], f[y].destr[z] -> f[x := d][y].destr[z] *)
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_destr srt destr (mk_read fvar yvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read (mk_write fvar xvars dvar) yvars)) [zvar]]) ::
(* f[x := d].destr[z] -> f[y].destr[z] *)
([Match (mk_read (mk_destr srt destr (mk_read (mk_write fvar xvars dvar) yvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read fvar yvars)) [zvar]])
:: propagators
| _ -> propagators
)
propagators destrs
in
propagators
| Map (dsrts2, Adt (id, adts)) ->
let cstrs = List.assoc id adts in
let destrs = flat_map (fun (_, destrs) -> destrs) cstrs in
let uvars = List.map (fun srt -> mk_var srt (fresh_ident "?u")) dsrts2 in
let propagators =
List.fold_left
(fun propagators -> function
| (destr, (Map ([dsrt2], _) as srt)) ->
let srt = unfold_adts adts srt in
let zvar = mk_var (unfold_adts adts dsrt2) (fresh_ident "?z") in
(* f[x := d], f[y][u].destr[z] -> f[x := d][y][u].destr[z] *)
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_destr srt destr (mk_read (mk_read fvar yvars) uvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars)) [zvar]]) ::
(* f[x := d][u].destr[z], f[y] -> f[y][u].destr[z] *)
([Match (mk_read fvar yvars, []);
Match (mk_read (mk_destr srt destr (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read (mk_read fvar yvars) uvars)) [zvar]])
:: propagators
| _ -> propagators
)
propagators destrs
in
propagators
| Map (dsrts2, Map(dsrts3, rsrt)) ->
let uvars = List.map (fun srt -> mk_var srt (fresh_ident "?u")) dsrts2 in
let zvars = List.map (fun srt -> mk_var srt (fresh_ident "?z")) dsrts3 in
(* f[x := d], f[y][u] -> f[x := d][y][u] *)
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_read (mk_read fvar yvars) uvars) zvars, [])],
[mk_read (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars) zvars]) ::
(* f[x := d][u].destr[z], f[y] -> f[y][u].destr[z] *)
([Match (mk_read fvar yvars, []);
Match (mk_read (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars) zvars, [])],
[mk_read (mk_read (mk_read fvar yvars) uvars) zvars]) :: propagators
| Map (dsrts2, rsrt) ->
let uvars = List.map (fun srt -> mk_var srt (fresh_ident "?u")) dsrts2 in
(* f[x := d], f[y][u] -> f[x := d][y][u] *)
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_read fvar yvars) uvars, [])],
[mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars]) ::
(* f[x := d][u].destr[z], f[y] -> f[y][u].destr[z] *)
([Match (mk_read fvar yvars, []);
Match (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars, [])],
[mk_read (mk_read fvar yvars) uvars]) :: propagators
| _ -> propagators
in
let ssrt_opt = match dsrts with
| Loc ssrt :: _ -> Some ssrt
| _ -> None
in
let match_ivar1 =
ssrt_opt |>
Opt.map (fun _ -> Match (List.hd xvars, [FilterNotNull])) |>
Opt.to_list
in
let gen_frame wrap =
ssrt_opt |>
Opt.map (fun ssrt ->
let set1 = Axioms.set1 ssrt in
let set2 = Axioms.set2 ssrt in
Frame ( x , a , f , ) , y.g - > y.f
([Match (mk_frame_term set1 set2 fvar gvar, []);
Match (wrap (mk_read gvar (xvars)), [])] @
match_ivar1,
[wrap (mk_read fvar (xvars))]);
Frame ( x , a , f , ) , y.f - > y.g
([Match (mk_frame_term set1 set2 fvar gvar, []);
Match (wrap (mk_read fvar (xvars)), [])],
[wrap (mk_read gvar (xvars))])
]) |>
Opt.get_or_else []
in
let mk_generators wrap =
([Match (mk_eq_term fvar gvar, []);
Match (wrap (mk_read fvar (xvars)), []);
],
[wrap (mk_read gvar (xvars))]) ::
f = = g , x.g
([Match (mk_eq_term fvar gvar, []);
Match (wrap (mk_read fvar (xvars)), [])
],
[wrap (mk_read gvar (xvars))]) ::
f = = g , x.g - > x.f
([Match (mk_eq_term fvar gvar, []);
Match (wrap (mk_read gvar (xvars)), [])] @ match_ivar1,
[wrap (mk_read fvar (xvars))]) ::
(* f [x := d], y.(f [x := d]) -> y.f *)
([Match (mk_write fvar xvars dvar, []);
Match (wrap (mk_read (mk_write fvar xvars dvar) yvars), []);
(*Match (loc1, [FilterNotNull]);*)
(*Match (loc2, [FilterNotNull])*)],
[wrap (mk_read fvar yvars)]) ::
(* f [x := d], y.f -> y.(f [x := d]) *)
([Match (mk_write fvar xvars dvar, []);
Match (wrap (mk_read fvar (yvars)), []);
(*Match (loc1, [FilterNotNull]);*)
(*Match (loc2, [FilterNotNull])*)],
[wrap (mk_read (mk_write fvar (xvars) dvar) (yvars))]) ::
gen_frame wrap
in
@ mk_generators ( fun t - > mk_known t )
| _ -> fun propagators -> propagators
in
SortSet.fold add_propagators field_sorts []
let add_read_write_axioms fs =
let gts = ground_terms ~include_atoms:true (mk_and fs) in
let has_loc_field_sort = function
id1 = id2
| _ -> false
in
let basic_pt_flds = TermSet.filter (has_loc_field_sort &&& is_free_const) gts in
let basic_structs = struct_sorts_of_fields basic_pt_flds in
(* instantiate null axioms *)
let axioms = SortSet.fold (fun srt axioms -> Axioms.null_axioms srt @ axioms) basic_structs [] in
let null_ax, generators = open_axioms ~force:true isFld axioms in
let generators = match generators with
| [[Match (v, _)], t] -> [[Match (v, [FilterGeneric (fun sm t -> TermSet.mem (subst_term sm v) basic_pt_flds)])], t]
| gs -> gs
in
let ccgraph = CongruenceClosure.congruence_classes gts fs in
let _ =
let tgcode = EMatching.add_term_generators_to_ematch_code EMatching.empty generators in
EMatching.generate_terms_from_code tgcode ccgraph
in
(* CAUTION: not forcing the instantiation here would yield an inconsistency with the read/write axioms *)
let null_ax1 = EMatching.instantiate_axioms ~force:true null_ax ccgraph in
let fs1 = null_ax1 @ fs in
let gts = TermSet.union (ground_terms ~include_atoms:true (mk_and null_ax1)) (CongruenceClosure.get_terms ccgraph) in
(* propagate read terms *)
(* generate instances of all read over write axioms *)
let read_write_ax =
let generators_and_axioms =
TermSet.fold (fun t acc ->
match t with
| App (Write, fld :: _, _) ->
Axioms.read_write_axioms fld @ acc
| _ -> acc) gts []
in
generators_and_axioms
in
let read_propagators =
List.map (fun (ms, t) -> TermGenerator (ms, t)) (get_read_propagators gts)
in
let fs1 =
match fs1 with
| f :: fs1 ->
annotate f read_propagators :: fs1
| [] -> []
in
rev_concat [read_write_ax; fs1]
(** Adds instantiated theory axioms for graph reachability to formula f.
** Assumes that f is typed. *)
let add_reach_axioms fs =
let struct_sorts =
SortSet.fold
(fun srt struct_sorts -> match srt with
| Map([Loc srt1], Loc srt2) when srt1 = srt2 ->
SortSet.add srt1 struct_sorts
| _ -> struct_sorts)
(sorts (mk_and fs)) SortSet.empty
in
let classes =
CongruenceClosure.congruence_classes TermSet.empty fs |>
CongruenceClosure.get_classes
in
let axioms =
SortSet.fold
(fun srt axioms -> Axioms.reach_axioms classes srt @ Axioms.reach_write_axioms srt @ axioms)
struct_sorts []
in
rev_concat [axioms; fs]
let add_array_axioms fs =
let srts = array_sorts fs in
let axioms = SortSet.fold (fun srt axioms -> Axioms.array_axioms srt @ axioms) srts [] in
axioms @ fs
(** Encode label annotations as propositional guards *)
let encode_labels fs =
let mk_label annots f =
let lbls =
Util.partial_map
(function
| Label (pol, t) ->
Some (if pol then Atom (t, []) else mk_not (Atom (t, [])))
| _ -> None)
annots
in
mk_and (f :: lbls)
in
let rec el = function
| Binder (b, vs, f, annots) ->
let f1 = el f in
mk_label annots (Binder (b, vs, f1, annots))
| (BoolOp (Not, [Atom (_, annots)]) as f)
| (Atom (_, annots) as f) ->
mk_label annots f
| BoolOp (op, fs) ->
BoolOp (op, List.map el fs)
in List.rev_map el fs
(** Reduces the given formula to the target theory fragment, as specified b the configuration. *)
let reduce f =
(* split f into conjuncts and eliminate all existential quantifiers *)
let f1 = nnf f in
let fs = elim_exists [f1] in
let fs = split_ands fs in
(* *)
(* no reduction step should introduce implicit or explicit existential quantifiers after this point *)
some formula rewriting that helps the SMT solver
let fs = massage_field_reads fs in
let fs = simplify_sets fs in
let fs = add_ep_axioms fs in
let fs = add_frame_axioms fs in
let fs = factorize_axioms (split_ands fs) in
let fs = add_set_axioms fs in
let fs = add_read_write_axioms fs in
let fs = add_reach_axioms fs in
let fs = add_array_axioms fs in
let fs = if !Config.named_assertions then fs else List.map strip_names fs in
let fs = fs |> split_ands in
let _ =
if Debug.is_debug 1 then begin
print_endline "VC before instantiation:";
print_form stdout (mk_and fs);
print_newline ()
end
in
let fs = encode_labels fs in
fs
| null | https://raw.githubusercontent.com/wies/grasshopper/108473b0a678f0d93fffec6da2ad6bcdce5bddb9/src/prover/reduction.ml | ocaml | * Eliminate all implicit and explicit existential quantifiers using skolemization.
** Assumes that [f] is typed and in negation normal form.
* Hoist all universally quantified subformulas to top level.
** Assumes that formulas [fs] are in negation normal form.
* Add axioms for frame predicates.
* Add axioms for frame predicates.
* Simplifies set constraints and adds axioms for set operations.
** Assumes that f is typed and in negation normal form.
* Compute the set of struct sorts of the domain sort from the set of field terms [flds].
* Adds theory axioms for the entry point function to formulas [fs].
** Assumes that all frame predicates have been reduced in formulas [fs].
s == t, s.destr -> t.destr
s == t, t.destr -> s.destr
a == b, a.cells[i].f -> b.cells[i].f
a == b, b.cells[i].f -> a.cells[i].f
a == b, b.cells[i].f -> a.cells[i].f
f[x := d], f[y].destr[z] -> f[x := d][y].destr[z]
f[x := d].destr[z] -> f[y].destr[z]
f[x := d], f[y][u].destr[z] -> f[x := d][y][u].destr[z]
f[x := d][u].destr[z], f[y] -> f[y][u].destr[z]
f[x := d], f[y][u] -> f[x := d][y][u]
f[x := d][u].destr[z], f[y] -> f[y][u].destr[z]
f[x := d], f[y][u] -> f[x := d][y][u]
f[x := d][u].destr[z], f[y] -> f[y][u].destr[z]
f [x := d], y.(f [x := d]) -> y.f
Match (loc1, [FilterNotNull]);
Match (loc2, [FilterNotNull])
f [x := d], y.f -> y.(f [x := d])
Match (loc1, [FilterNotNull]);
Match (loc2, [FilterNotNull])
instantiate null axioms
CAUTION: not forcing the instantiation here would yield an inconsistency with the read/write axioms
propagate read terms
generate instances of all read over write axioms
* Adds instantiated theory axioms for graph reachability to formula f.
** Assumes that f is typed.
* Encode label annotations as propositional guards
* Reduces the given formula to the target theory fragment, as specified b the configuration.
split f into conjuncts and eliminate all existential quantifiers
no reduction step should introduce implicit or explicit existential quantifiers after this point | * { 5 Reduction from GRASS to SMT }
open Util
open Grass
open GrassUtil
open Axioms
open SimplifyGrass
let elim_exists =
let e = fresh_ident "?e" in
let rec elim_neq seen_adts bvs = function
| BoolOp (Not, [Atom (App (Eq, [t1; t2], _), a)]) as f when bvs = [] ->
(match sort_of t1 with
| Set srt ->
let ve = mk_var srt e in
mk_exists [(e, srt)] (smk_or [smk_and [smk_elem ~ann:a ve t1; mk_not (smk_elem ~ann:a ve t2)];
smk_and [smk_elem ~ann:a ve t2; mk_not (smk_elem ~ann:a ve t1)]])
| Map (dsrts, rsrt) ->
let dom_vs = List.map (fun dsrts -> List.map (fun srt -> fresh_ident "?i", srt) dsrts) [dsrts] in
let dom_vts = List.map (fun vs -> List.map (fun (v, srt) -> mk_var srt v) vs) dom_vs in
let mk_reads t = List.fold_left (fun t_read vts -> mk_read t_read vts) t dom_vts in
let t1_read = mk_reads t1 in
let t2_read = mk_reads t2 in
let vs = List.flatten dom_vs in
mk_and [f; elim_neq seen_adts bvs (mk_exists vs (annotate (mk_neq t1_read t2_read) a))]
| Adt (id, adts) when not @@ IdSet.mem id seen_adts ->
let cstrs = List.assoc id adts in
let expand new_vs = function
| App (Constructor cid, ts, _) -> new_vs, [(cid, mk_true, ts)]
| t ->
match cstrs with
| [cid, dstrs] ->
let ts =
List.map (fun (id, srt) -> mk_destr srt id t) dstrs
in
(new_vs, [cid, mk_true, ts])
| _ ->
List.fold_left
(fun (new_vs, cases) (cid, dstrs) ->
let vs = List.map (fun (id, srt) -> fresh_ident "?x", unfold_adts adts srt) dstrs in
let vts = List.map (fun (v, srt) -> mk_var srt v) vs in
vs @ new_vs, (cid, mk_eq t (mk_constr (Adt (id, adts)) cid vts), vts) :: cases
) (new_vs, []) cstrs
in
let new_vs1, t1_cases = expand [] t1 in
let new_vs2, t2_cases = expand new_vs1 t2 in
let cases = List.fold_left
(fun cases (cid1, def_t1, args1) ->
List.fold_left
(fun cases (cid2, def_t2, args2) ->
if cid1 = cid2 then
let seen_adts1 = IdSet.add id seen_adts in
let sub_cases =
List.map2 (fun arg1 arg2 -> elim_neq seen_adts1 bvs (mk_neq arg1 arg2)) args1 args2
in
mk_and [def_t1; def_t2; mk_or sub_cases] :: cases
else mk_and [def_t1; def_t2] :: cases
) cases t2_cases
) [] t1_cases
in
mk_exists ~ann:a new_vs2 (mk_or cases)
| _ -> f)
| BoolOp (Not, [Atom (App (Disjoint, [s1; s2], _), a)]) when bvs = [] ->
let srt = element_sort_of_set s1 in
elim_neq seen_adts bvs (mk_not (Atom (App (Eq, [mk_inter [s1; s2]; mk_empty (Set srt)], Bool), a)))
| BoolOp (Not, [Atom (App (SubsetEq, [s1; s2], _), a)]) when bvs = [] ->
let srt = element_sort_of_set s1 in
let ve = mk_var srt e in
mk_exists [(e, srt)] (annotate (smk_and [smk_elem ve s1; mk_not (smk_elem ve s2)]) a)
| BoolOp (op, fs) ->
smk_op op (List.map (elim_neq IdSet.empty bvs) fs)
| Binder (Exists, vs, f, a) ->
mk_exists ~ann:a vs (elim_neq seen_adts bvs f)
| Binder (Forall, vs, f, a) ->
mk_forall ~ann:a vs (elim_neq seen_adts (bvs @ vs) f)
| f -> f
in
List.map (fun f ->
let f1 = elim_neq IdSet.empty [] f in
let f2 = propagate_exists_up f1 in
let f3 = skolemize f2 in
f3)
let factorize_axioms fs =
let rec extract f axioms =
match f with
| Binder (b, [], g, a) ->
let g1, axioms = extract g axioms in
Binder (b, [], g1, a), axioms
| Binder (Forall, (_ :: _ as vs), f1, a) ->
let p = mk_atom (FreeSym (fresh_ident "Axiom")) [] in
let names, other_annots = List.partition (function Name _ -> true | _ -> false) a in
let fact_axiom = annotate (mk_implies p (Binder (Forall, vs, f1, other_annots))) names in
p, fact_axiom :: axioms
| BoolOp (op, fs) ->
let fs1, axioms =
List.fold_right
(fun f (fs1, axioms) ->
let f1, axioms1 = extract f axioms in
f1 :: fs1, axioms1)
fs ([], axioms)
in
BoolOp (op, fs1), axioms
| f -> f, axioms
in
let process (fs1, axioms) f =
match f with
| Binder (Forall, _ :: _, _, _) -> f :: fs1, axioms
| _ ->
let f1, axioms1 = extract f axioms in
f1 :: fs1, axioms1
in
let fs1, axioms = List.fold_left process ([], []) fs in
axioms @ fs1
let field_partitions fs gts =
let fld_partition, fld_map, fields =
let max, fld_map, fields =
TermSet.fold (fun t (n, fld_map, fields) -> match t with
| App (_, _, Map (Loc _ :: _, _)) as fld ->
n+1, TermMap.add fld n fld_map, TermSet.add fld fields
| _ -> n, fld_map, fields)
gts (0, TermMap.empty, TermSet.empty)
in
let rec collect_eq partition = function
| BoolOp (Not, f) -> partition
| BoolOp (op, fs) -> List.fold_left collect_eq partition fs
| Atom (App (Eq, [App (_, _, Map (Loc _ :: _, _)) as fld1; fld2], _), _) ->
Puf.union partition (TermMap.find fld1 fld_map) (TermMap.find fld2 fld_map)
| Binder (_, _, f, _) -> collect_eq partition f
| f -> partition
in
let fld_partition0 = List.fold_left collect_eq (Puf.create max) fs in
let fld_partition =
TermSet.fold (fun t partition ->
match t with
| App (Write, fld1 :: _, _) as fld2
| App (Frame, [_; _; fld1; fld2], _) ->
Puf.union partition (TermMap.find fld1 fld_map) (TermMap.find fld2 fld_map)
| _ -> partition)
gts fld_partition0
in
fld_partition, fld_map, fields
in
let partition_of fld =
let p =
try Puf.find fld_partition (TermMap.find fld fld_map)
with Not_found -> failwith ("did not find field " ^ (string_of_term fld))
in
let res = TermSet.filter (fun fld1 -> Puf.find fld_partition (TermMap.find fld1 fld_map) = p) fields in
res
in
partition_of
let add_frame_axioms fs =
let gs = ground_terms ~include_atoms:true (mk_and fs) in
let frame_sorts =
TermSet.fold
(fun t frame_sorts ->
match t with
| App (Frame, [_; _; f; _], _) ->
SortSet.add (sort_of f) frame_sorts
| _ -> frame_sorts
)
gs SortSet.empty
in
SortSet.fold
(fun srt fs ->
match srt with
| Map ((Loc ssrt :: dsrts), rsrt) ->
Axioms.frame_axioms ssrt dsrts rsrt @ fs
| _ -> fs)
frame_sorts fs
let rec valid = function
| BoolOp (op, fs) ->
List.for_all valid fs
| Binder (Forall, [], f, ann) ->
let has_gen = List.for_all (function TermGenerator _ -> false | _ -> true) ann in
if not has_gen then print_form stdout f;
valid f && has_gen
| Binder (_, _, f, ann) ->
valid f
| Atom (_, ann) ->
true
let add_set_axioms fs =
let _split ts = List.fold_left (fun (ts1, ts2) t -> (ts2, t :: ts1)) ([], []) ts in
let elem_srts =
let set_srts =
List.fold_left
(fun acc f -> SortSet.union (sorts f) acc)
SortSet.empty fs
in
SortSet.fold (fun set_srt acc ->
match set_srt with
| Set srt -> srt :: acc
| _ -> acc) set_srts []
in
rev_concat [fs; Axioms.set_axioms elem_srts]
let struct_sorts_of_fields flds =
TermSet.fold
(fun fld structs ->
match fld with
| App (_, _, Map (Loc srt :: _, _)) -> SortSet.add srt structs
| _ -> structs)
flds SortSet.empty
let array_sorts fs =
let rec ars srts = function
| App (_, ts, srt) ->
let srts1 = match srt with
| Loc (Array srt) | Loc (ArrayCell srt) -> SortSet.add srt srts
| _ -> srts
in
List.fold_left ars srts1 ts
| _ -> srts
in
List.fold_left (fold_terms ars) SortSet.empty fs
let add_ep_axioms fs =
let gts = generated_ground_terms fs in
let rec get_struct_sorts acc = function
| App (_, ts, srt) ->
let acc = List.fold_left get_struct_sorts acc ts in
(match srt with
| Map([Loc srt1], Loc srt2) when srt1 = srt2 -> SortSet.add srt1 acc
| _ -> acc)
| Var (_, Map([Loc srt1], srt2)) when srt1 = srt2 -> SortSet.add srt1 acc
| Var _ -> acc
in
let struct_sorts = fold_terms get_struct_sorts SortSet.empty (mk_and fs) in
let axioms = SortSet.fold (fun srt axioms -> Axioms.ep_axioms srt @ axioms) struct_sorts [] in
axioms @ fs
let get_read_propagators gts =
let field_sorts = TermSet.fold (fun t srts ->
match sort_of t with
| (Loc ArrayCell _ | Map (_ :: _, _)) as srt -> SortSet.add srt srts
| Adt (_, adts) ->
List.fold_left
(fun srts (id, _) -> SortSet.add (Adt (id, adts)) srts)
srts adts
| _ -> srts)
gts SortSet.empty
in
let add_propagators = function
| Adt (id, adts) -> fun propagators ->
let s = fresh_ident "?s" in
let t = fresh_ident "?t" in
let cstrs = List.assoc id adts in
let adt_srt = Adt (id, adts) in
let s = mk_var adt_srt s in
let t = mk_var adt_srt t in
let destrs = flat_map (fun (_, destrs) -> destrs) cstrs in
let propagators =
List.fold_left
(fun propagators (destr, srt) ->
let srt = unfold_adts adts srt in
([Match (mk_eq_term s t, []);
Match (mk_destr srt destr s, [])],
[mk_destr srt destr t]) ::
([Match (mk_eq_term s t, []);
Match (mk_destr srt destr t, [])],
[mk_destr srt destr s]) :: propagators
)
propagators destrs
in
List.fold_left (fun propagators (cid, destrs) ->
let args =
List.map (fun (destr, srt) ->
let srt = unfold_adts adts srt in
mk_var srt (fresh_ident "?v"))
destrs
in
let t = mk_constr adt_srt cid args in
let gen_terms =
List.map (fun (destr, srt) -> mk_destr (unfold_adts adts srt) destr t) destrs
in
([Match (t, [])], gen_terms) :: propagators)
propagators cstrs
| Loc (ArrayCell srt) -> fun propagators ->
let f = fresh_ident "?f", field_sort (ArrayCell srt) srt in
let fld = mk_var (snd f) (fst f) in
let a = Axioms.loc1 (Array srt) in
let b = Axioms.loc2 (Array srt) in
let i = fresh_ident "?i" in
let idx = mk_var Int i in
([Match (mk_eq_term a b, []);
Match (mk_read fld [mk_read (mk_array_cells a) [idx]], [])],
[mk_read fld [mk_read (mk_array_cells b) [idx]]]) ::
([Match (mk_eq_term a b, []);
Match (mk_read fld [mk_read (mk_array_cells b) [idx]], [])],
[mk_read fld [mk_read (mk_array_cells a) [idx]]]) :: propagators
| Loc (Array srt) -> fun propagators ->
let f = fresh_ident "?f", Map ([Loc (Array srt); Int], srt) in
let fld = mk_var (snd f) (fst f) in
let a = Axioms.loc1 (Array srt) in
let b = Axioms.loc2 (Array srt) in
let i = fresh_ident "?i" in
let idx = mk_var Int i in
a = = b , a.f[i ] - > b.f[i ]
([Match (mk_eq_term a b, []);
Match (mk_read fld [a; idx], [])],
[mk_read fld [b; idx]]) ::
([Match (mk_eq_term a b, []);
Match (mk_read fld [mk_read (mk_array_cells b) [idx]], [])],
[mk_read fld [mk_read (mk_array_cells a) [idx]]]) :: propagators
| Map (dsrts, srt) as mapsrt -> fun propagators ->
let f = fresh_ident "?f", mapsrt in
let fvar = mk_var (snd f) (fst f) in
let g = fresh_ident "?g", mapsrt in
let gvar = mk_var (snd g) (fst g) in
let d = fresh_ident "?d" in
let dvar = mk_var srt d in
let xvars = List.map (fun srt -> mk_var srt (fresh_ident "?i")) dsrts in
let yvars = List.map (fun srt -> mk_var srt (fresh_ident "?j")) dsrts in
let propagators =
match srt with
| Adt (id, adts) ->
let cstrs = List.assoc id adts in
let destrs = flat_map (fun (_, destrs) -> destrs) cstrs in
let propagators =
List.fold_left
(fun propagators -> function
| (destr, (Map ([dsrt2], _) as srt)) ->
let srt = unfold_adts adts srt in
let zvar = mk_var (unfold_adts adts dsrt2) (fresh_ident "?z") in
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_destr srt destr (mk_read fvar yvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read (mk_write fvar xvars dvar) yvars)) [zvar]]) ::
([Match (mk_read (mk_destr srt destr (mk_read (mk_write fvar xvars dvar) yvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read fvar yvars)) [zvar]])
:: propagators
| _ -> propagators
)
propagators destrs
in
propagators
| Map (dsrts2, Adt (id, adts)) ->
let cstrs = List.assoc id adts in
let destrs = flat_map (fun (_, destrs) -> destrs) cstrs in
let uvars = List.map (fun srt -> mk_var srt (fresh_ident "?u")) dsrts2 in
let propagators =
List.fold_left
(fun propagators -> function
| (destr, (Map ([dsrt2], _) as srt)) ->
let srt = unfold_adts adts srt in
let zvar = mk_var (unfold_adts adts dsrt2) (fresh_ident "?z") in
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_destr srt destr (mk_read (mk_read fvar yvars) uvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars)) [zvar]]) ::
([Match (mk_read fvar yvars, []);
Match (mk_read (mk_destr srt destr (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars)) [zvar], [])],
[mk_read (mk_destr srt destr (mk_read (mk_read fvar yvars) uvars)) [zvar]])
:: propagators
| _ -> propagators
)
propagators destrs
in
propagators
| Map (dsrts2, Map(dsrts3, rsrt)) ->
let uvars = List.map (fun srt -> mk_var srt (fresh_ident "?u")) dsrts2 in
let zvars = List.map (fun srt -> mk_var srt (fresh_ident "?z")) dsrts3 in
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_read (mk_read fvar yvars) uvars) zvars, [])],
[mk_read (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars) zvars]) ::
([Match (mk_read fvar yvars, []);
Match (mk_read (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars) zvars, [])],
[mk_read (mk_read (mk_read fvar yvars) uvars) zvars]) :: propagators
| Map (dsrts2, rsrt) ->
let uvars = List.map (fun srt -> mk_var srt (fresh_ident "?u")) dsrts2 in
([Match (mk_write fvar xvars dvar, []);
Match (mk_read (mk_read fvar yvars) uvars, [])],
[mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars]) ::
([Match (mk_read fvar yvars, []);
Match (mk_read (mk_read (mk_write fvar xvars dvar) yvars) uvars, [])],
[mk_read (mk_read fvar yvars) uvars]) :: propagators
| _ -> propagators
in
let ssrt_opt = match dsrts with
| Loc ssrt :: _ -> Some ssrt
| _ -> None
in
let match_ivar1 =
ssrt_opt |>
Opt.map (fun _ -> Match (List.hd xvars, [FilterNotNull])) |>
Opt.to_list
in
let gen_frame wrap =
ssrt_opt |>
Opt.map (fun ssrt ->
let set1 = Axioms.set1 ssrt in
let set2 = Axioms.set2 ssrt in
Frame ( x , a , f , ) , y.g - > y.f
([Match (mk_frame_term set1 set2 fvar gvar, []);
Match (wrap (mk_read gvar (xvars)), [])] @
match_ivar1,
[wrap (mk_read fvar (xvars))]);
Frame ( x , a , f , ) , y.f - > y.g
([Match (mk_frame_term set1 set2 fvar gvar, []);
Match (wrap (mk_read fvar (xvars)), [])],
[wrap (mk_read gvar (xvars))])
]) |>
Opt.get_or_else []
in
let mk_generators wrap =
([Match (mk_eq_term fvar gvar, []);
Match (wrap (mk_read fvar (xvars)), []);
],
[wrap (mk_read gvar (xvars))]) ::
f = = g , x.g
([Match (mk_eq_term fvar gvar, []);
Match (wrap (mk_read fvar (xvars)), [])
],
[wrap (mk_read gvar (xvars))]) ::
f = = g , x.g - > x.f
([Match (mk_eq_term fvar gvar, []);
Match (wrap (mk_read gvar (xvars)), [])] @ match_ivar1,
[wrap (mk_read fvar (xvars))]) ::
([Match (mk_write fvar xvars dvar, []);
Match (wrap (mk_read (mk_write fvar xvars dvar) yvars), []);
[wrap (mk_read fvar yvars)]) ::
([Match (mk_write fvar xvars dvar, []);
Match (wrap (mk_read fvar (yvars)), []);
[wrap (mk_read (mk_write fvar (xvars) dvar) (yvars))]) ::
gen_frame wrap
in
@ mk_generators ( fun t - > mk_known t )
| _ -> fun propagators -> propagators
in
SortSet.fold add_propagators field_sorts []
let add_read_write_axioms fs =
let gts = ground_terms ~include_atoms:true (mk_and fs) in
let has_loc_field_sort = function
id1 = id2
| _ -> false
in
let basic_pt_flds = TermSet.filter (has_loc_field_sort &&& is_free_const) gts in
let basic_structs = struct_sorts_of_fields basic_pt_flds in
let axioms = SortSet.fold (fun srt axioms -> Axioms.null_axioms srt @ axioms) basic_structs [] in
let null_ax, generators = open_axioms ~force:true isFld axioms in
let generators = match generators with
| [[Match (v, _)], t] -> [[Match (v, [FilterGeneric (fun sm t -> TermSet.mem (subst_term sm v) basic_pt_flds)])], t]
| gs -> gs
in
let ccgraph = CongruenceClosure.congruence_classes gts fs in
let _ =
let tgcode = EMatching.add_term_generators_to_ematch_code EMatching.empty generators in
EMatching.generate_terms_from_code tgcode ccgraph
in
let null_ax1 = EMatching.instantiate_axioms ~force:true null_ax ccgraph in
let fs1 = null_ax1 @ fs in
let gts = TermSet.union (ground_terms ~include_atoms:true (mk_and null_ax1)) (CongruenceClosure.get_terms ccgraph) in
let read_write_ax =
let generators_and_axioms =
TermSet.fold (fun t acc ->
match t with
| App (Write, fld :: _, _) ->
Axioms.read_write_axioms fld @ acc
| _ -> acc) gts []
in
generators_and_axioms
in
let read_propagators =
List.map (fun (ms, t) -> TermGenerator (ms, t)) (get_read_propagators gts)
in
let fs1 =
match fs1 with
| f :: fs1 ->
annotate f read_propagators :: fs1
| [] -> []
in
rev_concat [read_write_ax; fs1]
let add_reach_axioms fs =
let struct_sorts =
SortSet.fold
(fun srt struct_sorts -> match srt with
| Map([Loc srt1], Loc srt2) when srt1 = srt2 ->
SortSet.add srt1 struct_sorts
| _ -> struct_sorts)
(sorts (mk_and fs)) SortSet.empty
in
let classes =
CongruenceClosure.congruence_classes TermSet.empty fs |>
CongruenceClosure.get_classes
in
let axioms =
SortSet.fold
(fun srt axioms -> Axioms.reach_axioms classes srt @ Axioms.reach_write_axioms srt @ axioms)
struct_sorts []
in
rev_concat [axioms; fs]
let add_array_axioms fs =
let srts = array_sorts fs in
let axioms = SortSet.fold (fun srt axioms -> Axioms.array_axioms srt @ axioms) srts [] in
axioms @ fs
let encode_labels fs =
let mk_label annots f =
let lbls =
Util.partial_map
(function
| Label (pol, t) ->
Some (if pol then Atom (t, []) else mk_not (Atom (t, [])))
| _ -> None)
annots
in
mk_and (f :: lbls)
in
let rec el = function
| Binder (b, vs, f, annots) ->
let f1 = el f in
mk_label annots (Binder (b, vs, f1, annots))
| (BoolOp (Not, [Atom (_, annots)]) as f)
| (Atom (_, annots) as f) ->
mk_label annots f
| BoolOp (op, fs) ->
BoolOp (op, List.map el fs)
in List.rev_map el fs
let reduce f =
let f1 = nnf f in
let fs = elim_exists [f1] in
let fs = split_ands fs in
some formula rewriting that helps the SMT solver
let fs = massage_field_reads fs in
let fs = simplify_sets fs in
let fs = add_ep_axioms fs in
let fs = add_frame_axioms fs in
let fs = factorize_axioms (split_ands fs) in
let fs = add_set_axioms fs in
let fs = add_read_write_axioms fs in
let fs = add_reach_axioms fs in
let fs = add_array_axioms fs in
let fs = if !Config.named_assertions then fs else List.map strip_names fs in
let fs = fs |> split_ands in
let _ =
if Debug.is_debug 1 then begin
print_endline "VC before instantiation:";
print_form stdout (mk_and fs);
print_newline ()
end
in
let fs = encode_labels fs in
fs
|
114b77c09be9c6f8c6c2a6ca9a14bee5849f1d0b1656b860af05c3ed15489a40 | coord-e/mlml | mlmlc.ml | let () =
match Sys.argv with
| [|_; file|] -> Mlml.Compile.f file |> print_endline
| _ -> failwith "Invalid number of arguments"
;;
| null | https://raw.githubusercontent.com/coord-e/mlml/ec34b1fe8766901fab6842b790267f32b77a2861/bin/mlmlc.ml | ocaml | let () =
match Sys.argv with
| [|_; file|] -> Mlml.Compile.f file |> print_endline
| _ -> failwith "Invalid number of arguments"
;;
| |
d67f23dad114afc4f63fa20d0d4651b81bb541ee61601f15e1ab63780517bd16 | saleyn/util | user_default.erl | %%%vim:ts=2:sw=2:et
%%%------------------------------------------------------------------------
File :
%%%------------------------------------------------------------------------
%%% @doc This is an extension of the shell commands
%%% to do all the work! Either place this file in the
path accessible to Erlang ( via ERL_LIBS ) or
%%% add this line to the ~/.erlang file:
` ` code : load_abs(os : getenv("HOME " ) + + " /.erlang / user_default " ) . ''
%%%
@author < >
%%% @version $Revision$
%%% $Date$
%%% @end
%%%------------------------------------------------------------------------
%%% $URL$
%%%------------------------------------------------------------------------
` ` The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%%% compliance with the License. You should have received a copy of the
%%% Erlang Public License along with this software. If not, it can be
%%% retrieved via the world wide web at /.
%%%
Software distributed under the License is distributed on an " AS IS "
%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%%% the License for the specific language governing rights and limitations
%%% under the License.
%%%
The Initial Developer of the Original Code is .
Portions created by Ericsson are Copyright 1999 ,
%%% AB. All Rights Reserved.''
%%%------------------------------------------------------------------------
-module(user_default).
-author('').
-export([help/0, saveh/1, debug/0, dbgtc/1, dbgon/1, dbgon/2,
dbgadd/1, dbgadd/2, dbgdel/1, dbgdel/2, dbgoff/0,
p/1, nl/0, tc/2, tc/4]).
-import(io, [format/1, format/2]).
help() ->
shell_default:help(),
format("** user extended commands **~n"),
format("saveh(File) -- save command history to a file\n"),
format("debug() -- start the debugger application\n"),
format("debug(Mods) -- start the debugger application and add the list of modules\n"),
format("dbgtc(File) -- use dbg:trace_client() to read data from File\n"),
format("dbgon(M) -- enable dbg tracer on all funs in module(s) M :: atom()|[atom()]\n"),
format("dbgon(M,Fun) -- enable dbg tracer for module M and function F\n"),
format("dbgon(M,File) -- enable dbg tracer for module M and log to File\n"),
format("dbgadd(M) -- enable call tracer for module(s) M :: atom()|[atom()]\n"),
format("dbgadd(M,F) -- enable call tracer for function M:F\n"),
format("dbgdel(M) -- disable call tracer for module(s) M :: atom()|[atom()]\n"),
format("dbgdel(M,F) -- disable call tracer for function M:F\n"),
format("dbgoff() -- disable dbg tracer (calls dbg:stop/0)\n"),
format("p(Term) -- print term using io:format(\"~s\\n\", [Term])\n", ["~p"]),
format("nl() -- load all changed modules on all known nodes\n"),
format("tc(N,M,F,A) -- evaluate {M,F,A} N times and return {MkSecs/call, Result}\n"),
format("tc(N,F) -- evaluate F N times and return {MkSecs/call, Result}\n"),
true.
%% These are in alphabetic order it would be nice if they were to *stay* so!
debug() ->
debug([]).
debug(Modules) when is_list(Modules) ->
R = debugger:start(),
i:iaa([break]),
[i:ii(M) || M <- Modules],
R.
dbgtc(File) ->
Fun = fun({trace,_,call,{M,F,A}}, _) -> io:format("call: ~w:~w~w~n", [M,F,A]);
({trace,_,return_from,{M,F,A},R}, _) -> io:format("retn: ~w:~w/~w -> ~w~n", [M,F,A,R]);
(A,B) -> io:format("~w: ~w~n", [A,B]) end,
dbg:trace_client(file, File, {Fun, []}).
dbgon(Modules) when is_atom(Modules); is_list(Modules) ->
case dbg:tracer() of
{ok,_} ->
dbg:p(all,call),
dbgadd(Modules);
Else ->
Else
end.
dbgon(Module, Fun) when is_atom(Fun) ->
{ok,_} = dbg:tracer(),
dbg:p(all,call),
dbg:tpl(Module, Fun, [{'_',[],[{return_trace}]}]),
ok;
dbgon(Module, File) when is_list(File) ->
{ok,_} = dbg:tracer(port, dbg:trace_port(file, File)),
dbg:p(all,call),
dbgadd(Module).
dbgadd(Module) when is_atom(Module) ->
dbgadd([Module]);
dbgadd(Modules) when is_list(Modules) ->
[dbg:tpl(M, [{'_',[],[{return_trace}]}]) || M <- Modules],
ok.
dbgadd(Module, Fun) ->
dbg:tpl(Module, Fun, [{'_',[],[{return_trace}]}]),
ok.
dbgdel(Module) when is_atom(Module) ->
dbgdel([Module]);
dbgdel(Modules) when is_list(Modules) ->
[dbg:ctpl(M) || M <- Modules],
ok.
dbgdel(Module, Fun) ->
dbg:ctpl(Module, Fun),
ok.
dbgoff() ->
dbg:stop().
%% @doc Term printer
p(Term) ->
io:format("~p\n", [Term]).
%% @doc Load all changed modules on all visible nodes
nl() ->
[io:format("Network loading ~p -> ~p~n", [M, c:nl(M)]) || M <- c:mm()],
ok.
%% @doc Save command history to file
saveh(File) ->
{ok, Io} = file:open(File, [write, read, delayed_write]),
GetHist = fun() ->
{links, [Shell|_]} = hd(process_info(self(), [links])),
Shell ! {shell_req, self(), get_cmd},
receive {shell_rep, Shell, R} -> R end
end,
Commands = lists:sort([{N,C} || {{command, N}, C} <- GetHist()]),
try
[case Trees of
[] -> ok;
[T] -> io:format(Io, "~s.\n", [erl_prettypr:format(T)]);
[T|Ts] -> io:format(Io, "~s~s.\n", [
erl_prettypr:format(T), [", "++erl_prettypr:format(Tree) || Tree <- Ts]
])
end || {_, Trees} <- Commands],
ok
after
file:close(Io)
end.
Profiling functions inspired by post :
-questions/2007-August/028462.html
tc(N, F) when N > 0 ->
time_it(fun() -> exit(call(N, N, F, erlang:system_time(microsecond))) end).
tc(N, M, F, A) when N > 0 ->
time_it(fun() -> exit(call(N, N, M, F, A, erlang:system_time(microsecond))) end).
time_it(F) ->
Pid = spawn_opt(F, [{min_heap_size, 16384}]),
MRef = erlang:monitor(process, Pid),
receive
{'DOWN', MRef, process, _, Result} -> Result
end.
call(1, X, F, Time1) ->
Res = (catch F()),
return(X, Res, Time1, erlang:system_time(microsecond));
call(N, X, F, Time1) ->
(catch F()),
call(N-1, X, F, Time1).
call(1, X, M, F, A, Time1) ->
Res = (catch apply(M, F, A)),
return(X, Res, Time1, erlang:system_time(microsecond));
call(N, X, M, F, A, Time1) ->
catch apply(M, F, A),
call(N-1, X, M, F, A, Time1).
return(N, Res, Time1, Time2) ->
Int = Time2 - Time1,
{Int / N, Res}.
| null | https://raw.githubusercontent.com/saleyn/util/17d567e8224c910bd7f987ed8432ae26efd4b36f/src/user_default.erl | erlang | vim:ts=2:sw=2:et
------------------------------------------------------------------------
------------------------------------------------------------------------
@doc This is an extension of the shell commands
to do all the work! Either place this file in the
add this line to the ~/.erlang file:
@version $Revision$
$Date$
@end
------------------------------------------------------------------------
$URL$
------------------------------------------------------------------------
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved via the world wide web at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
AB. All Rights Reserved.''
------------------------------------------------------------------------
These are in alphabetic order it would be nice if they were to *stay* so!
@doc Term printer
@doc Load all changed modules on all visible nodes
@doc Save command history to file
| File :
path accessible to Erlang ( via ERL_LIBS ) or
` ` code : load_abs(os : getenv("HOME " ) + + " /.erlang / user_default " ) . ''
@author < >
` ` The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Initial Developer of the Original Code is .
Portions created by Ericsson are Copyright 1999 ,
-module(user_default).
-author('').
-export([help/0, saveh/1, debug/0, dbgtc/1, dbgon/1, dbgon/2,
dbgadd/1, dbgadd/2, dbgdel/1, dbgdel/2, dbgoff/0,
p/1, nl/0, tc/2, tc/4]).
-import(io, [format/1, format/2]).
help() ->
shell_default:help(),
format("** user extended commands **~n"),
format("saveh(File) -- save command history to a file\n"),
format("debug() -- start the debugger application\n"),
format("debug(Mods) -- start the debugger application and add the list of modules\n"),
format("dbgtc(File) -- use dbg:trace_client() to read data from File\n"),
format("dbgon(M) -- enable dbg tracer on all funs in module(s) M :: atom()|[atom()]\n"),
format("dbgon(M,Fun) -- enable dbg tracer for module M and function F\n"),
format("dbgon(M,File) -- enable dbg tracer for module M and log to File\n"),
format("dbgadd(M) -- enable call tracer for module(s) M :: atom()|[atom()]\n"),
format("dbgadd(M,F) -- enable call tracer for function M:F\n"),
format("dbgdel(M) -- disable call tracer for module(s) M :: atom()|[atom()]\n"),
format("dbgdel(M,F) -- disable call tracer for function M:F\n"),
format("dbgoff() -- disable dbg tracer (calls dbg:stop/0)\n"),
format("p(Term) -- print term using io:format(\"~s\\n\", [Term])\n", ["~p"]),
format("nl() -- load all changed modules on all known nodes\n"),
format("tc(N,M,F,A) -- evaluate {M,F,A} N times and return {MkSecs/call, Result}\n"),
format("tc(N,F) -- evaluate F N times and return {MkSecs/call, Result}\n"),
true.
debug() ->
debug([]).
debug(Modules) when is_list(Modules) ->
R = debugger:start(),
i:iaa([break]),
[i:ii(M) || M <- Modules],
R.
dbgtc(File) ->
Fun = fun({trace,_,call,{M,F,A}}, _) -> io:format("call: ~w:~w~w~n", [M,F,A]);
({trace,_,return_from,{M,F,A},R}, _) -> io:format("retn: ~w:~w/~w -> ~w~n", [M,F,A,R]);
(A,B) -> io:format("~w: ~w~n", [A,B]) end,
dbg:trace_client(file, File, {Fun, []}).
dbgon(Modules) when is_atom(Modules); is_list(Modules) ->
case dbg:tracer() of
{ok,_} ->
dbg:p(all,call),
dbgadd(Modules);
Else ->
Else
end.
dbgon(Module, Fun) when is_atom(Fun) ->
{ok,_} = dbg:tracer(),
dbg:p(all,call),
dbg:tpl(Module, Fun, [{'_',[],[{return_trace}]}]),
ok;
dbgon(Module, File) when is_list(File) ->
{ok,_} = dbg:tracer(port, dbg:trace_port(file, File)),
dbg:p(all,call),
dbgadd(Module).
dbgadd(Module) when is_atom(Module) ->
dbgadd([Module]);
dbgadd(Modules) when is_list(Modules) ->
[dbg:tpl(M, [{'_',[],[{return_trace}]}]) || M <- Modules],
ok.
dbgadd(Module, Fun) ->
dbg:tpl(Module, Fun, [{'_',[],[{return_trace}]}]),
ok.
dbgdel(Module) when is_atom(Module) ->
dbgdel([Module]);
dbgdel(Modules) when is_list(Modules) ->
[dbg:ctpl(M) || M <- Modules],
ok.
dbgdel(Module, Fun) ->
dbg:ctpl(Module, Fun),
ok.
dbgoff() ->
dbg:stop().
p(Term) ->
io:format("~p\n", [Term]).
nl() ->
[io:format("Network loading ~p -> ~p~n", [M, c:nl(M)]) || M <- c:mm()],
ok.
saveh(File) ->
{ok, Io} = file:open(File, [write, read, delayed_write]),
GetHist = fun() ->
{links, [Shell|_]} = hd(process_info(self(), [links])),
Shell ! {shell_req, self(), get_cmd},
receive {shell_rep, Shell, R} -> R end
end,
Commands = lists:sort([{N,C} || {{command, N}, C} <- GetHist()]),
try
[case Trees of
[] -> ok;
[T] -> io:format(Io, "~s.\n", [erl_prettypr:format(T)]);
[T|Ts] -> io:format(Io, "~s~s.\n", [
erl_prettypr:format(T), [", "++erl_prettypr:format(Tree) || Tree <- Ts]
])
end || {_, Trees} <- Commands],
ok
after
file:close(Io)
end.
Profiling functions inspired by post :
-questions/2007-August/028462.html
tc(N, F) when N > 0 ->
time_it(fun() -> exit(call(N, N, F, erlang:system_time(microsecond))) end).
tc(N, M, F, A) when N > 0 ->
time_it(fun() -> exit(call(N, N, M, F, A, erlang:system_time(microsecond))) end).
time_it(F) ->
Pid = spawn_opt(F, [{min_heap_size, 16384}]),
MRef = erlang:monitor(process, Pid),
receive
{'DOWN', MRef, process, _, Result} -> Result
end.
call(1, X, F, Time1) ->
Res = (catch F()),
return(X, Res, Time1, erlang:system_time(microsecond));
call(N, X, F, Time1) ->
(catch F()),
call(N-1, X, F, Time1).
call(1, X, M, F, A, Time1) ->
Res = (catch apply(M, F, A)),
return(X, Res, Time1, erlang:system_time(microsecond));
call(N, X, M, F, A, Time1) ->
catch apply(M, F, A),
call(N-1, X, M, F, A, Time1).
return(N, Res, Time1, Time2) ->
Int = Time2 - Time1,
{Int / N, Res}.
|
220b04c7466f35d1462910f9fe41c3ca13ddd6f1215180022c5f013285566b3f | WorksHub/client | subs.cljs | (ns wh.jobs.jobsboard.subs
(:require [clojure.string :as str]
[goog.i18n.NumberFormat :as nf]
[re-frame.core :refer [reg-sub]]
[wh.common.data :refer [currency-symbols]]
[wh.common.job :as job]
[wh.common.subs]
[wh.common.user :as user-common]
[wh.components.pagination :as pagination]
[wh.graphql-cache :as gqlc]
[wh.job.db :as job-db]
[wh.jobs.jobsboard.db :as jobsboard]
[wh.jobs.jobsboard.events :as events]
[wh.jobs.jobsboard.search-results :as search-results]
[wh.jobsboard.db :as jobsboard-ssr]
[wh.landing-new.events :as landing-events]
[wh.slug :as slug]
[wh.verticals :as verticals])
(:require-macros [clojure.core.strint :refer [<<]]))
(reg-sub
::search-params
:<- [::results]
(fn [{:keys [jobs-search] :as _results} _]
(:search-params jobs-search)))
(reg-sub
::ssr-jobs
(fn [db _]
(get-in db jobsboard/ssr-jobs-path)))
(reg-sub
::db
(fn [db _] db))
;; used e.g. on preset search pages
(reg-sub
::search-tag
(fn [db _]
(get-in db [:wh.db/page-params :tag])))
(reg-sub
::search-term
:<- [::search-params]
:<- [::search-tag]
(fn [[{:keys [query] :as _search-params} search-tag] _]
(or (not-empty query) search-tag)))
(reg-sub
::search-label
:<- [::search-params]
(fn [{:keys [label] :as _search-params} _]
label))
(reg-sub
::jobsboard
:<- [::results]
(fn [{:keys [jobs-search] :as _results} _]
(search-results/get-jobsboard-db jobs-search)))
(reg-sub
::admin?
(fn [db _]
(user-common/admin? db)))
(reg-sub
::user-email
(fn [db _]
(get-in db [:wh.user.db/sub-db :wh.user.db/email])))
(reg-sub
::search
:<- [::results]
:<- [::admin?]
:<- [::user-email]
(fn [[{:keys [jobs-search] :as _results} admin? user-email] _]
(let [params (search-results/organize-search-params
user-email jobs-search (:search-params jobs-search))]
(search-results/get-search-data admin? params))))
(reg-sub
::filters
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/filters])))
(reg-sub
::result-count
:<- [::jobsboard]
(fn [db _]
(get-in db [::jobsboard/number-of-search-results] 0)))
(reg-sub
::current-page
:<- [::jobsboard]
(fn [db _]
(get-in db [::jobsboard/current-page])))
(reg-sub
::total-pages
:<- [::jobsboard]
(fn [db _]
(get-in db [::jobsboard/total-pages])))
(reg-sub
::header-title
:<- [:wh/vertical]
:<- [::search-label]
(fn [[vertical search-label] _]
(or search-label
(:title (verticals/config vertical :jobsboard-header)))))
(reg-sub
::header-subtitle
:<- [:wh/vertical]
(fn [vertical _]
(or (:subtitle (verticals/config vertical :jobsboard-header))
"Browse jobs for software engineers and developers")))
(reg-sub
::header-description
:<- [:wh/vertical]
:<- [::search-term]
(fn [[vertical search-term] _]
(or (get (verticals/config vertical :jobsboard-tag-desc) search-term)
(:description (verticals/config vertical :jobsboard-header)))))
(reg-sub
::header-info
:<- [::header-title]
:<- [::header-subtitle]
:<- [::header-description]
(fn [[title subtitle description] _]
{:title title :subtitle subtitle :description description}))
(reg-sub
::results
(fn [db _]
(gqlc/cache-results events/jobs db [])))
(reg-sub
::jobs-search
:<- [::results]
(fn [results _]
(:jobs-search results)))
(reg-sub
:wh.search/searching?
:<- [::ssr-jobs]
:<- [::db]
(fn [[ssr-jobs db] _]
(if ssr-jobs
false
(gqlc/cache-loading? events/jobs db))))
(reg-sub
::jobs
:<- [::jobs-search]
:<- [:user/liked-jobs]
:<- [:user/applied-jobs]
:<- [::ssr-jobs]
(fn [[jobs-search liked-jobs applied-jobs ssr-jobs] _]
(->> (or (:jobs jobs-search) ssr-jobs)
(job/add-interactions liked-jobs applied-jobs)
(map job/translate-job))))
(reg-sub
::promoted-jobs
:<- [::jobs-search]
:<- [:user/liked-jobs]
:<- [:user/applied-jobs]
(fn [[jobs-search liked-jobs applied-jobs] _]
(->> (:promoted jobs-search)
(job/add-interactions liked-jobs applied-jobs)
(map job/translate-job))))
(defn- checkbox-description
[{:keys [value label cnt display-count?]
:or {display-count? true}}]
(let [cnt (or cnt 0)]
{:value value
:label (str (or label value) (when (and display-count? (pos? cnt)) (str " (" cnt ")")))}))
(reg-sub
:wh.search/available-role-types
(fn [_ _]
(for [role-type job-db/role-types]
{:value role-type
:label role-type})))
(reg-sub
:wh.search/only-mine-desc
:<- [::search]
(fn [search _]
(checkbox-description {:label "Only my jobs"
:cnt (:wh.search/mine-count search)})))
(reg-sub
:wh.search/role-types
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/role-types])))
(reg-sub
:wh.search/query
:<- [::search]
(fn [search _]
(:wh.search/query search)))
(reg-sub
::current-query
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/query])))
(reg-sub
:wh.search/sponsorship
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/sponsorship])))
(reg-sub
:wh.search/remote
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/remote])))
(reg-sub
:wh.search/show-competitive?
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/competitive])))
(reg-sub
:wh.search/tags
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/tags])))
(reg-sub
:wh.search/selected-tags
:<- [:wh.search/tags]
(fn [tags _]
(map #(str (slug/tag-label->slug %) ":tech") tags)))
(reg-sub
:wh.search/available-tags
:<- [::filters]
(fn [filters _]
(->> (:wh.search/available-tags filters)
(map
(fn [{:keys [value _attr _count] :as tag}]
(merge
tag
{:label value
:slug (slug/tag-label->slug value)
:type "tech"})))
(sort-by :count >))))
;; Non-existing tags is a list of tags that appeared in URL
;; but are not available in application DB. We want to display
;; this tags so user may remove these from filters and clear URL
(reg-sub
:wh.search/non-existing-tags
:<- [:wh.search/available-tags]
:<- [:wh.search/tags]
(fn [[available-tags tags] _]
(let [available-tags (set (map :value available-tags))
non-existing-tags (remove available-tags tags)]
(for [value non-existing-tags]
{:label value
:slug (slug/tag-label->slug value)
:type "tech"
:value value
:selected true
:count 0
:tag/non-existing true}))))
(reg-sub
:wh.search/visible-tags
:<- [:wh.search/non-existing-tags]
:<- [:wh.search/available-tags]
(fn [[non-existing-tags available-tags] _]
(concat non-existing-tags available-tags)))
(reg-sub
:wh.search/city-info
:<- [::filters]
(fn [filters _]
(:wh.search/city-info filters)))
(reg-sub
:wh.search/country-names
:<- [:wh.search/city-info]
(fn [city-info _]
(into {} (map (juxt :country-code :country)) city-info)))
(reg-sub
:wh.search/available-locations
:<- [::filters]
:<- [:wh.search/country-names]
(fn [[{:keys [wh.search/available-countries
wh.search/available-cities
wh.search/available-regions]}
country-names]
_]
(->> available-cities
(concat
(map
(fn [{:keys [value] :as region}]
(assoc region :label value))
available-regions))
(concat
(map
(fn [{:keys [value] :as country}]
(assoc country :label (country-names value)))
available-countries))
(map
(fn [{:keys [label value attr count]}]
{:label (or label value)
:value value
:slug (slug/tag-label->slug value)
:attr attr
:type "location"
:count count}))
(sort-by :count >)
(sort-by :value (fn [v _]
(#{"UK" "GB" "US" "Europe"} v))))))
(reg-sub
:wh.search/city
:<- [::current-search]
(fn [search [_ city]]
(contains? (get search :wh.search/cities) city)))
(reg-sub
::selected-locations
:<- [::current-search]
(fn [search _]
(->> (select-keys search [:wh.search/cities :wh.search/countries :wh.search/regions])
(mapcat second)
(map #(str (slug/tag-label->slug %) ":location")))))
(reg-sub
::current-search
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search])))
(reg-sub
::result-count-str
:<- [::result-count]
:<- [::search-label]
(fn [[n label] _]
(let [label (if label (str "'" label "'") "your criteria")]
(if (zero? n)
(<< "We found no jobs matching ~{label} 😢")
(<< "We found ~{n} jobs matching ~{label}")))))
(reg-sub
::pagination
:<- [::current-page]
:<- [::total-pages]
(fn [[current-page total-pages] _]
(pagination/generate-pagination current-page total-pages)))
(reg-sub
::view-type
:<- [:wh/query-params]
(fn [query-params]
(keyword (get query-params jobsboard-ssr/view-type-param "cards"))))
(reg-sub
::page-params
:<- [:wh/page-params]
:<- [::search-term]
(fn [[params search-term] _]
NB : While ` : query ` param may have ` nil ` value , it must be passed .
(assoc params :query (when-not (str/blank? search-term) search-term))))
(reg-sub
:wh.search/currency
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/currency])))
(reg-sub
:wh.search/currencies
:<- [::filters]
(fn [filters _]
(let [currencies (->> filters
:wh.search/salary-ranges
(map :currency)
distinct)]
(conj currencies "*"))))
(reg-sub
:wh.search/salary-type
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/salary-type])))
;; This can return nil when salary ranges are not
;; yet fetched. In this case, the slider is not
;; rendered at all.
(reg-sub
:wh.search/grouped-salary-ranges
:<- [::filters]
(fn [filters _]
(when-let [ranges (:wh.search/salary-ranges filters)]
(group-by (juxt :currency :time-period) ranges))))
(reg-sub
:wh.search/salary-min-max
:<- [:wh.search/grouped-salary-ranges]
:<- [:wh.search/currency]
:<- [:wh.search/salary-type]
(fn [[ranges currency type] _]
(let [type ({:year "Yearly", :day "Daily"}
(or type :year))]
(first (get ranges [currency type])))))
(reg-sub
:wh.search/salary-min
:<- [:wh.search/salary-min-max]
(fn [minmax _]
(or (:min minmax) 0)))
(reg-sub
:wh.search/salary-max
:<- [:wh.search/salary-min-max]
(fn [minmax _]
(or (:max minmax) 100000)))
(reg-sub
:wh.search/current-salary-range
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/salary-range])))
(reg-sub
:wh.search/salary-from
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/salary-from])))
(reg-sub
:wh.search/salary-range
:<- [:wh.search/current-salary-range]
:<- [:wh.search/salary-min]
:<- [:wh.search/salary-max]
(fn [[current-salary-range min max] _]
(or current-salary-range [min max])))
(reg-sub
:wh.search/salary-range-js
:<- [:wh.search/salary-range]
(fn [range _]
(clj->js range)))
(defn format-number [n]
(let [formatter (goog.i18n.NumberFormat. nf/Format.COMPACT_SHORT)]
(.format formatter n)))
(reg-sub
:wh.search/salary-slider-description
:<- [:wh.search/salary-range]
:<- [:wh.search/salary-from]
:<- [:wh.search/currency]
(fn [[salary-range salary-from currency] _]
(let [[min max] salary-range
symbol (currency-symbols currency)
local-min (or salary-from min)]
(str symbol (format-number local-min) " – " symbol (format-number max)))))
(reg-sub
:wh.search/salary-slider-min-max
:<- [:wh.search/salary-range]
:<- [:wh.search/salary-from]
:<- [:wh.search/currency]
(fn [[salary-range salary-from currency] _]
(let [[min max] salary-range
symbol (currency-symbols currency)
local-min (or salary-from min)]
[(str symbol (format-number local-min)) (str symbol (format-number max))])))
(reg-sub
:wh.search/salary-slider-min
:<- [:wh.search/salary-slider-min-max]
(fn [[min] _] min))
(reg-sub
:wh.search/salary-slider-max
:<- [:wh.search/salary-slider-min-max]
(fn [[_ max] _] max))
(reg-sub
:wh.search/only-mine
:<- [::search]
(fn [search _]
(:wh.search/only-mine search)))
(reg-sub
:wh.search/published
:<- [::search]
(fn [search _]
(:wh.search/published search)))
(defn- published-option
[{:keys [wh.search/published-count]} value label]
(let [cnt (->> published-count
(filter #(= (:value %) (str value)))
first :count)]
(checkbox-description {:value value :label label :cnt cnt})))
(reg-sub
:wh.search/published-options
:<- [::search]
(fn [search _]
[(published-option search true "Published")
(published-option search false "Unpublished")]))
(reg-sub
::salary-pristine?
:<- [:wh.search/currency]
:<- [:wh.search/salary-type]
:<- [:wh.search/salary-min-max]
:<- [:wh.search/show-competitive?]
(fn [[currency salary-type salary-min-max competitive?] _]
(every? not [currency salary-type salary-min-max (not competitive?)])))
(reg-sub
::query-pristine?
:<- [::current-query]
(fn [current-query _]
(empty? current-query)))
(reg-sub
::tags-pristine?
:<- [:wh.search/selected-tags]
(fn [selected-tags _]
(empty? selected-tags)))
(reg-sub
::locations-pristine?
:<- [::selected-locations]
:<- [:wh.search/sponsorship]
:<- [:wh.search/remote]
(fn [[selected-locations sponsorship remote] _]
(and (empty? selected-locations)
(every? not [sponsorship remote]))))
(reg-sub
::role-types-pristine?
:<- [:wh.search/role-types]
(fn [role-types _]
(empty? role-types)))
(reg-sub
::recommended-jobs
(fn [db _]
(some->> (gqlc/cache-results landing-events/recommended-jobs db [:jobs])
(map #(assoc % :company-info (:company %))))))
(reg-sub
::recent-jobs
(fn [db _]
(gqlc/cache-results landing-events/recent-jobs db [:recent-jobs :results])))
(reg-sub
::side-jobs
:<- [:user/has-recommendations?]
:<- [::recommended-jobs]
:<- [::recent-jobs]
(fn [[has-recommendations? recommended-jobs recent-jobs]]
(if has-recommendations? recommended-jobs recent-jobs)))
(reg-sub
::recommended-jobs-loading?
(fn [db _]
(gqlc/cache-loading? landing-events/recommended-jobs db)))
(reg-sub
::recent-jobs-loading?
(fn [db _]
(gqlc/cache-loading? landing-events/recent-jobs db)))
(reg-sub
::side-jobs-loading?
:<- [:user/has-recommendations?]
:<- [::recommended-jobs-loading?]
:<- [::recent-jobs-loading?]
(fn [[has-recommendations? recommended-jobs-loading? recent-jobs-loading?]]
(if has-recommendations? recommended-jobs-loading? recent-jobs-loading?)))
| null | https://raw.githubusercontent.com/WorksHub/client/77e4212a69dad049a9e784143915058acd918982/client/src/wh/jobs/jobsboard/subs.cljs | clojure | used e.g. on preset search pages
Non-existing tags is a list of tags that appeared in URL
but are not available in application DB. We want to display
this tags so user may remove these from filters and clear URL
This can return nil when salary ranges are not
yet fetched. In this case, the slider is not
rendered at all. | (ns wh.jobs.jobsboard.subs
(:require [clojure.string :as str]
[goog.i18n.NumberFormat :as nf]
[re-frame.core :refer [reg-sub]]
[wh.common.data :refer [currency-symbols]]
[wh.common.job :as job]
[wh.common.subs]
[wh.common.user :as user-common]
[wh.components.pagination :as pagination]
[wh.graphql-cache :as gqlc]
[wh.job.db :as job-db]
[wh.jobs.jobsboard.db :as jobsboard]
[wh.jobs.jobsboard.events :as events]
[wh.jobs.jobsboard.search-results :as search-results]
[wh.jobsboard.db :as jobsboard-ssr]
[wh.landing-new.events :as landing-events]
[wh.slug :as slug]
[wh.verticals :as verticals])
(:require-macros [clojure.core.strint :refer [<<]]))
(reg-sub
::search-params
:<- [::results]
(fn [{:keys [jobs-search] :as _results} _]
(:search-params jobs-search)))
(reg-sub
::ssr-jobs
(fn [db _]
(get-in db jobsboard/ssr-jobs-path)))
(reg-sub
::db
(fn [db _] db))
(reg-sub
::search-tag
(fn [db _]
(get-in db [:wh.db/page-params :tag])))
(reg-sub
::search-term
:<- [::search-params]
:<- [::search-tag]
(fn [[{:keys [query] :as _search-params} search-tag] _]
(or (not-empty query) search-tag)))
(reg-sub
::search-label
:<- [::search-params]
(fn [{:keys [label] :as _search-params} _]
label))
(reg-sub
::jobsboard
:<- [::results]
(fn [{:keys [jobs-search] :as _results} _]
(search-results/get-jobsboard-db jobs-search)))
(reg-sub
::admin?
(fn [db _]
(user-common/admin? db)))
(reg-sub
::user-email
(fn [db _]
(get-in db [:wh.user.db/sub-db :wh.user.db/email])))
(reg-sub
::search
:<- [::results]
:<- [::admin?]
:<- [::user-email]
(fn [[{:keys [jobs-search] :as _results} admin? user-email] _]
(let [params (search-results/organize-search-params
user-email jobs-search (:search-params jobs-search))]
(search-results/get-search-data admin? params))))
(reg-sub
::filters
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/filters])))
(reg-sub
::result-count
:<- [::jobsboard]
(fn [db _]
(get-in db [::jobsboard/number-of-search-results] 0)))
(reg-sub
::current-page
:<- [::jobsboard]
(fn [db _]
(get-in db [::jobsboard/current-page])))
(reg-sub
::total-pages
:<- [::jobsboard]
(fn [db _]
(get-in db [::jobsboard/total-pages])))
(reg-sub
::header-title
:<- [:wh/vertical]
:<- [::search-label]
(fn [[vertical search-label] _]
(or search-label
(:title (verticals/config vertical :jobsboard-header)))))
(reg-sub
::header-subtitle
:<- [:wh/vertical]
(fn [vertical _]
(or (:subtitle (verticals/config vertical :jobsboard-header))
"Browse jobs for software engineers and developers")))
(reg-sub
::header-description
:<- [:wh/vertical]
:<- [::search-term]
(fn [[vertical search-term] _]
(or (get (verticals/config vertical :jobsboard-tag-desc) search-term)
(:description (verticals/config vertical :jobsboard-header)))))
(reg-sub
::header-info
:<- [::header-title]
:<- [::header-subtitle]
:<- [::header-description]
(fn [[title subtitle description] _]
{:title title :subtitle subtitle :description description}))
(reg-sub
::results
(fn [db _]
(gqlc/cache-results events/jobs db [])))
(reg-sub
::jobs-search
:<- [::results]
(fn [results _]
(:jobs-search results)))
(reg-sub
:wh.search/searching?
:<- [::ssr-jobs]
:<- [::db]
(fn [[ssr-jobs db] _]
(if ssr-jobs
false
(gqlc/cache-loading? events/jobs db))))
(reg-sub
::jobs
:<- [::jobs-search]
:<- [:user/liked-jobs]
:<- [:user/applied-jobs]
:<- [::ssr-jobs]
(fn [[jobs-search liked-jobs applied-jobs ssr-jobs] _]
(->> (or (:jobs jobs-search) ssr-jobs)
(job/add-interactions liked-jobs applied-jobs)
(map job/translate-job))))
(reg-sub
::promoted-jobs
:<- [::jobs-search]
:<- [:user/liked-jobs]
:<- [:user/applied-jobs]
(fn [[jobs-search liked-jobs applied-jobs] _]
(->> (:promoted jobs-search)
(job/add-interactions liked-jobs applied-jobs)
(map job/translate-job))))
(defn- checkbox-description
[{:keys [value label cnt display-count?]
:or {display-count? true}}]
(let [cnt (or cnt 0)]
{:value value
:label (str (or label value) (when (and display-count? (pos? cnt)) (str " (" cnt ")")))}))
(reg-sub
:wh.search/available-role-types
(fn [_ _]
(for [role-type job-db/role-types]
{:value role-type
:label role-type})))
(reg-sub
:wh.search/only-mine-desc
:<- [::search]
(fn [search _]
(checkbox-description {:label "Only my jobs"
:cnt (:wh.search/mine-count search)})))
(reg-sub
:wh.search/role-types
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/role-types])))
(reg-sub
:wh.search/query
:<- [::search]
(fn [search _]
(:wh.search/query search)))
(reg-sub
::current-query
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/query])))
(reg-sub
:wh.search/sponsorship
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/sponsorship])))
(reg-sub
:wh.search/remote
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/remote])))
(reg-sub
:wh.search/show-competitive?
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/competitive])))
(reg-sub
:wh.search/tags
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/tags])))
(reg-sub
:wh.search/selected-tags
:<- [:wh.search/tags]
(fn [tags _]
(map #(str (slug/tag-label->slug %) ":tech") tags)))
(reg-sub
:wh.search/available-tags
:<- [::filters]
(fn [filters _]
(->> (:wh.search/available-tags filters)
(map
(fn [{:keys [value _attr _count] :as tag}]
(merge
tag
{:label value
:slug (slug/tag-label->slug value)
:type "tech"})))
(sort-by :count >))))
(reg-sub
:wh.search/non-existing-tags
:<- [:wh.search/available-tags]
:<- [:wh.search/tags]
(fn [[available-tags tags] _]
(let [available-tags (set (map :value available-tags))
non-existing-tags (remove available-tags tags)]
(for [value non-existing-tags]
{:label value
:slug (slug/tag-label->slug value)
:type "tech"
:value value
:selected true
:count 0
:tag/non-existing true}))))
(reg-sub
:wh.search/visible-tags
:<- [:wh.search/non-existing-tags]
:<- [:wh.search/available-tags]
(fn [[non-existing-tags available-tags] _]
(concat non-existing-tags available-tags)))
(reg-sub
:wh.search/city-info
:<- [::filters]
(fn [filters _]
(:wh.search/city-info filters)))
(reg-sub
:wh.search/country-names
:<- [:wh.search/city-info]
(fn [city-info _]
(into {} (map (juxt :country-code :country)) city-info)))
(reg-sub
:wh.search/available-locations
:<- [::filters]
:<- [:wh.search/country-names]
(fn [[{:keys [wh.search/available-countries
wh.search/available-cities
wh.search/available-regions]}
country-names]
_]
(->> available-cities
(concat
(map
(fn [{:keys [value] :as region}]
(assoc region :label value))
available-regions))
(concat
(map
(fn [{:keys [value] :as country}]
(assoc country :label (country-names value)))
available-countries))
(map
(fn [{:keys [label value attr count]}]
{:label (or label value)
:value value
:slug (slug/tag-label->slug value)
:attr attr
:type "location"
:count count}))
(sort-by :count >)
(sort-by :value (fn [v _]
(#{"UK" "GB" "US" "Europe"} v))))))
(reg-sub
:wh.search/city
:<- [::current-search]
(fn [search [_ city]]
(contains? (get search :wh.search/cities) city)))
(reg-sub
::selected-locations
:<- [::current-search]
(fn [search _]
(->> (select-keys search [:wh.search/cities :wh.search/countries :wh.search/regions])
(mapcat second)
(map #(str (slug/tag-label->slug %) ":location")))))
(reg-sub
::current-search
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search])))
(reg-sub
::result-count-str
:<- [::result-count]
:<- [::search-label]
(fn [[n label] _]
(let [label (if label (str "'" label "'") "your criteria")]
(if (zero? n)
(<< "We found no jobs matching ~{label} 😢")
(<< "We found ~{n} jobs matching ~{label}")))))
(reg-sub
::pagination
:<- [::current-page]
:<- [::total-pages]
(fn [[current-page total-pages] _]
(pagination/generate-pagination current-page total-pages)))
(reg-sub
::view-type
:<- [:wh/query-params]
(fn [query-params]
(keyword (get query-params jobsboard-ssr/view-type-param "cards"))))
(reg-sub
::page-params
:<- [:wh/page-params]
:<- [::search-term]
(fn [[params search-term] _]
NB : While ` : query ` param may have ` nil ` value , it must be passed .
(assoc params :query (when-not (str/blank? search-term) search-term))))
(reg-sub
:wh.search/currency
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/currency])))
(reg-sub
:wh.search/currencies
:<- [::filters]
(fn [filters _]
(let [currencies (->> filters
:wh.search/salary-ranges
(map :currency)
distinct)]
(conj currencies "*"))))
(reg-sub
:wh.search/salary-type
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/salary-type])))
(reg-sub
:wh.search/grouped-salary-ranges
:<- [::filters]
(fn [filters _]
(when-let [ranges (:wh.search/salary-ranges filters)]
(group-by (juxt :currency :time-period) ranges))))
(reg-sub
:wh.search/salary-min-max
:<- [:wh.search/grouped-salary-ranges]
:<- [:wh.search/currency]
:<- [:wh.search/salary-type]
(fn [[ranges currency type] _]
(let [type ({:year "Yearly", :day "Daily"}
(or type :year))]
(first (get ranges [currency type])))))
(reg-sub
:wh.search/salary-min
:<- [:wh.search/salary-min-max]
(fn [minmax _]
(or (:min minmax) 0)))
(reg-sub
:wh.search/salary-max
:<- [:wh.search/salary-min-max]
(fn [minmax _]
(or (:max minmax) 100000)))
(reg-sub
:wh.search/current-salary-range
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/salary-range])))
(reg-sub
:wh.search/salary-from
(fn [db _]
(get-in db [::jobsboard/sub-db ::jobsboard/search :wh.search/salary-from])))
(reg-sub
:wh.search/salary-range
:<- [:wh.search/current-salary-range]
:<- [:wh.search/salary-min]
:<- [:wh.search/salary-max]
(fn [[current-salary-range min max] _]
(or current-salary-range [min max])))
(reg-sub
:wh.search/salary-range-js
:<- [:wh.search/salary-range]
(fn [range _]
(clj->js range)))
(defn format-number [n]
(let [formatter (goog.i18n.NumberFormat. nf/Format.COMPACT_SHORT)]
(.format formatter n)))
(reg-sub
:wh.search/salary-slider-description
:<- [:wh.search/salary-range]
:<- [:wh.search/salary-from]
:<- [:wh.search/currency]
(fn [[salary-range salary-from currency] _]
(let [[min max] salary-range
symbol (currency-symbols currency)
local-min (or salary-from min)]
(str symbol (format-number local-min) " – " symbol (format-number max)))))
(reg-sub
:wh.search/salary-slider-min-max
:<- [:wh.search/salary-range]
:<- [:wh.search/salary-from]
:<- [:wh.search/currency]
(fn [[salary-range salary-from currency] _]
(let [[min max] salary-range
symbol (currency-symbols currency)
local-min (or salary-from min)]
[(str symbol (format-number local-min)) (str symbol (format-number max))])))
(reg-sub
:wh.search/salary-slider-min
:<- [:wh.search/salary-slider-min-max]
(fn [[min] _] min))
(reg-sub
:wh.search/salary-slider-max
:<- [:wh.search/salary-slider-min-max]
(fn [[_ max] _] max))
(reg-sub
:wh.search/only-mine
:<- [::search]
(fn [search _]
(:wh.search/only-mine search)))
(reg-sub
:wh.search/published
:<- [::search]
(fn [search _]
(:wh.search/published search)))
(defn- published-option
[{:keys [wh.search/published-count]} value label]
(let [cnt (->> published-count
(filter #(= (:value %) (str value)))
first :count)]
(checkbox-description {:value value :label label :cnt cnt})))
(reg-sub
:wh.search/published-options
:<- [::search]
(fn [search _]
[(published-option search true "Published")
(published-option search false "Unpublished")]))
(reg-sub
::salary-pristine?
:<- [:wh.search/currency]
:<- [:wh.search/salary-type]
:<- [:wh.search/salary-min-max]
:<- [:wh.search/show-competitive?]
(fn [[currency salary-type salary-min-max competitive?] _]
(every? not [currency salary-type salary-min-max (not competitive?)])))
(reg-sub
::query-pristine?
:<- [::current-query]
(fn [current-query _]
(empty? current-query)))
(reg-sub
::tags-pristine?
:<- [:wh.search/selected-tags]
(fn [selected-tags _]
(empty? selected-tags)))
(reg-sub
::locations-pristine?
:<- [::selected-locations]
:<- [:wh.search/sponsorship]
:<- [:wh.search/remote]
(fn [[selected-locations sponsorship remote] _]
(and (empty? selected-locations)
(every? not [sponsorship remote]))))
(reg-sub
::role-types-pristine?
:<- [:wh.search/role-types]
(fn [role-types _]
(empty? role-types)))
(reg-sub
::recommended-jobs
(fn [db _]
(some->> (gqlc/cache-results landing-events/recommended-jobs db [:jobs])
(map #(assoc % :company-info (:company %))))))
(reg-sub
::recent-jobs
(fn [db _]
(gqlc/cache-results landing-events/recent-jobs db [:recent-jobs :results])))
(reg-sub
::side-jobs
:<- [:user/has-recommendations?]
:<- [::recommended-jobs]
:<- [::recent-jobs]
(fn [[has-recommendations? recommended-jobs recent-jobs]]
(if has-recommendations? recommended-jobs recent-jobs)))
(reg-sub
::recommended-jobs-loading?
(fn [db _]
(gqlc/cache-loading? landing-events/recommended-jobs db)))
(reg-sub
::recent-jobs-loading?
(fn [db _]
(gqlc/cache-loading? landing-events/recent-jobs db)))
(reg-sub
::side-jobs-loading?
:<- [:user/has-recommendations?]
:<- [::recommended-jobs-loading?]
:<- [::recent-jobs-loading?]
(fn [[has-recommendations? recommended-jobs-loading? recent-jobs-loading?]]
(if has-recommendations? recommended-jobs-loading? recent-jobs-loading?)))
|
eac54b78226e66ff5c2911b0d4c6be57ff261e2ea66b8685392571eb488a7c4b | haskell/directory | Directory.hs | -----------------------------------------------------------------------------
-- |
-- Module : System.Directory
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : stable
-- Portability : portable
--
-- System-independent interface to directory manipulation (FilePath API).
--
-----------------------------------------------------------------------------
module System.Directory
(
-- $intro
-- * Actions on directories
createDirectory
, createDirectoryIfMissing
, removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, renameDirectory
, listDirectory
, getDirectoryContents
-- ** Current working directory
, getCurrentDirectory
, setCurrentDirectory
, withCurrentDirectory
-- * Pre-defined directories
, getHomeDirectory
, XdgDirectory(..)
, getXdgDirectory
, XdgDirectoryList(..)
, getXdgDirectoryList
, getAppUserDataDirectory
, getUserDocumentsDirectory
, getTemporaryDirectory
-- * Actions on files
, removeFile
, renameFile
, renamePath
, copyFile
, copyFileWithMetadata
, getFileSize
, canonicalizePath
, makeAbsolute
, makeRelativeToCurrentDirectory
-- * Existence tests
, doesPathExist
, doesFileExist
, doesDirectoryExist
, findExecutable
, findExecutables
, findExecutablesInDirectories
, findFile
, findFiles
, findFileWith
, findFilesWith
, exeExtension
-- * Symbolic links
, createFileLink
, createDirectoryLink
, removeDirectoryLink
, pathIsSymbolicLink
, getSymbolicLinkTarget
-- * Permissions
-- $permissions
, Permissions
, emptyPermissions
, readable
, writable
, executable
, searchable
, setOwnerReadable
, setOwnerWritable
, setOwnerExecutable
, setOwnerSearchable
, getPermissions
, setPermissions
, copyPermissions
-- * Timestamps
, getAccessTime
, getModificationTime
, setAccessTime
, setModificationTime
-- * Deprecated
, isSymbolicLink
) where
import Prelude ()
import System.Directory.Internal
import System.Directory.Internal.Prelude
import Data.Time (UTCTime)
import System.OsPath (decodeFS, encodeFS)
import qualified System.Directory.OsPath as D
$ intro
A directory contains a series of entries , each of which is a named
reference to a file system object ( file , directory etc . ) . Some
entries may be hidden , inaccessible , or have some administrative
function ( e.g. @.@ or @ .. @ under
< POSIX > ) , but in
this standard all such entries are considered to form part of the
directory contents . Entries in sub - directories are not , however ,
considered to form part of the directory contents .
Each file system object is referenced by a /path/. There is
normally at least one absolute path to each file system object . In
some operating systems , it may also be possible to have paths which
are relative to the current directory .
Unless otherwise documented :
* ' IO ' operations in this package may throw any ' IOError ' . No other types of
exceptions shall be thrown .
* The list of possible ' IOErrorType 's in the API documentation is not
exhaustive . The full list may vary by platform and/or evolve over time .
A directory contains a series of entries, each of which is a named
reference to a file system object (file, directory etc.). Some
entries may be hidden, inaccessible, or have some administrative
function (e.g. @.@ or @..@ under
< POSIX>), but in
this standard all such entries are considered to form part of the
directory contents. Entries in sub-directories are not, however,
considered to form part of the directory contents.
Each file system object is referenced by a /path/. There is
normally at least one absolute path to each file system object. In
some operating systems, it may also be possible to have paths which
are relative to the current directory.
Unless otherwise documented:
* 'IO' operations in this package may throw any 'IOError'. No other types of
exceptions shall be thrown.
* The list of possible 'IOErrorType's in the API documentation is not
exhaustive. The full list may vary by platform and/or evolve over time.
-}
-----------------------------------------------------------------------------
-- Permissions
$ permissions
directory offers a limited ( and quirky ) interface for reading and setting file
and directory permissions ; see ' getPermissions ' and ' setPermissions ' for a
discussion of their limitations . Because permissions are very difficult to
implement portably across different platforms , users who wish to do more
sophisticated things with permissions are advised to use other ,
platform - specific libraries instead . For example , if you are only interested
in permissions on POSIX - like platforms ,
< -Posix-Files.html unix >
offers much more flexibility .
The ' Permissions ' type is used to record whether certain operations are
permissible on a file\/directory . ' getPermissions ' and ' setPermissions '
get and set these permissions , respectively . Permissions apply both to
files and directories . For directories , the executable field will be
' False ' , and for files the searchable field will be ' False ' . Note that
directories may be searchable without being readable , if permission has
been given to use them as part of a path , but not to examine the
directory contents .
Note that to change some , but not all permissions , a construct on the following lines must be used .
> makeReadable f = do
> f
> ( p { readable = True } )
directory offers a limited (and quirky) interface for reading and setting file
and directory permissions; see 'getPermissions' and 'setPermissions' for a
discussion of their limitations. Because permissions are very difficult to
implement portably across different platforms, users who wish to do more
sophisticated things with permissions are advised to use other,
platform-specific libraries instead. For example, if you are only interested
in permissions on POSIX-like platforms,
<-Posix-Files.html unix>
offers much more flexibility.
The 'Permissions' type is used to record whether certain operations are
permissible on a file\/directory. 'getPermissions' and 'setPermissions'
get and set these permissions, respectively. Permissions apply both to
files and directories. For directories, the executable field will be
'False', and for files the searchable field will be 'False'. Note that
directories may be searchable without being readable, if permission has
been given to use them as part of a path, but not to examine the
directory contents.
Note that to change some, but not all permissions, a construct on the following lines must be used.
> makeReadable f = do
> p <- getPermissions f
> setPermissions f (p {readable = True})
-}
emptyPermissions :: Permissions
emptyPermissions = Permissions {
readable = False,
writable = False,
executable = False,
searchable = False
}
setOwnerReadable :: Bool -> Permissions -> Permissions
setOwnerReadable b p = p { readable = b }
setOwnerWritable :: Bool -> Permissions -> Permissions
setOwnerWritable b p = p { writable = b }
setOwnerExecutable :: Bool -> Permissions -> Permissions
setOwnerExecutable b p = p { executable = b }
setOwnerSearchable :: Bool -> Permissions -> Permissions
setOwnerSearchable b p = p { searchable = b }
-- | Get the permissions of a file or directory.
--
On Windows , the ' writable ' permission corresponds to the " read - only "
-- attribute. The 'executable' permission is set if the file extension is of
-- an executable file type. The 'readable' permission is always set.
--
-- On POSIX systems, this returns the result of @access@.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to access the
-- permissions, or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
getPermissions :: FilePath -> IO Permissions
getPermissions = encodeFS >=> D.getPermissions
-- | Set the permissions of a file or directory.
--
On Windows , this is only capable of changing the ' writable ' permission ,
-- which corresponds to the "read-only" attribute. Changing the other
-- permissions has no effect.
--
-- On POSIX systems, this sets the /owner/ permissions.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to set the permissions,
-- or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
setPermissions :: FilePath -> Permissions -> IO ()
setPermissions path p = encodeFS path >>= (`D.setPermissions` p)
| Copy the permissions of one file to another . This reproduces the
-- permissions more accurately than using 'getPermissions' followed by
-- 'setPermissions'.
--
On Windows , this copies only the read - only attribute .
--
On POSIX systems , this is equivalent to followed by @chmod@.
copyPermissions :: FilePath -> FilePath -> IO ()
copyPermissions src dst = do
src' <- encodeFS src
dst' <- encodeFS dst
D.copyPermissions src' dst'
-----------------------------------------------------------------------------
-- Implementation
|@'createDirectory ' dir@ creates a new directory @dir@ which is
initially empty , or as near to empty as the operating system
allows .
The operation may fail with :
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES]@
* ' isAlreadyExistsError '
The operand refers to a directory that already exists .
@ [
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
There is no path to the directory .
@[ENOENT , ENOTDIR]@
* ' System . IO.isFullError '
Insufficient resources ( virtual memory , process file descriptors ,
physical disk space , etc . ) are available to perform the operation .
@[EDQUOT , ENOSPC , ENOMEM , EMLINK]@
* @InappropriateType@
The path refers to an existing non - directory object .
@[EEXIST]@
initially empty, or as near to empty as the operating system
allows.
The operation may fail with:
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES]@
* 'isAlreadyExistsError'
The operand refers to a directory that already exists.
@ [EEXIST]@
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
There is no path to the directory.
@[ENOENT, ENOTDIR]@
* 'System.IO.isFullError'
Insufficient resources (virtual memory, process file descriptors,
physical disk space, etc.) are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* @InappropriateType@
The path refers to an existing non-directory object.
@[EEXIST]@
-}
createDirectory :: FilePath -> IO ()
createDirectory = encodeFS >=> D.createDirectory
-- | @'createDirectoryIfMissing' parents dir@ creates a new directory
@dir@ if it doesn\'t exist . If the first argument is ' True '
-- the function will also create all parent directories if they are missing.
createDirectoryIfMissing :: Bool -- ^ Create its parents too?
-> FilePath -- ^ The path to the directory you want to make
-> IO ()
createDirectoryIfMissing cp = encodeFS >=> D.createDirectoryIfMissing cp
| @'removeDirectory ' dir@ removes an existing directory /dir/. The
implementation may specify additional constraints which must be
satisfied before a directory can be removed ( e.g. the directory has to
be empty , or may not be in use by other processes ) . It is not legal
for an implementation to partially remove a directory unless the
entire directory is removed . A conformant implementation need not
support directory removal in all situations ( e.g. removal of the root
directory ) .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The directory does not exist .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY , ENOTEMPTY , EEXIST]@
* @UnsupportedOperation@
The implementation does not support removal in this situation .
@[EINVAL]@
* @InappropriateType@
The operand refers to an existing non - directory object .
@[ENOTDIR]@
implementation may specify additional constraints which must be
satisfied before a directory can be removed (e.g. the directory has to
be empty, or may not be in use by other processes). It is not legal
for an implementation to partially remove a directory unless the
entire directory is removed. A conformant implementation need not
support directory removal in all situations (e.g. removal of the root
directory).
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The directory does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY, ENOTEMPTY, EEXIST]@
* @UnsupportedOperation@
The implementation does not support removal in this situation.
@[EINVAL]@
* @InappropriateType@
The operand refers to an existing non-directory object.
@[ENOTDIR]@
-}
removeDirectory :: FilePath -> IO ()
removeDirectory = encodeFS >=> D.removeDirectory
-- | @'removeDirectoryRecursive' dir@ removes an existing directory /dir/
-- together with its contents and subdirectories. Within this directory,
-- symbolic links are removed without affecting their targets.
--
On Windows , the operation fails if /dir/ is a directory symbolic link .
--
This operation is reported to be flaky on Windows so retry logic may
-- be advisable. See:
removeDirectoryRecursive :: FilePath -> IO ()
removeDirectoryRecursive = encodeFS >=> D.removeDirectoryRecursive
-- | Removes a file or directory at /path/ together with its contents and
-- subdirectories. Symbolic links are removed without affecting their
-- targets. If the path does not exist, nothing happens.
--
-- Unlike other removal functions, this function will also attempt to delete
-- files marked as read-only or otherwise made unremovable due to permissions.
-- As a result, if the removal is incomplete, the permissions or attributes on
-- the remaining files may be altered. If there are hard links in the
-- directory, then permissions on all related hard links may be altered.
--
-- If an entry within the directory vanishes while @removePathForcibly@ is
-- running, it is silently ignored.
--
If an exception occurs while removing an entry , @removePathForcibly@ will
-- still try to remove as many entries as it can before failing with an
exception . The first exception that it encountered is re - thrown .
--
@since 1.2.7.0
removePathForcibly :: FilePath -> IO ()
removePathForcibly = encodeFS >=> D.removePathForcibly
|'removeFile ' /file/ removes the directory entry for an existing file
/file/ , where /file/ is not itself a directory . The
implementation may specify additional constraints which must be
satisfied before a file can be removed ( e.g. the file may not be in
use by other processes ) .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
The operand is not a valid file name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The file does not exist .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY]@
* @InappropriateType@
The operand refers to an existing directory .
@[EPERM , EINVAL]@
/file/, where /file/ is not itself a directory. The
implementation may specify additional constraints which must be
satisfied before a file can be removed (e.g. the file may not be in
use by other processes).
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
The operand is not a valid file name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The file does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY]@
* @InappropriateType@
The operand refers to an existing directory.
@[EPERM, EINVAL]@
-}
removeFile :: FilePath -> IO ()
removeFile = encodeFS >=> D.removeFile
|@'renameDirectory ' old new@ changes the name of an existing
directory from /old/ to /new/. If the /new/ directory
already exists , it is atomically replaced by the /old/ directory .
If the /new/ directory is neither the /old/ directory nor an
alias of the /old/ directory , it is removed as if by
' removeDirectory ' . A conformant implementation need not support
renaming directories in all situations ( e.g. renaming to an existing
directory , or across different physical devices ) , but the constraints
must be documented .
On Win32 platforms , @renameDirectory@ fails if the /new/ directory already
exists .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
Either operand is not a valid directory name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The original directory does not exist , or there is no path to the target .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* ' System . IO.isFullError '
Insufficient resources are available to perform the operation .
@[EDQUOT , ENOSPC , ENOMEM , EMLINK]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY , ENOTEMPTY , EEXIST]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation .
@[EINVAL , EXDEV]@
* @InappropriateType@
Either path refers to an existing non - directory object .
@[ENOTDIR , EISDIR]@
directory from /old/ to /new/. If the /new/ directory
already exists, it is atomically replaced by the /old/ directory.
If the /new/ directory is neither the /old/ directory nor an
alias of the /old/ directory, it is removed as if by
'removeDirectory'. A conformant implementation need not support
renaming directories in all situations (e.g. renaming to an existing
directory, or across different physical devices), but the constraints
must be documented.
On Win32 platforms, @renameDirectory@ fails if the /new/ directory already
exists.
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
Either operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The original directory does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY, ENOTEMPTY, EEXIST]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation.
@[EINVAL, EXDEV]@
* @InappropriateType@
Either path refers to an existing non-directory object.
@[ENOTDIR, EISDIR]@
-}
renameDirectory :: FilePath -> FilePath -> IO ()
renameDirectory opath npath = do
opath' <- encodeFS opath
npath' <- encodeFS npath
D.renameDirectory opath' npath'
|@'renameFile ' old new@ changes the name of an existing file system
object from /old/ to /new/. If the object already exists , it is
replaced by the /old/ object . Neither path may refer to an existing
directory . A conformant implementation need not support renaming files
in all situations ( e.g. renaming across different physical devices ) , but
the constraints must be documented .
On Windows , this calls @MoveFileEx@ with @MOVEFILE_REPLACE_EXISTING@ set ,
which is not guaranteed to be atomic
( < > ) .
On other platforms , this operation is atomic .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
Either operand is not a valid file name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The original file does not exist , or there is no path to the target .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* ' System . IO.isFullError '
Insufficient resources are available to perform the operation .
@[EDQUOT , ENOSPC , ENOMEM , EMLINK]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation .
@[EXDEV]@
* @InappropriateType@
Either path refers to an existing directory .
@[ENOTDIR , EISDIR , EINVAL , EEXIST , ENOTEMPTY]@
object from /old/ to /new/. If the /new/ object already exists, it is
replaced by the /old/ object. Neither path may refer to an existing
directory. A conformant implementation need not support renaming files
in all situations (e.g. renaming across different physical devices), but
the constraints must be documented.
On Windows, this calls @MoveFileEx@ with @MOVEFILE_REPLACE_EXISTING@ set,
which is not guaranteed to be atomic
(<>).
On other platforms, this operation is atomic.
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
Either operand is not a valid file name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The original file does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation.
@[EXDEV]@
* @InappropriateType@
Either path refers to an existing directory.
@[ENOTDIR, EISDIR, EINVAL, EEXIST, ENOTEMPTY]@
-}
renameFile :: FilePath -> FilePath -> IO ()
renameFile opath npath = do
opath' <- encodeFS opath
npath' <- encodeFS npath
D.renameFile opath' npath'
-- | Rename a file or directory. If the destination path already exists, it
-- is replaced atomically. The destination path must not point to an existing
-- directory. A conformant implementation need not support renaming files in
-- all situations (e.g. renaming across different physical devices), but the
-- constraints must be documented.
--
-- The operation may fail with:
--
-- * @HardwareFault@
A physical I\/O error has occurred .
-- @[EIO]@
--
* @InvalidArgument@
-- Either operand is not a valid file name.
@[ENAMETOOLONG , ELOOP]@
--
-- * 'isDoesNotExistError'
-- The original file does not exist, or there is no path to the target.
-- @[ENOENT, ENOTDIR]@
--
-- * 'isPermissionError'
-- The process has insufficient privileges to perform the operation.
@[EROFS , EACCES , EPERM]@
--
-- * 'System.IO.isFullError'
-- Insufficient resources are available to perform the operation.
@[EDQUOT , ENOSPC , ENOMEM ,
--
-- * @UnsatisfiedConstraints@
-- Implementation-dependent constraints are not satisfied.
@[EBUSY]@
--
-- * @UnsupportedOperation@
-- The implementation does not support renaming in this situation.
-- @[EXDEV]@
--
-- * @InappropriateType@
Either the destination path refers to an existing directory , or one of the
-- parent segments in the destination path is not a directory.
@[ENOTDIR , EISDIR , EINVAL , EEXIST , ENOTEMPTY]@
--
@since 1.2.7.0
renamePath :: FilePath -- ^ Old path
-> FilePath -- ^ New path
-> IO ()
renamePath opath npath = do
opath' <- encodeFS opath
npath' <- encodeFS npath
D.renamePath opath' npath'
-- | Copy a file with its permissions. If the destination file already exists,
-- it is replaced atomically. Neither path may refer to an existing
-- directory. No exceptions are thrown if the permissions could not be
-- copied.
copyFile :: FilePath -- ^ Source filename
-> FilePath -- ^ Destination filename
-> IO ()
copyFile fromFPath toFPath = do
fromFPath' <- encodeFS fromFPath
toFPath' <- encodeFS toFPath
D.copyFile fromFPath' toFPath'
-- | Copy a file with its associated metadata. If the destination file
-- already exists, it is overwritten. There is no guarantee of atomicity in
-- the replacement of the destination file. Neither path may refer to an
-- existing directory. If the source and/or destination are symbolic links,
-- the copy is performed on the targets of the links.
--
On Windows , it behaves like the Win32 function
-- <-us/library/windows/desktop/aa363851.aspx CopyFile>,
-- which copies various kinds of metadata including file attributes and
-- security resource properties.
--
-- On Unix-like systems, permissions, access time, and modification time are
-- preserved. If possible, the owner and group are also preserved. Note that
-- the very act of copying can change the access time of the source file,
hence the access times of the two files may differ after the operation
-- completes.
--
-- @since 1.2.6.0
copyFileWithMetadata :: FilePath -- ^ Source file
-> FilePath -- ^ Destination file
-> IO ()
copyFileWithMetadata src dst = do
src' <- encodeFS src
dst' <- encodeFS dst
D.copyFileWithMetadata src' dst'
-- | Make a path absolute, normalize the path, and remove as many indirections
-- from it as possible. Any trailing path separators are discarded via
' dropTrailingPathSeparator ' . Additionally , on Windows the letter case of
-- the path is canonicalized.
--
-- __Note__: This function is a very big hammer. If you only need an absolute
path , ' ' is sufficient for removing dependence on the current
-- working directory.
--
include the two special directories @.@ and @ .. @ , as well as
any symbolic links ( and junction points on Windows ) . The input path need
not point to an existing file or directory . Canonicalization is performed
-- on the longest prefix of the path that points to an existing file or
-- directory. The remaining portion of the path that does not point to an
-- existing file or directory will still be normalized, but case
-- canonicalization and indirection removal are skipped as they are impossible
-- to do on a nonexistent path.
--
-- Most programs should not worry about the canonicity of a path. In
-- particular, despite the name, the function does not truly guarantee
-- canonicity of the returned path due to the presence of hard links, mount
-- points, etc.
--
-- If the path points to an existing file or directory, then the output path
-- shall also point to the same file or directory, subject to the condition
-- that the relevant parts of the file system do not change while the function
-- is still running. In other words, the function is definitively not atomic.
-- The results can be utterly wrong if the portions of the path change while
-- this function is running.
--
Since some indirections ( symbolic links on all systems , @ .. @ on non - Windows
systems , and junction points on Windows ) are dependent on the state of the
-- existing filesystem, the function can only make a conservative attempt by
-- removing such indirections from the longest prefix of the path that still
-- points to an existing file or directory.
--
Note that on Windows parent directories @ .. @ are always fully expanded
before the symbolic links , as consistent with the rest of the Windows API
( such as @GetFullPathName@ ) . In contrast , on POSIX systems parent
-- directories @..@ are expanded alongside symbolic links from left to right.
-- To put this more concretely: if @L@ is a symbolic link for @R/P@, then on
Windows @L\\ .. @ refers to @.@ , whereas on other operating systems @L/ .. @
-- refers to @R@.
--
-- Similar to 'System.FilePath.normalise', passing an empty path is equivalent
-- to passing the current directory.
--
@canonicalizePath@ can resolve at least 64 indirections in a single path ,
-- more than what is supported by most operating systems. Therefore, it may
-- return the fully resolved path even though the operating system itself
-- would have long given up.
--
On Windows XP or earlier systems , junction expansion is not performed due
to their lack of @GetFinalPathNameByHandle@.
--
/Changes since 1.2.3.0:/ The function has been altered to be more robust
-- and has the same exception behavior as 'makeAbsolute'.
--
/Changes since 1.3.0.0:/ The function no longer preserves the trailing path
-- separator. File symbolic links that appear in the middle of a path are
-- properly dereferenced. Case canonicalization and symbolic link expansion
are now performed on Windows .
--
canonicalizePath :: FilePath -> IO FilePath
canonicalizePath = encodeFS >=> D.canonicalizePath >=> decodeFS
-- | Convert a path into an absolute path. If the given path is relative, the
-- current directory is prepended and then the combined result is normalized.
-- If the path is already absolute, the path is simply normalized. The
-- function preserves the presence or absence of the trailing path separator
-- unless the path refers to the root directory @/@.
--
-- If the path is already absolute, the operation never fails. Otherwise, the
-- operation may fail with the same exceptions as 'getCurrentDirectory'.
--
-- @since 1.2.2.0
--
makeAbsolute :: FilePath -> IO FilePath
makeAbsolute = encodeFS >=> D.makeAbsolute >=> decodeFS
-- | Construct a path relative to the current directory, similar to
-- 'makeRelative'.
--
-- The operation may fail with the same exceptions as 'getCurrentDirectory'.
makeRelativeToCurrentDirectory :: FilePath -> IO FilePath
makeRelativeToCurrentDirectory =
encodeFS >=> D.makeRelativeToCurrentDirectory >=> decodeFS
-- | Given the name or path of an executable file, 'findExecutable' searches
-- for such a file in a list of system-defined locations, which generally
-- includes @PATH@ and possibly more. The full path to the executable is
-- returned if found. For example, @(findExecutable \"ghc\")@ would normally
give you the path to GHC .
--
-- The path returned by @'findExecutable' name@ corresponds to the program
-- that would be executed by
@< / package / process / docs / System - Process.html#v : createProcess createProcess>@
when passed the same string ( as a @RawCommand@ , not a @ShellCommand@ ) ,
provided that @name@ is not a relative path with more than one segment .
--
On Windows , ' findExecutable ' calls the Win32 function
-- @<-us/library/aa365527.aspx SearchPath>@,
-- which may search other places before checking the directories in the @PATH@
-- environment variable. Where it actually searches depends on registry
-- settings, but notably includes the directory containing the current
-- executable.
--
On non - Windows platforms , the behavior is equivalent to ' findFileWith '
-- using the search directories from the @PATH@ environment variable and
-- testing each file for executable permissions. Details can be found in the
-- documentation of 'findFileWith'.
findExecutable :: String -> IO (Maybe FilePath)
findExecutable = encodeFS >=> D.findExecutable >=> (`for` decodeFS)
-- | Search for executable files in a list of system-defined locations, which
-- generally includes @PATH@ and possibly more.
--
On Windows , this /only returns the first occurrence/ , if any . Its behavior
-- is therefore equivalent to 'findExecutable'.
--
On non - Windows platforms , the behavior is equivalent to
-- 'findExecutablesInDirectories' using the search directories from the @PATH@
-- environment variable. Details can be found in the documentation of
-- 'findExecutablesInDirectories'.
--
-- @since 1.2.2.0
findExecutables :: String -> IO [FilePath]
findExecutables = encodeFS >=> D.findExecutables >=> (`for` decodeFS)
-- | Given a name or path, 'findExecutable' appends the 'exeExtension' to the
-- query and searches for executable files in the list of given search
-- directories and returns all occurrences.
--
-- The behavior is equivalent to 'findFileWith' using the given search
-- directories and testing each file for executable permissions. Details can
-- be found in the documentation of 'findFileWith'.
--
-- Unlike other similarly named functions, 'findExecutablesInDirectories' does
-- not use @SearchPath@ from the Win32 API. The behavior of this function on
Windows is therefore equivalent to those on non - Windows platforms .
--
@since 1.2.4.0
findExecutablesInDirectories :: [FilePath] -> String -> IO [FilePath]
findExecutablesInDirectories path binary = do
path' <- for path encodeFS
binary' <- encodeFS binary
D.findExecutablesInDirectories path' binary'
>>= (`for` decodeFS)
-- | Search through the given list of directories for the given file.
--
The behavior is equivalent to ' findFileWith ' , returning only the first
-- occurrence. Details can be found in the documentation of 'findFileWith'.
findFile :: [FilePath] -> String -> IO (Maybe FilePath)
findFile = findFileWith (\ _ -> pure True)
-- | Search through the given list of directories for the given file and
-- returns all paths where the given file exists.
--
-- The behavior is equivalent to 'findFilesWith'. Details can be found in the
-- documentation of 'findFilesWith'.
--
@since 1.2.1.0
findFiles :: [FilePath] -> String -> IO [FilePath]
findFiles = findFilesWith (\ _ -> pure True)
-- | Search through a given list of directories for a file that has the given
name and satisfies the given predicate and return the path of the first
-- occurrence. The directories are checked in a left-to-right order.
--
This is essentially a more performant version of ' findFilesWith ' that
always returns the first result , if any . Details can be found in the
-- documentation of 'findFilesWith'.
--
-- @since 1.2.6.0
findFileWith :: (FilePath -> IO Bool) -> [FilePath] -> String -> IO (Maybe FilePath)
findFileWith f ds name = do
ds' <- for ds encodeFS
name' <- encodeFS name
D.findFileWith (decodeFS >=> f) ds' name'
>>= (`for` decodeFS)
| @findFilesWith predicate dirs name@ searches through the list of
-- directories (@dirs@) for files that have the given @name@ and satisfy the
-- given @predicate@ and returns the paths of those files. The directories
-- are checked in a left-to-right order and the paths are returned in the same
-- order.
--
If the @name@ is a relative path , then for every search directory @dir@ ,
the function checks whether ' < / > ' name@ exists and satisfies the
predicate . If so , ' < / > ' name@ is returned as one of the results . In
-- other words, the returned paths can be either relative or absolute
-- depending on the search directories were used. If there are no search
-- directories, no results are ever returned.
--
If the @name@ is an absolute path , then the function will return a single
-- result if the file exists and satisfies the predicate and no results
-- otherwise. This is irrespective of what search directories were given.
--
@since 1.2.1.0
findFilesWith :: (FilePath -> IO Bool) -> [FilePath] -> String -> IO [FilePath]
findFilesWith f ds name = do
ds' <- for ds encodeFS
name' <- encodeFS name
res <- D.findFilesWith (decodeFS >=> f) ds' name'
for res decodeFS
-- | Filename extension for executable files (including the dot if any)
( usually @\"\"@ on POSIX systems and @\".exe\"@ on Windows or OS\/2 ) .
--
@since 1.2.4.0
exeExtension :: String
exeExtension = so D.exeExtension
-- | Similar to 'listDirectory', but always includes the special entries (@.@
and @ .. @ ) . ( This applies to Windows as well . )
--
-- The operation may fail with the same exceptions as 'listDirectory'.
getDirectoryContents :: FilePath -> IO [FilePath]
getDirectoryContents = encodeFS >=> D.getDirectoryContents >=> (`for` decodeFS)
| @'listDirectory ' dir@ returns a list of entries in /dir/ without
-- the special entries (@.@ and @..@).
--
-- The operation may fail with:
--
-- * @HardwareFault@
A physical I\/O error has occurred .
-- @[EIO]@
--
* @InvalidArgument@
-- The operand is not a valid directory name.
@[ENAMETOOLONG , ELOOP]@
--
-- * 'isDoesNotExistError'
-- The directory does not exist.
-- @[ENOENT, ENOTDIR]@
--
-- * 'isPermissionError'
-- The process has insufficient privileges to perform the operation.
@[EACCES]@
--
-- * 'System.IO.isFullError'
-- Insufficient resources are available to perform the operation.
-- @[EMFILE, ENFILE]@
--
-- * @InappropriateType@
-- The path refers to an existing non-directory object.
-- @[ENOTDIR]@
--
-- @since 1.2.5.0
--
listDirectory :: FilePath -> IO [FilePath]
listDirectory = encodeFS >=> D.listDirectory >=> (`for` decodeFS)
-- | Obtain the current working directory as an absolute path.
--
-- In a multithreaded program, the current working directory is a global state
-- shared among all threads of the process. Therefore, when performing
-- filesystem operations from multiple threads, it is highly recommended to
use absolute rather than relative paths ( see : ' ' ) .
--
-- Note that 'getCurrentDirectory' is not guaranteed to return the same path
-- received by 'setCurrentDirectory'. On POSIX systems, the path returned will
-- always be fully dereferenced (not contain any symbolic links). For more
-- information, refer to the documentation of
< > .
--
-- The operation may fail with:
--
-- * @HardwareFault@
A physical I\/O error has occurred .
-- @[EIO]@
--
-- * 'isDoesNotExistError'
-- There is no path referring to the working directory.
@[EPERM , ENOENT , ESTALE ... ]@
--
-- * 'isPermissionError'
-- The process has insufficient privileges to perform the operation.
-- @[EACCES]@
--
-- * 'System.IO.isFullError'
-- Insufficient resources are available to perform the operation.
--
-- * @UnsupportedOperation@
-- The operating system has no notion of current working directory.
--
getCurrentDirectory :: IO FilePath
getCurrentDirectory = D.getCurrentDirectory >>= decodeFS
-- | Change the working directory to the given path.
--
-- In a multithreaded program, the current working directory is a global state
-- shared among all threads of the process. Therefore, when performing
-- filesystem operations from multiple threads, it is highly recommended to
use absolute rather than relative paths ( see : ' ' ) .
--
-- The operation may fail with:
--
-- * @HardwareFault@
A physical I\/O error has occurred .
-- @[EIO]@
--
* @InvalidArgument@
-- The operand is not a valid directory name.
@[ENAMETOOLONG , ELOOP]@
--
-- * 'isDoesNotExistError'
-- The directory does not exist.
-- @[ENOENT, ENOTDIR]@
--
-- * 'isPermissionError'
-- The process has insufficient privileges to perform the operation.
-- @[EACCES]@
--
-- * @UnsupportedOperation@
-- The operating system has no notion of current working directory, or the
-- working directory cannot be dynamically changed.
--
-- * @InappropriateType@
-- The path refers to an existing non-directory object.
-- @[ENOTDIR]@
--
setCurrentDirectory :: FilePath -> IO ()
setCurrentDirectory = encodeFS >=> D.setCurrentDirectory
-- | Run an 'IO' action with the given working directory and restore the
-- original working directory afterwards, even if the given action fails due
-- to an exception.
--
-- The operation may fail with the same exceptions as 'getCurrentDirectory'
-- and 'setCurrentDirectory'.
--
@since 1.2.3.0
--
withCurrentDirectory :: FilePath -- ^ Directory to execute in
-> IO a -- ^ Action to be executed
-> IO a
withCurrentDirectory dir action =
encodeFS dir >>= (`D.withCurrentDirectory` action)
-- | Obtain the size of a file in bytes.
--
@since 1.2.7.0
getFileSize :: FilePath -> IO Integer
getFileSize = encodeFS >=> D.getFileSize
-- | Test whether the given path points to an existing filesystem object. If
-- the user lacks necessary permissions to search the parent directories, this
-- function may return false even if the file does actually exist.
--
@since 1.2.7.0
doesPathExist :: FilePath -> IO Bool
doesPathExist = encodeFS >=> D.doesPathExist
{- |The operation 'doesDirectoryExist' returns 'True' if the argument file
exists and is either a directory or a symbolic link to a directory,
and 'False' otherwise.
-}
doesDirectoryExist :: FilePath -> IO Bool
doesDirectoryExist = encodeFS >=> D.doesDirectoryExist
{- |The operation 'doesFileExist' returns 'True'
if the argument file exists and is not a directory, and 'False' otherwise.
-}
doesFileExist :: FilePath -> IO Bool
doesFileExist = encodeFS >=> D.doesFileExist
-- | Create a /file/ symbolic link. The target path can be either absolute or
-- relative and need not refer to an existing file. The order of arguments
-- follows the POSIX convention.
--
-- To remove an existing file symbolic link, use 'removeFile'.
--
-- Although the distinction between /file/ symbolic links and /directory/
symbolic links does not exist on POSIX systems , on Windows this is an
-- intrinsic property of every symbolic link and cannot be changed without
-- recreating the link. A file symbolic link that actually points to a
-- directory will fail to dereference and vice versa. Moreover, creating
symbolic links on Windows may require privileges unavailable to users
-- outside the Administrators group. Portable programs that use symbolic
-- links should take both into consideration.
--
On Windows , the function is implemented using @CreateSymbolicLink@. Since
1.3.3.0 , the @SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE@ flag is included
if supported by the operating system . On POSIX , the function uses @symlink@
-- and is therefore atomic.
--
-- Windows-specific errors: This operation may fail with 'permissionErrorType'
-- if the user lacks the privileges to create symbolic links. It may also
-- fail with 'illegalOperationErrorType' if the file system does not support
-- symbolic links.
--
@since 1.3.1.0
createFileLink
:: FilePath -- ^ path to the target file
-> FilePath -- ^ path of the link to be created
-> IO ()
createFileLink target link = do
target' <- encodeFS target
link' <- encodeFS link
D.createFileLink target' link'
-- | Create a /directory/ symbolic link. The target path can be either
-- absolute or relative and need not refer to an existing directory. The
-- order of arguments follows the POSIX convention.
--
-- To remove an existing directory symbolic link, use 'removeDirectoryLink'.
--
-- Although the distinction between /file/ symbolic links and /directory/
symbolic links does not exist on POSIX systems , on Windows this is an
-- intrinsic property of every symbolic link and cannot be changed without
-- recreating the link. A file symbolic link that actually points to a
-- directory will fail to dereference and vice versa. Moreover, creating
symbolic links on Windows may require privileges unavailable to users
-- outside the Administrators group. Portable programs that use symbolic
-- links should take both into consideration.
--
On Windows , the function is implemented using @CreateSymbolicLink@ with
@SYMBOLIC_LINK_FLAG_DIRECTORY@. Since 1.3.3.0 , the
-- @SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE@ flag is also included if
supported by the operating system . On POSIX , this is an alias for
-- 'createFileLink' and is therefore atomic.
--
-- Windows-specific errors: This operation may fail with 'permissionErrorType'
-- if the user lacks the privileges to create symbolic links. It may also
-- fail with 'illegalOperationErrorType' if the file system does not support
-- symbolic links.
--
@since 1.3.1.0
createDirectoryLink
:: FilePath -- ^ path to the target directory
-> FilePath -- ^ path of the link to be created
-> IO ()
createDirectoryLink target link = do
target' <- encodeFS target
link' <- encodeFS link
D.createDirectoryLink target' link'
-- | Remove an existing /directory/ symbolic link.
--
On Windows , this is an alias for ' removeDirectory ' . On POSIX systems , this
-- is an alias for 'removeFile'.
--
-- See also: 'removeFile', which can remove an existing /file/ symbolic link.
--
@since 1.3.1.0
removeDirectoryLink :: FilePath -> IO ()
removeDirectoryLink = encodeFS >=> D.removeDirectoryLink
| Check whether an existing @path@ is a symbolic link . If @path@ is a
-- regular file or directory, 'False' is returned. If @path@ does not exist
-- or is otherwise inaccessible, an exception is thrown (see below).
--
On Windows , this checks for In addition to
-- symbolic links, the function also returns true on junction points. On
POSIX systems , this checks for @S_IFLNK@.
--
-- The operation may fail with:
--
-- * 'isDoesNotExistError' if the symbolic link does not exist; or
--
-- * 'isPermissionError' if the user is not permitted to read the symbolic
-- link.
--
@since 1.3.0.0
pathIsSymbolicLink :: FilePath -> IO Bool
pathIsSymbolicLink = encodeFS >=> D.pathIsSymbolicLink
{-# DEPRECATED isSymbolicLink "Use 'pathIsSymbolicLink' instead" #-}
isSymbolicLink :: FilePath -> IO Bool
isSymbolicLink = pathIsSymbolicLink
-- | Retrieve the target path of either a file or directory symbolic link.
-- The returned path may not be absolute, may not exist, and may not even be a
-- valid path.
--
On Windows systems , this calls with
@FSCTL_GET_REPARSE_POINT@. In addition to symbolic links , the function
-- also works on junction points. On POSIX systems, this calls @readlink@.
--
-- Windows-specific errors: This operation may fail with
-- 'illegalOperationErrorType' if the file system does not support symbolic
-- links.
--
@since 1.3.1.0
getSymbolicLinkTarget :: FilePath -> IO FilePath
getSymbolicLinkTarget = encodeFS >=> D.getSymbolicLinkTarget >=> decodeFS
-- | Obtain the time at which the file or directory was last accessed.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to read
-- the access time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Caveat for POSIX systems: This function returns a timestamp with sub-second
resolution only if this package is compiled against @unix-2.6.0.0@ or later
-- and the underlying filesystem supports them.
--
@since 1.2.3.0
--
getAccessTime :: FilePath -> IO UTCTime
getAccessTime = encodeFS >=> D.getAccessTime
-- | Obtain the time at which the file or directory was last modified.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to read
-- the modification time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Caveat for POSIX systems: This function returns a timestamp with sub-second
resolution only if this package is compiled against @unix-2.6.0.0@ or later
-- and the underlying filesystem supports them.
--
getModificationTime :: FilePath -> IO UTCTime
getModificationTime = encodeFS >=> D.getModificationTime
-- | Change the time at which the file or directory was last accessed.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to alter the
-- access time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Some caveats for POSIX systems:
--
-- * Not all systems support @utimensat@, in which case the function can only
-- emulate the behavior by reading the modification time and then setting
-- both the access and modification times together. On systems where
-- @utimensat@ is supported, the access time is set atomically with
-- nanosecond precision.
--
* If compiled against a version of @unix@ prior to @2.7.0.0@ , the function
-- would not be able to set timestamps with sub-second resolution. In this
-- case, there would also be loss of precision in the modification time.
--
@since 1.2.3.0
--
setAccessTime :: FilePath -> UTCTime -> IO ()
setAccessTime path atime = encodeFS path >>= (`D.setAccessTime` atime)
-- | Change the time at which the file or directory was last modified.
--
-- The operation may fail with:
--
-- * 'isPermissionError' if the user is not permitted to alter the
-- modification time; or
--
-- * 'isDoesNotExistError' if the file or directory does not exist.
--
-- Some caveats for POSIX systems:
--
-- * Not all systems support @utimensat@, in which case the function can only
-- emulate the behavior by reading the access time and then setting both the
-- access and modification times together. On systems where @utimensat@ is
-- supported, the modification time is set atomically with nanosecond
-- precision.
--
* If compiled against a version of @unix@ prior to @2.7.0.0@ , the function
-- would not be able to set timestamps with sub-second resolution. In this
-- case, there would also be loss of precision in the access time.
--
@since 1.2.3.0
--
setModificationTime :: FilePath -> UTCTime -> IO ()
setModificationTime path mtime =
encodeFS path >>= (`D.setModificationTime` mtime)
| Returns the current user 's home directory .
The directory returned is expected to be writable by the current user ,
but note that it is n't generally considered good practice to store
application - specific data here ; use ' getXdgDirectory ' or
' getAppUserDataDirectory ' instead .
On Unix , ' getHomeDirectory ' behaves as follows :
* Returns $ HOME env variable if set ( including to an empty string ) .
* Otherwise uses home directory returned by ` getpwuid_r ` using the UID of the current proccesses user . This basically reads the /etc / passwd file . An empty home directory field is considered valid .
On Windows , the system is queried for a suitable path ; a typical path might be @C:\/Users\//\<user\>/@.
The operation may fail with :
* @UnsupportedOperation@
The operating system has no notion of home directory .
* ' isDoesNotExistError '
The home directory for the current user does not exist , or
can not be found .
The directory returned is expected to be writable by the current user,
but note that it isn't generally considered good practice to store
application-specific data here; use 'getXdgDirectory' or
'getAppUserDataDirectory' instead.
On Unix, 'getHomeDirectory' behaves as follows:
* Returns $HOME env variable if set (including to an empty string).
* Otherwise uses home directory returned by `getpwuid_r` using the UID of the current proccesses user. This basically reads the /etc/passwd file. An empty home directory field is considered valid.
On Windows, the system is queried for a suitable path; a typical path might be @C:\/Users\//\<user\>/@.
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of home directory.
* 'isDoesNotExistError'
The home directory for the current user does not exist, or
cannot be found.
-}
getHomeDirectory :: IO FilePath
getHomeDirectory = D.getHomeDirectory >>= decodeFS
-- | Obtain the paths to special directories for storing user-specific
-- application data, configuration, and cache files, conforming to the
-- <-spec/basedir-spec-latest.html XDG Base Directory Specification>.
-- Compared with 'getAppUserDataDirectory', this function provides a more
-- fine-grained hierarchy as well as greater flexibility for the user.
--
On Windows , ' XdgData ' and ' XdgConfig ' usually map to the same directory
-- unless overridden.
--
Refer to the docs of ' XdgDirectory ' for more details .
--
The second argument is usually the name of the application . Since it
-- will be integrated into the path, it must consist of valid path
characters . Note : if the second argument is an absolute path , it will
just return the second argument .
--
-- Note: The directory may not actually exist, in which case you would need
-- to create it with file mode @700@ (i.e. only accessible by the owner).
--
As of 1.3.5.0 , the environment variable is ignored if set to a relative
path , per revised XDG Base Directory Specification . See
-- < #100>.
--
@since 1.2.3.0
getXdgDirectory :: XdgDirectory -- ^ which special directory
-> FilePath -- ^ a relative path that is appended
-- to the path; if empty, the base
-- path is returned
-> IO FilePath
getXdgDirectory xdgDir = encodeFS >=> D.getXdgDirectory xdgDir >=> decodeFS
| Similar to ' getXdgDirectory ' but retrieves the entire list of XDG
-- directories.
--
On Windows , ' XdgDataDirs ' and ' XdgConfigDirs ' usually map to the same list
-- of directories unless overridden.
--
-- Refer to the docs of 'XdgDirectoryList' for more details.
getXdgDirectoryList :: XdgDirectoryList -- ^ which special directory list
-> IO [FilePath]
getXdgDirectoryList = D.getXdgDirectoryList >=> (`for` decodeFS)
-- | Obtain the path to a special directory for storing user-specific
-- application data (traditional Unix location). Newer applications may
prefer the the XDG - conformant location provided by ' getXdgDirectory '
-- (<#issuecomment-96521020 migration guide>).
--
-- The argument is usually the name of the application. Since it will be
-- integrated into the path, it must consist of valid path characters.
--
-- * On Unix-like systems, the path is @~\/./\<app\>/@.
* On Windows , the path is @%APPDATA%\//\<app\>/@
-- (e.g. @C:\/Users\//\<user\>/\/AppData\/Roaming\//\<app\>/@)
--
-- Note: the directory may not actually exist, in which case you would need
-- to create it. It is expected that the parent directory exists and is
-- writable.
--
-- The operation may fail with:
--
-- * @UnsupportedOperation@
-- The operating system has no notion of application-specific data
-- directory.
--
-- * 'isDoesNotExistError'
-- The home directory for the current user does not exist, or cannot be
-- found.
--
getAppUserDataDirectory :: FilePath -- ^ a relative path that is appended
-- to the path
-> IO FilePath
getAppUserDataDirectory = encodeFS >=> D.getAppUserDataDirectory >=>decodeFS
| Returns the current user 's document directory .
The directory returned is expected to be writable by the current user ,
but note that it is n't generally considered good practice to store
application - specific data here ; use ' getXdgDirectory ' or
' getAppUserDataDirectory ' instead .
On Unix , ' getUserDocumentsDirectory ' returns the value of the @HOME@
environment variable . On Windows , the system is queried for a
suitable path ; a typical path might be @C:\/Users\//\<user\>/\/Documents@.
The operation may fail with :
* @UnsupportedOperation@
The operating system has no notion of document directory .
* ' isDoesNotExistError '
The document directory for the current user does not exist , or
can not be found .
The directory returned is expected to be writable by the current user,
but note that it isn't generally considered good practice to store
application-specific data here; use 'getXdgDirectory' or
'getAppUserDataDirectory' instead.
On Unix, 'getUserDocumentsDirectory' returns the value of the @HOME@
environment variable. On Windows, the system is queried for a
suitable path; a typical path might be @C:\/Users\//\<user\>/\/Documents@.
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of document directory.
* 'isDoesNotExistError'
The document directory for the current user does not exist, or
cannot be found.
-}
getUserDocumentsDirectory :: IO FilePath
getUserDocumentsDirectory = D.getUserDocumentsDirectory >>= decodeFS
| Returns the current directory for temporary files .
On Unix , ' getTemporaryDirectory ' returns the value of the @TMPDIR@
environment variable or \"\/tmp\ " if the variable isn\'t defined .
On Windows , the function checks for the existence of environment variables in
the following order and uses the first path found :
*
TMP environment variable .
*
TEMP environment variable .
*
USERPROFILE environment variable .
*
The Windows directory
The operation may fail with :
* @UnsupportedOperation@
The operating system has no notion of temporary directory .
The function doesn\'t verify whether the path exists .
On Unix, 'getTemporaryDirectory' returns the value of the @TMPDIR@
environment variable or \"\/tmp\" if the variable isn\'t defined.
On Windows, the function checks for the existence of environment variables in
the following order and uses the first path found:
*
TMP environment variable.
*
TEMP environment variable.
*
USERPROFILE environment variable.
*
The Windows directory
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of temporary directory.
The function doesn\'t verify whether the path exists.
-}
getTemporaryDirectory :: IO FilePath
getTemporaryDirectory = D.getTemporaryDirectory >>= decodeFS
| null | https://raw.githubusercontent.com/haskell/directory/c1895b93a49a9723c949eefef71b0855b4ee5d51/System/Directory.hs | haskell | ---------------------------------------------------------------------------
|
Module : System.Directory
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : stable
Portability : portable
System-independent interface to directory manipulation (FilePath API).
---------------------------------------------------------------------------
$intro
* Actions on directories
** Current working directory
* Pre-defined directories
* Actions on files
* Existence tests
* Symbolic links
* Permissions
$permissions
* Timestamps
* Deprecated
---------------------------------------------------------------------------
Permissions
| Get the permissions of a file or directory.
attribute. The 'executable' permission is set if the file extension is of
an executable file type. The 'readable' permission is always set.
On POSIX systems, this returns the result of @access@.
The operation may fail with:
* 'isPermissionError' if the user is not permitted to access the
permissions, or
* 'isDoesNotExistError' if the file or directory does not exist.
| Set the permissions of a file or directory.
which corresponds to the "read-only" attribute. Changing the other
permissions has no effect.
On POSIX systems, this sets the /owner/ permissions.
The operation may fail with:
* 'isPermissionError' if the user is not permitted to set the permissions,
or
* 'isDoesNotExistError' if the file or directory does not exist.
permissions more accurately than using 'getPermissions' followed by
'setPermissions'.
---------------------------------------------------------------------------
Implementation
| @'createDirectoryIfMissing' parents dir@ creates a new directory
the function will also create all parent directories if they are missing.
^ Create its parents too?
^ The path to the directory you want to make
| @'removeDirectoryRecursive' dir@ removes an existing directory /dir/
together with its contents and subdirectories. Within this directory,
symbolic links are removed without affecting their targets.
be advisable. See:
| Removes a file or directory at /path/ together with its contents and
subdirectories. Symbolic links are removed without affecting their
targets. If the path does not exist, nothing happens.
Unlike other removal functions, this function will also attempt to delete
files marked as read-only or otherwise made unremovable due to permissions.
As a result, if the removal is incomplete, the permissions or attributes on
the remaining files may be altered. If there are hard links in the
directory, then permissions on all related hard links may be altered.
If an entry within the directory vanishes while @removePathForcibly@ is
running, it is silently ignored.
still try to remove as many entries as it can before failing with an
| Rename a file or directory. If the destination path already exists, it
is replaced atomically. The destination path must not point to an existing
directory. A conformant implementation need not support renaming files in
all situations (e.g. renaming across different physical devices), but the
constraints must be documented.
The operation may fail with:
* @HardwareFault@
@[EIO]@
Either operand is not a valid file name.
* 'isDoesNotExistError'
The original file does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
* @UnsupportedOperation@
The implementation does not support renaming in this situation.
@[EXDEV]@
* @InappropriateType@
parent segments in the destination path is not a directory.
^ Old path
^ New path
| Copy a file with its permissions. If the destination file already exists,
it is replaced atomically. Neither path may refer to an existing
directory. No exceptions are thrown if the permissions could not be
copied.
^ Source filename
^ Destination filename
| Copy a file with its associated metadata. If the destination file
already exists, it is overwritten. There is no guarantee of atomicity in
the replacement of the destination file. Neither path may refer to an
existing directory. If the source and/or destination are symbolic links,
the copy is performed on the targets of the links.
<-us/library/windows/desktop/aa363851.aspx CopyFile>,
which copies various kinds of metadata including file attributes and
security resource properties.
On Unix-like systems, permissions, access time, and modification time are
preserved. If possible, the owner and group are also preserved. Note that
the very act of copying can change the access time of the source file,
completes.
@since 1.2.6.0
^ Source file
^ Destination file
| Make a path absolute, normalize the path, and remove as many indirections
from it as possible. Any trailing path separators are discarded via
the path is canonicalized.
__Note__: This function is a very big hammer. If you only need an absolute
working directory.
on the longest prefix of the path that points to an existing file or
directory. The remaining portion of the path that does not point to an
existing file or directory will still be normalized, but case
canonicalization and indirection removal are skipped as they are impossible
to do on a nonexistent path.
Most programs should not worry about the canonicity of a path. In
particular, despite the name, the function does not truly guarantee
canonicity of the returned path due to the presence of hard links, mount
points, etc.
If the path points to an existing file or directory, then the output path
shall also point to the same file or directory, subject to the condition
that the relevant parts of the file system do not change while the function
is still running. In other words, the function is definitively not atomic.
The results can be utterly wrong if the portions of the path change while
this function is running.
existing filesystem, the function can only make a conservative attempt by
removing such indirections from the longest prefix of the path that still
points to an existing file or directory.
directories @..@ are expanded alongside symbolic links from left to right.
To put this more concretely: if @L@ is a symbolic link for @R/P@, then on
refers to @R@.
Similar to 'System.FilePath.normalise', passing an empty path is equivalent
to passing the current directory.
more than what is supported by most operating systems. Therefore, it may
return the fully resolved path even though the operating system itself
would have long given up.
and has the same exception behavior as 'makeAbsolute'.
separator. File symbolic links that appear in the middle of a path are
properly dereferenced. Case canonicalization and symbolic link expansion
| Convert a path into an absolute path. If the given path is relative, the
current directory is prepended and then the combined result is normalized.
If the path is already absolute, the path is simply normalized. The
function preserves the presence or absence of the trailing path separator
unless the path refers to the root directory @/@.
If the path is already absolute, the operation never fails. Otherwise, the
operation may fail with the same exceptions as 'getCurrentDirectory'.
@since 1.2.2.0
| Construct a path relative to the current directory, similar to
'makeRelative'.
The operation may fail with the same exceptions as 'getCurrentDirectory'.
| Given the name or path of an executable file, 'findExecutable' searches
for such a file in a list of system-defined locations, which generally
includes @PATH@ and possibly more. The full path to the executable is
returned if found. For example, @(findExecutable \"ghc\")@ would normally
The path returned by @'findExecutable' name@ corresponds to the program
that would be executed by
@<-us/library/aa365527.aspx SearchPath>@,
which may search other places before checking the directories in the @PATH@
environment variable. Where it actually searches depends on registry
settings, but notably includes the directory containing the current
executable.
using the search directories from the @PATH@ environment variable and
testing each file for executable permissions. Details can be found in the
documentation of 'findFileWith'.
| Search for executable files in a list of system-defined locations, which
generally includes @PATH@ and possibly more.
is therefore equivalent to 'findExecutable'.
'findExecutablesInDirectories' using the search directories from the @PATH@
environment variable. Details can be found in the documentation of
'findExecutablesInDirectories'.
@since 1.2.2.0
| Given a name or path, 'findExecutable' appends the 'exeExtension' to the
query and searches for executable files in the list of given search
directories and returns all occurrences.
The behavior is equivalent to 'findFileWith' using the given search
directories and testing each file for executable permissions. Details can
be found in the documentation of 'findFileWith'.
Unlike other similarly named functions, 'findExecutablesInDirectories' does
not use @SearchPath@ from the Win32 API. The behavior of this function on
| Search through the given list of directories for the given file.
occurrence. Details can be found in the documentation of 'findFileWith'.
| Search through the given list of directories for the given file and
returns all paths where the given file exists.
The behavior is equivalent to 'findFilesWith'. Details can be found in the
documentation of 'findFilesWith'.
| Search through a given list of directories for a file that has the given
occurrence. The directories are checked in a left-to-right order.
documentation of 'findFilesWith'.
@since 1.2.6.0
directories (@dirs@) for files that have the given @name@ and satisfy the
given @predicate@ and returns the paths of those files. The directories
are checked in a left-to-right order and the paths are returned in the same
order.
other words, the returned paths can be either relative or absolute
depending on the search directories were used. If there are no search
directories, no results are ever returned.
result if the file exists and satisfies the predicate and no results
otherwise. This is irrespective of what search directories were given.
| Filename extension for executable files (including the dot if any)
| Similar to 'listDirectory', but always includes the special entries (@.@
The operation may fail with the same exceptions as 'listDirectory'.
the special entries (@.@ and @..@).
The operation may fail with:
* @HardwareFault@
@[EIO]@
The operand is not a valid directory name.
* 'isDoesNotExistError'
The directory does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
@[EMFILE, ENFILE]@
* @InappropriateType@
The path refers to an existing non-directory object.
@[ENOTDIR]@
@since 1.2.5.0
| Obtain the current working directory as an absolute path.
In a multithreaded program, the current working directory is a global state
shared among all threads of the process. Therefore, when performing
filesystem operations from multiple threads, it is highly recommended to
Note that 'getCurrentDirectory' is not guaranteed to return the same path
received by 'setCurrentDirectory'. On POSIX systems, the path returned will
always be fully dereferenced (not contain any symbolic links). For more
information, refer to the documentation of
The operation may fail with:
* @HardwareFault@
@[EIO]@
* 'isDoesNotExistError'
There is no path referring to the working directory.
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EACCES]@
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
* @UnsupportedOperation@
The operating system has no notion of current working directory.
| Change the working directory to the given path.
In a multithreaded program, the current working directory is a global state
shared among all threads of the process. Therefore, when performing
filesystem operations from multiple threads, it is highly recommended to
The operation may fail with:
* @HardwareFault@
@[EIO]@
The operand is not a valid directory name.
* 'isDoesNotExistError'
The directory does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EACCES]@
* @UnsupportedOperation@
The operating system has no notion of current working directory, or the
working directory cannot be dynamically changed.
* @InappropriateType@
The path refers to an existing non-directory object.
@[ENOTDIR]@
| Run an 'IO' action with the given working directory and restore the
original working directory afterwards, even if the given action fails due
to an exception.
The operation may fail with the same exceptions as 'getCurrentDirectory'
and 'setCurrentDirectory'.
^ Directory to execute in
^ Action to be executed
| Obtain the size of a file in bytes.
| Test whether the given path points to an existing filesystem object. If
the user lacks necessary permissions to search the parent directories, this
function may return false even if the file does actually exist.
|The operation 'doesDirectoryExist' returns 'True' if the argument file
exists and is either a directory or a symbolic link to a directory,
and 'False' otherwise.
|The operation 'doesFileExist' returns 'True'
if the argument file exists and is not a directory, and 'False' otherwise.
| Create a /file/ symbolic link. The target path can be either absolute or
relative and need not refer to an existing file. The order of arguments
follows the POSIX convention.
To remove an existing file symbolic link, use 'removeFile'.
Although the distinction between /file/ symbolic links and /directory/
intrinsic property of every symbolic link and cannot be changed without
recreating the link. A file symbolic link that actually points to a
directory will fail to dereference and vice versa. Moreover, creating
outside the Administrators group. Portable programs that use symbolic
links should take both into consideration.
and is therefore atomic.
Windows-specific errors: This operation may fail with 'permissionErrorType'
if the user lacks the privileges to create symbolic links. It may also
fail with 'illegalOperationErrorType' if the file system does not support
symbolic links.
^ path to the target file
^ path of the link to be created
| Create a /directory/ symbolic link. The target path can be either
absolute or relative and need not refer to an existing directory. The
order of arguments follows the POSIX convention.
To remove an existing directory symbolic link, use 'removeDirectoryLink'.
Although the distinction between /file/ symbolic links and /directory/
intrinsic property of every symbolic link and cannot be changed without
recreating the link. A file symbolic link that actually points to a
directory will fail to dereference and vice versa. Moreover, creating
outside the Administrators group. Portable programs that use symbolic
links should take both into consideration.
@SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE@ flag is also included if
'createFileLink' and is therefore atomic.
Windows-specific errors: This operation may fail with 'permissionErrorType'
if the user lacks the privileges to create symbolic links. It may also
fail with 'illegalOperationErrorType' if the file system does not support
symbolic links.
^ path to the target directory
^ path of the link to be created
| Remove an existing /directory/ symbolic link.
is an alias for 'removeFile'.
See also: 'removeFile', which can remove an existing /file/ symbolic link.
regular file or directory, 'False' is returned. If @path@ does not exist
or is otherwise inaccessible, an exception is thrown (see below).
symbolic links, the function also returns true on junction points. On
The operation may fail with:
* 'isDoesNotExistError' if the symbolic link does not exist; or
* 'isPermissionError' if the user is not permitted to read the symbolic
link.
# DEPRECATED isSymbolicLink "Use 'pathIsSymbolicLink' instead" #
| Retrieve the target path of either a file or directory symbolic link.
The returned path may not be absolute, may not exist, and may not even be a
valid path.
also works on junction points. On POSIX systems, this calls @readlink@.
Windows-specific errors: This operation may fail with
'illegalOperationErrorType' if the file system does not support symbolic
links.
| Obtain the time at which the file or directory was last accessed.
The operation may fail with:
* 'isPermissionError' if the user is not permitted to read
the access time; or
* 'isDoesNotExistError' if the file or directory does not exist.
Caveat for POSIX systems: This function returns a timestamp with sub-second
and the underlying filesystem supports them.
| Obtain the time at which the file or directory was last modified.
The operation may fail with:
* 'isPermissionError' if the user is not permitted to read
the modification time; or
* 'isDoesNotExistError' if the file or directory does not exist.
Caveat for POSIX systems: This function returns a timestamp with sub-second
and the underlying filesystem supports them.
| Change the time at which the file or directory was last accessed.
The operation may fail with:
* 'isPermissionError' if the user is not permitted to alter the
access time; or
* 'isDoesNotExistError' if the file or directory does not exist.
Some caveats for POSIX systems:
* Not all systems support @utimensat@, in which case the function can only
emulate the behavior by reading the modification time and then setting
both the access and modification times together. On systems where
@utimensat@ is supported, the access time is set atomically with
nanosecond precision.
would not be able to set timestamps with sub-second resolution. In this
case, there would also be loss of precision in the modification time.
| Change the time at which the file or directory was last modified.
The operation may fail with:
* 'isPermissionError' if the user is not permitted to alter the
modification time; or
* 'isDoesNotExistError' if the file or directory does not exist.
Some caveats for POSIX systems:
* Not all systems support @utimensat@, in which case the function can only
emulate the behavior by reading the access time and then setting both the
access and modification times together. On systems where @utimensat@ is
supported, the modification time is set atomically with nanosecond
precision.
would not be able to set timestamps with sub-second resolution. In this
case, there would also be loss of precision in the access time.
| Obtain the paths to special directories for storing user-specific
application data, configuration, and cache files, conforming to the
<-spec/basedir-spec-latest.html XDG Base Directory Specification>.
Compared with 'getAppUserDataDirectory', this function provides a more
fine-grained hierarchy as well as greater flexibility for the user.
unless overridden.
will be integrated into the path, it must consist of valid path
Note: The directory may not actually exist, in which case you would need
to create it with file mode @700@ (i.e. only accessible by the owner).
< #100>.
^ which special directory
^ a relative path that is appended
to the path; if empty, the base
path is returned
directories.
of directories unless overridden.
Refer to the docs of 'XdgDirectoryList' for more details.
^ which special directory list
| Obtain the path to a special directory for storing user-specific
application data (traditional Unix location). Newer applications may
(<#issuecomment-96521020 migration guide>).
The argument is usually the name of the application. Since it will be
integrated into the path, it must consist of valid path characters.
* On Unix-like systems, the path is @~\/./\<app\>/@.
(e.g. @C:\/Users\//\<user\>/\/AppData\/Roaming\//\<app\>/@)
Note: the directory may not actually exist, in which case you would need
to create it. It is expected that the parent directory exists and is
writable.
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of application-specific data
directory.
* 'isDoesNotExistError'
The home directory for the current user does not exist, or cannot be
found.
^ a relative path that is appended
to the path | Copyright : ( c ) The University of Glasgow 2001
module System.Directory
(
createDirectory
, createDirectoryIfMissing
, removeDirectory
, removeDirectoryRecursive
, removePathForcibly
, renameDirectory
, listDirectory
, getDirectoryContents
, getCurrentDirectory
, setCurrentDirectory
, withCurrentDirectory
, getHomeDirectory
, XdgDirectory(..)
, getXdgDirectory
, XdgDirectoryList(..)
, getXdgDirectoryList
, getAppUserDataDirectory
, getUserDocumentsDirectory
, getTemporaryDirectory
, removeFile
, renameFile
, renamePath
, copyFile
, copyFileWithMetadata
, getFileSize
, canonicalizePath
, makeAbsolute
, makeRelativeToCurrentDirectory
, doesPathExist
, doesFileExist
, doesDirectoryExist
, findExecutable
, findExecutables
, findExecutablesInDirectories
, findFile
, findFiles
, findFileWith
, findFilesWith
, exeExtension
, createFileLink
, createDirectoryLink
, removeDirectoryLink
, pathIsSymbolicLink
, getSymbolicLinkTarget
, Permissions
, emptyPermissions
, readable
, writable
, executable
, searchable
, setOwnerReadable
, setOwnerWritable
, setOwnerExecutable
, setOwnerSearchable
, getPermissions
, setPermissions
, copyPermissions
, getAccessTime
, getModificationTime
, setAccessTime
, setModificationTime
, isSymbolicLink
) where
import Prelude ()
import System.Directory.Internal
import System.Directory.Internal.Prelude
import Data.Time (UTCTime)
import System.OsPath (decodeFS, encodeFS)
import qualified System.Directory.OsPath as D
$ intro
A directory contains a series of entries , each of which is a named
reference to a file system object ( file , directory etc . ) . Some
entries may be hidden , inaccessible , or have some administrative
function ( e.g. @.@ or @ .. @ under
< POSIX > ) , but in
this standard all such entries are considered to form part of the
directory contents . Entries in sub - directories are not , however ,
considered to form part of the directory contents .
Each file system object is referenced by a /path/. There is
normally at least one absolute path to each file system object . In
some operating systems , it may also be possible to have paths which
are relative to the current directory .
Unless otherwise documented :
* ' IO ' operations in this package may throw any ' IOError ' . No other types of
exceptions shall be thrown .
* The list of possible ' IOErrorType 's in the API documentation is not
exhaustive . The full list may vary by platform and/or evolve over time .
A directory contains a series of entries, each of which is a named
reference to a file system object (file, directory etc.). Some
entries may be hidden, inaccessible, or have some administrative
function (e.g. @.@ or @..@ under
< POSIX>), but in
this standard all such entries are considered to form part of the
directory contents. Entries in sub-directories are not, however,
considered to form part of the directory contents.
Each file system object is referenced by a /path/. There is
normally at least one absolute path to each file system object. In
some operating systems, it may also be possible to have paths which
are relative to the current directory.
Unless otherwise documented:
* 'IO' operations in this package may throw any 'IOError'. No other types of
exceptions shall be thrown.
* The list of possible 'IOErrorType's in the API documentation is not
exhaustive. The full list may vary by platform and/or evolve over time.
-}
$ permissions
directory offers a limited ( and quirky ) interface for reading and setting file
and directory permissions ; see ' getPermissions ' and ' setPermissions ' for a
discussion of their limitations . Because permissions are very difficult to
implement portably across different platforms , users who wish to do more
sophisticated things with permissions are advised to use other ,
platform - specific libraries instead . For example , if you are only interested
in permissions on POSIX - like platforms ,
< -Posix-Files.html unix >
offers much more flexibility .
The ' Permissions ' type is used to record whether certain operations are
permissible on a file\/directory . ' getPermissions ' and ' setPermissions '
get and set these permissions , respectively . Permissions apply both to
files and directories . For directories , the executable field will be
' False ' , and for files the searchable field will be ' False ' . Note that
directories may be searchable without being readable , if permission has
been given to use them as part of a path , but not to examine the
directory contents .
Note that to change some , but not all permissions , a construct on the following lines must be used .
> makeReadable f = do
> f
> ( p { readable = True } )
directory offers a limited (and quirky) interface for reading and setting file
and directory permissions; see 'getPermissions' and 'setPermissions' for a
discussion of their limitations. Because permissions are very difficult to
implement portably across different platforms, users who wish to do more
sophisticated things with permissions are advised to use other,
platform-specific libraries instead. For example, if you are only interested
in permissions on POSIX-like platforms,
<-Posix-Files.html unix>
offers much more flexibility.
The 'Permissions' type is used to record whether certain operations are
permissible on a file\/directory. 'getPermissions' and 'setPermissions'
get and set these permissions, respectively. Permissions apply both to
files and directories. For directories, the executable field will be
'False', and for files the searchable field will be 'False'. Note that
directories may be searchable without being readable, if permission has
been given to use them as part of a path, but not to examine the
directory contents.
Note that to change some, but not all permissions, a construct on the following lines must be used.
> makeReadable f = do
> p <- getPermissions f
> setPermissions f (p {readable = True})
-}
emptyPermissions :: Permissions
emptyPermissions = Permissions {
readable = False,
writable = False,
executable = False,
searchable = False
}
setOwnerReadable :: Bool -> Permissions -> Permissions
setOwnerReadable b p = p { readable = b }
setOwnerWritable :: Bool -> Permissions -> Permissions
setOwnerWritable b p = p { writable = b }
setOwnerExecutable :: Bool -> Permissions -> Permissions
setOwnerExecutable b p = p { executable = b }
setOwnerSearchable :: Bool -> Permissions -> Permissions
setOwnerSearchable b p = p { searchable = b }
On Windows , the ' writable ' permission corresponds to the " read - only "
getPermissions :: FilePath -> IO Permissions
getPermissions = encodeFS >=> D.getPermissions
On Windows , this is only capable of changing the ' writable ' permission ,
setPermissions :: FilePath -> Permissions -> IO ()
setPermissions path p = encodeFS path >>= (`D.setPermissions` p)
| Copy the permissions of one file to another . This reproduces the
On Windows , this copies only the read - only attribute .
On POSIX systems , this is equivalent to followed by @chmod@.
copyPermissions :: FilePath -> FilePath -> IO ()
copyPermissions src dst = do
src' <- encodeFS src
dst' <- encodeFS dst
D.copyPermissions src' dst'
|@'createDirectory ' dir@ creates a new directory @dir@ which is
initially empty , or as near to empty as the operating system
allows .
The operation may fail with :
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES]@
* ' isAlreadyExistsError '
The operand refers to a directory that already exists .
@ [
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
There is no path to the directory .
@[ENOENT , ENOTDIR]@
* ' System . IO.isFullError '
Insufficient resources ( virtual memory , process file descriptors ,
physical disk space , etc . ) are available to perform the operation .
@[EDQUOT , ENOSPC , ENOMEM , EMLINK]@
* @InappropriateType@
The path refers to an existing non - directory object .
@[EEXIST]@
initially empty, or as near to empty as the operating system
allows.
The operation may fail with:
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES]@
* 'isAlreadyExistsError'
The operand refers to a directory that already exists.
@ [EEXIST]@
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
There is no path to the directory.
@[ENOENT, ENOTDIR]@
* 'System.IO.isFullError'
Insufficient resources (virtual memory, process file descriptors,
physical disk space, etc.) are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* @InappropriateType@
The path refers to an existing non-directory object.
@[EEXIST]@
-}
createDirectory :: FilePath -> IO ()
createDirectory = encodeFS >=> D.createDirectory
@dir@ if it doesn\'t exist . If the first argument is ' True '
-> IO ()
createDirectoryIfMissing cp = encodeFS >=> D.createDirectoryIfMissing cp
| @'removeDirectory ' dir@ removes an existing directory /dir/. The
implementation may specify additional constraints which must be
satisfied before a directory can be removed ( e.g. the directory has to
be empty , or may not be in use by other processes ) . It is not legal
for an implementation to partially remove a directory unless the
entire directory is removed . A conformant implementation need not
support directory removal in all situations ( e.g. removal of the root
directory ) .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The directory does not exist .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY , ENOTEMPTY , EEXIST]@
* @UnsupportedOperation@
The implementation does not support removal in this situation .
@[EINVAL]@
* @InappropriateType@
The operand refers to an existing non - directory object .
@[ENOTDIR]@
implementation may specify additional constraints which must be
satisfied before a directory can be removed (e.g. the directory has to
be empty, or may not be in use by other processes). It is not legal
for an implementation to partially remove a directory unless the
entire directory is removed. A conformant implementation need not
support directory removal in all situations (e.g. removal of the root
directory).
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
The operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The directory does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY, ENOTEMPTY, EEXIST]@
* @UnsupportedOperation@
The implementation does not support removal in this situation.
@[EINVAL]@
* @InappropriateType@
The operand refers to an existing non-directory object.
@[ENOTDIR]@
-}
removeDirectory :: FilePath -> IO ()
removeDirectory = encodeFS >=> D.removeDirectory
On Windows , the operation fails if /dir/ is a directory symbolic link .
This operation is reported to be flaky on Windows so retry logic may
removeDirectoryRecursive :: FilePath -> IO ()
removeDirectoryRecursive = encodeFS >=> D.removeDirectoryRecursive
If an exception occurs while removing an entry , @removePathForcibly@ will
exception . The first exception that it encountered is re - thrown .
@since 1.2.7.0
removePathForcibly :: FilePath -> IO ()
removePathForcibly = encodeFS >=> D.removePathForcibly
|'removeFile ' /file/ removes the directory entry for an existing file
/file/ , where /file/ is not itself a directory . The
implementation may specify additional constraints which must be
satisfied before a file can be removed ( e.g. the file may not be in
use by other processes ) .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
The operand is not a valid file name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The file does not exist .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY]@
* @InappropriateType@
The operand refers to an existing directory .
@[EPERM , EINVAL]@
/file/, where /file/ is not itself a directory. The
implementation may specify additional constraints which must be
satisfied before a file can be removed (e.g. the file may not be in
use by other processes).
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
The operand is not a valid file name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The file does not exist.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY]@
* @InappropriateType@
The operand refers to an existing directory.
@[EPERM, EINVAL]@
-}
removeFile :: FilePath -> IO ()
removeFile = encodeFS >=> D.removeFile
|@'renameDirectory ' old new@ changes the name of an existing
directory from /old/ to /new/. If the /new/ directory
already exists , it is atomically replaced by the /old/ directory .
If the /new/ directory is neither the /old/ directory nor an
alias of the /old/ directory , it is removed as if by
' removeDirectory ' . A conformant implementation need not support
renaming directories in all situations ( e.g. renaming to an existing
directory , or across different physical devices ) , but the constraints
must be documented .
On Win32 platforms , @renameDirectory@ fails if the /new/ directory already
exists .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
Either operand is not a valid directory name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The original directory does not exist , or there is no path to the target .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* ' System . IO.isFullError '
Insufficient resources are available to perform the operation .
@[EDQUOT , ENOSPC , ENOMEM , EMLINK]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY , ENOTEMPTY , EEXIST]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation .
@[EINVAL , EXDEV]@
* @InappropriateType@
Either path refers to an existing non - directory object .
@[ENOTDIR , EISDIR]@
directory from /old/ to /new/. If the /new/ directory
already exists, it is atomically replaced by the /old/ directory.
If the /new/ directory is neither the /old/ directory nor an
alias of the /old/ directory, it is removed as if by
'removeDirectory'. A conformant implementation need not support
renaming directories in all situations (e.g. renaming to an existing
directory, or across different physical devices), but the constraints
must be documented.
On Win32 platforms, @renameDirectory@ fails if the /new/ directory already
exists.
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
Either operand is not a valid directory name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The original directory does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY, ENOTEMPTY, EEXIST]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation.
@[EINVAL, EXDEV]@
* @InappropriateType@
Either path refers to an existing non-directory object.
@[ENOTDIR, EISDIR]@
-}
renameDirectory :: FilePath -> FilePath -> IO ()
renameDirectory opath npath = do
opath' <- encodeFS opath
npath' <- encodeFS npath
D.renameDirectory opath' npath'
|@'renameFile ' old new@ changes the name of an existing file system
object from /old/ to /new/. If the object already exists , it is
replaced by the /old/ object . Neither path may refer to an existing
directory . A conformant implementation need not support renaming files
in all situations ( e.g. renaming across different physical devices ) , but
the constraints must be documented .
On Windows , this calls @MoveFileEx@ with @MOVEFILE_REPLACE_EXISTING@ set ,
which is not guaranteed to be atomic
( < > ) .
On other platforms , this operation is atomic .
The operation may fail with :
* @HardwareFault@
A physical I\/O error has occurred .
@[EIO]@
* @InvalidArgument@
Either operand is not a valid file name .
@[ENAMETOOLONG , ELOOP]@
* ' isDoesNotExistError '
The original file does not exist , or there is no path to the target .
@[ENOENT , ENOTDIR]@
* ' isPermissionError '
The process has insufficient privileges to perform the operation .
@[EROFS , EACCES , EPERM]@
* ' System . IO.isFullError '
Insufficient resources are available to perform the operation .
@[EDQUOT , ENOSPC , ENOMEM , EMLINK]@
* @UnsatisfiedConstraints@
Implementation - dependent constraints are not satisfied .
@[EBUSY]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation .
@[EXDEV]@
* @InappropriateType@
Either path refers to an existing directory .
@[ENOTDIR , EISDIR , EINVAL , EEXIST , ENOTEMPTY]@
object from /old/ to /new/. If the /new/ object already exists, it is
replaced by the /old/ object. Neither path may refer to an existing
directory. A conformant implementation need not support renaming files
in all situations (e.g. renaming across different physical devices), but
the constraints must be documented.
On Windows, this calls @MoveFileEx@ with @MOVEFILE_REPLACE_EXISTING@ set,
which is not guaranteed to be atomic
(<>).
On other platforms, this operation is atomic.
The operation may fail with:
* @HardwareFault@
A physical I\/O error has occurred.
@[EIO]@
* @InvalidArgument@
Either operand is not a valid file name.
@[ENAMETOOLONG, ELOOP]@
* 'isDoesNotExistError'
The original file does not exist, or there is no path to the target.
@[ENOENT, ENOTDIR]@
* 'isPermissionError'
The process has insufficient privileges to perform the operation.
@[EROFS, EACCES, EPERM]@
* 'System.IO.isFullError'
Insufficient resources are available to perform the operation.
@[EDQUOT, ENOSPC, ENOMEM, EMLINK]@
* @UnsatisfiedConstraints@
Implementation-dependent constraints are not satisfied.
@[EBUSY]@
* @UnsupportedOperation@
The implementation does not support renaming in this situation.
@[EXDEV]@
* @InappropriateType@
Either path refers to an existing directory.
@[ENOTDIR, EISDIR, EINVAL, EEXIST, ENOTEMPTY]@
-}
renameFile :: FilePath -> FilePath -> IO ()
renameFile opath npath = do
opath' <- encodeFS opath
npath' <- encodeFS npath
D.renameFile opath' npath'
A physical I\/O error has occurred .
* @InvalidArgument@
@[ENAMETOOLONG , ELOOP]@
@[EROFS , EACCES , EPERM]@
@[EDQUOT , ENOSPC , ENOMEM ,
@[EBUSY]@
Either the destination path refers to an existing directory , or one of the
@[ENOTDIR , EISDIR , EINVAL , EEXIST , ENOTEMPTY]@
@since 1.2.7.0
-> IO ()
renamePath opath npath = do
opath' <- encodeFS opath
npath' <- encodeFS npath
D.renamePath opath' npath'
-> IO ()
copyFile fromFPath toFPath = do
fromFPath' <- encodeFS fromFPath
toFPath' <- encodeFS toFPath
D.copyFile fromFPath' toFPath'
On Windows , it behaves like the Win32 function
hence the access times of the two files may differ after the operation
-> IO ()
copyFileWithMetadata src dst = do
src' <- encodeFS src
dst' <- encodeFS dst
D.copyFileWithMetadata src' dst'
' dropTrailingPathSeparator ' . Additionally , on Windows the letter case of
path , ' ' is sufficient for removing dependence on the current
include the two special directories @.@ and @ .. @ , as well as
any symbolic links ( and junction points on Windows ) . The input path need
not point to an existing file or directory . Canonicalization is performed
Since some indirections ( symbolic links on all systems , @ .. @ on non - Windows
systems , and junction points on Windows ) are dependent on the state of the
Note that on Windows parent directories @ .. @ are always fully expanded
before the symbolic links , as consistent with the rest of the Windows API
( such as @GetFullPathName@ ) . In contrast , on POSIX systems parent
Windows @L\\ .. @ refers to @.@ , whereas on other operating systems @L/ .. @
@canonicalizePath@ can resolve at least 64 indirections in a single path ,
On Windows XP or earlier systems , junction expansion is not performed due
to their lack of @GetFinalPathNameByHandle@.
/Changes since 1.2.3.0:/ The function has been altered to be more robust
/Changes since 1.3.0.0:/ The function no longer preserves the trailing path
are now performed on Windows .
canonicalizePath :: FilePath -> IO FilePath
canonicalizePath = encodeFS >=> D.canonicalizePath >=> decodeFS
makeAbsolute :: FilePath -> IO FilePath
makeAbsolute = encodeFS >=> D.makeAbsolute >=> decodeFS
makeRelativeToCurrentDirectory :: FilePath -> IO FilePath
makeRelativeToCurrentDirectory =
encodeFS >=> D.makeRelativeToCurrentDirectory >=> decodeFS
give you the path to GHC .
@< / package / process / docs / System - Process.html#v : createProcess createProcess>@
when passed the same string ( as a @RawCommand@ , not a @ShellCommand@ ) ,
provided that @name@ is not a relative path with more than one segment .
On Windows , ' findExecutable ' calls the Win32 function
On non - Windows platforms , the behavior is equivalent to ' findFileWith '
findExecutable :: String -> IO (Maybe FilePath)
findExecutable = encodeFS >=> D.findExecutable >=> (`for` decodeFS)
On Windows , this /only returns the first occurrence/ , if any . Its behavior
On non - Windows platforms , the behavior is equivalent to
findExecutables :: String -> IO [FilePath]
findExecutables = encodeFS >=> D.findExecutables >=> (`for` decodeFS)
Windows is therefore equivalent to those on non - Windows platforms .
@since 1.2.4.0
findExecutablesInDirectories :: [FilePath] -> String -> IO [FilePath]
findExecutablesInDirectories path binary = do
path' <- for path encodeFS
binary' <- encodeFS binary
D.findExecutablesInDirectories path' binary'
>>= (`for` decodeFS)
The behavior is equivalent to ' findFileWith ' , returning only the first
findFile :: [FilePath] -> String -> IO (Maybe FilePath)
findFile = findFileWith (\ _ -> pure True)
@since 1.2.1.0
findFiles :: [FilePath] -> String -> IO [FilePath]
findFiles = findFilesWith (\ _ -> pure True)
name and satisfies the given predicate and return the path of the first
This is essentially a more performant version of ' findFilesWith ' that
always returns the first result , if any . Details can be found in the
findFileWith :: (FilePath -> IO Bool) -> [FilePath] -> String -> IO (Maybe FilePath)
findFileWith f ds name = do
ds' <- for ds encodeFS
name' <- encodeFS name
D.findFileWith (decodeFS >=> f) ds' name'
>>= (`for` decodeFS)
| @findFilesWith predicate dirs name@ searches through the list of
If the @name@ is a relative path , then for every search directory @dir@ ,
the function checks whether ' < / > ' name@ exists and satisfies the
predicate . If so , ' < / > ' name@ is returned as one of the results . In
If the @name@ is an absolute path , then the function will return a single
@since 1.2.1.0
findFilesWith :: (FilePath -> IO Bool) -> [FilePath] -> String -> IO [FilePath]
findFilesWith f ds name = do
ds' <- for ds encodeFS
name' <- encodeFS name
res <- D.findFilesWith (decodeFS >=> f) ds' name'
for res decodeFS
( usually @\"\"@ on POSIX systems and @\".exe\"@ on Windows or OS\/2 ) .
@since 1.2.4.0
exeExtension :: String
exeExtension = so D.exeExtension
and @ .. @ ) . ( This applies to Windows as well . )
getDirectoryContents :: FilePath -> IO [FilePath]
getDirectoryContents = encodeFS >=> D.getDirectoryContents >=> (`for` decodeFS)
| @'listDirectory ' dir@ returns a list of entries in /dir/ without
A physical I\/O error has occurred .
* @InvalidArgument@
@[ENAMETOOLONG , ELOOP]@
@[EACCES]@
listDirectory :: FilePath -> IO [FilePath]
listDirectory = encodeFS >=> D.listDirectory >=> (`for` decodeFS)
use absolute rather than relative paths ( see : ' ' ) .
< > .
A physical I\/O error has occurred .
@[EPERM , ENOENT , ESTALE ... ]@
getCurrentDirectory :: IO FilePath
getCurrentDirectory = D.getCurrentDirectory >>= decodeFS
use absolute rather than relative paths ( see : ' ' ) .
A physical I\/O error has occurred .
* @InvalidArgument@
@[ENAMETOOLONG , ELOOP]@
setCurrentDirectory :: FilePath -> IO ()
setCurrentDirectory = encodeFS >=> D.setCurrentDirectory
@since 1.2.3.0
-> IO a
withCurrentDirectory dir action =
encodeFS dir >>= (`D.withCurrentDirectory` action)
@since 1.2.7.0
getFileSize :: FilePath -> IO Integer
getFileSize = encodeFS >=> D.getFileSize
@since 1.2.7.0
doesPathExist :: FilePath -> IO Bool
doesPathExist = encodeFS >=> D.doesPathExist
doesDirectoryExist :: FilePath -> IO Bool
doesDirectoryExist = encodeFS >=> D.doesDirectoryExist
doesFileExist :: FilePath -> IO Bool
doesFileExist = encodeFS >=> D.doesFileExist
symbolic links does not exist on POSIX systems , on Windows this is an
symbolic links on Windows may require privileges unavailable to users
On Windows , the function is implemented using @CreateSymbolicLink@. Since
1.3.3.0 , the @SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE@ flag is included
if supported by the operating system . On POSIX , the function uses @symlink@
@since 1.3.1.0
createFileLink
-> IO ()
createFileLink target link = do
target' <- encodeFS target
link' <- encodeFS link
D.createFileLink target' link'
symbolic links does not exist on POSIX systems , on Windows this is an
symbolic links on Windows may require privileges unavailable to users
On Windows , the function is implemented using @CreateSymbolicLink@ with
@SYMBOLIC_LINK_FLAG_DIRECTORY@. Since 1.3.3.0 , the
supported by the operating system . On POSIX , this is an alias for
@since 1.3.1.0
createDirectoryLink
-> IO ()
createDirectoryLink target link = do
target' <- encodeFS target
link' <- encodeFS link
D.createDirectoryLink target' link'
On Windows , this is an alias for ' removeDirectory ' . On POSIX systems , this
@since 1.3.1.0
removeDirectoryLink :: FilePath -> IO ()
removeDirectoryLink = encodeFS >=> D.removeDirectoryLink
| Check whether an existing @path@ is a symbolic link . If @path@ is a
On Windows , this checks for In addition to
POSIX systems , this checks for @S_IFLNK@.
@since 1.3.0.0
pathIsSymbolicLink :: FilePath -> IO Bool
pathIsSymbolicLink = encodeFS >=> D.pathIsSymbolicLink
isSymbolicLink :: FilePath -> IO Bool
isSymbolicLink = pathIsSymbolicLink
On Windows systems , this calls with
@FSCTL_GET_REPARSE_POINT@. In addition to symbolic links , the function
@since 1.3.1.0
getSymbolicLinkTarget :: FilePath -> IO FilePath
getSymbolicLinkTarget = encodeFS >=> D.getSymbolicLinkTarget >=> decodeFS
resolution only if this package is compiled against @unix-2.6.0.0@ or later
@since 1.2.3.0
getAccessTime :: FilePath -> IO UTCTime
getAccessTime = encodeFS >=> D.getAccessTime
resolution only if this package is compiled against @unix-2.6.0.0@ or later
getModificationTime :: FilePath -> IO UTCTime
getModificationTime = encodeFS >=> D.getModificationTime
* If compiled against a version of @unix@ prior to @2.7.0.0@ , the function
@since 1.2.3.0
setAccessTime :: FilePath -> UTCTime -> IO ()
setAccessTime path atime = encodeFS path >>= (`D.setAccessTime` atime)
* If compiled against a version of @unix@ prior to @2.7.0.0@ , the function
@since 1.2.3.0
setModificationTime :: FilePath -> UTCTime -> IO ()
setModificationTime path mtime =
encodeFS path >>= (`D.setModificationTime` mtime)
| Returns the current user 's home directory .
The directory returned is expected to be writable by the current user ,
but note that it is n't generally considered good practice to store
application - specific data here ; use ' getXdgDirectory ' or
' getAppUserDataDirectory ' instead .
On Unix , ' getHomeDirectory ' behaves as follows :
* Returns $ HOME env variable if set ( including to an empty string ) .
* Otherwise uses home directory returned by ` getpwuid_r ` using the UID of the current proccesses user . This basically reads the /etc / passwd file . An empty home directory field is considered valid .
On Windows , the system is queried for a suitable path ; a typical path might be @C:\/Users\//\<user\>/@.
The operation may fail with :
* @UnsupportedOperation@
The operating system has no notion of home directory .
* ' isDoesNotExistError '
The home directory for the current user does not exist , or
can not be found .
The directory returned is expected to be writable by the current user,
but note that it isn't generally considered good practice to store
application-specific data here; use 'getXdgDirectory' or
'getAppUserDataDirectory' instead.
On Unix, 'getHomeDirectory' behaves as follows:
* Returns $HOME env variable if set (including to an empty string).
* Otherwise uses home directory returned by `getpwuid_r` using the UID of the current proccesses user. This basically reads the /etc/passwd file. An empty home directory field is considered valid.
On Windows, the system is queried for a suitable path; a typical path might be @C:\/Users\//\<user\>/@.
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of home directory.
* 'isDoesNotExistError'
The home directory for the current user does not exist, or
cannot be found.
-}
getHomeDirectory :: IO FilePath
getHomeDirectory = D.getHomeDirectory >>= decodeFS
On Windows , ' XdgData ' and ' XdgConfig ' usually map to the same directory
Refer to the docs of ' XdgDirectory ' for more details .
The second argument is usually the name of the application . Since it
characters . Note : if the second argument is an absolute path , it will
just return the second argument .
As of 1.3.5.0 , the environment variable is ignored if set to a relative
path , per revised XDG Base Directory Specification . See
@since 1.2.3.0
-> IO FilePath
getXdgDirectory xdgDir = encodeFS >=> D.getXdgDirectory xdgDir >=> decodeFS
| Similar to ' getXdgDirectory ' but retrieves the entire list of XDG
On Windows , ' XdgDataDirs ' and ' XdgConfigDirs ' usually map to the same list
-> IO [FilePath]
getXdgDirectoryList = D.getXdgDirectoryList >=> (`for` decodeFS)
prefer the the XDG - conformant location provided by ' getXdgDirectory '
* On Windows , the path is @%APPDATA%\//\<app\>/@
-> IO FilePath
getAppUserDataDirectory = encodeFS >=> D.getAppUserDataDirectory >=>decodeFS
| Returns the current user 's document directory .
The directory returned is expected to be writable by the current user ,
but note that it is n't generally considered good practice to store
application - specific data here ; use ' getXdgDirectory ' or
' getAppUserDataDirectory ' instead .
On Unix , ' getUserDocumentsDirectory ' returns the value of the @HOME@
environment variable . On Windows , the system is queried for a
suitable path ; a typical path might be @C:\/Users\//\<user\>/\/Documents@.
The operation may fail with :
* @UnsupportedOperation@
The operating system has no notion of document directory .
* ' isDoesNotExistError '
The document directory for the current user does not exist , or
can not be found .
The directory returned is expected to be writable by the current user,
but note that it isn't generally considered good practice to store
application-specific data here; use 'getXdgDirectory' or
'getAppUserDataDirectory' instead.
On Unix, 'getUserDocumentsDirectory' returns the value of the @HOME@
environment variable. On Windows, the system is queried for a
suitable path; a typical path might be @C:\/Users\//\<user\>/\/Documents@.
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of document directory.
* 'isDoesNotExistError'
The document directory for the current user does not exist, or
cannot be found.
-}
getUserDocumentsDirectory :: IO FilePath
getUserDocumentsDirectory = D.getUserDocumentsDirectory >>= decodeFS
| Returns the current directory for temporary files .
On Unix , ' getTemporaryDirectory ' returns the value of the @TMPDIR@
environment variable or \"\/tmp\ " if the variable isn\'t defined .
On Windows , the function checks for the existence of environment variables in
the following order and uses the first path found :
*
TMP environment variable .
*
TEMP environment variable .
*
USERPROFILE environment variable .
*
The Windows directory
The operation may fail with :
* @UnsupportedOperation@
The operating system has no notion of temporary directory .
The function doesn\'t verify whether the path exists .
On Unix, 'getTemporaryDirectory' returns the value of the @TMPDIR@
environment variable or \"\/tmp\" if the variable isn\'t defined.
On Windows, the function checks for the existence of environment variables in
the following order and uses the first path found:
*
TMP environment variable.
*
TEMP environment variable.
*
USERPROFILE environment variable.
*
The Windows directory
The operation may fail with:
* @UnsupportedOperation@
The operating system has no notion of temporary directory.
The function doesn\'t verify whether the path exists.
-}
getTemporaryDirectory :: IO FilePath
getTemporaryDirectory = D.getTemporaryDirectory >>= decodeFS
|
0ca9134edb6d816bfb5015b5a7e5ea2197da2aafd73f2c64471357e6e3631f9a | yogthos/krueger | core.clj | (ns krueger.test.db.core
(:require [krueger.db.core :refer [*db*] :as db]
[luminus-migrations.core :as migrations]
[clojure.test :refer :all]
[clojure.java.jdbc :as jdbc]
[krueger.config :refer [env]]
[mount.core :as mount]))
(use-fixtures
:once
(fn [f]
(mount/start
#'krueger.config/env
#'krueger.db.core/*db*)
(migrations/migrate ["migrate"] (select-keys env [:database-url]))
(f)))
(deftest test-users
(jdbc/with-db-transaction [t-conn *db*]
(jdbc/db-set-rollback-only! t-conn)
(is (= 1 (db/create-user!
t-conn
{:id "1"
:first_name "Sam"
:last_name "Smith"
:email ""
:pass "pass"})))
(is (= {:id "1"
:first_name "Sam"
:last_name "Smith"
:email ""
:pass "pass"
:admin nil
:last_login nil
:is_active nil}
(db/get-user t-conn {:id "1"})))))
| null | https://raw.githubusercontent.com/yogthos/krueger/782e1f8ab358867102b907c5a80e56ee6bc6ff82/test/clj/krueger/test/db/core.clj | clojure | (ns krueger.test.db.core
(:require [krueger.db.core :refer [*db*] :as db]
[luminus-migrations.core :as migrations]
[clojure.test :refer :all]
[clojure.java.jdbc :as jdbc]
[krueger.config :refer [env]]
[mount.core :as mount]))
(use-fixtures
:once
(fn [f]
(mount/start
#'krueger.config/env
#'krueger.db.core/*db*)
(migrations/migrate ["migrate"] (select-keys env [:database-url]))
(f)))
(deftest test-users
(jdbc/with-db-transaction [t-conn *db*]
(jdbc/db-set-rollback-only! t-conn)
(is (= 1 (db/create-user!
t-conn
{:id "1"
:first_name "Sam"
:last_name "Smith"
:email ""
:pass "pass"})))
(is (= {:id "1"
:first_name "Sam"
:last_name "Smith"
:email ""
:pass "pass"
:admin nil
:last_login nil
:is_active nil}
(db/get-user t-conn {:id "1"})))))
| |
4d7d3f2592a6df8e104755be9aa3b61fedd413bd48665eb19f527096e6d32562 | haskell-numerics/hmatrix | VectorShow.hs | # LANGUAGE DataKinds #
module Main
( main
) where
import Numeric.LinearAlgebra.Static
import qualified Numeric.LinearAlgebra as LA
import qualified Numeric.GSL.Minimization as Min
u :: R 4
u = vec4 10 20 30 40
v :: R 5
v = vec2 5 0 & 0 & 3 & 7
b :: L 4 3
b = matrix
[ 2, 0,-1
, 1, 1, 7
, 5, 3, 1
, 2, 8, 0 ] :: L 4 3
w :: R 10
w = vector [1..10] :: R 10
f :: [Double] -> Double
f [x,y] = 10*(x-1)^(2::Int) + 20*(y-2)^(2::Int) + 30
f _ = error "f only defined for exactly 2 elements"
main :: IO ()
main = do
print u
print v
print b
print w
print $ diag u
print (eye + 2 :: Sq 4)
print $ LA.diag (LA.fromList [1,2,3 :: Double])
--
let (s,p) = Min.minimize Min.NMSimplex2 1E-2 30 [1,1] f [5,7]
print s
print p
| null | https://raw.githubusercontent.com/haskell-numerics/hmatrix/2694f776c7b5034d239acb5d984c489417739225/examples/VectorShow.hs | haskell | # LANGUAGE DataKinds #
module Main
( main
) where
import Numeric.LinearAlgebra.Static
import qualified Numeric.LinearAlgebra as LA
import qualified Numeric.GSL.Minimization as Min
u :: R 4
u = vec4 10 20 30 40
v :: R 5
v = vec2 5 0 & 0 & 3 & 7
b :: L 4 3
b = matrix
[ 2, 0,-1
, 1, 1, 7
, 5, 3, 1
, 2, 8, 0 ] :: L 4 3
w :: R 10
w = vector [1..10] :: R 10
f :: [Double] -> Double
f [x,y] = 10*(x-1)^(2::Int) + 20*(y-2)^(2::Int) + 30
f _ = error "f only defined for exactly 2 elements"
main :: IO ()
main = do
print u
print v
print b
print w
print $ diag u
print (eye + 2 :: Sq 4)
print $ LA.diag (LA.fromList [1,2,3 :: Double])
let (s,p) = Min.minimize Min.NMSimplex2 1E-2 30 [1,1] f [5,7]
print s
print p
| |
e3b7bb9ba567b2e26718439a84f276793670eb64118d20405c8c1b773e1b959f | well-typed-lightbulbs/ocaml-esp32 | pipe_eof.ml | (* TEST
* hasunix
include unix
** bytecode
** native
*)
let drain pipe =
let max = 2048 in
let buf = Buffer.create 2048 in
let tmp = Bytes.create max in
while begin
try
let len = Unix.read pipe tmp 0 max in
Buffer.add_subbytes buf tmp 0 len;
len > 0
with Unix.Unix_error (Unix.EPIPE, _, _) when false ->
false
end do () done;
Buffer.contents buf
;;
let run exe args =
let out_in, out_out = Unix.pipe () in
let err_in, err_out = Unix.pipe () in
let args = Array.append [| exe |] args in
let pid = Unix.create_process exe args Unix.stdin out_out err_out in
Unix.close out_out;
Unix.close err_out;
let output = drain out_in in
let error = drain err_in in
Unix.close out_in;
Unix.close err_in;
let _pid, status = Unix.waitpid [ ] pid in
status, output, error
;;
let _ =
ignore (run "cp" [||]);
print_endline "success"
;;
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/lib-unix/common/pipe_eof.ml | ocaml | TEST
* hasunix
include unix
** bytecode
** native
|
let drain pipe =
let max = 2048 in
let buf = Buffer.create 2048 in
let tmp = Bytes.create max in
while begin
try
let len = Unix.read pipe tmp 0 max in
Buffer.add_subbytes buf tmp 0 len;
len > 0
with Unix.Unix_error (Unix.EPIPE, _, _) when false ->
false
end do () done;
Buffer.contents buf
;;
let run exe args =
let out_in, out_out = Unix.pipe () in
let err_in, err_out = Unix.pipe () in
let args = Array.append [| exe |] args in
let pid = Unix.create_process exe args Unix.stdin out_out err_out in
Unix.close out_out;
Unix.close err_out;
let output = drain out_in in
let error = drain err_in in
Unix.close out_in;
Unix.close err_in;
let _pid, status = Unix.waitpid [ ] pid in
status, output, error
;;
let _ =
ignore (run "cp" [||]);
print_endline "success"
;;
|
40d91d9c2c02171c9f173b4bbc96d725fcd0a3213cf67645397f623da53363d8 | venantius/glow | core.clj | (ns glow.core
(:require [glow.colorschemes :as colorschemes]
[glow.html :as html]
[glow.parse :as parse]
[glow.terminal :as terminal]
[instaparse.core :as insta]))
(defn highlight
"Given a string of valid Clojure source code, parse it and return a
syntax-highlighted string of the same.
By default, highlight uses `glow.colorschemes/terminal-default` to figure out
which ANSI colors to use. If you want to use a different colorscheme, just
pass in a map in a style akin to that as an optional second argument, e.g.:
{:string :blue
:number :green}"
([s]
(terminal/ansi-colorize colorschemes/terminal-default (parse/parse s)))
([s colorscheme]
(terminal/ansi-colorize colorscheme (parse/parse s))))
(defn highlight-html
"Given a string of valid Clojure source code, parse it and return an HTML
document of the same with span classes set. This should be used in tandem
with `generate-css`."
[s]
(html/generate-html (parse/parse s)))
(defn generate-css
"By default, generate-css uses `glow.colorschemes/solarized-dark` to figure
out which colors to use. If you want to use a different colorscheme, just
pass in a map in a style akin to that as an optional second argument, e.g.:
{:string :blue
:number :green}"
([]
(html/generate-css colorschemes/solarized-dark))
([colorscheme]
(html/generate-css colorscheme)))
| null | https://raw.githubusercontent.com/venantius/glow/17698a3621ba2f7d09f7c786764a21b13f90f6c3/src/glow/core.clj | clojure | (ns glow.core
(:require [glow.colorschemes :as colorschemes]
[glow.html :as html]
[glow.parse :as parse]
[glow.terminal :as terminal]
[instaparse.core :as insta]))
(defn highlight
"Given a string of valid Clojure source code, parse it and return a
syntax-highlighted string of the same.
By default, highlight uses `glow.colorschemes/terminal-default` to figure out
which ANSI colors to use. If you want to use a different colorscheme, just
pass in a map in a style akin to that as an optional second argument, e.g.:
{:string :blue
:number :green}"
([s]
(terminal/ansi-colorize colorschemes/terminal-default (parse/parse s)))
([s colorscheme]
(terminal/ansi-colorize colorscheme (parse/parse s))))
(defn highlight-html
"Given a string of valid Clojure source code, parse it and return an HTML
document of the same with span classes set. This should be used in tandem
with `generate-css`."
[s]
(html/generate-html (parse/parse s)))
(defn generate-css
"By default, generate-css uses `glow.colorschemes/solarized-dark` to figure
out which colors to use. If you want to use a different colorscheme, just
pass in a map in a style akin to that as an optional second argument, e.g.:
{:string :blue
:number :green}"
([]
(html/generate-css colorschemes/solarized-dark))
([colorscheme]
(html/generate-css colorscheme)))
| |
b0188e26055ce12d93409a2f8007d8d9cc5db96837bdf48b6912dfe7d5ca2614 | yutopp/rill | attribute.ml |
* Copyright yutopp 2016 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2016 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
open Batteries
open Stdint
let analyze_boot_expr node ctx attr : 'ty Ctfe_value.t =
match Ast.kind_of node with
| Ast.IntLit (i, bits, signed, loc) ->
begin
match bits with
| 32 -> if signed then
Ctfe_value.Int32 (Int32.of_int i)
else
Ctfe_value.Uint32 (Uint32.of_int i)
| _ -> failwith ""
end
| Ast.BoolLit (b, loc) ->
begin
Ctfe_value.Bool b
end
| Ast.StringLit (str, loc) ->
begin
failwith "[ICE]: not supported yet"
end
| _ ->
begin
Ast.print node;
failwith "analyze_expr: unsupported node"
end
let find_val_impl opt_attr key f =
match opt_attr with
| Some tbl ->
begin
let opt_value_node = Hashtbl.find_option tbl key in
match opt_value_node with
Some value_node ->
begin
match value_node with
| None -> Some (Ctfe_value.Bool true)
| Some value ->
begin
let ctfe_v = f value in
Some (ctfe_v)
end
end
| None -> None
end
| None -> None
(* it can treat simple nodes *)
let find_boot_val opt_attr key ctx =
let f tnode =
analyze_boot_expr tnode ctx None
in
find_val_impl opt_attr key f
let find_attr_ctfe_val opt_attr key parent_env ctx =
let f node =
let sub_expr_spec = SubExprSpec.empty ( ) in
let ( nnode , naux ) =
analyze_expr node parent_env sub_expr_spec ctx None
in
let {
. Aux.ta_type = ty ;
. ;
} = naux in
let ( v , _ ) = eval_texpr_as_ctfe nnode ty ml parent_env ctx None in
v
in
find_attr_val_impl opt_attr key f
let find_attr_ctfe_val opt_attr key parent_env ctx =
let f node =
let sub_expr_spec = SubExprSpec.empty () in
let (nnode, naux) =
analyze_expr node parent_env sub_expr_spec ctx None
in
let {
TAst.Aux.ta_type = ty;
TAst.Aux.ta_ml = ml;
} = naux in
let (v, _) = eval_texpr_as_ctfe nnode ty ml parent_env ctx None in
v
in
find_attr_val_impl opt_attr key f
*)
let find_val opt_attr key ctx =
TODO : support
find_boot_val opt_attr key ctx
let find_bool_val opt_attr key ctx =
let opt_v = find_val opt_attr key ctx in
match opt_v with
| Some v ->
begin
match v with
| Ctfe_value.Bool b -> b
| _ -> failwith "[ERR] not bool value"
end
| None -> false (* default value *)
let find_int32_val opt_attr key ctx =
let opt_v = find_val opt_attr key ctx in
match opt_v with
| Some v -> begin match v with
| Ctfe_value.Int32 i -> Some i
| _ -> failwith "[ERR] not int32 value"
end
| None -> None
let find_uint32_val opt_attr key ctx =
let opt_v = find_val opt_attr key ctx in
match opt_v with
| Some v -> begin match v with
| Ctfe_value.Uint32 i -> Some i
| _ -> failwith "[ERR] not uint32 value"
end
| None -> None
| null | https://raw.githubusercontent.com/yutopp/rill/375b67c03ab2087d0a2a833bd9e80f3e51e2694f/rillc/_migrating/attribute.ml | ocaml | it can treat simple nodes
default value |
* Copyright yutopp 2016 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2016 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
open Batteries
open Stdint
let analyze_boot_expr node ctx attr : 'ty Ctfe_value.t =
match Ast.kind_of node with
| Ast.IntLit (i, bits, signed, loc) ->
begin
match bits with
| 32 -> if signed then
Ctfe_value.Int32 (Int32.of_int i)
else
Ctfe_value.Uint32 (Uint32.of_int i)
| _ -> failwith ""
end
| Ast.BoolLit (b, loc) ->
begin
Ctfe_value.Bool b
end
| Ast.StringLit (str, loc) ->
begin
failwith "[ICE]: not supported yet"
end
| _ ->
begin
Ast.print node;
failwith "analyze_expr: unsupported node"
end
let find_val_impl opt_attr key f =
match opt_attr with
| Some tbl ->
begin
let opt_value_node = Hashtbl.find_option tbl key in
match opt_value_node with
Some value_node ->
begin
match value_node with
| None -> Some (Ctfe_value.Bool true)
| Some value ->
begin
let ctfe_v = f value in
Some (ctfe_v)
end
end
| None -> None
end
| None -> None
let find_boot_val opt_attr key ctx =
let f tnode =
analyze_boot_expr tnode ctx None
in
find_val_impl opt_attr key f
let find_attr_ctfe_val opt_attr key parent_env ctx =
let f node =
let sub_expr_spec = SubExprSpec.empty ( ) in
let ( nnode , naux ) =
analyze_expr node parent_env sub_expr_spec ctx None
in
let {
. Aux.ta_type = ty ;
. ;
} = naux in
let ( v , _ ) = eval_texpr_as_ctfe nnode ty ml parent_env ctx None in
v
in
find_attr_val_impl opt_attr key f
let find_attr_ctfe_val opt_attr key parent_env ctx =
let f node =
let sub_expr_spec = SubExprSpec.empty () in
let (nnode, naux) =
analyze_expr node parent_env sub_expr_spec ctx None
in
let {
TAst.Aux.ta_type = ty;
TAst.Aux.ta_ml = ml;
} = naux in
let (v, _) = eval_texpr_as_ctfe nnode ty ml parent_env ctx None in
v
in
find_attr_val_impl opt_attr key f
*)
let find_val opt_attr key ctx =
TODO : support
find_boot_val opt_attr key ctx
let find_bool_val opt_attr key ctx =
let opt_v = find_val opt_attr key ctx in
match opt_v with
| Some v ->
begin
match v with
| Ctfe_value.Bool b -> b
| _ -> failwith "[ERR] not bool value"
end
let find_int32_val opt_attr key ctx =
let opt_v = find_val opt_attr key ctx in
match opt_v with
| Some v -> begin match v with
| Ctfe_value.Int32 i -> Some i
| _ -> failwith "[ERR] not int32 value"
end
| None -> None
let find_uint32_val opt_attr key ctx =
let opt_v = find_val opt_attr key ctx in
match opt_v with
| Some v -> begin match v with
| Ctfe_value.Uint32 i -> Some i
| _ -> failwith "[ERR] not uint32 value"
end
| None -> None
|
65a9515ebd00f9d90a146cf6a88c12865c39c8fe3523229e58deb40add8908b1 | cpeikert/ALCHEMY | Arithmetic.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE PartialTypeSignatures #
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE PartialTypeSignatures #
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE Strict #-}
# OPTIONS_GHC -fno - warn - partial - type - signatures #
# OPTIONS_GHC -fno - cse #
# OPTIONS_GHC -fno - full - laziness #
module Arithmetic where
import Control.Monad.Random
import Control.Monad.Writer
import Data.Functor ((<$>))
import Data.Maybe
import Crypto.Alchemy
import Crypto.Alchemy.Interpreter.Counter.TensorOps
import Crypto.Lol
import Crypto.Lol.Cyclotomic.Tensor.CPP
import Common
type PT = PNoiseCyc PNZ (Cyc CT) F4 (Zq 7)
-- polymorphic over expr alone
addMul :: _ => expr env (_ -> _ -> PT)
addMul = lam2 $ \x y -> (var x +: var y) *: var y
type M'Map = '[ '(F4, F512) ]
type Zqs = '[ Zq $(mkModulus 268440577)
, Zq $(mkModulus 8392193)
, Zq $(mkModulus 1073750017)
]
main :: IO ()
main = do
{-
-- print and size
putStrLn $ "PT expression: " ++ print addMul
putStrLn $ "PT expression size: " ++ show (size addMul)
-}
-- evaluate on random arguments
pt1 <- getRandom
pt2 <- getRandom
let ptresult = eval addMul (PNC pt1) (PNC pt2)
putStrLn $ " PT evaluation result : " + + show ptresult
putStrLn $ " PT expression params:\n " + + params @(PT2CT M'Map Zqs _ _ _ _ ) addMul
putStrLn $ "PT evaluation result: " ++ show ptresult
putStrLn $ "PT expression params:\n" ++ params @(PT2CT M'Map Zqs _ _ _ _) addMul
-}
evalKeysHints (3.0 :: Double) $ do
compile PT->CT once ; interpret multiple times using dup
-- was: ct
ct1 <- pt2ct @M'Map @Zqs @TrivGad @Int64 addMul
{-
let (ct1,tmp) = dup ct
(ct2,tmp') = dup tmp
(ct3,ct4) = dup tmp'
-}
-- encrypt the arguments
arg1 <- encrypt pt1
arg2 <- encrypt pt2
let result = eval ct1 arg1 arg2
{-
-- print and params/size the compiled expression
putStrLnIO $ "CT expression: " ++ print ct2
putStrLnIO $ "CT expression params:\n" ++ params ct3
putStrLnIO $ "CT expression size: " ++ show (size ct4)
-}
-- evaluate with error rates
ct1 ' < - readerToAccumulator $ writeErrorRates @Int64 ct1
let ( result , errors ) = eval ct1 ' > > = ( $ arg1 ) > > = ( $ arg2 )
putStrLnIO " Error rates : "
liftIO $ mapM _ print errors
liftIO clearTensorRecord
ct1' <- readerToAccumulator $ writeErrorRates @Int64 ct1
let (result, errors) = runWriter $ eval ct1' >>= ($ arg1) >>= ($ arg2)
putStrLnIO "Error rates: "
liftIO $ mapM_ print errors
liftIO clearTensorRecord
-}
-- check the decrypted result
decResult <- fromJust <$> readerToAccumulator (decrypt result)
putStrLnIO $ "Decrypted evaluation result: " ++ show decResult
putStrLnIO $ if decResult == unPNC ptresult then "PASS" else "FAIL"
| null | https://raw.githubusercontent.com/cpeikert/ALCHEMY/adbef64576c6f6885600da66f59c5a4ad91810b7/examples/Arithmetic.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE Strict #
polymorphic over expr alone
-- print and size
putStrLn $ "PT expression: " ++ print addMul
putStrLn $ "PT expression size: " ++ show (size addMul)
evaluate on random arguments
was: ct
let (ct1,tmp) = dup ct
(ct2,tmp') = dup tmp
(ct3,ct4) = dup tmp'
encrypt the arguments
-- print and params/size the compiled expression
putStrLnIO $ "CT expression: " ++ print ct2
putStrLnIO $ "CT expression params:\n" ++ params ct3
putStrLnIO $ "CT expression size: " ++ show (size ct4)
evaluate with error rates
check the decrypted result | # LANGUAGE PartialTypeSignatures #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - partial - type - signatures #
# OPTIONS_GHC -fno - cse #
# OPTIONS_GHC -fno - full - laziness #
module Arithmetic where
import Control.Monad.Random
import Control.Monad.Writer
import Data.Functor ((<$>))
import Data.Maybe
import Crypto.Alchemy
import Crypto.Alchemy.Interpreter.Counter.TensorOps
import Crypto.Lol
import Crypto.Lol.Cyclotomic.Tensor.CPP
import Common
type PT = PNoiseCyc PNZ (Cyc CT) F4 (Zq 7)
addMul :: _ => expr env (_ -> _ -> PT)
addMul = lam2 $ \x y -> (var x +: var y) *: var y
type M'Map = '[ '(F4, F512) ]
type Zqs = '[ Zq $(mkModulus 268440577)
, Zq $(mkModulus 8392193)
, Zq $(mkModulus 1073750017)
]
main :: IO ()
main = do
pt1 <- getRandom
pt2 <- getRandom
let ptresult = eval addMul (PNC pt1) (PNC pt2)
putStrLn $ " PT evaluation result : " + + show ptresult
putStrLn $ " PT expression params:\n " + + params @(PT2CT M'Map Zqs _ _ _ _ ) addMul
putStrLn $ "PT evaluation result: " ++ show ptresult
putStrLn $ "PT expression params:\n" ++ params @(PT2CT M'Map Zqs _ _ _ _) addMul
-}
evalKeysHints (3.0 :: Double) $ do
compile PT->CT once ; interpret multiple times using dup
ct1 <- pt2ct @M'Map @Zqs @TrivGad @Int64 addMul
arg1 <- encrypt pt1
arg2 <- encrypt pt2
let result = eval ct1 arg1 arg2
ct1 ' < - readerToAccumulator $ writeErrorRates @Int64 ct1
let ( result , errors ) = eval ct1 ' > > = ( $ arg1 ) > > = ( $ arg2 )
putStrLnIO " Error rates : "
liftIO $ mapM _ print errors
liftIO clearTensorRecord
ct1' <- readerToAccumulator $ writeErrorRates @Int64 ct1
let (result, errors) = runWriter $ eval ct1' >>= ($ arg1) >>= ($ arg2)
putStrLnIO "Error rates: "
liftIO $ mapM_ print errors
liftIO clearTensorRecord
-}
decResult <- fromJust <$> readerToAccumulator (decrypt result)
putStrLnIO $ "Decrypted evaluation result: " ++ show decResult
putStrLnIO $ if decResult == unPNC ptresult then "PASS" else "FAIL"
|
dcab3b74e5119c6dcd018748c885d6f154681ba47f34d35bb31e88a8fa148ad0 | serokell/ariadne | Setup.hs | {-# OPTIONS_GHC -Wall -Wcompat -Werror #-}
import Distribution.PackageDescription
(BuildInfo(cSources, extraLibs), HookedBuildInfo, emptyBuildInfo)
import Distribution.Simple (defaultMainWithHooks, simpleUserHooks)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo, withPrograms)
import Distribution.Simple.Program (Program, runDbProgram, simpleProgram)
import Distribution.Simple.Setup
(ConfigFlags, configVerbosity, fromFlagOrDefault)
import Distribution.Simple.UserHooks
(UserHooks(hookedPrograms, postConf, preBuild))
import Distribution.Verbosity (normal)
rccProgram :: Program
rccProgram = simpleProgram "rcc"
main :: IO ()
main = defaultMainWithHooks ariadneHooks
ariadneHooks :: UserHooks
ariadneHooks = simpleUserHooks
{ hookedPrograms = [rccProgram]
, postConf = \args cf pd lbi -> do
runRcc cf lbi
postConf simpleUserHooks args cf pd lbi
, preBuild = \_ _ -> addCxxFiles
}
runRcc :: ConfigFlags -> LocalBuildInfo -> IO ()
runRcc configFlags localBuildInfo = do
let verbosity = fromFlagOrDefault normal $ configVerbosity configFlags
programDb = withPrograms localBuildInfo
runDbProgram verbosity rccProgram programDb $
["resources/ariadne-qt.qrc", "-o", "resources/ariadne-qt.cpp"]
addCxxFiles :: IO HookedBuildInfo
addCxxFiles = do
return (Just emptyBuildInfo
{ cSources = ["resources/ariadne-qt.cpp"]
, extraLibs = ["Qt5Core"]
}, [])
| null | https://raw.githubusercontent.com/serokell/ariadne/5f49ee53b6bbaf332cb6f110c75f7b971acdd452/ui/qt-lib/Setup.hs | haskell | # OPTIONS_GHC -Wall -Wcompat -Werror # |
import Distribution.PackageDescription
(BuildInfo(cSources, extraLibs), HookedBuildInfo, emptyBuildInfo)
import Distribution.Simple (defaultMainWithHooks, simpleUserHooks)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo, withPrograms)
import Distribution.Simple.Program (Program, runDbProgram, simpleProgram)
import Distribution.Simple.Setup
(ConfigFlags, configVerbosity, fromFlagOrDefault)
import Distribution.Simple.UserHooks
(UserHooks(hookedPrograms, postConf, preBuild))
import Distribution.Verbosity (normal)
rccProgram :: Program
rccProgram = simpleProgram "rcc"
main :: IO ()
main = defaultMainWithHooks ariadneHooks
ariadneHooks :: UserHooks
ariadneHooks = simpleUserHooks
{ hookedPrograms = [rccProgram]
, postConf = \args cf pd lbi -> do
runRcc cf lbi
postConf simpleUserHooks args cf pd lbi
, preBuild = \_ _ -> addCxxFiles
}
runRcc :: ConfigFlags -> LocalBuildInfo -> IO ()
runRcc configFlags localBuildInfo = do
let verbosity = fromFlagOrDefault normal $ configVerbosity configFlags
programDb = withPrograms localBuildInfo
runDbProgram verbosity rccProgram programDb $
["resources/ariadne-qt.qrc", "-o", "resources/ariadne-qt.cpp"]
addCxxFiles :: IO HookedBuildInfo
addCxxFiles = do
return (Just emptyBuildInfo
{ cSources = ["resources/ariadne-qt.cpp"]
, extraLibs = ["Qt5Core"]
}, [])
|
5f6fdc17e85c424d1257889711ba7accd15f269eab3d58e201de1e7eed57d508 | j-cr/speck | core_test.clj | (ns speck.v1.core-test
(:require [speck.v1.core :as speck :refer [|]]
[clojure.test :as test :refer [deftest testing is are]]
[clojure.spec.alpha :as s]
;; [clojure.spec.test.alpha :as s.test]
;; [orchestra.spec.test :as orchestra]
))
;; setup --------------------------------------------------------------------------
(set! *data-readers* (assoc *data-readers* '| #'speck/speck-reader))
(test/use-fixtures
:once (fn [body]
(binding [speck/*auto-define-opts* {:enabled false}]
(body))))
;; tests --------------------------------------------------------------------------
(deftest syntax-spec-tests
(are [in out]
(= (s/conform ::speck/syntax in) out)
'[_ => ret]
'{:clauses [{:argslist [:unnamed []], :=> =>, :ret ret}]}
'[a? b? => ret]
'{:clauses [{:argslist [:unnamed [a? b?]], :=> =>, :ret ret}]}
'[x :- a?, y :- b? => ret]
'{:clauses [{:argslist [:named ([x a?] [y b?])], :=> =>, :ret ret}]}
'[a => b, c => d, :ret r]
'{:clauses [{:argslist [:unnamed [a]], :=> =>, :ret b}
{:argslist [:unnamed [c]], :=> =>, :ret d}],
:opts {:ret r}}
'[a => b |- args-expr |= fn-expr]
'{:clauses [{:argslist [:unnamed [a]], :=> =>, :ret b
:args-expr args-expr
:fn-expr fn-expr}]}
'[a => b |- args1, c => d |- args2, :gen g]
'{:clauses [{:argslist [:unnamed [a]], :=> =>, :ret b, :args-expr args1}
{:argslist [:unnamed [c]], :=> =>, :ret d, :args-expr args2}]
:opts {:gen g}}
))
(deftest main-tests
(testing "unnamed args"
(let [fspec (| [odd? even? => pos?])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[2 1] false
[] false )
(are [expr res] (= res (s/valid? (:ret fspec) expr))
1 true
0 false ) ))
(testing "named args"
(let [spec (-> #|[x :- odd?, y :- even? => any?] :speck :args)]
(is (= (s/conform spec [1 2]) {:x 1 :y 2}))
))
(testing "zero arg spec"
(let [spec (-> #|[_ => any?] :speck :args)]
(is (s/valid? spec []))
(is (not (s/valid? spec [:foo])))
))
(testing "optional args and varargs"
(let [s1 (-> #|[(s/? odd?) even? => any?] :speck :args)
s2 (-> #|[odd? (s/* even?) => any?] :speck :args)]
(are [expr res] (= res (s/valid? s1 expr))
[1 2] true
[ 2] true
[2 1] false)
(are [expr res] (= res (s/valid? s2 expr))
[1 2] true
[1 ] true
[1 2 2] true
[2 1] false ) ))
(testing "fspec opts: args, ret and fn"
(let [fspec (| [x :- odd?, y :- even? => pos?
:args #(< (:x %) (:y %))
:ret even?
:fn #(= (-> % :args :x) (-> % :ret))])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[3 2] false
[2 1] false )
(are [expr res] (= res (s/valid? (:ret fspec) expr))
2 true
1 false
0 false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args {:x 42} :ret 42} true
{:args {:x 41} :ret 42} false ) ))
(testing "zero-clause speck (opts only)"
(let [fspec (| [:args (s/tuple odd? even?), :ret even?])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[1 ] false
[2 1] false )
(are [expr res] (= res (s/valid? (:ret fspec) expr))
2 true
1 false ) ))
(testing "multi-clause"
(let [fspec (| [pos? => pos?, odd? even? => neg?])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] true
[1 2] true
[2 1] false
[1 2 3] false
)
(are [expr res] (= res (s/conform (:args fspec) expr))
[1] [:arity-1 {:%1 1}]
[1 2] [:arity-2 {:%1 1, :%2 2}]
)
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args (first {:arity-1 {:%1 42}}) :ret 42} true
{:args (first {:arity-1 {:%1 42}}) :ret -1} false
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret -1} true
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret 42} false
) ))
(testing "multi-clause with fspec opts"
(let [fspec (| [pos? => pos?, odd? even? => neg?
:args #(->> % val vals (every? (fn [x] (> x 10))))
:ret even?
:fn #(->> % :ret odd?)])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] false
[11] true
[1 2] false
[11 12] true )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args (first {:arity-1 {:%1 42}}) :ret 42} false
{:args (first {:arity-1 {:%1 41}}) :ret 41} true
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret -2} false
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret -1} true
) ))
(testing "|- and |= (single clause)"
(let [fspec (| [x :- odd?, y :- even? => pos?
|- (< x y)
|= (= x %) ])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[3 2] false
[2 4] false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args {:x 41} :ret 41} true
{:args {:x 41} :ret 42} false
) ))
(testing "|- and |= (multi clause)"
(let [fspec (| [pos? => pos? |- (< %1 10) |= (= %1 %)
odd? even? => neg? |- (< %1 %2) |= (< %1 %2 %)
])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] true
[11] false
[-1] false
[1 2] true
[2 1] false
[3 2] false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
pos ? and (= % 1 % )
{:args (first {:arity-1 {:%1 42}}) :ret 42} true
{:args (first {:arity-1 {:%1 -1}}) :ret -1} false
{:args (first {:arity-1 {:%1 41}}) :ret 42} false
neg ? and ( < % 1 % 2 % )
{:args (first {:arity-2 {:%1 -3 :%2 -2}}) :ret -1} true
{:args (first {:arity-2 {:%1 -3 :%2 -2}}) :ret 1} false
{:args (first {:arity-2 {:%1 -1 :%2 -2}}) :ret -3} false
) ))
(testing "|- and |= (single, with opts)"
(let [fspec (| [x :- odd? => any?
|- (> x 10) |= (> % 10)
:args #(< (:x %) 20) :fn #(< (:ret %) 20)
])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] false
[11] true
[12] false
[21] false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:ret 1} false
{:ret 11} true
{:ret 21} false )
))
(testing "|- and |= (multi, with opts)"
;; same test cases as in previous, except now args are with s/or tag
(let [fspec (| [_ => any?, x :- odd? => odd?
|- (> x 10)
|= (> % 10)
:args #(< (-> % val :x) 20) ; [:arity-1 {:x ..}]
{ : args [: arity-1 ... ] , : ret .. }
])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] false
[11] true
[12] false
[21] false )
(are [expr res] (= res (->> {:args (first {:arity-1 :any}) :ret expr}
(s/valid? (:fn fspec) ,,,)))
1 false
11 true
12 false
21 false )
))
)
(comment
(|[x1 => r1 |- args1 |= fn1
x2 y2 => r2 |- args2 |= fn2
:args opts-args
:ret opts-ret
:fn opts-fn])
;; expands to:
(s/fspec
:args (s/and
(s/or :arity-1 (s/and (s/cat :%1 x1 ) (fn [{:keys [%1 ]}] args1))
:arity-2 (s/and (s/cat :%1 x2 :%2 y2) (fn [{:keys [%1 %2]}] args2)))
opts-args)
:ret (s/and any? opts-ret)
:fn (s/and
(fn [%]
(case (-> % :args key)
:arity-1 (and
(s/valid? r1 (-> % :ret))
(let [{:keys [%1 %2]} (-> % :args val)
% (-> % :ret)]
fn1))
:arity-2 (and
(s/valid? r2 (-> % :ret))
(let [{:keys [%1 %2]} (-> % :args val) (-> % :args val)
% (-> % :ret)]
fn2))))
opts-fn))
)
;; --------------------------------------------------------------------------------
( if ( test / successful ? ( test / run - tests ) ) : ok : )
| null | https://raw.githubusercontent.com/j-cr/speck/dfa4068fd06a31471223feefd88e82afe4d61ceb/test/speck/v1/core_test.clj | clojure | [clojure.spec.test.alpha :as s.test]
[orchestra.spec.test :as orchestra]
setup --------------------------------------------------------------------------
tests --------------------------------------------------------------------------
same test cases as in previous, except now args are with s/or tag
[:arity-1 {:x ..}]
expands to:
-------------------------------------------------------------------------------- | (ns speck.v1.core-test
(:require [speck.v1.core :as speck :refer [|]]
[clojure.test :as test :refer [deftest testing is are]]
[clojure.spec.alpha :as s]
))
(set! *data-readers* (assoc *data-readers* '| #'speck/speck-reader))
(test/use-fixtures
:once (fn [body]
(binding [speck/*auto-define-opts* {:enabled false}]
(body))))
(deftest syntax-spec-tests
(are [in out]
(= (s/conform ::speck/syntax in) out)
'[_ => ret]
'{:clauses [{:argslist [:unnamed []], :=> =>, :ret ret}]}
'[a? b? => ret]
'{:clauses [{:argslist [:unnamed [a? b?]], :=> =>, :ret ret}]}
'[x :- a?, y :- b? => ret]
'{:clauses [{:argslist [:named ([x a?] [y b?])], :=> =>, :ret ret}]}
'[a => b, c => d, :ret r]
'{:clauses [{:argslist [:unnamed [a]], :=> =>, :ret b}
{:argslist [:unnamed [c]], :=> =>, :ret d}],
:opts {:ret r}}
'[a => b |- args-expr |= fn-expr]
'{:clauses [{:argslist [:unnamed [a]], :=> =>, :ret b
:args-expr args-expr
:fn-expr fn-expr}]}
'[a => b |- args1, c => d |- args2, :gen g]
'{:clauses [{:argslist [:unnamed [a]], :=> =>, :ret b, :args-expr args1}
{:argslist [:unnamed [c]], :=> =>, :ret d, :args-expr args2}]
:opts {:gen g}}
))
(deftest main-tests
(testing "unnamed args"
(let [fspec (| [odd? even? => pos?])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[2 1] false
[] false )
(are [expr res] (= res (s/valid? (:ret fspec) expr))
1 true
0 false ) ))
(testing "named args"
(let [spec (-> #|[x :- odd?, y :- even? => any?] :speck :args)]
(is (= (s/conform spec [1 2]) {:x 1 :y 2}))
))
(testing "zero arg spec"
(let [spec (-> #|[_ => any?] :speck :args)]
(is (s/valid? spec []))
(is (not (s/valid? spec [:foo])))
))
(testing "optional args and varargs"
(let [s1 (-> #|[(s/? odd?) even? => any?] :speck :args)
s2 (-> #|[odd? (s/* even?) => any?] :speck :args)]
(are [expr res] (= res (s/valid? s1 expr))
[1 2] true
[ 2] true
[2 1] false)
(are [expr res] (= res (s/valid? s2 expr))
[1 2] true
[1 ] true
[1 2 2] true
[2 1] false ) ))
(testing "fspec opts: args, ret and fn"
(let [fspec (| [x :- odd?, y :- even? => pos?
:args #(< (:x %) (:y %))
:ret even?
:fn #(= (-> % :args :x) (-> % :ret))])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[3 2] false
[2 1] false )
(are [expr res] (= res (s/valid? (:ret fspec) expr))
2 true
1 false
0 false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args {:x 42} :ret 42} true
{:args {:x 41} :ret 42} false ) ))
(testing "zero-clause speck (opts only)"
(let [fspec (| [:args (s/tuple odd? even?), :ret even?])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[1 ] false
[2 1] false )
(are [expr res] (= res (s/valid? (:ret fspec) expr))
2 true
1 false ) ))
(testing "multi-clause"
(let [fspec (| [pos? => pos?, odd? even? => neg?])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] true
[1 2] true
[2 1] false
[1 2 3] false
)
(are [expr res] (= res (s/conform (:args fspec) expr))
[1] [:arity-1 {:%1 1}]
[1 2] [:arity-2 {:%1 1, :%2 2}]
)
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args (first {:arity-1 {:%1 42}}) :ret 42} true
{:args (first {:arity-1 {:%1 42}}) :ret -1} false
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret -1} true
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret 42} false
) ))
(testing "multi-clause with fspec opts"
(let [fspec (| [pos? => pos?, odd? even? => neg?
:args #(->> % val vals (every? (fn [x] (> x 10))))
:ret even?
:fn #(->> % :ret odd?)])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] false
[11] true
[1 2] false
[11 12] true )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args (first {:arity-1 {:%1 42}}) :ret 42} false
{:args (first {:arity-1 {:%1 41}}) :ret 41} true
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret -2} false
{:args (first {:arity-2 {:%1 1 :%2 2}}) :ret -1} true
) ))
(testing "|- and |= (single clause)"
(let [fspec (| [x :- odd?, y :- even? => pos?
|- (< x y)
|= (= x %) ])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1 2] true
[3 2] false
[2 4] false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:args {:x 41} :ret 41} true
{:args {:x 41} :ret 42} false
) ))
(testing "|- and |= (multi clause)"
(let [fspec (| [pos? => pos? |- (< %1 10) |= (= %1 %)
odd? even? => neg? |- (< %1 %2) |= (< %1 %2 %)
])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] true
[11] false
[-1] false
[1 2] true
[2 1] false
[3 2] false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
pos ? and (= % 1 % )
{:args (first {:arity-1 {:%1 42}}) :ret 42} true
{:args (first {:arity-1 {:%1 -1}}) :ret -1} false
{:args (first {:arity-1 {:%1 41}}) :ret 42} false
neg ? and ( < % 1 % 2 % )
{:args (first {:arity-2 {:%1 -3 :%2 -2}}) :ret -1} true
{:args (first {:arity-2 {:%1 -3 :%2 -2}}) :ret 1} false
{:args (first {:arity-2 {:%1 -1 :%2 -2}}) :ret -3} false
) ))
(testing "|- and |= (single, with opts)"
(let [fspec (| [x :- odd? => any?
|- (> x 10) |= (> % 10)
:args #(< (:x %) 20) :fn #(< (:ret %) 20)
])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] false
[11] true
[12] false
[21] false )
(are [expr res] (= res (s/valid? (:fn fspec) expr))
{:ret 1} false
{:ret 11} true
{:ret 21} false )
))
(testing "|- and |= (multi, with opts)"
(let [fspec (| [_ => any?, x :- odd? => odd?
|- (> x 10)
|= (> % 10)
{ : args [: arity-1 ... ] , : ret .. }
])]
(are [expr res] (= res (s/valid? (:args fspec) expr))
[1] false
[11] true
[12] false
[21] false )
(are [expr res] (= res (->> {:args (first {:arity-1 :any}) :ret expr}
(s/valid? (:fn fspec) ,,,)))
1 false
11 true
12 false
21 false )
))
)
(comment
(|[x1 => r1 |- args1 |= fn1
x2 y2 => r2 |- args2 |= fn2
:args opts-args
:ret opts-ret
:fn opts-fn])
(s/fspec
:args (s/and
(s/or :arity-1 (s/and (s/cat :%1 x1 ) (fn [{:keys [%1 ]}] args1))
:arity-2 (s/and (s/cat :%1 x2 :%2 y2) (fn [{:keys [%1 %2]}] args2)))
opts-args)
:ret (s/and any? opts-ret)
:fn (s/and
(fn [%]
(case (-> % :args key)
:arity-1 (and
(s/valid? r1 (-> % :ret))
(let [{:keys [%1 %2]} (-> % :args val)
% (-> % :ret)]
fn1))
:arity-2 (and
(s/valid? r2 (-> % :ret))
(let [{:keys [%1 %2]} (-> % :args val) (-> % :args val)
% (-> % :ret)]
fn2))))
opts-fn))
)
( if ( test / successful ? ( test / run - tests ) ) : ok : )
|
49fd508891d2727a3c09b8af592a906dea6f9ee51753b52d8900b0c8f97c4e52 | hjcapple/reading-sicp | exercise_3_19.scm | #lang sicp
P179 - [ 练习 3.19 ]
让 x1 每次用 cdr 前进 1,x2 每次用 cddr 前进 2 格。初始时 x2 在 x1 前面 。
; 这样当列表包含环时,x2 就会绕圈,从后面追上 x1。
; 因此当 x1 和 x2 相遇,就表示有环。当 x2 到达尾部,就表示不包含环。
这里不用判断 x1 是否到达尾部,因为 x2 每次前进 2 格,会比 x1 要快,不含环时,x2
(define (contains-cycle? x)
(define (contains-cycle-step? x1 x2)
(cond ((not (pair? x2)) false)
((not (pair? (cdr x2))) false)
((eq? x1 x2) true)
(else (contains-cycle-step? (cdr x1) (cddr x2)))))
(if (not (pair? x))
false
(contains-cycle-step? x (cdr x))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (last-pair x)
(if (null? (cdr x))
x
(last-pair (cdr x))))
(define (make-cycle x)
(set-cdr! (last-pair x) x)
x)
(contains-cycle? (list 'a 'b 'c)) ; #f
(contains-cycle? (make-cycle (list 'a))) ; #t
(contains-cycle? (make-cycle (list 'a 'b 'c))) ; #t
(contains-cycle? (make-cycle (list 'a 'b 'c 'd))) ; #t
(contains-cycle? (cons 1 2)) ; f
(contains-cycle? '(1)) ; f
(contains-cycle? '()) ; f
| null | https://raw.githubusercontent.com/hjcapple/reading-sicp/7051d55dde841c06cf9326dc865d33d656702ecc/chapter_3/exercise_3_19.scm | scheme | 这样当列表包含环时,x2 就会绕圈,从后面追上 x1。
因此当 x1 和 x2 相遇,就表示有环。当 x2 到达尾部,就表示不包含环。
#f
#t
#t
#t
f
f
f | #lang sicp
P179 - [ 练习 3.19 ]
让 x1 每次用 cdr 前进 1,x2 每次用 cddr 前进 2 格。初始时 x2 在 x1 前面 。
这里不用判断 x1 是否到达尾部,因为 x2 每次前进 2 格,会比 x1 要快,不含环时,x2
(define (contains-cycle? x)
(define (contains-cycle-step? x1 x2)
(cond ((not (pair? x2)) false)
((not (pair? (cdr x2))) false)
((eq? x1 x2) true)
(else (contains-cycle-step? (cdr x1) (cddr x2)))))
(if (not (pair? x))
false
(contains-cycle-step? x (cdr x))))
(define (last-pair x)
(if (null? (cdr x))
x
(last-pair (cdr x))))
(define (make-cycle x)
(set-cdr! (last-pair x) x)
x)
|
570f3875336add551a2056dab9089b3e06b52f748b5ee1cb3eda7cf27cd42d94 | jwiegley/notes | TwanFree.hs | {-# LANGUAGE RankNTypes #-}
# LANGUAGE DeriveFunctor #
module TwanFree where
import Control.Monad.Free
data TermF r = PutChar Char r
| GetChar (Char -> r)
deriving Functor
data Term m = Term {
putChar :: Char -> m (),
getChar :: m Char
}
data TFree effect a = TFree { runTFree :: forall m. Monad m => effect m -> m a }
to :: Free TermF a -> TFree Term a
to (Pure x) = TFree $ \_ -> return x
to (Free (PutChar x next)) = TFree $ \e -> do
TwanFree.putChar e x
runTFree (to next) e
to (Free (GetChar f)) = TFree $ \e -> do
x <- TwanFree.getChar e
runTFree (to (f x)) e
from :: TFree Term a -> Free TermF a
from (TFree f) = f Term
{ TwanFree.putChar = \x -> Free (PutChar x (Pure ()))
, TwanFree.getChar = Free (GetChar Pure)
}
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/e420888b9551a4207eb3/TwanFree.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE DeriveFunctor #
module TwanFree where
import Control.Monad.Free
data TermF r = PutChar Char r
| GetChar (Char -> r)
deriving Functor
data Term m = Term {
putChar :: Char -> m (),
getChar :: m Char
}
data TFree effect a = TFree { runTFree :: forall m. Monad m => effect m -> m a }
to :: Free TermF a -> TFree Term a
to (Pure x) = TFree $ \_ -> return x
to (Free (PutChar x next)) = TFree $ \e -> do
TwanFree.putChar e x
runTFree (to next) e
to (Free (GetChar f)) = TFree $ \e -> do
x <- TwanFree.getChar e
runTFree (to (f x)) e
from :: TFree Term a -> Free TermF a
from (TFree f) = f Term
{ TwanFree.putChar = \x -> Free (PutChar x (Pure ()))
, TwanFree.getChar = Free (GetChar Pure)
}
|
03779d11bccceee3730a6b6a88c30efefa8764ac4e8df34d245eadd5d24e7f46 | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | RefundNextActionDisplayDetails.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
-- | Contains the types generated from the schema RefundNextActionDisplayDetails
module StripeAPI.Types.RefundNextActionDisplayDetails where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import {-# SOURCE #-} StripeAPI.Types.EmailSent
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
-- | Defines the object schema located at @components.schemas.refund_next_action_display_details@ in the specification.
data RefundNextActionDisplayDetails = RefundNextActionDisplayDetails
{ -- | email_sent:
refundNextActionDisplayDetailsEmailSent :: EmailSent,
-- | expires_at: The expiry timestamp.
refundNextActionDisplayDetailsExpiresAt :: GHC.Types.Int
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON RefundNextActionDisplayDetails where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["email_sent" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsEmailSent obj] : ["expires_at" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsExpiresAt obj] : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["email_sent" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsEmailSent obj] : ["expires_at" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsExpiresAt obj] : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON RefundNextActionDisplayDetails where
parseJSON = Data.Aeson.Types.FromJSON.withObject "RefundNextActionDisplayDetails" (\obj -> (GHC.Base.pure RefundNextActionDisplayDetails GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "email_sent")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "expires_at"))
-- | Create a new 'RefundNextActionDisplayDetails' with all required fields.
mkRefundNextActionDisplayDetails ::
-- | 'refundNextActionDisplayDetailsEmailSent'
EmailSent ->
-- | 'refundNextActionDisplayDetailsExpiresAt'
GHC.Types.Int ->
RefundNextActionDisplayDetails
mkRefundNextActionDisplayDetails refundNextActionDisplayDetailsEmailSent refundNextActionDisplayDetailsExpiresAt =
RefundNextActionDisplayDetails
{ refundNextActionDisplayDetailsEmailSent = refundNextActionDisplayDetailsEmailSent,
refundNextActionDisplayDetailsExpiresAt = refundNextActionDisplayDetailsExpiresAt
}
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/RefundNextActionDisplayDetails.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Contains the types generated from the schema RefundNextActionDisplayDetails
# SOURCE #
| Defines the object schema located at @components.schemas.refund_next_action_display_details@ in the specification.
| email_sent:
| expires_at: The expiry timestamp.
| Create a new 'RefundNextActionDisplayDetails' with all required fields.
| 'refundNextActionDisplayDetailsEmailSent'
| 'refundNextActionDisplayDetailsExpiresAt' | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
module StripeAPI.Types.RefundNextActionDisplayDetails where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
data RefundNextActionDisplayDetails = RefundNextActionDisplayDetails
refundNextActionDisplayDetailsEmailSent :: EmailSent,
refundNextActionDisplayDetailsExpiresAt :: GHC.Types.Int
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON RefundNextActionDisplayDetails where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["email_sent" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsEmailSent obj] : ["expires_at" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsExpiresAt obj] : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["email_sent" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsEmailSent obj] : ["expires_at" Data.Aeson.Types.ToJSON..= refundNextActionDisplayDetailsExpiresAt obj] : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON RefundNextActionDisplayDetails where
parseJSON = Data.Aeson.Types.FromJSON.withObject "RefundNextActionDisplayDetails" (\obj -> (GHC.Base.pure RefundNextActionDisplayDetails GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "email_sent")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "expires_at"))
mkRefundNextActionDisplayDetails ::
EmailSent ->
GHC.Types.Int ->
RefundNextActionDisplayDetails
mkRefundNextActionDisplayDetails refundNextActionDisplayDetailsEmailSent refundNextActionDisplayDetailsExpiresAt =
RefundNextActionDisplayDetails
{ refundNextActionDisplayDetailsEmailSent = refundNextActionDisplayDetailsEmailSent,
refundNextActionDisplayDetailsExpiresAt = refundNextActionDisplayDetailsExpiresAt
}
|
8f6c9251dcfafd9e3cd91c14dfe9128a6751eee5484bfe0513b4bec6841635aa | jimcrayne/jhc | tcfail168.hs |
Test trac # 719 ( should n't give the entire do block in the error message )
module ShouldFail where
foo = do
putChar
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
| null | https://raw.githubusercontent.com/jimcrayne/jhc/1ff035af3d697f9175f8761c8d08edbffde03b4e/regress/tests/1_typecheck/4_fail/ghc/tcfail168.hs | haskell |
Test trac # 719 ( should n't give the entire do block in the error message )
module ShouldFail where
foo = do
putChar
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
putChar 'a'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.