_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
0e159116fdb49952db5b074480612d24c5c7fca0d4040f5b37ed2d833161762e | Oblosys/proxima | Derivation.hs | {-# OPTIONS_GHC -fglasgow-exts -fallow-incoherent-instances -fallow-overlapping-instances -fallow-undecidable-instances -fno-monomorphism-restriction #-}
module Derivation where
import Layers
import DPP_Lib
--import DPPClass_Lib
import Char
newtype InfTupleExplicit a b = InfTupleExplicit (a, (b, InfTupleExplicit a b))
newtype TupleStep a inft = I (a, inft)
type InfTuple2 a b = Fix ((TupleStep a) :.: (TupleStep b))
type InfTuple3 a b c = Fix ((TupleStep a) :.: (TupleStep b) :.: (TupleStep c))
inftup2 :: InfTuple2 Char Bool
inftup2 = Fix $ Comp (I ('c', I (True, inftup2)))
lft f = I . f
comp f g = (\x -> Comp $ (f . g) x)
fix :: (a->a) -> a
fix a = let fixa = a fixa
in fixa
fFix f = let fixf = Fix . f $ fFix f
in fixf
inftup2Compositional :: InfTuple2 Char Bool
inftup2Compositional = fFix $ lft a1 `comp` lft a2
where a1 n = ('c', n)
a2 n = (True, n)
inftup3Compositional :: InfTuple3 Char Bool Int
inftup3Compositional = fFix $ lft a1 `comp` lft a2 `comp` lft a3
where a1 n = ('c', n)
a2 n = (True, n)
a3 n = (8::Int, n)
compterm :: ((TupleStep Bool) :.: ((TupleStep Char) :.: (TupleStep Int))) ()
compterm = (Comp . I) ( True
, (Comp . I) ('a', I (0, ()))
)
newtype Fix f = Fix (f (Fix f))
newtype (:.:) f g ns = Comp (f (g ns))
newtype Step a b ns = Step { unStep :: (a -> (b, ns)) }
newtype DownStep a b ns = DownStep { unDownStep :: (a -> (b, ns)) }
type Layer doc pres gest upd =
Fix (Step doc pres :.: Step gest upd)
lift0, lift1, lift2, lift3, lift4, lift5 ::
Simple state map doc pres gest upd ->
state -> Layer doc pres gest upd
lift0 simple state = step1 state
where step1 hArg = Fix . Comp . Step $
\vArg -> let (pres, hRes) =
present simple hArg vArg
in (pres, step2 hRes)
step2 hArg = Step $
\vArg -> let (upd, hRes) =
interpret simple hArg vArg
in (upd, step1 hRes)
factorize out next step
lift1 simple state =
step1 (step2 (lift1 simple)) state
where step1 next hArg = Fix . Comp . Step $
\vArg -> let (pres, hRes) =
present simple hArg vArg
in (pres, next hRes)
step2 next hArg = Step $
\vArg -> let (upd, hRes) =
interpret simple hArg vArg
in (upd, next hRes)
-- now use liftStep to remove Step
liftStep f next horArgs = Step $
\vArg -> let (vertRes, horRes) = f horArgs vArg
in (vertRes, next horRes)
lift2 simple state =
step1 (step2 (lift2 simple)) state
where step1 next hArg = Fix . Comp $
liftStep (present simple) next hArg
step2 next hArg =
liftStep (interpret simple) next hArg
-- capture recursion with fix
lift3 simple = fix $ step1 . step2
where step1 next hArg = Fix . Comp $
liftStep (present simple) next hArg
step2 next hArg =
liftStep (interpret simple) next hArg
-- remove Fix with specifal fix
lfix f = fix f'
where f' n = (Fix . f n)
lift4 simple = lfix $ step1 . step2
where step1 next hArg = Comp $
liftStep (present simple) next hArg
step2 next hArg =
liftStep (interpret simple) next hArg
-- remove Comp with special composition
lcomp :: (b -> t -> (f (g ns))) ->
(a->b) ->
a -> t -> ((f :.: g) ns)
lcomp f g = (\n s -> Comp $ f n s) . g
f g = ( \n s - > Comp ( ( f . ) n s ) )
lift5 simple = lfix $ step1 `lcomp` step2
where step1 next args =
liftStep (present simple) next args
step2 next args =
liftStep (interpret simple) next args
-- rewrite, dropping parameters
lift :: Simple state map doc pres gest upd ->
state -> Layer doc pres gest upd
lift simple = lfix $ liftStep (present simple)
`lcomp` liftStep (interpret simple)
combine0, combine2, combine3, combine4, combine5, combine6 ::
Layer high med emed ehigh ->
Layer med low elow emed ->
Layer high low elow ehigh
combine0 upr lwr = step1 upr lwr
where step1 (Fix (Comp (Step upr)))
(Fix (Comp (Step lwr))) =
Fix . Comp . Step $
\high -> let (med, uprIntr) = upr high
(low, lwrIntr) = lwr med
in (low, step2 uprIntr lwrIntr)
step2 (Step upr) (Step lwr) = Step $
\low -> let (med, lwrPres) = lwr low
(high, uprPres) = upr med
in (high, step1 uprPres lwrPres)
-- pass next step as arg and rewrite to fix
combine2 = fix $ (step1 . step2)
where step1 next (Fix (Comp (Step upr)))
(Fix (Comp (Step lwr))) =
Fix . Comp . Step $
\high -> let (med, uprIntr) = upr high
(low, lwrIntr) = lwr med
in (low, next uprIntr lwrIntr)
step2 next (Step upr) (Step lwr) = Step $
\low -> let (med, lwrPres) = lwr low
(high, uprPres) = upr med
in (high, next uprPres lwrPres)
-- now we capture the up down with these functions:
combineStepDown :: (x -> y -> ns) ->
Step a b x -> Step b c y -> Step a c ns
combineStepDown next (Step upper) (Step lower) = Step $
\h -> let (m ,upperf) = upper h
(l, lowerf) = lower m
in (l, next upperf lowerf)
combineStepUp :: (x -> y -> ns) ->
Step b c x -> Step a b y -> Step a c ns
combineStepUp next (Step upper) (Step lower) = Step $
\l -> let (m, lowerf) = lower l
(h, upperf) = upper m
in (h, next upperf lowerf)
combine3 = fix $ (step1 . step2)
where step1 next (Fix (Comp upr)) (Fix (Comp lwr)) =
Fix . Comp $ combineStepDown next upr lwr
step2 = combineStepUp
combine4 = fix (\n (Fix u) (Fix l)-> Fix $
(step1 . step2) n u l)
where step1 next (Comp upr) (Comp lwr) = Comp $
combineStepDown next upr lwr
step2 = combineStepUp
--------------------------------------------------------
cfix f = fix f'
where f' n (Fix u) (Fix l) = Fix $ f n u l
combine5 = cfix (step1 . step2)
where step1 next (Comp upr) (Comp lwr) = Comp $
combineStepDown next upr lwr
step2 = combineStepUp
ccomp f g = f' . g
where f' m (Comp u) (Comp l) = Comp $ f m u l
combine6 = cfix $ step1 `ccomp` step2
where step1 next upr lwr =
combineStepDown next upr lwr
step2 = combineStepUp
combine :: Layer high med emed ehigh ->
Layer med low elow emed ->
Layer high low elow ehigh
combine = cfix $ combineStepDown `ccomp` combineStepUp
editLoop : : forall a . Layer Document Rendering ( EditRendering a ) ( EditDocument a ) - > Document - > IO ( )
editLoop ( Fix compPresentStep ) doc =
do { let presentStep = decomp compPresentStep
; let ( pres : : Rendering , interpretStep ) = unStep presentStep $ doc
; showRendering pres
; gesture : : EditRendering a < - getGesture
; let ( update : : a ,
presentStep ' : : Layer Document Rendering ( EditRendering a ) ( EditDocument a )
) = unStep interpretStep $ gesture
; let doc ' : : Document = updateDocument update doc
; editLoop presentStep ' doc '
}
editLoop :: forall a . Layer Document Rendering (EditRendering a) (EditDocument a) -> Document -> IO ()
editLoop (Fix compPresentStep) doc =
do { let presentStep = decomp compPresentStep
; let (pres :: Rendering , interpretStep) = unStep presentStep $ doc
; showRendering pres
; gesture :: EditRendering a <- getGesture
; let ( update :: EditDocument a ,
presentStep' :: Layer Document Rendering (EditRendering a) (EditDocument a)
) = unStep interpretStep $ gesture
; let doc' :: Document = updateDocument update doc
; editLoop presentStep' doc'
}
-}
-- no type sigs
editLoop :: Layer Document Rendering (EditRendering) (EditDocument) -> Document -> IO ()
editLoop (Fix (compPresentStep)) doc =
do { let presentStep = decomp2 compPresentStep
; let (pres , interpretStep) = unStep presentStep $ doc
; showRendering pres
; gesture <- getGesture
; let (update, presentStep') = unStep interpretStep $ gesture
; let doc' = updateDocument update doc
; editLoop presentStep' doc'
}
--main :: IO ()
main layer0 layer1 layer2 =
do { (state0, state1, state2) <- initStates
; doc <- initDoc
; let layers = lift layer0 state0
`combine` lift layer1 state1
`combine` lift layer2 state2
; editLoop layers doc
}
main4 state2 =
do { let layers4 = liftl4 foursteplayer state2
; let Fix comps = layers4
-- editStep = decomp4 comps
; editLoop4 layers4 (undefined :: Arrangement)
}
combinel4 = cfix $ combineStepDown `ccomp` combineStepUp `ccomp` combineStepDown `ccomp` combineStepUp
liftl4 l4 = lfix $ liftStep (present1 l4) `lcomp` liftStep (interpret1 l4) `lcomp`
liftStep (present2 l4) `lcomp` liftStep (interpret2 l4)
( liftl4,combinel4 ) = afix $ ( liftStep ( present1 l4 ) , combineStepDown )
foursteplayer :: FourStep State2 Mapping2 Arrangement Rendering (EditRendering) (EditArrangement)
foursteplayer = undefined
data FourStep state map doc pres gest upd =
FourStep { present1 :: LayerFn state doc (map, state) pres
, interpret1 :: LayerFn (map, state) gest state upd
, present2 :: LayerFn state doc (map, state) pres
, interpret2 :: LayerFn (map, state) gest state upd
}
unfixCompL f (Fix x) = f x
unfixComp = unfixCompL $ \(Comp (Comp (Comp ztep))) -> ztep
-- Explicit ones:
instance ( f : . : g : . : h ) t ( f ( g ( h t ) ) ) where
-- decomp (Comp (Comp f)) = f
instance ( f : . : g : . : h : . : i ) t ( f ( g ( h ( i t ) ) ) ) where
-- decomp (Comp (Comp (Comp f))) = f
type Layer4 doc pres gest upd =
Fix (Step doc pres :.: Step gest upd :.: Step doc pres :.: Step gest upd)
editLoop4 :: forall a . Layer4 Arrangement Rendering (EditRendering) (EditArrangement) -> Arrangement -> IO ()
editLoop4 (Fix comps) doc =
do { let Step presentStep = decomp4 comps
; let (pres::Rendering, Step interpretStep) = presentStep doc
; showRendering pres
; gesture :: EditRendering <- getGesture
; let (update::EditArrangement, Step presentStep2) = interpretStep gesture
; let doc'
= undefined update
; let (pres2 :: Rendering, Step interpret2Step) = presentStep2 doc'
; showRendering pres2
; gesture2 :: EditRendering <- getGesture
; let (update :: EditArrangement, presentStep') = interpret2Step gesture2
; editLoop4 presentStep' (undefined :: Arrangement)
}
--- A class to get rid of Fix and Comp applications
-- old comment : " This works , but without functional dependency , all polymorphism must be removed with explicit
-- type sigs . " now it does not work though ..
class comp where
decomp : : compx - > comp
instance ( ( f : . : ) t ) ( f ( g t ) ) where
decomp ( Comp f ) = f
instance ( ( f : . : g : . : h ) t ) ( f ( g ( h t ) ) ) where
decomp ( Comp ( Comp f ) ) = f
instance ( ( p : . : q : . : r : . : s ) t ) ( p ( q ( r ( s t ) ) ) ) where
decomp ( Comp ( Comp ( Comp f ) ) ) = f
-- old comment: "This works, but without functional dependency, all polymorphism must be removed with explicit
-- type sigs." now it does not work though..
class Decomp compx comp where
decomp :: compx -> comp
instance Decomp ((f :.: g) t) (f (g t)) where
decomp (Comp f) = f
instance Decomp ((f :.: g :.: h) t) (f (g (h t))) where
decomp (Comp (Comp f)) = f
instance Decomp ((p :.: q :.: r :.: s) t) (p (q (r (s t)))) where
decomp (Comp (Comp (Comp f))) = f
-}
decomp4 :: ((p :.: q :.: r :.: s) t) -> (p (q (r (s t))))
decomp4 (Comp (Comp (Comp step))) = step
--decomp4 = decomp
: : ( ( ( :-) : . : p : . : q : . : r ) t ) - > ( p ( q ( r t ) ) )
data (:-) a = CompNil a
term :: (Maybe :.: (:-)) ()
term = Comp $ Just (CompNil ())
decomp3 :: (((:-) :.: p :.: r) t) -> (p (r t))
decomp3 (Comp (Comp (CompNil step))) = step
decomp2 (Comp s) = s
-- this one does work!
-- fundeps seem impossible now :-(
class Decomp comp t decomp where -- | comp t -> decomp where
decomp :: comp t -> decomp
instance Decomp f t (f t) where
decomp f = f
instance Decomp f (g t) h => Decomp (f :.: g) t h where
decomp (Comp f) = decomp f
instance ( f : . : ) t ( f ( g t ) ) where
-- decomp (Comp f) = f
--instance Decomp ( :-) t t where
-- decomp ( x ) = x
--instance Decomp (:-) t t where
-- decomp (CompNil x) = x
-}
class x comp ( g : : * - > * ) | comp - > g where
decomp : : - > comp
instance ( f : . : ) t ( f ( g t ) ) g where
decomp ( Comp f ) = f
instance ( g t ) h i = > ( f : . : ) t where
decomp ( Comp f ) = decomp f
class Decomp compx x comp (g :: * -> *) | comp -> g where
decomp :: compx x -> comp
instance Decomp (f :.: g) t (f (g t)) g where
decomp (Comp f) = f
instance Decomp f (g t) h i => Decomp (f :.: g) t h g where
decomp (Comp f) = decomp f
-}
decompTest : : ( f ( ) , ( ) , t ) - > ( ( ( :-) : . : f : . : ) t ) - > ( f ( g t ) )
--decompTest dummy = decomp
decompTest dummy = decomp ( undefined : : ( ( :-) : . : f : . : ) t ) : : ( f ( g t ) )
--decompTest3 = decomp (undefined :: (f :.: g :.: h) t) :: (f (g (h t)))
newtype NilT t = NilT t
class Combine (comp :: * -> *) t f | comp t -> f where
combineC :: f
instance Combine NilT t ((NilT x) -> (NilT y) -> (x -> y -> t) -> NilT t) where
combineC = \(NilT x) (NilT y) next -> NilT (next x y)
instance Combine (Step a r :.: h) t
((Step a m :.: f) ft ->
(Step m r :.: g) gt ->
(f ft -> g gt -> h t) -> (Step a r :.: h) t) where
combineC = \(Comp (Step f)) (Comp (Step g)) next ->
Comp $ Step $ \h -> let (m ,upperf) = f h
(l, lowerf) = g m
in (l, next upperf lowerf)
{- Let's see if we can make lift automatically -}
: : ( b - > t - > ( f ( g ns ) ) ) - >
-- (a->b) ->
-- a -> t -> ((f :.: g) ns)
{-
gliftc1 r f = liftStep f
gliftc2 r f g = liftStep f `lcomp'` liftStep g
gliftc3 r f g h = liftStep f `lcomp'` liftStep g `lcomp` liftStep h
gliftc4 r f g h i = liftStep f `lcomp'` liftStep g `lcomp` liftStep h `lcomp'` liftStep i
-}
f1 a = a
f2 a b = a . b
f3 a b c = a . b . c
f0' r = r
f1' r a = (r . a)
f2' r b = f1' (r . b)
f3' r c = f2' (r . c)
class Comp (ctype :: * -> *) r f c where
compose :: ctype t -> r -> f -> c
instance Comp (f :.: g) (b->res) (a->b) (a->res) where
compose ctype r f = r . f
instance (Comp f (b -> res) (a -> b) (a -> res)) =>
Comp (f :.: g) (y->res) (b->y) ((a->b) -> (a->res)) where
compose ctype r f = compose (getLeftType ctype) (r . f)
fixCompose : : Comp x = > Fix x - >
fixCompose fix = let Fix f = fix
in compose (getCType fix)
getCType :: Fix ctype -> ctype t
getCType = undefined
getLeftType :: (f :.: g) t -> (f t')
getLeftType = undefined
test x = compose (undefined :: (x :.: y) t) :: (b->res) -> (a ->b) -> (a->res)
test2 = compose (undefined :: (x :.: y :.: z) t) :: (c -> d) -> (b->c) -> (a ->b) -> (a->d)
test' = fixCompose (undefined :: Layer doc pres gest upd) :: (b->c) -> (a ->b) -> (a->c)
dppMain = do { print $ compPeano (undefined :: ((:-) :.: x :.: y) ())
; print $ comp2
(toUpper) (toUpper) 'a'
; print $ composeC (Succ Zero) -- :: (c->res) -> (b->c) -> (a ->b) -> (a->res))
(toUpper) (toUpper) (toUpper) 'a'
; getLine
}
data Zero = Zero deriving Show
data Succ a = Succ a deriving Show
comp2 = composeC Zero
composeC Zero = \f g - > f .
composeC ( Succ Zero ) = f . . h
-- etc.
class CompC num r f c bereik | num r - > c , r - > bereik where
composeC : : num - > r - > f - > c
instance CompC Zero ( b->res ) ( a->b ) ( a->res ) res where
composeC ctype r f = r . f
instance forall a b y res n .
CompC n ( b - > res ) ( a - > b ) ( a - > res ) >
CompC ( Succ n ) ( y->res ) ( b->y ) ( ( a->b ) - > ( a->res ) ) res where
composeC ( Succ n ) r f = let comp = composeC : : n - > ( b - > res ) - > ( a - > b ) - > ( a - > res )
in comp n ( r . f )
class CompC num r f c bereik | num r -> c, r -> bereik where
composeC :: num -> r -> f -> c
instance CompC Zero (b->res) (a->b) (a->res) res where
composeC ctype r f = r . f
instance forall a b y res n .
CompC n (b -> res) (a -> b) (a -> res) res =>
CompC (Succ n) (y->res) (b->y) ((a->b) -> (a->res)) res where
composeC (Succ n) r f = let comp = composeC :: n -> (b -> res) -> (a -> b) -> (a -> res)
in comp n (r . f)
-}
-- ?? this also works now?
class CompC num r f c | num r -> c where
composeC :: num -> r -> f -> c
instance CompC Zero (b->res) (a->b) (a->res) where
composeC ctype r f = r . f
instance forall a b y res n .
CompC n (b -> res) (a -> b) (a -> res) =>
CompC (Succ n) (y->res) (b->y) ((a->b) -> (a->res)) where
composeC (Succ n) r f = let comp = composeC :: n -> (b -> res) -> (a -> b) -> (a -> res)
in comp n (r . f)
liftfinal' :: Simple state map doc pres gest upd ->
state -> Layer doc pres gest upd
liftfinal' simple = glift2 (present simple) (interpret simple)
liftl4' l4 = glift4 (present1 l4) (interpret1 l4) (present2 l4) (interpret2 l4)
glift1 = gfix1 (gliftc1 id)
glift2 = gfix2 (gliftc2 id)
glift3 = gfix3 (gliftc3 id)
glift4 = gfix4 (gliftc4 id)
gfix0 f = lfix f
gfix1 f x = gfix0 (f x)
gfix2 f x = gfix1 (f x)
gfix3 f x = gfix2 (f x)
gfix4 f x = gfix3 (f x)
--gfix1 c f = gfix0 c id f
gliftc1 r f = r . liftStep f
gliftc2 r f = gliftc1 (r `lcomp` liftStep f)
gliftc3 r f = gliftc2 (r `lcomp` liftStep f)
gliftc4 r f = gliftc3 (r `lcomp` liftStep f)
--glift4 = undefined
class CompPeano comp peano | comp -> peano where
compPeano :: comp -> peano
instance CompPeano ((:-) t) Zero where
compPeano (CompNil a) = Zero
instance CompPeano (f (g t)) n => CompPeano ((f:.:g) t) (Succ n) where
compPeano (Comp f) = Succ (compPeano f)
| null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/papers/Haskell%202008/Haskell/src/Derivation.hs | haskell | # OPTIONS_GHC -fglasgow-exts -fallow-incoherent-instances -fallow-overlapping-instances -fallow-undecidable-instances -fno-monomorphism-restriction #
import DPPClass_Lib
now use liftStep to remove Step
capture recursion with fix
remove Fix with specifal fix
remove Comp with special composition
rewrite, dropping parameters
pass next step as arg and rewrite to fix
now we capture the up down with these functions:
------------------------------------------------------
no type sigs
main :: IO ()
editStep = decomp4 comps
Explicit ones:
decomp (Comp (Comp f)) = f
decomp (Comp (Comp (Comp f))) = f
- A class to get rid of Fix and Comp applications
old comment : " This works , but without functional dependency , all polymorphism must be removed with explicit
type sigs . " now it does not work though ..
old comment: "This works, but without functional dependency, all polymorphism must be removed with explicit
type sigs." now it does not work though..
decomp4 = decomp
this one does work!
fundeps seem impossible now :-(
| comp t -> decomp where
decomp (Comp f) = f
instance Decomp ( :-) t t where
decomp ( x ) = x
instance Decomp (:-) t t where
decomp (CompNil x) = x
decompTest dummy = decomp
decompTest3 = decomp (undefined :: (f :.: g :.: h) t) :: (f (g (h t)))
Let's see if we can make lift automatically
(a->b) ->
a -> t -> ((f :.: g) ns)
gliftc1 r f = liftStep f
gliftc2 r f g = liftStep f `lcomp'` liftStep g
gliftc3 r f g h = liftStep f `lcomp'` liftStep g `lcomp` liftStep h
gliftc4 r f g h i = liftStep f `lcomp'` liftStep g `lcomp` liftStep h `lcomp'` liftStep i
:: (c->res) -> (b->c) -> (a ->b) -> (a->res))
etc.
?? this also works now?
gfix1 c f = gfix0 c id f
glift4 = undefined
| module Derivation where
import Layers
import DPP_Lib
import Char
newtype InfTupleExplicit a b = InfTupleExplicit (a, (b, InfTupleExplicit a b))
newtype TupleStep a inft = I (a, inft)
type InfTuple2 a b = Fix ((TupleStep a) :.: (TupleStep b))
type InfTuple3 a b c = Fix ((TupleStep a) :.: (TupleStep b) :.: (TupleStep c))
inftup2 :: InfTuple2 Char Bool
inftup2 = Fix $ Comp (I ('c', I (True, inftup2)))
lft f = I . f
comp f g = (\x -> Comp $ (f . g) x)
fix :: (a->a) -> a
fix a = let fixa = a fixa
in fixa
fFix f = let fixf = Fix . f $ fFix f
in fixf
inftup2Compositional :: InfTuple2 Char Bool
inftup2Compositional = fFix $ lft a1 `comp` lft a2
where a1 n = ('c', n)
a2 n = (True, n)
inftup3Compositional :: InfTuple3 Char Bool Int
inftup3Compositional = fFix $ lft a1 `comp` lft a2 `comp` lft a3
where a1 n = ('c', n)
a2 n = (True, n)
a3 n = (8::Int, n)
compterm :: ((TupleStep Bool) :.: ((TupleStep Char) :.: (TupleStep Int))) ()
compterm = (Comp . I) ( True
, (Comp . I) ('a', I (0, ()))
)
newtype Fix f = Fix (f (Fix f))
newtype (:.:) f g ns = Comp (f (g ns))
newtype Step a b ns = Step { unStep :: (a -> (b, ns)) }
newtype DownStep a b ns = DownStep { unDownStep :: (a -> (b, ns)) }
type Layer doc pres gest upd =
Fix (Step doc pres :.: Step gest upd)
lift0, lift1, lift2, lift3, lift4, lift5 ::
Simple state map doc pres gest upd ->
state -> Layer doc pres gest upd
lift0 simple state = step1 state
where step1 hArg = Fix . Comp . Step $
\vArg -> let (pres, hRes) =
present simple hArg vArg
in (pres, step2 hRes)
step2 hArg = Step $
\vArg -> let (upd, hRes) =
interpret simple hArg vArg
in (upd, step1 hRes)
factorize out next step
lift1 simple state =
step1 (step2 (lift1 simple)) state
where step1 next hArg = Fix . Comp . Step $
\vArg -> let (pres, hRes) =
present simple hArg vArg
in (pres, next hRes)
step2 next hArg = Step $
\vArg -> let (upd, hRes) =
interpret simple hArg vArg
in (upd, next hRes)
liftStep f next horArgs = Step $
\vArg -> let (vertRes, horRes) = f horArgs vArg
in (vertRes, next horRes)
lift2 simple state =
step1 (step2 (lift2 simple)) state
where step1 next hArg = Fix . Comp $
liftStep (present simple) next hArg
step2 next hArg =
liftStep (interpret simple) next hArg
lift3 simple = fix $ step1 . step2
where step1 next hArg = Fix . Comp $
liftStep (present simple) next hArg
step2 next hArg =
liftStep (interpret simple) next hArg
lfix f = fix f'
where f' n = (Fix . f n)
lift4 simple = lfix $ step1 . step2
where step1 next hArg = Comp $
liftStep (present simple) next hArg
step2 next hArg =
liftStep (interpret simple) next hArg
lcomp :: (b -> t -> (f (g ns))) ->
(a->b) ->
a -> t -> ((f :.: g) ns)
lcomp f g = (\n s -> Comp $ f n s) . g
f g = ( \n s - > Comp ( ( f . ) n s ) )
lift5 simple = lfix $ step1 `lcomp` step2
where step1 next args =
liftStep (present simple) next args
step2 next args =
liftStep (interpret simple) next args
lift :: Simple state map doc pres gest upd ->
state -> Layer doc pres gest upd
lift simple = lfix $ liftStep (present simple)
`lcomp` liftStep (interpret simple)
combine0, combine2, combine3, combine4, combine5, combine6 ::
Layer high med emed ehigh ->
Layer med low elow emed ->
Layer high low elow ehigh
combine0 upr lwr = step1 upr lwr
where step1 (Fix (Comp (Step upr)))
(Fix (Comp (Step lwr))) =
Fix . Comp . Step $
\high -> let (med, uprIntr) = upr high
(low, lwrIntr) = lwr med
in (low, step2 uprIntr lwrIntr)
step2 (Step upr) (Step lwr) = Step $
\low -> let (med, lwrPres) = lwr low
(high, uprPres) = upr med
in (high, step1 uprPres lwrPres)
combine2 = fix $ (step1 . step2)
where step1 next (Fix (Comp (Step upr)))
(Fix (Comp (Step lwr))) =
Fix . Comp . Step $
\high -> let (med, uprIntr) = upr high
(low, lwrIntr) = lwr med
in (low, next uprIntr lwrIntr)
step2 next (Step upr) (Step lwr) = Step $
\low -> let (med, lwrPres) = lwr low
(high, uprPres) = upr med
in (high, next uprPres lwrPres)
combineStepDown :: (x -> y -> ns) ->
Step a b x -> Step b c y -> Step a c ns
combineStepDown next (Step upper) (Step lower) = Step $
\h -> let (m ,upperf) = upper h
(l, lowerf) = lower m
in (l, next upperf lowerf)
combineStepUp :: (x -> y -> ns) ->
Step b c x -> Step a b y -> Step a c ns
combineStepUp next (Step upper) (Step lower) = Step $
\l -> let (m, lowerf) = lower l
(h, upperf) = upper m
in (h, next upperf lowerf)
combine3 = fix $ (step1 . step2)
where step1 next (Fix (Comp upr)) (Fix (Comp lwr)) =
Fix . Comp $ combineStepDown next upr lwr
step2 = combineStepUp
combine4 = fix (\n (Fix u) (Fix l)-> Fix $
(step1 . step2) n u l)
where step1 next (Comp upr) (Comp lwr) = Comp $
combineStepDown next upr lwr
step2 = combineStepUp
cfix f = fix f'
where f' n (Fix u) (Fix l) = Fix $ f n u l
combine5 = cfix (step1 . step2)
where step1 next (Comp upr) (Comp lwr) = Comp $
combineStepDown next upr lwr
step2 = combineStepUp
ccomp f g = f' . g
where f' m (Comp u) (Comp l) = Comp $ f m u l
combine6 = cfix $ step1 `ccomp` step2
where step1 next upr lwr =
combineStepDown next upr lwr
step2 = combineStepUp
combine :: Layer high med emed ehigh ->
Layer med low elow emed ->
Layer high low elow ehigh
combine = cfix $ combineStepDown `ccomp` combineStepUp
editLoop : : forall a . Layer Document Rendering ( EditRendering a ) ( EditDocument a ) - > Document - > IO ( )
editLoop ( Fix compPresentStep ) doc =
do { let presentStep = decomp compPresentStep
; let ( pres : : Rendering , interpretStep ) = unStep presentStep $ doc
; showRendering pres
; gesture : : EditRendering a < - getGesture
; let ( update : : a ,
presentStep ' : : Layer Document Rendering ( EditRendering a ) ( EditDocument a )
) = unStep interpretStep $ gesture
; let doc ' : : Document = updateDocument update doc
; editLoop presentStep ' doc '
}
editLoop :: forall a . Layer Document Rendering (EditRendering a) (EditDocument a) -> Document -> IO ()
editLoop (Fix compPresentStep) doc =
do { let presentStep = decomp compPresentStep
; let (pres :: Rendering , interpretStep) = unStep presentStep $ doc
; showRendering pres
; gesture :: EditRendering a <- getGesture
; let ( update :: EditDocument a ,
presentStep' :: Layer Document Rendering (EditRendering a) (EditDocument a)
) = unStep interpretStep $ gesture
; let doc' :: Document = updateDocument update doc
; editLoop presentStep' doc'
}
-}
editLoop :: Layer Document Rendering (EditRendering) (EditDocument) -> Document -> IO ()
editLoop (Fix (compPresentStep)) doc =
do { let presentStep = decomp2 compPresentStep
; let (pres , interpretStep) = unStep presentStep $ doc
; showRendering pres
; gesture <- getGesture
; let (update, presentStep') = unStep interpretStep $ gesture
; let doc' = updateDocument update doc
; editLoop presentStep' doc'
}
main layer0 layer1 layer2 =
do { (state0, state1, state2) <- initStates
; doc <- initDoc
; let layers = lift layer0 state0
`combine` lift layer1 state1
`combine` lift layer2 state2
; editLoop layers doc
}
main4 state2 =
do { let layers4 = liftl4 foursteplayer state2
; let Fix comps = layers4
; editLoop4 layers4 (undefined :: Arrangement)
}
combinel4 = cfix $ combineStepDown `ccomp` combineStepUp `ccomp` combineStepDown `ccomp` combineStepUp
liftl4 l4 = lfix $ liftStep (present1 l4) `lcomp` liftStep (interpret1 l4) `lcomp`
liftStep (present2 l4) `lcomp` liftStep (interpret2 l4)
( liftl4,combinel4 ) = afix $ ( liftStep ( present1 l4 ) , combineStepDown )
foursteplayer :: FourStep State2 Mapping2 Arrangement Rendering (EditRendering) (EditArrangement)
foursteplayer = undefined
data FourStep state map doc pres gest upd =
FourStep { present1 :: LayerFn state doc (map, state) pres
, interpret1 :: LayerFn (map, state) gest state upd
, present2 :: LayerFn state doc (map, state) pres
, interpret2 :: LayerFn (map, state) gest state upd
}
unfixCompL f (Fix x) = f x
unfixComp = unfixCompL $ \(Comp (Comp (Comp ztep))) -> ztep
instance ( f : . : g : . : h ) t ( f ( g ( h t ) ) ) where
instance ( f : . : g : . : h : . : i ) t ( f ( g ( h ( i t ) ) ) ) where
type Layer4 doc pres gest upd =
Fix (Step doc pres :.: Step gest upd :.: Step doc pres :.: Step gest upd)
editLoop4 :: forall a . Layer4 Arrangement Rendering (EditRendering) (EditArrangement) -> Arrangement -> IO ()
editLoop4 (Fix comps) doc =
do { let Step presentStep = decomp4 comps
; let (pres::Rendering, Step interpretStep) = presentStep doc
; showRendering pres
; gesture :: EditRendering <- getGesture
; let (update::EditArrangement, Step presentStep2) = interpretStep gesture
; let doc'
= undefined update
; let (pres2 :: Rendering, Step interpret2Step) = presentStep2 doc'
; showRendering pres2
; gesture2 :: EditRendering <- getGesture
; let (update :: EditArrangement, presentStep') = interpret2Step gesture2
; editLoop4 presentStep' (undefined :: Arrangement)
}
class comp where
decomp : : compx - > comp
instance ( ( f : . : ) t ) ( f ( g t ) ) where
decomp ( Comp f ) = f
instance ( ( f : . : g : . : h ) t ) ( f ( g ( h t ) ) ) where
decomp ( Comp ( Comp f ) ) = f
instance ( ( p : . : q : . : r : . : s ) t ) ( p ( q ( r ( s t ) ) ) ) where
decomp ( Comp ( Comp ( Comp f ) ) ) = f
class Decomp compx comp where
decomp :: compx -> comp
instance Decomp ((f :.: g) t) (f (g t)) where
decomp (Comp f) = f
instance Decomp ((f :.: g :.: h) t) (f (g (h t))) where
decomp (Comp (Comp f)) = f
instance Decomp ((p :.: q :.: r :.: s) t) (p (q (r (s t)))) where
decomp (Comp (Comp (Comp f))) = f
-}
decomp4 :: ((p :.: q :.: r :.: s) t) -> (p (q (r (s t))))
decomp4 (Comp (Comp (Comp step))) = step
: : ( ( ( :-) : . : p : . : q : . : r ) t ) - > ( p ( q ( r t ) ) )
data (:-) a = CompNil a
term :: (Maybe :.: (:-)) ()
term = Comp $ Just (CompNil ())
decomp3 :: (((:-) :.: p :.: r) t) -> (p (r t))
decomp3 (Comp (Comp (CompNil step))) = step
decomp2 (Comp s) = s
decomp :: comp t -> decomp
instance Decomp f t (f t) where
decomp f = f
instance Decomp f (g t) h => Decomp (f :.: g) t h where
decomp (Comp f) = decomp f
instance ( f : . : ) t ( f ( g t ) ) where
-}
class x comp ( g : : * - > * ) | comp - > g where
decomp : : - > comp
instance ( f : . : ) t ( f ( g t ) ) g where
decomp ( Comp f ) = f
instance ( g t ) h i = > ( f : . : ) t where
decomp ( Comp f ) = decomp f
class Decomp compx x comp (g :: * -> *) | comp -> g where
decomp :: compx x -> comp
instance Decomp (f :.: g) t (f (g t)) g where
decomp (Comp f) = f
instance Decomp f (g t) h i => Decomp (f :.: g) t h g where
decomp (Comp f) = decomp f
-}
decompTest : : ( f ( ) , ( ) , t ) - > ( ( ( :-) : . : f : . : ) t ) - > ( f ( g t ) )
decompTest dummy = decomp ( undefined : : ( ( :-) : . : f : . : ) t ) : : ( f ( g t ) )
newtype NilT t = NilT t
class Combine (comp :: * -> *) t f | comp t -> f where
combineC :: f
instance Combine NilT t ((NilT x) -> (NilT y) -> (x -> y -> t) -> NilT t) where
combineC = \(NilT x) (NilT y) next -> NilT (next x y)
instance Combine (Step a r :.: h) t
((Step a m :.: f) ft ->
(Step m r :.: g) gt ->
(f ft -> g gt -> h t) -> (Step a r :.: h) t) where
combineC = \(Comp (Step f)) (Comp (Step g)) next ->
Comp $ Step $ \h -> let (m ,upperf) = f h
(l, lowerf) = g m
in (l, next upperf lowerf)
: : ( b - > t - > ( f ( g ns ) ) ) - >
f1 a = a
f2 a b = a . b
f3 a b c = a . b . c
f0' r = r
f1' r a = (r . a)
f2' r b = f1' (r . b)
f3' r c = f2' (r . c)
class Comp (ctype :: * -> *) r f c where
compose :: ctype t -> r -> f -> c
instance Comp (f :.: g) (b->res) (a->b) (a->res) where
compose ctype r f = r . f
instance (Comp f (b -> res) (a -> b) (a -> res)) =>
Comp (f :.: g) (y->res) (b->y) ((a->b) -> (a->res)) where
compose ctype r f = compose (getLeftType ctype) (r . f)
fixCompose : : Comp x = > Fix x - >
fixCompose fix = let Fix f = fix
in compose (getCType fix)
getCType :: Fix ctype -> ctype t
getCType = undefined
getLeftType :: (f :.: g) t -> (f t')
getLeftType = undefined
test x = compose (undefined :: (x :.: y) t) :: (b->res) -> (a ->b) -> (a->res)
test2 = compose (undefined :: (x :.: y :.: z) t) :: (c -> d) -> (b->c) -> (a ->b) -> (a->d)
test' = fixCompose (undefined :: Layer doc pres gest upd) :: (b->c) -> (a ->b) -> (a->c)
dppMain = do { print $ compPeano (undefined :: ((:-) :.: x :.: y) ())
; print $ comp2
(toUpper) (toUpper) 'a'
(toUpper) (toUpper) (toUpper) 'a'
; getLine
}
data Zero = Zero deriving Show
data Succ a = Succ a deriving Show
comp2 = composeC Zero
composeC Zero = \f g - > f .
composeC ( Succ Zero ) = f . . h
class CompC num r f c bereik | num r - > c , r - > bereik where
composeC : : num - > r - > f - > c
instance CompC Zero ( b->res ) ( a->b ) ( a->res ) res where
composeC ctype r f = r . f
instance forall a b y res n .
CompC n ( b - > res ) ( a - > b ) ( a - > res ) >
CompC ( Succ n ) ( y->res ) ( b->y ) ( ( a->b ) - > ( a->res ) ) res where
composeC ( Succ n ) r f = let comp = composeC : : n - > ( b - > res ) - > ( a - > b ) - > ( a - > res )
in comp n ( r . f )
class CompC num r f c bereik | num r -> c, r -> bereik where
composeC :: num -> r -> f -> c
instance CompC Zero (b->res) (a->b) (a->res) res where
composeC ctype r f = r . f
instance forall a b y res n .
CompC n (b -> res) (a -> b) (a -> res) res =>
CompC (Succ n) (y->res) (b->y) ((a->b) -> (a->res)) res where
composeC (Succ n) r f = let comp = composeC :: n -> (b -> res) -> (a -> b) -> (a -> res)
in comp n (r . f)
-}
class CompC num r f c | num r -> c where
composeC :: num -> r -> f -> c
instance CompC Zero (b->res) (a->b) (a->res) where
composeC ctype r f = r . f
instance forall a b y res n .
CompC n (b -> res) (a -> b) (a -> res) =>
CompC (Succ n) (y->res) (b->y) ((a->b) -> (a->res)) where
composeC (Succ n) r f = let comp = composeC :: n -> (b -> res) -> (a -> b) -> (a -> res)
in comp n (r . f)
liftfinal' :: Simple state map doc pres gest upd ->
state -> Layer doc pres gest upd
liftfinal' simple = glift2 (present simple) (interpret simple)
liftl4' l4 = glift4 (present1 l4) (interpret1 l4) (present2 l4) (interpret2 l4)
glift1 = gfix1 (gliftc1 id)
glift2 = gfix2 (gliftc2 id)
glift3 = gfix3 (gliftc3 id)
glift4 = gfix4 (gliftc4 id)
gfix0 f = lfix f
gfix1 f x = gfix0 (f x)
gfix2 f x = gfix1 (f x)
gfix3 f x = gfix2 (f x)
gfix4 f x = gfix3 (f x)
gliftc1 r f = r . liftStep f
gliftc2 r f = gliftc1 (r `lcomp` liftStep f)
gliftc3 r f = gliftc2 (r `lcomp` liftStep f)
gliftc4 r f = gliftc3 (r `lcomp` liftStep f)
class CompPeano comp peano | comp -> peano where
compPeano :: comp -> peano
instance CompPeano ((:-) t) Zero where
compPeano (CompNil a) = Zero
instance CompPeano (f (g t)) n => CompPeano ((f:.:g) t) (Succ n) where
compPeano (Comp f) = Succ (compPeano f)
|
178174c88d1e0f36c66d490a4f3a24c453e8e53c4e1c1831d87d36a0ef97abd2 | cburgmer/greenyet | selection.clj | (ns greenyet.selection
(:require [clojure.string :as str]))
(defn- filter-by-systems [host-status-pairs selected-systems]
(let [systems (set (map str/lower-case selected-systems))]
(filter (fn [[{system :system} _]]
(contains? systems (str/lower-case system)))
host-status-pairs)))
(defn- filter-by-environments [host-status-pairs selected-environments]
(let [environments (set (map str/lower-case selected-environments))]
(filter (fn [[{environment :environment} _]]
(contains? environments (str/lower-case environment)))
host-status-pairs)))
(defn- remove-green-systems [host-status-pairs]
(->> host-status-pairs
(group-by (fn [[{system :system} _]] system))
(map second)
(remove (fn [hosts]
(every? (fn [[_ {color :color}]] (= :green color))
hosts)))
(apply concat)))
(defn filter-hosts [host-status-pairs selected-systems selected-environments hide-green]
(cond-> host-status-pairs
selected-systems (filter-by-systems selected-systems)
selected-environments (filter-by-environments selected-environments)
hide-green remove-green-systems))
| null | https://raw.githubusercontent.com/cburgmer/greenyet/b07e8257cafb779bbfe172de2c53db1053cfc495/src/greenyet/selection.clj | clojure | (ns greenyet.selection
(:require [clojure.string :as str]))
(defn- filter-by-systems [host-status-pairs selected-systems]
(let [systems (set (map str/lower-case selected-systems))]
(filter (fn [[{system :system} _]]
(contains? systems (str/lower-case system)))
host-status-pairs)))
(defn- filter-by-environments [host-status-pairs selected-environments]
(let [environments (set (map str/lower-case selected-environments))]
(filter (fn [[{environment :environment} _]]
(contains? environments (str/lower-case environment)))
host-status-pairs)))
(defn- remove-green-systems [host-status-pairs]
(->> host-status-pairs
(group-by (fn [[{system :system} _]] system))
(map second)
(remove (fn [hosts]
(every? (fn [[_ {color :color}]] (= :green color))
hosts)))
(apply concat)))
(defn filter-hosts [host-status-pairs selected-systems selected-environments hide-green]
(cond-> host-status-pairs
selected-systems (filter-by-systems selected-systems)
selected-environments (filter-by-environments selected-environments)
hide-green remove-green-systems))
| |
da52ad9d552f2882173796f6e78e30f475ec88363446107d1c6e336d73247595 | metaocaml/ber-metaocaml | pr9384.ml | (* TEST
* expect
*)
module M : sig
type 'a t := [< `A ] as 'a
val f : 'a -> 'a t
end = struct
let f x = x
end;;
[%%expect{|
Line 2, characters 2-28:
2 | type 'a t := [< `A ] as 'a
^^^^^^^^^^^^^^^^^^^^^^^^^^
Error: Destructive substitutions are not supported for constrained
types (other than when replacing a type constructor with
a type constructor with the same arguments).
|}]
type foo = { foo : 'a. ([< `A] as 'a) -> 'a }
module Foo (X : sig type 'a t := [< `A ] as 'a type foo2 = foo = { foo : 'a. 'a t -> 'a t } end) = struct
let f { X.foo } = foo
end;;
[%%expect{|
type foo = { foo : 'a. ([< `A ] as 'a) -> 'a; }
Line 3, characters 20-46:
3 | module Foo (X : sig type 'a t := [< `A ] as 'a type foo2 = foo = { foo : 'a. 'a t -> 'a t } end) = struct
^^^^^^^^^^^^^^^^^^^^^^^^^^
Error: Destructive substitutions are not supported for constrained
types (other than when replacing a type constructor with
a type constructor with the same arguments).
|}]
type bar = { bar : 'a. ([< `A] as 'a) -> 'a }
module Bar (X : sig type 'a t := 'a type bar2 = bar = { bar : 'a. ([< `A] as 'a) t -> 'a t } end) = struct
let f { X.bar } = bar
end;;
[%%expect{|
type bar = { bar : 'a. ([< `A ] as 'a) -> 'a; }
module Bar :
functor
(X : sig type bar2 = bar = { bar : 'a. ([< `A ] as 'a) -> 'a; } end) ->
sig val f : X.bar2 -> ([< `A ] as 'a) -> 'a end
|}]
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/typing-modules/pr9384.ml | ocaml | TEST
* expect
|
module M : sig
type 'a t := [< `A ] as 'a
val f : 'a -> 'a t
end = struct
let f x = x
end;;
[%%expect{|
Line 2, characters 2-28:
2 | type 'a t := [< `A ] as 'a
^^^^^^^^^^^^^^^^^^^^^^^^^^
Error: Destructive substitutions are not supported for constrained
types (other than when replacing a type constructor with
a type constructor with the same arguments).
|}]
type foo = { foo : 'a. ([< `A] as 'a) -> 'a }
module Foo (X : sig type 'a t := [< `A ] as 'a type foo2 = foo = { foo : 'a. 'a t -> 'a t } end) = struct
let f { X.foo } = foo
end;;
[%%expect{|
type foo = { foo : 'a. ([< `A ] as 'a) -> 'a; }
Line 3, characters 20-46:
3 | module Foo (X : sig type 'a t := [< `A ] as 'a type foo2 = foo = { foo : 'a. 'a t -> 'a t } end) = struct
^^^^^^^^^^^^^^^^^^^^^^^^^^
Error: Destructive substitutions are not supported for constrained
types (other than when replacing a type constructor with
a type constructor with the same arguments).
|}]
type bar = { bar : 'a. ([< `A] as 'a) -> 'a }
module Bar (X : sig type 'a t := 'a type bar2 = bar = { bar : 'a. ([< `A] as 'a) t -> 'a t } end) = struct
let f { X.bar } = bar
end;;
[%%expect{|
type bar = { bar : 'a. ([< `A ] as 'a) -> 'a; }
module Bar :
functor
(X : sig type bar2 = bar = { bar : 'a. ([< `A ] as 'a) -> 'a; } end) ->
sig val f : X.bar2 -> ([< `A ] as 'a) -> 'a end
|}]
|
1ab5666abdefa7ff7c5ee76e88391b700d02f097476956321a69b830007ea33e | ocaml-multicore/ocaml-tsan | float_compare.ml | (* TEST *)
let equal (x : float) (y : float) =
x, "=", y, (x = y)
[@@inline never]
let not_equal (x : float) (y : float) =
x, "!=", y, (x <> y)
[@@inline never]
let less_than (x : float) (y : float) =
x, "<", y, (x < y)
[@@inline never]
let not_less_than (x : float) (y : float) =
x, "!<", y, not (x < y)
[@@inline never]
let less_equal (x : float) (y : float) =
x, "<=", y, (x <= y)
[@@inline never]
let not_less_equal (x : float) (y : float) =
x, "!<=", y, not (x <= y)
[@@inline never]
let greater_than (x : float) (y : float) =
x, ">", y, (x > y)
[@@inline never]
let not_greater_than (x : float) (y : float) =
x, "!>", y, not (x > y)
[@@inline never]
let greater_equal (x : float) (y : float) =
x, ">=", y, (x >= y)
[@@inline never]
let not_greater_equal (x : float) (y : float) =
x, "!>=", y, not (x >= y)
[@@inline never]
let show (x, op, y, b) =
print_float x;
print_string " ";
print_string op;
print_string " ";
print_float y;
print_string ": ";
print_endline (string_of_bool b)
let print_line () =
print_endline "------------------"
let () = show (equal 1.0 2.0)
let () = show (equal 1.0 1.0)
let () = show (equal 2.0 1.0)
let () = show (equal 1.0 nan)
let () = print_line ()
let () = show (not_equal 1.0 2.0)
let () = show (not_equal 1.0 1.0)
let () = show (not_equal 2.0 1.0)
let () = show (not_equal 1.0 nan)
let () = print_line ()
let () = show (less_than 1.0 2.0)
let () = show (less_than 1.0 1.0)
let () = show (less_than 2.0 1.0)
let () = show (less_than 1.0 nan)
let () = print_line ()
let () = show (not_less_than 1.0 2.0)
let () = show (not_less_than 1.0 1.0)
let () = show (not_less_than 2.0 1.0)
let () = show (not_less_than 1.0 nan)
let () = print_line ()
let () = show (less_equal 1.0 2.0)
let () = show (less_equal 1.0 1.0)
let () = show (less_equal 2.0 1.0)
let () = show (less_equal 1.0 nan)
let () = print_line ()
let () = show (not_less_equal 1.0 2.0)
let () = show (not_less_equal 1.0 1.0)
let () = show (not_less_equal 2.0 1.0)
let () = show (not_less_equal 1.0 nan)
let () = print_line ()
let () = show (greater_than 1.0 2.0)
let () = show (greater_than 1.0 1.0)
let () = show (greater_than 2.0 1.0)
let () = show (greater_than 1.0 nan)
let () = print_line ()
let () = show (not_greater_than 1.0 2.0)
let () = show (not_greater_than 1.0 1.0)
let () = show (not_greater_than 2.0 1.0)
let () = show (not_greater_than 1.0 nan)
let () = print_line ()
let () = show (greater_equal 1.0 2.0)
let () = show (greater_equal 1.0 1.0)
let () = show (greater_equal 2.0 1.0)
let () = show (greater_equal 1.0 nan)
let () = print_line ()
let () = show (not_greater_equal 1.0 2.0)
let () = show (not_greater_equal 1.0 1.0)
let () = show (not_greater_equal 2.0 1.0)
let () = show (not_greater_equal 1.0 nan)
let () = print_line ()
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/ae9c1502103845550162a49fcd3f76276cdfa866/testsuite/tests/basic-float/float_compare.ml | ocaml | TEST |
let equal (x : float) (y : float) =
x, "=", y, (x = y)
[@@inline never]
let not_equal (x : float) (y : float) =
x, "!=", y, (x <> y)
[@@inline never]
let less_than (x : float) (y : float) =
x, "<", y, (x < y)
[@@inline never]
let not_less_than (x : float) (y : float) =
x, "!<", y, not (x < y)
[@@inline never]
let less_equal (x : float) (y : float) =
x, "<=", y, (x <= y)
[@@inline never]
let not_less_equal (x : float) (y : float) =
x, "!<=", y, not (x <= y)
[@@inline never]
let greater_than (x : float) (y : float) =
x, ">", y, (x > y)
[@@inline never]
let not_greater_than (x : float) (y : float) =
x, "!>", y, not (x > y)
[@@inline never]
let greater_equal (x : float) (y : float) =
x, ">=", y, (x >= y)
[@@inline never]
let not_greater_equal (x : float) (y : float) =
x, "!>=", y, not (x >= y)
[@@inline never]
let show (x, op, y, b) =
print_float x;
print_string " ";
print_string op;
print_string " ";
print_float y;
print_string ": ";
print_endline (string_of_bool b)
let print_line () =
print_endline "------------------"
let () = show (equal 1.0 2.0)
let () = show (equal 1.0 1.0)
let () = show (equal 2.0 1.0)
let () = show (equal 1.0 nan)
let () = print_line ()
let () = show (not_equal 1.0 2.0)
let () = show (not_equal 1.0 1.0)
let () = show (not_equal 2.0 1.0)
let () = show (not_equal 1.0 nan)
let () = print_line ()
let () = show (less_than 1.0 2.0)
let () = show (less_than 1.0 1.0)
let () = show (less_than 2.0 1.0)
let () = show (less_than 1.0 nan)
let () = print_line ()
let () = show (not_less_than 1.0 2.0)
let () = show (not_less_than 1.0 1.0)
let () = show (not_less_than 2.0 1.0)
let () = show (not_less_than 1.0 nan)
let () = print_line ()
let () = show (less_equal 1.0 2.0)
let () = show (less_equal 1.0 1.0)
let () = show (less_equal 2.0 1.0)
let () = show (less_equal 1.0 nan)
let () = print_line ()
let () = show (not_less_equal 1.0 2.0)
let () = show (not_less_equal 1.0 1.0)
let () = show (not_less_equal 2.0 1.0)
let () = show (not_less_equal 1.0 nan)
let () = print_line ()
let () = show (greater_than 1.0 2.0)
let () = show (greater_than 1.0 1.0)
let () = show (greater_than 2.0 1.0)
let () = show (greater_than 1.0 nan)
let () = print_line ()
let () = show (not_greater_than 1.0 2.0)
let () = show (not_greater_than 1.0 1.0)
let () = show (not_greater_than 2.0 1.0)
let () = show (not_greater_than 1.0 nan)
let () = print_line ()
let () = show (greater_equal 1.0 2.0)
let () = show (greater_equal 1.0 1.0)
let () = show (greater_equal 2.0 1.0)
let () = show (greater_equal 1.0 nan)
let () = print_line ()
let () = show (not_greater_equal 1.0 2.0)
let () = show (not_greater_equal 1.0 1.0)
let () = show (not_greater_equal 2.0 1.0)
let () = show (not_greater_equal 1.0 nan)
let () = print_line ()
|
e434c4ba6e6f25a9a3e2da583a0b3c0209d9a6810af59061cf98b4bc52f33332 | exercism/common-lisp | pig-latin.lisp | (defpackage :pig-latin
(:use :cl)
(:export :translate))
(in-package :pig-latin)
(defun translate (phrase))
| null | https://raw.githubusercontent.com/exercism/common-lisp/3c159cf4511feab74a2f710b89540ab4f5116d80/exercises/practice/pig-latin/pig-latin.lisp | lisp | (defpackage :pig-latin
(:use :cl)
(:export :translate))
(in-package :pig-latin)
(defun translate (phrase))
| |
232276e1d18656bce71d41bbf591fd16e5dbfeadbd15854991bd18be8c53b23c | techascent/tvm-clj | topi.clj | (ns tvm-clj.impl.fns.topi
(:require [tvm-clj.impl.tvm-ns-fns :as tvm-ns-fns]))
(tvm-ns-fns/export-tvm-functions "topi") | null | https://raw.githubusercontent.com/techascent/tvm-clj/1088845bd613b4ba14b00381ffe3cdbd3d8b639e/src/tvm_clj/impl/fns/topi.clj | clojure | (ns tvm-clj.impl.fns.topi
(:require [tvm-clj.impl.tvm-ns-fns :as tvm-ns-fns]))
(tvm-ns-fns/export-tvm-functions "topi") | |
8ffec25515fc63ce636b0dad1512fc84974de4aae7c56a6862385971062c0bd9 | jesperes/aoc_erlang | aoc2019_day15.erl | Advent of Code solution for 2019 day 15 .
Created : 2019 - 12 - 15T10:32:09 + 00:00
-module(aoc2019_day15).
-include_lib("stdlib/include/assert.hrl").
-include("aoc_puzzle.hrl").
-export([parse/1, solve/1, info/0]).
-behavior(aoc_puzzle).
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2019,
day = 15,
name = "Oxygen System",
expected = {214, 344},
use_one_solver_fun = true,
has_input_file = true}.
-type input_type() :: any().
-type result_type() :: {integer(), integer()}.
-spec parse(Binary :: binary()) -> input_type().
parse(Binary) ->
intcode:parse(Binary).
-spec solve(Input :: input_type()) -> result_type().
solve(Prog) ->
TODO cleanup ; this throw thing to escape out of the first search is
a little yucky . We should be able to do both part 1 and 2 in the
%% same function.
Pid = start_intcode(Prog),
{Dist0, Grid0} =
try
explore(Pid,
#{pos => {0, 0},
dist => 0,
oxygen => not_found})
catch
{oxygen, Dist, Grid} ->
{Dist, Grid}
end,
Grid1 = fill_oxygen(Pid, Grid0),
OxygenDists =
lists:filtermap(fun ({{_, _}, V}) ->
{true, V};
(_) ->
false
end,
maps:to_list(Grid1)),
{Dist0, lists:max(OxygenDists)}.
start_intcode(Prog) ->
Parent = self(),
spawn(fun() ->
intcode:execute(Prog,
fun(State) -> {State, receive Input -> Input end} end,
fun(Output, State) ->
State ! Output,
State
end,
Parent)
end).
step_repair_robot(Dir, IntCodePid) ->
IntCodePid ! Dir,
receive
Output ->
Output
end.
explore(Pid, Grid0) ->
lists:foldl(fun(Dir, #{pos := Pos, dist := Dist} = Grid) ->
NewPos = move(Dir, Pos),
NewDist = Dist + 1,
case maps:is_key(NewPos, Grid) of
true -> Grid;
false ->
case step_repair_robot(Dir, Pid) of
0 -> maps:put(NewPos, $#, Grid); %% Wall
1 ->
%% New pos is space. Robot was moved.
NewGrid =
explore(Pid,
maps:merge(Grid,
#{NewPos => $.,
dist => NewDist,
pos => NewPos})),
%% We must move the robot back after exploring; it has
%% its own state in the intcode machine and does not
%% automatically rewind with our stack.
?assertEqual(1, step_repair_robot(opposite(Dir), Pid)),
maps:merge(NewGrid, #{pos => Pos, dist => Dist});
2 ->
%% Found oxygen source. Throw to wind back up (we want
%% to leave the intcode computer as is, and continue
%% with the oxygen-filling).
throw({oxygen,
NewDist,
#{pos => NewPos, %% Where we are
Time to fill oxygen
time => 0}})
end
end
end,
Grid0,
[1, 2, 3, 4]).
fill_oxygen(Pid, Grid0) ->
lists:foldl(fun(Dir, #{pos := Pos, time := Time} = Grid) ->
NewPos = move(Dir, Pos),
NewTime = Time + 1,
case maps:is_key(NewPos, Grid) of
true ->
T = maps:get(NewPos, Grid),
if Time < T -> maps:put(NewPos, Time, Grid);
true -> Grid
end;
false ->
case step_repair_robot(Dir, Pid) of
0 -> maps:put(NewPos, $#, Grid); %% Wall
1 ->
%% New pos is space. Robot was moved.
NewGrid =
fill_oxygen(Pid,
maps:merge(Grid,
#{NewPos => NewTime,
time => NewTime,
pos => NewPos})),
step_repair_robot(opposite(Dir), Pid),
maps:merge(NewGrid, #{pos => Pos, time => Time})
end
end
end,
Grid0,
[1, 2, 3, 4]).
move(1, {X, Y}) ->
{X, Y - 1}; %% north
move(2, {X, Y}) ->
{X, Y + 1}; %% south
move(3, {X, Y}) ->
{X - 1, Y}; %% west
move(4, {X, Y}) ->
{X + 1, Y}. %% east
opposite(1) ->
2;
opposite(2) ->
1;
opposite(3) ->
4;
opposite(4) ->
3.
| null | https://raw.githubusercontent.com/jesperes/aoc_erlang/ec0786088fb9ab886ee57e17ea0149ba3e91810a/src/2019/aoc2019_day15.erl | erlang | same function.
Wall
New pos is space. Robot was moved.
We must move the robot back after exploring; it has
its own state in the intcode machine and does not
automatically rewind with our stack.
Found oxygen source. Throw to wind back up (we want
to leave the intcode computer as is, and continue
with the oxygen-filling).
Where we are
Wall
New pos is space. Robot was moved.
north
south
west
east | Advent of Code solution for 2019 day 15 .
Created : 2019 - 12 - 15T10:32:09 + 00:00
-module(aoc2019_day15).
-include_lib("stdlib/include/assert.hrl").
-include("aoc_puzzle.hrl").
-export([parse/1, solve/1, info/0]).
-behavior(aoc_puzzle).
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2019,
day = 15,
name = "Oxygen System",
expected = {214, 344},
use_one_solver_fun = true,
has_input_file = true}.
-type input_type() :: any().
-type result_type() :: {integer(), integer()}.
-spec parse(Binary :: binary()) -> input_type().
parse(Binary) ->
intcode:parse(Binary).
-spec solve(Input :: input_type()) -> result_type().
solve(Prog) ->
TODO cleanup ; this throw thing to escape out of the first search is
a little yucky . We should be able to do both part 1 and 2 in the
Pid = start_intcode(Prog),
{Dist0, Grid0} =
try
explore(Pid,
#{pos => {0, 0},
dist => 0,
oxygen => not_found})
catch
{oxygen, Dist, Grid} ->
{Dist, Grid}
end,
Grid1 = fill_oxygen(Pid, Grid0),
OxygenDists =
lists:filtermap(fun ({{_, _}, V}) ->
{true, V};
(_) ->
false
end,
maps:to_list(Grid1)),
{Dist0, lists:max(OxygenDists)}.
start_intcode(Prog) ->
Parent = self(),
spawn(fun() ->
intcode:execute(Prog,
fun(State) -> {State, receive Input -> Input end} end,
fun(Output, State) ->
State ! Output,
State
end,
Parent)
end).
step_repair_robot(Dir, IntCodePid) ->
IntCodePid ! Dir,
receive
Output ->
Output
end.
explore(Pid, Grid0) ->
lists:foldl(fun(Dir, #{pos := Pos, dist := Dist} = Grid) ->
NewPos = move(Dir, Pos),
NewDist = Dist + 1,
case maps:is_key(NewPos, Grid) of
true -> Grid;
false ->
case step_repair_robot(Dir, Pid) of
1 ->
NewGrid =
explore(Pid,
maps:merge(Grid,
#{NewPos => $.,
dist => NewDist,
pos => NewPos})),
?assertEqual(1, step_repair_robot(opposite(Dir), Pid)),
maps:merge(NewGrid, #{pos => Pos, dist => Dist});
2 ->
throw({oxygen,
NewDist,
Time to fill oxygen
time => 0}})
end
end
end,
Grid0,
[1, 2, 3, 4]).
fill_oxygen(Pid, Grid0) ->
lists:foldl(fun(Dir, #{pos := Pos, time := Time} = Grid) ->
NewPos = move(Dir, Pos),
NewTime = Time + 1,
case maps:is_key(NewPos, Grid) of
true ->
T = maps:get(NewPos, Grid),
if Time < T -> maps:put(NewPos, Time, Grid);
true -> Grid
end;
false ->
case step_repair_robot(Dir, Pid) of
1 ->
NewGrid =
fill_oxygen(Pid,
maps:merge(Grid,
#{NewPos => NewTime,
time => NewTime,
pos => NewPos})),
step_repair_robot(opposite(Dir), Pid),
maps:merge(NewGrid, #{pos => Pos, time => Time})
end
end
end,
Grid0,
[1, 2, 3, 4]).
move(1, {X, Y}) ->
move(2, {X, Y}) ->
move(3, {X, Y}) ->
move(4, {X, Y}) ->
opposite(1) ->
2;
opposite(2) ->
1;
opposite(3) ->
4;
opposite(4) ->
3.
|
e1d10a97d31a33915ed0d8a4c1cfa57c565b7ec09e10aef5c698171d6d0b1222 | techascent/tech.ml | sparse_logreg_test.clj | (ns tech.v3.libs.smile.sparse-logreg-test
(:require [clojure.test :refer :all]
[tech.v3.dataset :as ds]
[tech.v3.dataset.modelling :as ds-mod]
[tech.v3.libs.smile.discrete-nb :as nb]
[tech.v3.libs.smile.nlp :as nlp]
[tech.v3.ml :as ml]
[tech.v3.libs.smile.sparse-logreg]))
(defn get-reviews []
(->
(ds/->dataset "test/data/reviews.csv.gz" {:key-fn keyword })
(ds/select-columns [:Text :Score])
(ds/update-column :Score #(map dec %))
(nlp/count-vectorize :Text :bow nlp/default-text->bow)
(nb/bow->SparseArray :bow :sparse #(nlp/->vocabulary-top-n % 100))
(ds-mod/set-inference-target :Score)))
(deftest does-not-crash
(let [reviews (get-reviews)
trained-model
(ml/train reviews {:model-type :smile.classification/sparse-logistic-regression
:n-sparse-columns 100
:sparse-column :sparse})]
(is (= [4 4 4 2]
(take 4
(:Score (ml/predict reviews trained-model)))))))
| null | https://raw.githubusercontent.com/techascent/tech.ml/7f2cc506980a05f0f8c85f8b1ff0cde6b9451f54/test/tech/v3/libs/smile/sparse_logreg_test.clj | clojure | (ns tech.v3.libs.smile.sparse-logreg-test
(:require [clojure.test :refer :all]
[tech.v3.dataset :as ds]
[tech.v3.dataset.modelling :as ds-mod]
[tech.v3.libs.smile.discrete-nb :as nb]
[tech.v3.libs.smile.nlp :as nlp]
[tech.v3.ml :as ml]
[tech.v3.libs.smile.sparse-logreg]))
(defn get-reviews []
(->
(ds/->dataset "test/data/reviews.csv.gz" {:key-fn keyword })
(ds/select-columns [:Text :Score])
(ds/update-column :Score #(map dec %))
(nlp/count-vectorize :Text :bow nlp/default-text->bow)
(nb/bow->SparseArray :bow :sparse #(nlp/->vocabulary-top-n % 100))
(ds-mod/set-inference-target :Score)))
(deftest does-not-crash
(let [reviews (get-reviews)
trained-model
(ml/train reviews {:model-type :smile.classification/sparse-logistic-regression
:n-sparse-columns 100
:sparse-column :sparse})]
(is (= [4 4 4 2]
(take 4
(:Score (ml/predict reviews trained-model)))))))
| |
462798799293adc243b48586f96d79f964093ac20ce6120ef4e958354645320f | deadpendency/deadpendency | Api.hs | # LANGUAGE DataKinds #
module DD.Serve.Api
( AppApi (..),
)
where
import DD.Handler.DependencyDeterminerHandler (DependencyDeterminerRoute)
import Servant.API.Generic ((:-))
newtype AppApi route = AppApi
{ dependencyDeterminer ::
route
:- DependencyDeterminerRoute
}
deriving stock (Generic)
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/dependency-determiner/src/DD/Serve/Api.hs | haskell | # LANGUAGE DataKinds #
module DD.Serve.Api
( AppApi (..),
)
where
import DD.Handler.DependencyDeterminerHandler (DependencyDeterminerRoute)
import Servant.API.Generic ((:-))
newtype AppApi route = AppApi
{ dependencyDeterminer ::
route
:- DependencyDeterminerRoute
}
deriving stock (Generic)
| |
1a5c3430956ec2d9e1b9ffbf9a398e67c18ef562f84eb77be689679b7c8e0cc0 | daveray/dorothy | er.clj | Copyright ( c ) , 2011 . All rights reserved .
; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
; which can be found in the file epl-v10.html at the root of this
; distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;
(ns dorothy.examples.er
(:use dorothy.core))
(defn -main []
(->
(graph :ER [
{:rankdir :LR}
(node-attrs {:shape :box})
:course :institute :student
(node-attrs {:shape :ellipse})
(subgraph [
[:node {:label "name"}]
:name0 :name1 :name2])
:code :grade :number
(node-attrs {:shape :diamond :style :filled :color :lightgrey})
"C-I" "S-C" "S-I"
; Edges
[:name0 :> :course]
[:code :> :course]
[:course :> "C-I" {:label "n" :len 1.00}]
["C-I" :> :institute {:label "1" :len 1.00}]
[:institute :> :name1]
[:institute :> "S-I" {:label "1" :len 1.00}]
["S-I" :> :student {:label "n" :len 1.00}]
[:student :> :grade]
[:student :> :name2]
[:student :> :number]
[:student :> "S-C" {:label "m" :len 1.00}]
["S-C" :> :course {:label "n" :len 1.00}]
{:label "\n\nEntity Relation Diagram\ndrawn by NEATO"
:fontsize 20}
])
dot
(show! {:layout :neato})))
| null | https://raw.githubusercontent.com/daveray/dorothy/207570804dfda2162a15b9ee55b5e76ec6e1ecfa/src/dorothy/examples/er.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this
distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
Edges | Copyright ( c ) , 2011 . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns dorothy.examples.er
(:use dorothy.core))
(defn -main []
(->
(graph :ER [
{:rankdir :LR}
(node-attrs {:shape :box})
:course :institute :student
(node-attrs {:shape :ellipse})
(subgraph [
[:node {:label "name"}]
:name0 :name1 :name2])
:code :grade :number
(node-attrs {:shape :diamond :style :filled :color :lightgrey})
"C-I" "S-C" "S-I"
[:name0 :> :course]
[:code :> :course]
[:course :> "C-I" {:label "n" :len 1.00}]
["C-I" :> :institute {:label "1" :len 1.00}]
[:institute :> :name1]
[:institute :> "S-I" {:label "1" :len 1.00}]
["S-I" :> :student {:label "n" :len 1.00}]
[:student :> :grade]
[:student :> :name2]
[:student :> :number]
[:student :> "S-C" {:label "m" :len 1.00}]
["S-C" :> :course {:label "n" :len 1.00}]
{:label "\n\nEntity Relation Diagram\ndrawn by NEATO"
:fontsize 20}
])
dot
(show! {:layout :neato})))
|
c7bb1dc9614391465cade3611fffc46137d9594c87f16a15f28d54f010986f74 | liquidz/misaki | 02-directory-structure.html.clj | ; @layout post
; @title Directory Structure
(h2 "Default structure")
[:pre {:class "block"} "
root/
|
+- template/
| |
| +- posts/
| | |
| | +- YYYY-MM-DD-posttitle.html.clj
| |
| +- layouts/
| | |
| | +- default.clj
| |
| +- index.html.clj
|
+- public/
| |
| +- css/
| |
| +- js/
|
+- _config.clj"]
(h3 "root/")
(p "All public static files such as JavaScripts and StyleSheets are here. Compiled templates are also put here.")
(h3 "template/")
(p "Template root directory.")
(h3 "template/posts/")
(p "Blog posts data directory. All post files should be named **YYYY-MM-DD-posttitle.html.clj** as same as [Jekyll]() filename format.
This naming format can be configured **_config.clj**")
(h3 "template/layouts/")
(p "All layout files are here.")
(h3 "template/index.html.clj")
(p "Template file. When misaki compile templates, last extension(`.clj` in this case) is removed.
To edit templates, See [Edit Template](title:Edit Template).")
#-TEXT
index.html.clj => COMPILE => index.html
TEXT
(h3 "public/")
(p "Public root directory. Static files are here.")
(h3 "_config.clj")
(p "Configuration file for misaki.")
#-CLJ
{
;; directory setting
:public-dir "public/"
:tag-out-dir "tag/"
:template-dir "template/"
:post-dir "posts/"
:layout-dir "layouts/"
;; posts and tags url setting
;; default value: "/"
;; ex)
;; "/" => "/YYYY-MM/POST.html"
;; "/foo" => "/foo/YYYY-MM/POST.html"
:url-base "/"
;; dev server port
default value : 8080
:port 8080
;; site language
;; default value: "en"
:lang "en"
;; default site data
:site {:site-title "misaki"
:twitter "uochan"
:css ["+Sans"
"/css/main.css"]
:device-css ["/css/smartphone.css"]
:js ["/js/main.js"]}
;; post file compile hook
:compile-with-post ["index.html.clj" "atom.xml.clj"]
;; tag setting
:tag-layout "tag"
;; post setting
default value : # " ( "
:post-filename-regexp #"(\d{4})[-_](\d{1,2})[-_](\d{1,2})[-_](.+)$"
:post-filename-format "$(year)-$(month)/$(filename)"
;; post sort type (:date :name :title :date-desc :name-desc :title-desc)
;; default value: :date-desc
:post-sort-type :date-desc
;; clojurescript compile options
;; src-dir base is `:template-dir`
;; output-dir base is `:public-dir`
: cljs { : " cljs "
: output - to " js / main.js "
; :optimizations :whitespace
; :pretty-print true}
;; highlight setting
:code-highlight {:CLJ "lang-clj", :CLOJURE "lang-clj"}
;; flag for detailed log
;; default value: false
:detailed-log false
;; flag for error notification
;; default value: false
:notify? false
;; notify setting(OPTIONAL)
:notify-setting {;; title for fixing notification
;; default value: "$(filename)"
:fixed-title "$(filename)"
message for fixing notication
;; default value: "FIXED"
:fixed "FIXED"
;; title for failing notification
;; default value: "$(filename) : $(line)"
:failed-title "$(filename) : $(line)"
;; message for failing notification
;; default value: $(message)
:failed "$(message)"}
;; compiler setting
;; default value: "default"
:compiler "default"
}
CLJ
(ul [">= 0.2.6, Growl notify only support GNTP. If you want to use Growl notification with UDP, please checkout <= 0.2.5 version."
"See [Highlight Setting](title: Highlight Setting) about code highlight setting."])
(see-also ["Edit Template"
"Highlight Setting"])
| null | https://raw.githubusercontent.com/liquidz/misaki/b8104e632058e3b3da4487513d10e666e5914ec9/doc/.template/posts/02-directory-structure.html.clj | clojure | @layout post
@title Directory Structure
directory setting
posts and tags url setting
default value: "/"
ex)
"/" => "/YYYY-MM/POST.html"
"/foo" => "/foo/YYYY-MM/POST.html"
dev server port
site language
default value: "en"
default site data
post file compile hook
tag setting
post setting
post sort type (:date :name :title :date-desc :name-desc :title-desc)
default value: :date-desc
clojurescript compile options
src-dir base is `:template-dir`
output-dir base is `:public-dir`
:optimizations :whitespace
:pretty-print true}
highlight setting
flag for detailed log
default value: false
flag for error notification
default value: false
notify setting(OPTIONAL)
title for fixing notification
default value: "$(filename)"
default value: "FIXED"
title for failing notification
default value: "$(filename) : $(line)"
message for failing notification
default value: $(message)
compiler setting
default value: "default" |
(h2 "Default structure")
[:pre {:class "block"} "
root/
|
+- template/
| |
| +- posts/
| | |
| | +- YYYY-MM-DD-posttitle.html.clj
| |
| +- layouts/
| | |
| | +- default.clj
| |
| +- index.html.clj
|
+- public/
| |
| +- css/
| |
| +- js/
|
+- _config.clj"]
(h3 "root/")
(p "All public static files such as JavaScripts and StyleSheets are here. Compiled templates are also put here.")
(h3 "template/")
(p "Template root directory.")
(h3 "template/posts/")
(p "Blog posts data directory. All post files should be named **YYYY-MM-DD-posttitle.html.clj** as same as [Jekyll]() filename format.
This naming format can be configured **_config.clj**")
(h3 "template/layouts/")
(p "All layout files are here.")
(h3 "template/index.html.clj")
(p "Template file. When misaki compile templates, last extension(`.clj` in this case) is removed.
To edit templates, See [Edit Template](title:Edit Template).")
#-TEXT
index.html.clj => COMPILE => index.html
TEXT
(h3 "public/")
(p "Public root directory. Static files are here.")
(h3 "_config.clj")
(p "Configuration file for misaki.")
#-CLJ
{
:public-dir "public/"
:tag-out-dir "tag/"
:template-dir "template/"
:post-dir "posts/"
:layout-dir "layouts/"
:url-base "/"
default value : 8080
:port 8080
:lang "en"
:site {:site-title "misaki"
:twitter "uochan"
:css ["+Sans"
"/css/main.css"]
:device-css ["/css/smartphone.css"]
:js ["/js/main.js"]}
:compile-with-post ["index.html.clj" "atom.xml.clj"]
:tag-layout "tag"
default value : # " ( "
:post-filename-regexp #"(\d{4})[-_](\d{1,2})[-_](\d{1,2})[-_](.+)$"
:post-filename-format "$(year)-$(month)/$(filename)"
:post-sort-type :date-desc
: cljs { : " cljs "
: output - to " js / main.js "
:code-highlight {:CLJ "lang-clj", :CLOJURE "lang-clj"}
:detailed-log false
:notify? false
:fixed-title "$(filename)"
message for fixing notication
:fixed "FIXED"
:failed-title "$(filename) : $(line)"
:failed "$(message)"}
:compiler "default"
}
CLJ
(ul [">= 0.2.6, Growl notify only support GNTP. If you want to use Growl notification with UDP, please checkout <= 0.2.5 version."
"See [Highlight Setting](title: Highlight Setting) about code highlight setting."])
(see-also ["Edit Template"
"Highlight Setting"])
|
9d933361a7bcf822505f35868852f8a846d3933c7d4db63dfeb79094f9751d10 | padsproj/pads-haskell | First.hs | # LANGUAGE FlexibleContexts , TypeFamilies , TypeSynonymInstances , TemplateHaskell , QuasiQuotes ,
MultiParamTypeClasses , FlexibleInstances , UndecidableInstances ,
DeriveDataTypeable , ScopedTypeVariables #
MultiParamTypeClasses, FlexibleInstances, UndecidableInstances,
DeriveDataTypeable, ScopedTypeVariables #-}
module First where
import Language.Pads.Padsc
import Language.Pads.Testing
import FirstPads
import Language.Haskell.TH
import System.IO.Unsafe (unsafePerformIO)
import Data.Char as Char
import qualified Data.ByteString as B
import Data.Word
import qualified Text . Regex . ByteString as BRE
ws = REd "[\t ]+|$" " "
---- PADS EXAMPLES
[pads| |]
[pads| type MyChar = Char |]
myChar_result = myChar_parseS "ab"
myChar_expects = ('a', 0,"b")
myChar_test = mkTestCase "myChar" myChar_expects myChar_result
[pads| type IntPair = (Int, '|', Int) |]
intPair_result = intPair_parseS "12|23"
intPair_expects = ((12,23), 0,"")
intPair_test = mkTestCase "intPair" intPair_expects intPair_result
[pads| type Bar = (Int, ',', IntPair, ';', Int) |] -- reference to another named type
bar_result = bar_parseS "256,12|23;456:"
bar_expects = ((256, (12, 23), 456), 0, ":")
bar_test = mkTestCase "bar" bar_expects bar_result
[pads| type Bar2 = (Int, ',', (Int,':',Int), ';', Int) |] -- nested tuple type.
bar2_result = bar2_parseS "56,23:46;29"
bar2_expects = ((56,(23,46),29), 0 ,"")
bar2_test = mkTestCase "bar2" bar2_expects bar2_result
[pads| type BazR = Line (Int, ',', Int) |] -- type that consumes a line boundary.
bazr_result = bazR_parseS "33,33\n"
bazr_expects = ((33,33),0,"")
bazr_test = mkTestCase "bazr" bazr_expects bazr_result
Integer base type
myInt_result = myInt_parseS "23"
myInt_expects = (23,0,"")
myInt_test = mkTestCase "myInt" myInt_expects myInt_result
{- String base types -}
testStrLen = 2
computeLen x = x - 1
[pads| type StrTy = StringFW <| testStrLen + (computeLen 4) |> |]
inputStrTy = "catdog"
strTy_results = strTy_parseS inputStrTy
strTy_expects = ("catdo", 0,"g")
strTy_test = mkTestCase "strTy" strTy_expects strTy_results
[pads| type StrTy1 = StringC 'o' |]
strTy1_results = strTy1_parseS inputStrTy
strTy1_expects = ("catd",0,"og")
strTy1_test = mkTestCase "strTy1" strTy1_expects strTy1_results
[pads| type Baz = (StringFW 3,',',Int) |]
input_baz = "cat,123"
baz_results = baz_parseS input_baz
baz_expects = (("cat",123),0,"")
baz_test = mkTestCase "baz" baz_expects baz_results
{- Regular expression types -}
[pads| type StrME = StringME 'a+' |]
input_strME = "aaaab"
strME_results = strME_parseS input_strME
[pads| type StrSE = StringSE <|RE "b|c"|> |]
input_strSE_1 = "aaaab"
input_strSE_2 = "aaaac"
strSE_results_1 = strSE_parseS input_strSE_1
strSE_results_2 = strSE_parseS input_strSE_2
[pads| type StrP1 (x::Int) = StringFW <|x - 1|> |]
input_strP1 = "abcd"
strP1_result = strP1_parseS 3 input_strP1
[pads| type StrHex = StringME '[0-9A-Fa-f]+' |]
input_strHex = "12abcds"
strHex_result = strHex_parseS input_strHex
Testing for Phex32FW , which is in Pads . Language .
input_hex32FW = "12bc34"
phex32FW_results = phex32FW_parseS 4 input_hex32FW
phex32FW_expects = (4796, 0, "34")
phex32FW_test = mkTestCase "phex32FW" phex32FW_expects phex32FW_results
input2_hex32FW = "00bc34"
( ( Phex32FW ( 188),Errors : 0),"34 " )
input3_hex32FW = "gbc34"
strhex32FW_result3 = phex32FW_parseS 4 input3_hex32FW -- Prints error message
[pads| type HexPair = (Phex32FW 2, ',', Phex32FW 3) |]
input_hexpair = "aa,bbb"
hexpair_result = hexPair_parseS input_hexpair
Constrained types
[pads| type IntRange = constrain x :: Int where <| 0 <= x && x <= 256 |> |]
intRange24_input = "24"
intRange0_input = "0"
intRange256_input = "256"
intRangeLow_input = "-23"
intRangeHigh_input = "512"
intRangeBad_input = "aaa"
result_intRange24 = intRange_parseS intRange24_input
expect_intRange24 = (24,0,"")
test_intRange24 = mkTestCase "IntRange24" expect_intRange24 result_intRange24
result_intRange0 = intRange_parseS intRange0_input
expect_intRange0 = (0,0,"")
test_intRange0 = mkTestCase "IntRange0" expect_intRange0 result_intRange0
result_intRange256 = intRange_parseS intRange256_input
expect_intRange256 = (256,0,"")
test_intRange256 = mkTestCase "IntRange256" expect_intRange256 result_intRange256
result_intRangeLow = intRange_parseS intRangeLow_input
expect_intRangeLow = ((-23),1,"")
test_intRangeLow = mkTestCase "IntRangeLow" expect_intRangeLow result_intRangeLow
result_intRangeHigh = intRange_parseS intRangeHigh_input
expect_intRangeHigh = (512,1,"")
test_intRangeHigh = mkTestCase "IntRangeHigh" expect_intRangeHigh result_intRangeHigh
result_intRangeBad = intRange_parseS intRangeBad_input
expect_intRangeBad = (0,1,"aaa")
test_intRangeBad = mkTestCase "IntRangeBad" expect_intRangeBad result_intRangeBad
Note that the special variable " is in scope in the body of the predicate .
{- md is the meta-data descriptor for the underyling type. -}
[pads| type IntRangeP (low::Int, high::Int) = constrain x :: Int where <| low <= x && x <= high && (numErrors md == 0) |> |]
result_intRangeP24 = intRangeP_parseS (0, 256) intRange24_input
expect_intRangeP24 = (24,0,"")
test_intRangeP24 = mkTestCase "IntRangeP24" expect_intRangeP24 result_intRangeP24
result_intRangeP0 = intRangeP_parseS (0, 256) intRange0_input
expect_intRangeP0 = (0,0,"")
test_intRangeP0 = mkTestCase "IntRangeP0" expect_intRangeP0 result_intRangeP0
result_intRangeP256 = intRangeP_parseS (0, 256) intRange256_input
expect_intRangeP256 = (256,0,"")
test_intRangeP256 = mkTestCase "IntRangeP256" expect_intRangeP256 result_intRangeP256
result_intRangePLow = intRangeP_parseS (0, 256) intRangeLow_input
expect_intRangePLow = ((-23), 1, "")
test_intRangePLow = mkTestCase "IntRangePLow" expect_intRangePLow result_intRangePLow
result_intRangePHigh = intRangeP_parseS (0, 256) intRangeHigh_input
expect_intRangePHigh = (512, 1,"")
test_intRangePHigh = mkTestCase "IntRangePHigh" expect_intRangePHigh result_intRangePHigh
result_intRangePBad = intRangeP_parseS (0, 256) intRangeBad_input
expect_intRangePBad = (0, 2,"aaa")
test_intRangePBad = mkTestCase "IntRangePBad" expect_intRangePBad result_intRangePBad
[pads| data Record (bound::Int) = Record
{ i1 :: Int,
',', i2 :: Int where <| i1 + i2 <= bound |> } |]
input_Record = "24,45"
result_Record = record_parseS 100 input_Record
expect_Record = (Record {i1 = 24, i2 = 45},0,"")
test_Record = mkTestCase "Record" expect_Record result_Record
[pads| data Id = Numeric Int
| Alpha (StringC ',') |]
input_IdInt = "23"
result_IdInt = id_parseS input_IdInt
expect_IdInt = (Numeric 23,0,"")
test_IdInt = mkTestCase "IdInt" expect_IdInt result_IdInt
input_IdStr = "hello"
result_IdStr = id_parseS input_IdStr
expect_IdStr = (Alpha ("hello"),0,"")
test_IdStr = mkTestCase "IdAlpha" expect_IdStr result_IdStr
[pads| data Id2 (bound::Int) =
Numeric2 (constrain n::Int where <| n <= bound |>)
| Alpha2 (StringC ',') |]
input_IdInt2 = "23"
result_IdInt2 = id2_parseS 10 input_IdInt2
expect_IdInt2 = (Alpha2 ("23"),0,"")
test_IdInt2 = mkTestCase "IdInt2" expect_IdInt2 result_IdInt2
input_IdStr2 = "hello"
result_IdStr2 = id2_parseS 10 input_IdStr2
expect_IdStr2 = (Alpha2 ("hello"),0,"")
test_IdStr2 = mkTestCase "IdAlpha2" expect_IdStr2 result_IdStr2
[pads| data Id3 = Numeric3 (IntRangeP <|(1,10)|>)
| Numeric3a Int
| Lit3 ',' |]
input_IdInt3 = "24"
result_IdInt3 = id3_parseS input_IdInt3
expect_IdInt3 = (Numeric3a (24),0,"")
test_IdInt3 = mkTestCase "IdInt3" expect_IdInt2 result_IdInt2
input_IdLit3 = ","
result_IdLit3 = id3_parseS input_IdLit3
expect_IdLit3 = (Lit3,0,"")
test_IdLit3 = mkTestCase "IdLit3" expect_IdLit3 result_IdLit3
[pads| data Ab_or_a = AB "ab" | A "a" |]
input_AB = "ab"
result_Ab_or_a = ab_or_a_parseS input_AB
expect_Ab_or_a = (AB,0,"")
test_Ab_or_a = mkTestCase "Ab_or_a" expect_Ab_or_a result_Ab_or_a
[pads| data AB_test = AB_test { field_AB :: Ab_or_a , 'b'} |]
input_AB_test1 = "abb"
result_AB_test1 = aB_test_parseS input_AB_test1
expect_AB_test1 = (AB_test {field_AB = AB},0,"")
test_AB_test1 = mkTestCase "AB_test1" expect_AB_test1 result_AB_test1
input_AB_test2 = "ab"
result_AB_test2 = aB_test_parseS input_AB_test2
--expect_AB_test2 = (AB_test {field_AB = A},0,"") -- if backtracking
expect_AB_test2 = (AB_test {field_AB = AB},1,"")
test_AB_test2 = mkTestCase "AB_test2" expect_AB_test2 result_AB_test2
[pads| data Method = GET | PUT | LINK | UNLINK | POST
data Version = Version
{"HTTP/"
, major :: Int, '.'
, minor :: Int}
|]
checkVersion :: Method -> Version -> Bool
checkVersion method version =
case method of
LINK -> major version == 1 && minor version == 0
UNLINK -> major version == 1 && minor version == 0
_ -> True
[pads| data Request = Request
{ '"', method :: Method
, ' ', url :: StringC ' '
, ' ', version :: Version where <| checkVersion method version |>
, '"'
} |]
input_method_get = "GET"
result_method_get = method_parseS input_method_get
expect_method_get = (GET,0,"")
test_method_get = mkTestCase "Method_get" expect_method_get result_method_get
input_method_put = "PUT"
result_method_put = method_parseS input_method_put
expect_method_put = (PUT,0,"")
test_method_put = mkTestCase "Method_put" expect_method_put result_method_put
input_method_link = "LINK"
result_method_link = method_parseS input_method_link
expect_method_link = (LINK,0,"")
test_method_link = mkTestCase "Method_link" expect_method_link result_method_link
input_method_post = "POST"
result_method_post = method_parseS input_method_post
expect_method_post = (POST,0,"")
test_method_post = mkTestCase "Method_post" expect_method_post result_method_post
input_version = "HTTP/1.2"
result_version = version_parseS input_version
expect_version = (Version {major = 1, minor = 2},0,"")
test_version = mkTestCase "Version" expect_version result_version
input_request_G = "\"PUT /www.google.com HTTP/1.0\""
result_request_G = request_parseS input_request_G
expect_request_G = (Request {method = PUT, url = "/www.google.com", version = Version {major = 1, minor = 0}}, 0, "")
test_request_G = mkTestCase "Request_G" expect_request_G result_request_G
input_request_B = "\"LINK /www.google.com HTTP/1.3\""
result_request_B = request_parseS input_request_B
expect_request_B = (Request {method = LINK, url = "/www.google.com", version = Version {major = 1, minor = 3}},1, "")
test_request_B = mkTestCase "Request_B" expect_request_B result_request_B
[pads| type Eor_Test = (Int, EOR, Int) |]
input_eor_test = "23\n56"
result_eor_test = eor_Test_parseS input_eor_test
expect_eor_test = ((23,56),0,"")
test_eor_test = mkTestCase "Eor_Test" expect_eor_test result_eor_test
[pads| type Eof_Test = (Int, EOR, Int, EOF) |]
input_eof_test_G = "23\n56"
result_eof_test_G = eof_Test_parseS input_eof_test_G
expect_eof_test_G = ((23,56),0,"")
test_eof_test_G = mkTestCase "Eof_TestG" expect_eof_test_G result_eof_test_G
input_eof_test_B = "23\n56ab"
result_eof_test_B = eof_Test_parseS input_eof_test_B
expect_eof_test_B = ((23,56), 1,"ab")
test_eof_test_B = mkTestCase "Eof_TestB" expect_eof_test_B result_eof_test_B
{- Restate after Maybe is implemented -}
[pads| type Opt_test = (Int, '|', Maybe Int, '|', Int) |]
input_opt_test_j = "34|35|56"
result_opt_test_j = opt_test_parseS input_opt_test_j
expect_opt_test_j = ((34,Just 35,56),0,"")
test_opt_test_j = mkTestCase "Opt_test_j" expect_opt_test_j result_opt_test_j
input_opt_test_n = "34||56"
result_opt_test_n = opt_test_parseS input_opt_test_n
expect_opt_test_n = ((34,Nothing,56),0,"")
test_opt_test_n = mkTestCase "Opt_test_n" expect_opt_test_n result_opt_test_n
{- LIST EXAMPLES -}
[pads| type Entries_nosep_noterm = [StringFW 3] |]
input_entries_nosep_noterm = "123456789"
result_entries_nosep_noterm = entries_nosep_noterm_parseS input_entries_nosep_noterm
expect_entries_nosep_noterm = (["123","456","789"],0,"")
test_entries_nosep_noterm = mkTestCase "NoSep_NoTerm" expect_entries_nosep_noterm result_entries_nosep_noterm
input_entries_nosep_noterm' = "1234567890"
result_entries_nosep_noterm' = entries_nosep_noterm_parseS input_entries_nosep_noterm'
expect_entries_nosep_noterm' = (["123","456","789"],0,"0")
test_entries_nosep_noterm' = mkTestCase "NoSep_NoTerm'" expect_entries_nosep_noterm' result_entries_nosep_noterm'
[pads| type Entries_nosep_noterm2 = [Char] |]
input_entries_nosep_noterm2 = ""
result_entries_nosep_noterm2 = entries_nosep_noterm2_parseS input_entries_nosep_noterm2
expect_entries_nosep_noterm2 = ([],0,"")
test_entries_nosep_noterm2 = mkTestCase "NoSep_NoTerm2" expect_entries_nosep_noterm2 result_entries_nosep_noterm2
[pads| type EvenInt = constrain x :: Digit where <| x `mod` 2 == 0 |>
type EvenInts = [EvenInt] |]
input_evenInts = "2465"
result_evenInt = evenInt_parseS input_evenInts
expect_evenInt = ( 2,0,"465")
test_evenInt = mkTestCase "EvenInt" expect_evenInt result_evenInt
result_evenInts = evenInts_parseS input_evenInts
expect_evenInts = ([2,4,6],0,"5")
test_evenInts = mkTestCase "EvenInts" expect_evenInts result_evenInts
[pads| type DigitList = [Digit | ','] |]
input_digitListG = "1,2,3"
input_digitList2G = "1,2,3|fed"
input_digitListB = "1,b,3"
result_digitListG = digitList_parseS input_digitListG
expect_digitListG = ([1,2,3],0,"")
test_digitListG = mkTestCase "DigitListG" expect_digitListG result_digitListG
result_digitList2G = digitList_parseS input_digitList2G
expect_digitList2G = ([1,2,3],0,"|fed")
test_digitList2G = mkTestCase "DigitList2G" expect_digitList2G result_digitList2G
result_digitListB = digitList_parseS input_digitListB
expect_digitListB = ([1],0,",b,3")
test_digitListB = mkTestCase "DigitListB" expect_digitListB result_digitListB
[pads| type DigitListLen (x::Int) = [Digit] length <|x + 1 |> |]
input_digitListLenG = "123456"
input_digitListLenB = "12a456"
result_digitListLenG = digitListLen_parseS 4 input_digitListLenG
expect_digitListLenG = ([1,2,3,4,5],0,"6")
test_digitListLenG = mkTestCase "DigitListLenG" expect_digitListLenG result_digitListLenG
result_digitListLenB = digitListLen_parseS 4 input_digitListLenB
expect_digitListLenB = ([1,2,0,4,5],1 ,"6")
test_digitListLenB = mkTestCase "DigitListLenB" expect_digitListLenB result_digitListLenB
[pads| type DigitListLenSep (x::Int) = [Digit | "ab" ] length <|x + 1|> |]
input_digitListLenSepG = "1ab2ab3ab4ab5ab6ab7ab"
input_digitListLenSepB = "1ab2ab3abDab5ab6ab7ab"
result_digitListLenSepG = digitListLenSep_parseS 4 input_digitListLenSepG
expect_digitListLenSepG = ([1,2,3,4,5],0,"ab6ab7ab")
test_digitListLenSepG = mkTestCase "DigitListLenSepG" expect_digitListLenSepG result_digitListLenSepG
result_digitListLenSepB = digitListLenSep_parseS 4 input_digitListLenSepB
expect_digitListLenSepB = ([1,2,3,0,5],1,"ab6ab7ab")
test_digitListLenSepB = mkTestCase "DigitListLenSepB" expect_digitListLenSepB result_digitListLenSepB
[pads| type DigitListTerm = [Digit] terminator EOR|]
input_digitListTermG = "12345\nhello"
result_digitListTermG = digitListTerm_parseS input_digitListTermG
expect_digitListTermG = ([1,2,3,4,5],0,"hello")
test_digitListTermG = mkTestCase "DigitListTermG" expect_digitListTermG result_digitListTermG
input_digitListTermB = "12345,h"
result_digitListTermB = digitListTerm_parseS input_digitListTermB
expect_digitListTermB = ([1,2,3,4,5,0,0],2,"")
test_digitListTermB = mkTestCase "DigitListTermB" expect_digitListTermB result_digitListTermB
[pads| type DigitListTermSep = [Digit | '|' ] terminator ';' |]
input_digitListTermSepG = "1|2|3|4|5|6;hello"
result_digitListTermSepG = digitListTermSep_parseS input_digitListTermSepG
expect_digitListTermSepG = ([1,2,3,4,5,6], 0,"hello")
test_digitListTermSepG = mkTestCase "digitListTermSepG" expect_digitListTermSepG result_digitListTermSepG
input_digitListTermSepB = "1|2|3|4|56;hello"
result_digitListTermSepB = digitListTermSep_parseS input_digitListTermSepB
expect_digitListTermSepB = ([1,2,3,4,5],1,"hello")
test_digitListTermSepB = mkTestCase "digitListTermSepB" expect_digitListTermSepB result_digitListTermSepB
[pads| type TryTest = (Try Char, StringFW 3) |]
input_tryTest = "abc123"
result_tryTest = tryTest_parseS input_tryTest
expect_tryTest = (('a',"abc"),0,"123")
test_tryTest = mkTestCase "tryTest" expect_tryTest result_tryTest
[pads| type TryTestD = (Try Digit, StringFW 3) |]
input_tryTestDG = "123abc"
result_tryTestDG = tryTestD_parseS input_tryTestDG
expect_tryTestDG = ((1,"123"),0,"abc")
test_tryTestDG = mkTestCase "tryTestDG" expect_tryTestDG result_tryTestDG
-- Note that 'try_parseM' does not return an error when it fails to parse a
-- digit (and therefore uses the default value of "0") because a "try" parser
-- should fail silently, similar to how the 'try' combinator works in parsec.
input_tryTestDB = "abc123"
result_tryTestDB = tryTestD_parseS input_tryTestDB
expect_tryTestDB = ((0,"abc"),0,"123")
test_tryTestDB = mkTestCase "tryTestDB" expect_tryTestDB result_tryTestDB
( ( TryTestD ( 0,"abc " ) ,
( Errors : 1 Encountered a when expecting Digit . at : Line : 0 , Offset :
0,(Errors : 1 Encountered a when expecting Digit . at : Line : 0 , Offset :
0,Errors : 0))),"123 " )
XXX : we are getting a repeat error message because of change to how errors are
propagated . Need to work on cleaning up error reporting .
(Errors: 1 Encountered a when expecting Digit. at: Line: 0, Offset:
0,(Errors: 1 Encountered a when expecting Digit. at: Line: 0, Offset:
0,Errors: 0))),"123")
XXX: we are getting a repeat error message because of change to how errors are
propagated. Need to work on cleaning up error reporting.
-}
[pads| type ListWithTry = ([Char] terminator (Try Digit), Digit) |]
input_ListWithTry = "cat123"
result_ListWithTry = listWithTry_parseS input_ListWithTry
expect_ListWithTry = ((['c', 'a', 't'],1),0,"23")
test_ListWithTry = mkTestCase "ListWithTry" expect_ListWithTry result_ListWithTry
[pads| type WithVoid = (Char, ',', Void, '|') |]
input_WithVoid = "a,|rest"
result_WithVoid = withVoid_parseS input_WithVoid
expect_WithVoid = ('a',0,"rest")
test_WithVoid = mkTestCase "WithVoid" expect_WithVoid result_WithVoid
[pads| data VoidOpt = PDigit Digit | Pcolor "red" | Pnothing Void
type VoidEntry = (VoidOpt, StringFW 3) |]
input_voidEntry1 = "9abcdef"
result_voidEntry1 = voidEntry_parseS input_voidEntry1
expect_voidEntry1 = ((PDigit 9,"abc"),0,"def")
test_voidEntry1 = mkTestCase "VoidEntry1" expect_voidEntry1 result_voidEntry1
input_voidEntry2 = "redabcdef"
result_voidEntry2 = voidEntry_parseS input_voidEntry2
expect_voidEntry2 = ((Pcolor,"abc"),0,"def")
test_voidEntry2 = mkTestCase "VoidEntry2" expect_voidEntry2 result_voidEntry2
input_voidEntry3 = "abcdef"
result_voidEntry3 = voidEntry_parseS input_voidEntry3
expect_voidEntry3 = ((Pnothing,"abc"),0,"def")
test_voidEntry3 = mkTestCase "VoidEntry3" expect_voidEntry3 result_voidEntry3
[pads| data Switch (which :: Int) =
case <| which |> of
0 -> Even Int where <| even `mod` 2 == 0 |>
| 1 -> Comma ','
| otherwise -> Missing Void |]
input_switch0 = "2hello"
input_switch1 = ",hello"
input_switchOther = "hello"
result_switch0 = switch_parseS 0 input_switch0
expect_switch0 = (Even (2),0,"hello")
test_switch0 = mkTestCase "switch0" expect_switch0 result_switch0
result_switch1 = switch_parseS 1 input_switch1
expect_switch1 = (Comma,0,"hello")
test_switch1 = mkTestCase "switch1" expect_switch1 result_switch1
result_switchOther = switch_parseS 2 input_switchOther
expect_switchOther = (Missing,0,"hello")
test_switchOther = mkTestCase "switchOther" expect_switchOther result_switchOther
result_stringln = stringLn_parseS "hello\ngoodbye"
expect_stringln = ("hello",0,"\ngoodbye")
test_stringln = mkTestCase "stringln" expect_stringln result_stringln
[pads| data MyBody (which::Int) =
case <| which |> of
0 -> First Int
| 1 -> Second (StringC ',')
| otherwise -> Other Void
data MyEntry = MyEntry
{ header :: Int, ','
, body :: MyBody header, ','
, trailer :: Char}
type MyData = [Line MyEntry] terminator EOF |]
input_myData = "0,23,a\n1,hello,b\n2,,c\n"
result_myData = myData_parseS input_myData
expect_myData = ([MyEntry {header = 0, body = First (23), trailer = 'a'},
MyEntry {header = 1, body = Second ("hello"), trailer = 'b'},
MyEntry {header = 2, body = Other, trailer = 'c'}],0, "")
test_myData = mkTestCase "MyData" expect_myData result_myData
[pads| data HP = HP { student_num :: Int, ',',
student_name :: StringFW student_num }
type HP_data = [Line HP] terminator EOF |]
input_hp_data = "8,Hermione\n3,Ron\n5,Harry\n"
result_hp_data = hP_data_parseS input_hp_data
expect_hp_data = ([HP {student_num = 8, student_name = "Hermione"},
HP {student_num = 3, student_name = "Ron"},
HP {student_num = 5, student_name = "Harry"}], 0, "")
test_hp_data = mkTestCase "HP Data" expect_hp_data result_hp_data
test_file = "examples/data/test_file"
result_hp_data_file_parse :: (HP_data, HP_data_md) = unsafePerformIO $ parseFileWith hP_data_parseM test_file
expect_hp_data_file_parse =
( [HP {student_num = 8, student_name = "Hermione"},
HP {student_num = 3, student_name = "Ron"},
HP {student_num = 5, student_name = "Harry"}], 0)
test_hp_data_file_parse = mkFileTestCase "HP file" expect_hp_data_file_parse result_hp_data_file_parse
strToBS = B.pack . (map chrToWord8)
[pads| newtype MyDoc = MyDoc Text |]
myDoc_input_file = "examples/data/test_file"
myDoc_result :: (MyDoc, MyDoc_md) = unsafePerformIO $ parseFile myDoc_input_file
myDoc_expects = (MyDoc (Text (strToBS "8,Hermione\n3,Ron\n5,Harry\n")),0)
myDoc_test = mkFileTestCase "myDoc" myDoc_expects myDoc_result
acomma = ","
[pads| data LitRec = LitRec { fstField :: Int, acomma, sndField :: Int} |]
litRec_input = "12,34"
litRec_result = litRec_parseS litRec_input
litRec_expects = (LitRec {fstField = 12, sndField = 34},0,"")
litRec_test = mkTestCase "Haskell identifier literal" litRec_expects litRec_result
[pads| type WhiteSpace = (Int, '[ \t]+', Int) |]
whiteSpace_input = "12 34"
whiteSpace_result = whiteSpace_parseS whiteSpace_input
whiteSpace_expects = ((12,34),0,"")
whiteSpace_test = mkTestCase "regular expression literal" whiteSpace_expects whiteSpace_result
[pads| type WhiteSpace2 = (Int, ws, Int) |]
whiteSpace2_input = "12 34"
whiteSpace2_result = whiteSpace2_parseS whiteSpace2_input
whiteSpace2_expects = ((12,34),0,"")
whiteSpace2_test = mkTestCase "Haskell expression regular expression literal" whiteSpace2_expects whiteSpace2_result
[pads| type RE_ty = (StringME '[tod]', ws, StringME 'a+') |]
rE_ty_input = "t aaaa"
rE_ty_result = rE_ty_parseS rE_ty_input
rE_ty_expects = (("t","aaaa"),0,"")
rE_ty_test = mkTestCase "regular expression abbreviation for StringME" rE_ty_expects rE_ty_result
[pads| type Disc = (Int, EOR, Int, EOR, (partition (Int, EOR, Int, EOR) using windows), Int, EOR) |]
disc_input = "1\n2\n3\r\n4\r\n5\n"
disc_result = disc_parseS disc_input
disc_expects = ((1,2,(3,4),5),0,"")
disc_test = mkTestCase "multiple record disciplines" disc_expects disc_result
[pads| data Exxy a = Exxy {exxy :: Int, aa :: a}
type ExxyInt = Exxy Char |]
exxy_input = "32635def"
exxy_result = exxyInt_parseS exxy_input
exxy_expects = (Exxy {exxy = 32635, aa = 'd'},0,"ef")
exxy_test = mkTestCase "label overlap" exxy_expects exxy_result
[pads| type OneLine = [Char] terminator EOR
type Lines = [OneLine] terminator EOF
type LinesFW = partition Lines using <| bytes 3 |>
|]
linesFW_input = "123456789"
linesFW_result = linesFW_parseS linesFW_input
linesFW_expects = (["123","456","789"],0,"")
linesFW_test = mkTestCase "fixed-width record discpline" linesFW_expects linesFW_result
[pads| type Strs = [StringSE ws | ws] terminator EOR |]
strs_input = "0.700264\n"
strs_result = strs_parseS strs_input
[pads| data Vals = Vals { vv :: Int, uu = value <| even vv |> :: Bool, ww::Char} |]
vals_input = "123x"
vals_result = vals_parseS vals_input
vals_expects = (Vals {vv=123,uu=False,ww='x'},0,"")
vals_test = mkTestCase "values" vals_expects vals_result
[pads| data Vals2 = Vals2 { vv2 :: (Int,",",Int),
uu2 = value <| fst vv2 `mod` snd vv2 == 0 |> :: Bool,
ww2 :: Char} |]
vals2_input = "12,3x"
vals2_result = vals2_parseS vals2_input
vals2_expects = (Vals2 {vv2=(12,3),uu2=True,ww2='x'},0,"")
vals2_test = mkTestCase "values" vals2_expects vals2_result
[ pads| data Vals3 a = Vals3 { vv3 : : ( Int,",",Int ) ,
uu3 = value < | [ ] | > : : [ a ] ,
ww3 : : } | ]
vals3_input = " 12,3x "
vals3_result = vals3_parseS vals3_input
vals3_expects = ( Vals3 { vv3=(12,3),uu3=[],ww3='x'},0 , " " )
vals3_test = mkTestCase " values " vals3_expects vals3_result
[pads| data Vals3 a = Vals3 { vv3 :: (Int,",",Int),
uu3 = value <| [] |> :: [a],
ww3 :: Char} |]
vals3_input = "12,3x"
vals3_result = vals3_parseS vals3_input
vals3_expects = (Vals3 {vv3=(12,3),uu3=[],ww3='x'},0,"")
vals3_test = mkTestCase "values" vals3_expects vals3_result
-}
[pads| type Doubles = [Double | EOR] terminator EOF |]
doubles_input = "12.34\n1\n-12.0\n1.3e4\n1.2e-2"
doubles_result = doubles_parseS doubles_input
doubles_expects = ([12.34,1.0,-12.0,13000.0,1.2e-2],0,"")
doubles_test = mkTestCase "doubles" doubles_expects doubles_result
[pads| type StringSEs = [StringSE <|RE "$"|> | EOR] terminator EOF |]
stringSEs_input = "12.34\n1\n-12.0\n1.3e4\n1.2e-2"
stringSEs_result = stringSEs_parseS stringSEs_input
stringSEs_expects = (["12.34","1","-12.0","1.3e4","1.2e-2"],0,"")
stringSEs_test = mkTestCase "stringSEs" stringSEs_expects stringSEs_result
[pads| type StringFWs = [StringFW 3| EOR] terminator EOF |]
stringFWs_input = "abc\nabcd\nab\nabc"
stringFWs_result = stringFWs_parseS stringFWs_input
stringFWs_expects = (["abc","abc","XXX","abc"],2,"")
stringFWs_test = mkTestCase "stringFWs" stringFWs_expects stringFWs_result
[pads| type StringESCs = [(StringESC <| ('!', ";,") |>, '[;,]') | EOR] terminator EOF |]
stringESCs_input = "abc\na;\nb,\na!;bc,\na!,cd\nhe!"
stringESCs_result = stringESCs_parseS stringESCs_input
stringESCs_expects = (["abc","a","b","a;bc","a,cd","he!"],4, "")
stringESCs_test = mkTestCase "stringESCs" stringESCs_expects stringESCs_result
[pads| type StringPs = [StringP Char.isDigit | EOR] terminator EOF |]
stringPs_input = "123\na\n123a"
stringPs_result = stringPs_parseS stringPs_input
stringPs_expects = (["123","","123"],2, "")
stringPs_test = mkTestCase "stringPs" stringPs_expects stringPs_result
{- Bit-level functionality tests -}
[pads| type BitBools = (partition [BitBool] using none) |]
binary 01100001
bitBools_result = bitBools_parseS bitBools_input
bitBools_expects = ([False,True,True,False,False,False,False,True], 0, "")
bitBools_test = mkTestCase "bitBools" bitBools_expects bitBools_result
bitBools_input2 = "a\n" -- binary 01100001 00001010
bitBools_result2 = bitBools_parseS bitBools_input2
bitBools_expects2 = ([False,True,True,False,False,False,False,True,
False,False,False,False,True,False,True,False], 0, "")
bitBools_test2 = mkTestCase "bitBools" bitBools_expects2 bitBools_result2
[pads| type IncompleteBitBools = (partition (BitBool,
BitBool,
BitBool) using none) |]
incompleteBitBools_input = "4"
incompleteBitBools_result = incompleteBitBools_parseS incompleteBitBools_input
incompleteBitBools_expects = ((False,False,True), 0, "4")
incompleteBitBools_test = mkTestCase "incompleteBitBools"
incompleteBitBools_expects
incompleteBitBools_result
[pads| type ArithPixel = (partition (Bits16 9,
Bits8 5,
Bits8 5,
Bits8 5,
Bits8 4,
Bits8 4) using none) |]
arithPixel_input = map word8ToChr [136,114,32,0]
arithPixel_result = arithPixel_parseS arithPixel_input
arithPixel_expects = ((272,28,17,0,0,0), 0, "")
arithPixel_test = mkTestCase "arithPixel" arithPixel_expects arithPixel_result
[pads| type Mixed = (partition (StringC ' ',
' ',
Bits8 4,
BitBool,
Bits8 3,
Char) using none) |]
mixed_input = "Hello " ++ (map word8ToChr [74]) ++ "c"
mixed_result = mixed_parseS mixed_input
mixed_expects = (("Hello",4,True,2,'c'), 0, "")
mixed_test = mkTestCase "mixed" mixed_expects mixed_result
[pads| type OddWidths = (partition (Bits32 19,
Bits64 39,
Bits8 1,
Bits8 5) using none) |]
oddWidths_input = map word8ToChr [104,46,174,3,185,8,6,158]
oddWidths_result = oddWidths_parseS oddWidths_input
oddWidths_expects = ((213365,240768000026,0,30), 0, "")
oddWidths_test = mkTestCase "oddWidths" oddWidths_expects oddWidths_result
[pads| type LargeWidths = (partition (Bits8 7,
BitField 89,
BitField 65) using none) |]
largeWidths_input = map word8ToChr [1,0,0,0,0,0,0,0,0,0,0,1,128,0,0,0,0,0,0,0,128]
largeWidths_result = largeWidths_parseS largeWidths_input
largeWidths_expects = ((0,309485009821345068724781057,18446744073709551617), 0, map word8ToChr [128])
largeWidths_test = mkTestCase "largeWidths" largeWidths_expects largeWidths_result
[pads| data EnumType (x :: Bits8) = case x of 0 -> ZERO {}
| 1 -> ONE {}
| 2 -> TWO {}
| _ -> OTHER {}
data Enumerate = Enumerate {x :: Bits8 3,
Bits8 5,
y :: EnumType x}
type Enumerated = (partition Enumerate using none) |]
enumerated_input = map word8ToChr [64]
enumerated_result = enumerated_parseS enumerated_input
enumerated_expects = (Enumerate {x = 2, y = TWO}, 0, "")
enumerated_test = mkTestCase "Enumerated" enumerated_expects enumerated_result
enumerated_input_wc = map word8ToChr [255]
enumerated_result_wc = enumerated_parseS enumerated_input_wc
enumerated_expects_wc = (Enumerate {x = 7, y = OTHER}, 0, "")
enumerated_test_wc = mkTestCase "EnumeratedWC" enumerated_expects_wc enumerated_result_wc
[pads| data EnumTypeBool (x' :: BitBool) = case x' of True -> ON {}
| False -> OFF {}
data EnumerateBool = EnumerateBool {Bits8 7,
x' :: BitBool,
y' :: EnumTypeBool x'}
type EnumeratedBool = (partition EnumerateBool using none) |]
enumeratedBool_input = map word8ToChr [1]
enumeratedBool_result = enumeratedBool_parseS enumeratedBool_input
enumeratedBool_expects = (EnumerateBool {x' = True, y' = ON}, 0, "")
enumeratedBool_test = mkTestCase "EnumeratedBool" enumeratedBool_expects enumeratedBool_result
[pads| type NBA_char = (partition (Bits8 3, CharNB, Bits8 5) using none) |]
nBA_char_input = map word8ToChr [70,181] -- 01000110 10110101
nBA_char_result = nBA_char_parseS nBA_char_input
010 00110101 10101
nBA_char_test = mkTestCase "NBA_char" nBA_char_expects nBA_char_result
[pads| type NBA_char_aligned = (partition (CharNB, CharNB) using none)|]
nBA_char_aligned_input = map word8ToChr [70,181]
nBA_char_aligned_result = nBA_char_aligned_parseS nBA_char_aligned_input
nBA_char_aligned_expects = ((word8ToChr 70, word8ToChr 181), 0, "")
nBA_char_aligned_test = mkTestCase "NBA_char_aligned" nBA_char_aligned_expects nBA_char_aligned_result
[pads| type NBA_BS = (partition (Bits8 6, BytesNB 2, Bits8 2) using none) |]
01100011 11101010 0000011
nBA_BS_result = nBA_BS_parseS nBA_BS_input
nBA_BS_expects = ((24, B.pack [250,128], 3), 0, "")
nBA_BS_test = mkTestCase "NBA_BS" nBA_BS_expects nBA_BS_result
[pads| type NBA_BS_aligned = (partition (BytesNB 4) using none) |]
nBA_BS_aligned_input = map word8ToChr [9,8,7,255]
nBA_BS_aligned_result = nBA_BS_aligned_parseS nBA_BS_aligned_input
nBA_BS_aligned_expects = ((B.pack [9,8,7,255]), 0, "")
nBA_BS_aligned_test = mkTestCase "NBA_BS_aligned" nBA_BS_aligned_expects nBA_BS_aligned_result
[pads| type NBA_BS_empty = (partition (BytesNB 1) using none) |]
nBA_BS_empty_input = ""
nBA_BS_empty_result = nBA_BS_empty_parseS nBA_BS_empty_input
nBA_BS_empty_expects = ((B.singleton 0), 1, "")
nBA_BS_empty_test = mkTestCase "NBA_BS_empty" nBA_BS_empty_expects nBA_BS_empty_result
[pads| type NBA_StringFW = (partition (Bits8 4, StringFWNB 3, Bits8 4) using none) |]
1000 0110 0001 0110 0010 0110 0011 1111
nBA_StringFW_result = nBA_StringFW_parseS nBA_StringFW_input
nBA_StringFW_expects = ((8,"abc",15),0,"")
nBA_StringFW_test = mkTestCase "NBA_StringFW" nBA_StringFW_expects nBA_StringFW_result
[pads| type NBA_StringFW_aligned = (partition (StringFWNB 15) using none) |]
nBA_StringFW_aligned_input = map word8ToChr (replicate 15 97)
nBA_StringFW_aligned_result = nBA_StringFW_aligned_parseS nBA_StringFW_aligned_input
nBA_StringFW_aligned_expects = (("aaaaaaaaaaaaaaa"),0,"")
nBA_StringFW_aligned_test = mkTestCase "NBA_StringFW_aligned" nBA_StringFW_aligned_expects nBA_StringFW_aligned_result
[pads| type NBA_StringFW_err = (partition (StringFWNB 3) using none) |]
nBA_StringFW_err_input = map word8ToChr [99,99]
nBA_StringFW_err_result = nBA_StringFW_err_parseS nBA_StringFW_err_input
nBA_StringFW_err_expects = (("XXX"),1,"")
nBA_StringFW_err_test = mkTestCase "NBA_StringFW_err" nBA_StringFW_err_expects nBA_StringFW_err_result
[pads| type NBA_StringC = (partition (Bits8 2, StringCNB 'z', CharNB, Bits8 6) using none) |]
10 011110 00 011110 01 011110 10 010101
nBA_StringC_result = nBA_StringC_parseS nBA_StringC_input
nBA_StringC_expects = ((2,"xy",'z',21),0,"")
nBA_StringC_test = mkTestCase "NBA_StringC" nBA_StringC_expects nBA_StringC_result
[pads| type NBA_StringC_aligned = (partition (StringCNB 'z') using none) |]
nBA_StringC_aligned_input = "xyz"
nBA_StringC_aligned_result = nBA_StringC_aligned_parseS nBA_StringC_aligned_input
nBA_StringC_aligned_expects = (("xy"),0,"z")
nBA_StringC_aligned_test = mkTestCase "NBA_StringC_aligned" nBA_StringC_aligned_expects nBA_StringC_aligned_result
$(make_pads_declarations $ map snd padsExp)
padsExp_ast =
[ ("Halloween", PadsDeclType "Halloween" [] Nothing
( PList (PApp [PTycon ["StringFW"]] (Just (LitE (IntegerL 4))))
(Just (PTycon ["EOR"]))
(Just (LTerm (PTycon ["EOF"])))) Nothing)]
padsExp_input = "karl\njred\nmatt\nsam_"
padsExp_result = halloween_parseS padsExp_input
padsExp_expects = (["karl", "jred", "matt", "sam_"], 0, "")
padsExp_test = TestCase (assertEqual "padsExp" padsExp padsExp_ast) -- mkTestCase "padsExp" padsExp padsExp_ast
padsExp_test2 = mkTestCase "padsExp" padsExp_expects padsExp_result
-- | Regression tests need to be run from the root directory of the pads-haskell
-- package because the data file paths in these test cases use paths relative to
-- the root.
test = runTestTT (TestList tests)
tests = [ TestLabel "MyChar" myChar_test
, TestLabel "IntPair" intPair_test
, TestLabel "Bar" bar_test
, TestLabel "Bar2" bar2_test
, TestLabel "Bazr" bazr_test
, TestLabel "MyInt" myInt_test
, TestLabel "StrTy" strTy_test
, TestLabel "StrTy1" strTy1_test
, TestLabel "Baz" baz_test
, TestLabel "Phex32FW" phex32FW_test
, TestLabel "IntRange" test_intRange24
, TestLabel "IntRange" test_intRange0
, TestLabel "IntRange" test_intRange256
, TestLabel "IntRange" test_intRangeLow
, TestLabel "IntRange" test_intRangeHigh
, TestLabel "IntRange" test_intRangeBad
, TestLabel "IntRangeP" test_intRangeP24
, TestLabel "IntRangeP" test_intRangeP0
, TestLabel "IntRangeP" test_intRangeP256
, TestLabel "IntRangeP" test_intRangePLow
, TestLabel "IntRangeP" test_intRangePHigh
, TestLabel "IntRangeP" test_intRangePBad
, TestLabel "Record" test_Record
, TestLabel "Id" test_IdInt
, TestLabel "Id" test_IdStr
, TestLabel "Id" test_IdInt2
, TestLabel "Id" test_IdStr2
, TestLabel "Id3" test_IdInt3
, TestLabel "Id3" test_IdLit3
, TestLabel "Ab_or_a" test_Ab_or_a
, TestLabel "AB_test" test_AB_test1
, TestLabel "AB_test" test_AB_test2
, TestLabel "Method" test_method_get
, TestLabel "Method" test_method_put
, TestLabel "Method" test_method_link
, TestLabel "Method" test_method_post
, TestLabel "Version" test_version
, TestLabel "Request" test_request_G
, TestLabel "Request" test_request_B
, TestLabel "Eor" test_eor_test
, TestLabel "Eof" test_eof_test_G
, TestLabel "Eof" test_eof_test_B
, TestLabel "Opt" test_opt_test_j
, TestLabel "Opt" test_opt_test_n
, TestLabel "List" test_entries_nosep_noterm
, TestLabel "List" test_entries_nosep_noterm'
, TestLabel "List" test_entries_nosep_noterm2
, TestLabel "List" test_evenInt
, TestLabel "List" test_evenInts
, TestLabel "List" test_digitListG
, TestLabel "List" test_digitList2G
, TestLabel "List" test_digitListB
, TestLabel "List" test_digitListLenG
, TestLabel "List" test_digitListLenB
, TestLabel "List" test_digitListLenSepG
, TestLabel "List" test_digitListLenSepB
, TestLabel "List" test_digitListTermG
, TestLabel "List" test_digitListTermB
, TestLabel "List" test_digitListTermSepG
, TestLabel "List" test_digitListTermSepB
, TestLabel "Try" test_tryTest
, TestLabel "Try" test_tryTestDG
, TestLabel "Try" test_tryTestDB
, TestLabel "Try" test_ListWithTry
, TestLabel "Void" test_WithVoid
, TestLabel "Void" test_voidEntry1
, TestLabel "Void" test_voidEntry2
, TestLabel "Void" test_voidEntry3
, TestLabel "Switch" test_switch0
, TestLabel "Switch" test_switch1
, TestLabel "Switch" test_switchOther
, TestLabel "Stringln" test_stringln
, TestLabel "Compound" test_myData
, TestLabel "Compound" test_hp_data
, TestLabel "Doc" test_hp_data_file_parse
, TestLabel "Doc" myDoc_test
, TestLabel "Literal" litRec_test
, TestLabel "Literal" whiteSpace_test
, TestLabel "Literal" whiteSpace2_test
, TestLabel "Regular Expression" rE_ty_test
, TestLabel "Discipline" disc_test
, TestLabel "Overlap" exxy_test
, TestLabel "Discipline" linesFW_test
, TestLabel "Values" vals_test
, TestLabel "Values" vals2_test
, TestLabel "Double" doubles_test
, TestLabel "StringSE" stringSEs_test
, TestLabel "StringFWs" stringFWs_test
, TestLabel "StringESCs" stringESCs_test
, TestLabel "StringPs" stringPs_test
, TestLabel "PadsExp" padsExp_test
, TestLabel "PadsExp2" padsExp_test2
, TestLabel "BitBools" bitBools_test
, TestLabel "BitBools" bitBools_test2
, TestLabel "ArithPixel" arithPixel_test
, TestLabel "IncompleteBitBools" incompleteBitBools_test
, TestLabel "Mixed" mixed_test
, TestLabel "OddWidths" oddWidths_test
, TestLabel "LargeWidths" largeWidths_test
, TestLabel "Enumerated" enumerated_test
, TestLabel "EnumeratedWC" enumerated_test_wc
, TestLabel "EnumeratedBool" enumeratedBool_test
, TestLabel "NBA_char" nBA_char_test
, TestLabel "NBA_char_aligned" nBA_char_aligned_test
, TestLabel "NBA_BS" nBA_BS_test
, TestLabel "NBA_BS_aligned" nBA_BS_aligned_test
, TestLabel "NBA_BS_empty" nBA_BS_empty_test
, TestLabel "NBA_StringFW" nBA_StringFW_test
, TestLabel "NBA_StringFW_aligned" nBA_StringFW_aligned_test
, TestLabel "NBA_StringFW_err" nBA_StringFW_err_test
, TestLabel "NBA_StringC" nBA_StringC_test
, TestLabel "NBA_StringC_aligned" nBA_StringC_aligned_test
]
| null | https://raw.githubusercontent.com/padsproj/pads-haskell/8dce6b2b28bf7d98028e67f6faa2be753a6ad691/examples/First.hs | haskell | -- PADS EXAMPLES
reference to another named type
nested tuple type.
type that consumes a line boundary.
String base types
Regular expression types
Prints error message
md is the meta-data descriptor for the underyling type.
expect_AB_test2 = (AB_test {field_AB = A},0,"") -- if backtracking
Restate after Maybe is implemented
LIST EXAMPLES
Note that 'try_parseM' does not return an error when it fails to parse a
digit (and therefore uses the default value of "0") because a "try" parser
should fail silently, similar to how the 'try' combinator works in parsec.
Bit-level functionality tests
binary 01100001 00001010
01000110 10110101
mkTestCase "padsExp" padsExp padsExp_ast
| Regression tests need to be run from the root directory of the pads-haskell
package because the data file paths in these test cases use paths relative to
the root. | # LANGUAGE FlexibleContexts , TypeFamilies , TypeSynonymInstances , TemplateHaskell , QuasiQuotes ,
MultiParamTypeClasses , FlexibleInstances , UndecidableInstances ,
DeriveDataTypeable , ScopedTypeVariables #
MultiParamTypeClasses, FlexibleInstances, UndecidableInstances,
DeriveDataTypeable, ScopedTypeVariables #-}
module First where
import Language.Pads.Padsc
import Language.Pads.Testing
import FirstPads
import Language.Haskell.TH
import System.IO.Unsafe (unsafePerformIO)
import Data.Char as Char
import qualified Data.ByteString as B
import Data.Word
import qualified Text . Regex . ByteString as BRE
ws = REd "[\t ]+|$" " "
[pads| |]
[pads| type MyChar = Char |]
myChar_result = myChar_parseS "ab"
myChar_expects = ('a', 0,"b")
myChar_test = mkTestCase "myChar" myChar_expects myChar_result
[pads| type IntPair = (Int, '|', Int) |]
intPair_result = intPair_parseS "12|23"
intPair_expects = ((12,23), 0,"")
intPair_test = mkTestCase "intPair" intPair_expects intPair_result
bar_result = bar_parseS "256,12|23;456:"
bar_expects = ((256, (12, 23), 456), 0, ":")
bar_test = mkTestCase "bar" bar_expects bar_result
bar2_result = bar2_parseS "56,23:46;29"
bar2_expects = ((56,(23,46),29), 0 ,"")
bar2_test = mkTestCase "bar2" bar2_expects bar2_result
bazr_result = bazR_parseS "33,33\n"
bazr_expects = ((33,33),0,"")
bazr_test = mkTestCase "bazr" bazr_expects bazr_result
Integer base type
myInt_result = myInt_parseS "23"
myInt_expects = (23,0,"")
myInt_test = mkTestCase "myInt" myInt_expects myInt_result
testStrLen = 2
computeLen x = x - 1
[pads| type StrTy = StringFW <| testStrLen + (computeLen 4) |> |]
inputStrTy = "catdog"
strTy_results = strTy_parseS inputStrTy
strTy_expects = ("catdo", 0,"g")
strTy_test = mkTestCase "strTy" strTy_expects strTy_results
[pads| type StrTy1 = StringC 'o' |]
strTy1_results = strTy1_parseS inputStrTy
strTy1_expects = ("catd",0,"og")
strTy1_test = mkTestCase "strTy1" strTy1_expects strTy1_results
[pads| type Baz = (StringFW 3,',',Int) |]
input_baz = "cat,123"
baz_results = baz_parseS input_baz
baz_expects = (("cat",123),0,"")
baz_test = mkTestCase "baz" baz_expects baz_results
[pads| type StrME = StringME 'a+' |]
input_strME = "aaaab"
strME_results = strME_parseS input_strME
[pads| type StrSE = StringSE <|RE "b|c"|> |]
input_strSE_1 = "aaaab"
input_strSE_2 = "aaaac"
strSE_results_1 = strSE_parseS input_strSE_1
strSE_results_2 = strSE_parseS input_strSE_2
[pads| type StrP1 (x::Int) = StringFW <|x - 1|> |]
input_strP1 = "abcd"
strP1_result = strP1_parseS 3 input_strP1
[pads| type StrHex = StringME '[0-9A-Fa-f]+' |]
input_strHex = "12abcds"
strHex_result = strHex_parseS input_strHex
Testing for Phex32FW , which is in Pads . Language .
input_hex32FW = "12bc34"
phex32FW_results = phex32FW_parseS 4 input_hex32FW
phex32FW_expects = (4796, 0, "34")
phex32FW_test = mkTestCase "phex32FW" phex32FW_expects phex32FW_results
input2_hex32FW = "00bc34"
( ( Phex32FW ( 188),Errors : 0),"34 " )
input3_hex32FW = "gbc34"
[pads| type HexPair = (Phex32FW 2, ',', Phex32FW 3) |]
input_hexpair = "aa,bbb"
hexpair_result = hexPair_parseS input_hexpair
Constrained types
[pads| type IntRange = constrain x :: Int where <| 0 <= x && x <= 256 |> |]
intRange24_input = "24"
intRange0_input = "0"
intRange256_input = "256"
intRangeLow_input = "-23"
intRangeHigh_input = "512"
intRangeBad_input = "aaa"
result_intRange24 = intRange_parseS intRange24_input
expect_intRange24 = (24,0,"")
test_intRange24 = mkTestCase "IntRange24" expect_intRange24 result_intRange24
result_intRange0 = intRange_parseS intRange0_input
expect_intRange0 = (0,0,"")
test_intRange0 = mkTestCase "IntRange0" expect_intRange0 result_intRange0
result_intRange256 = intRange_parseS intRange256_input
expect_intRange256 = (256,0,"")
test_intRange256 = mkTestCase "IntRange256" expect_intRange256 result_intRange256
result_intRangeLow = intRange_parseS intRangeLow_input
expect_intRangeLow = ((-23),1,"")
test_intRangeLow = mkTestCase "IntRangeLow" expect_intRangeLow result_intRangeLow
result_intRangeHigh = intRange_parseS intRangeHigh_input
expect_intRangeHigh = (512,1,"")
test_intRangeHigh = mkTestCase "IntRangeHigh" expect_intRangeHigh result_intRangeHigh
result_intRangeBad = intRange_parseS intRangeBad_input
expect_intRangeBad = (0,1,"aaa")
test_intRangeBad = mkTestCase "IntRangeBad" expect_intRangeBad result_intRangeBad
Note that the special variable " is in scope in the body of the predicate .
[pads| type IntRangeP (low::Int, high::Int) = constrain x :: Int where <| low <= x && x <= high && (numErrors md == 0) |> |]
result_intRangeP24 = intRangeP_parseS (0, 256) intRange24_input
expect_intRangeP24 = (24,0,"")
test_intRangeP24 = mkTestCase "IntRangeP24" expect_intRangeP24 result_intRangeP24
result_intRangeP0 = intRangeP_parseS (0, 256) intRange0_input
expect_intRangeP0 = (0,0,"")
test_intRangeP0 = mkTestCase "IntRangeP0" expect_intRangeP0 result_intRangeP0
result_intRangeP256 = intRangeP_parseS (0, 256) intRange256_input
expect_intRangeP256 = (256,0,"")
test_intRangeP256 = mkTestCase "IntRangeP256" expect_intRangeP256 result_intRangeP256
result_intRangePLow = intRangeP_parseS (0, 256) intRangeLow_input
expect_intRangePLow = ((-23), 1, "")
test_intRangePLow = mkTestCase "IntRangePLow" expect_intRangePLow result_intRangePLow
result_intRangePHigh = intRangeP_parseS (0, 256) intRangeHigh_input
expect_intRangePHigh = (512, 1,"")
test_intRangePHigh = mkTestCase "IntRangePHigh" expect_intRangePHigh result_intRangePHigh
result_intRangePBad = intRangeP_parseS (0, 256) intRangeBad_input
expect_intRangePBad = (0, 2,"aaa")
test_intRangePBad = mkTestCase "IntRangePBad" expect_intRangePBad result_intRangePBad
[pads| data Record (bound::Int) = Record
{ i1 :: Int,
',', i2 :: Int where <| i1 + i2 <= bound |> } |]
input_Record = "24,45"
result_Record = record_parseS 100 input_Record
expect_Record = (Record {i1 = 24, i2 = 45},0,"")
test_Record = mkTestCase "Record" expect_Record result_Record
[pads| data Id = Numeric Int
| Alpha (StringC ',') |]
input_IdInt = "23"
result_IdInt = id_parseS input_IdInt
expect_IdInt = (Numeric 23,0,"")
test_IdInt = mkTestCase "IdInt" expect_IdInt result_IdInt
input_IdStr = "hello"
result_IdStr = id_parseS input_IdStr
expect_IdStr = (Alpha ("hello"),0,"")
test_IdStr = mkTestCase "IdAlpha" expect_IdStr result_IdStr
[pads| data Id2 (bound::Int) =
Numeric2 (constrain n::Int where <| n <= bound |>)
| Alpha2 (StringC ',') |]
input_IdInt2 = "23"
result_IdInt2 = id2_parseS 10 input_IdInt2
expect_IdInt2 = (Alpha2 ("23"),0,"")
test_IdInt2 = mkTestCase "IdInt2" expect_IdInt2 result_IdInt2
input_IdStr2 = "hello"
result_IdStr2 = id2_parseS 10 input_IdStr2
expect_IdStr2 = (Alpha2 ("hello"),0,"")
test_IdStr2 = mkTestCase "IdAlpha2" expect_IdStr2 result_IdStr2
[pads| data Id3 = Numeric3 (IntRangeP <|(1,10)|>)
| Numeric3a Int
| Lit3 ',' |]
input_IdInt3 = "24"
result_IdInt3 = id3_parseS input_IdInt3
expect_IdInt3 = (Numeric3a (24),0,"")
test_IdInt3 = mkTestCase "IdInt3" expect_IdInt2 result_IdInt2
input_IdLit3 = ","
result_IdLit3 = id3_parseS input_IdLit3
expect_IdLit3 = (Lit3,0,"")
test_IdLit3 = mkTestCase "IdLit3" expect_IdLit3 result_IdLit3
[pads| data Ab_or_a = AB "ab" | A "a" |]
input_AB = "ab"
result_Ab_or_a = ab_or_a_parseS input_AB
expect_Ab_or_a = (AB,0,"")
test_Ab_or_a = mkTestCase "Ab_or_a" expect_Ab_or_a result_Ab_or_a
[pads| data AB_test = AB_test { field_AB :: Ab_or_a , 'b'} |]
input_AB_test1 = "abb"
result_AB_test1 = aB_test_parseS input_AB_test1
expect_AB_test1 = (AB_test {field_AB = AB},0,"")
test_AB_test1 = mkTestCase "AB_test1" expect_AB_test1 result_AB_test1
input_AB_test2 = "ab"
result_AB_test2 = aB_test_parseS input_AB_test2
expect_AB_test2 = (AB_test {field_AB = AB},1,"")
test_AB_test2 = mkTestCase "AB_test2" expect_AB_test2 result_AB_test2
[pads| data Method = GET | PUT | LINK | UNLINK | POST
data Version = Version
{"HTTP/"
, major :: Int, '.'
, minor :: Int}
|]
checkVersion :: Method -> Version -> Bool
checkVersion method version =
case method of
LINK -> major version == 1 && minor version == 0
UNLINK -> major version == 1 && minor version == 0
_ -> True
[pads| data Request = Request
{ '"', method :: Method
, ' ', url :: StringC ' '
, ' ', version :: Version where <| checkVersion method version |>
, '"'
} |]
input_method_get = "GET"
result_method_get = method_parseS input_method_get
expect_method_get = (GET,0,"")
test_method_get = mkTestCase "Method_get" expect_method_get result_method_get
input_method_put = "PUT"
result_method_put = method_parseS input_method_put
expect_method_put = (PUT,0,"")
test_method_put = mkTestCase "Method_put" expect_method_put result_method_put
input_method_link = "LINK"
result_method_link = method_parseS input_method_link
expect_method_link = (LINK,0,"")
test_method_link = mkTestCase "Method_link" expect_method_link result_method_link
input_method_post = "POST"
result_method_post = method_parseS input_method_post
expect_method_post = (POST,0,"")
test_method_post = mkTestCase "Method_post" expect_method_post result_method_post
input_version = "HTTP/1.2"
result_version = version_parseS input_version
expect_version = (Version {major = 1, minor = 2},0,"")
test_version = mkTestCase "Version" expect_version result_version
input_request_G = "\"PUT /www.google.com HTTP/1.0\""
result_request_G = request_parseS input_request_G
expect_request_G = (Request {method = PUT, url = "/www.google.com", version = Version {major = 1, minor = 0}}, 0, "")
test_request_G = mkTestCase "Request_G" expect_request_G result_request_G
input_request_B = "\"LINK /www.google.com HTTP/1.3\""
result_request_B = request_parseS input_request_B
expect_request_B = (Request {method = LINK, url = "/www.google.com", version = Version {major = 1, minor = 3}},1, "")
test_request_B = mkTestCase "Request_B" expect_request_B result_request_B
[pads| type Eor_Test = (Int, EOR, Int) |]
input_eor_test = "23\n56"
result_eor_test = eor_Test_parseS input_eor_test
expect_eor_test = ((23,56),0,"")
test_eor_test = mkTestCase "Eor_Test" expect_eor_test result_eor_test
[pads| type Eof_Test = (Int, EOR, Int, EOF) |]
input_eof_test_G = "23\n56"
result_eof_test_G = eof_Test_parseS input_eof_test_G
expect_eof_test_G = ((23,56),0,"")
test_eof_test_G = mkTestCase "Eof_TestG" expect_eof_test_G result_eof_test_G
input_eof_test_B = "23\n56ab"
result_eof_test_B = eof_Test_parseS input_eof_test_B
expect_eof_test_B = ((23,56), 1,"ab")
test_eof_test_B = mkTestCase "Eof_TestB" expect_eof_test_B result_eof_test_B
[pads| type Opt_test = (Int, '|', Maybe Int, '|', Int) |]
input_opt_test_j = "34|35|56"
result_opt_test_j = opt_test_parseS input_opt_test_j
expect_opt_test_j = ((34,Just 35,56),0,"")
test_opt_test_j = mkTestCase "Opt_test_j" expect_opt_test_j result_opt_test_j
input_opt_test_n = "34||56"
result_opt_test_n = opt_test_parseS input_opt_test_n
expect_opt_test_n = ((34,Nothing,56),0,"")
test_opt_test_n = mkTestCase "Opt_test_n" expect_opt_test_n result_opt_test_n
[pads| type Entries_nosep_noterm = [StringFW 3] |]
input_entries_nosep_noterm = "123456789"
result_entries_nosep_noterm = entries_nosep_noterm_parseS input_entries_nosep_noterm
expect_entries_nosep_noterm = (["123","456","789"],0,"")
test_entries_nosep_noterm = mkTestCase "NoSep_NoTerm" expect_entries_nosep_noterm result_entries_nosep_noterm
input_entries_nosep_noterm' = "1234567890"
result_entries_nosep_noterm' = entries_nosep_noterm_parseS input_entries_nosep_noterm'
expect_entries_nosep_noterm' = (["123","456","789"],0,"0")
test_entries_nosep_noterm' = mkTestCase "NoSep_NoTerm'" expect_entries_nosep_noterm' result_entries_nosep_noterm'
[pads| type Entries_nosep_noterm2 = [Char] |]
input_entries_nosep_noterm2 = ""
result_entries_nosep_noterm2 = entries_nosep_noterm2_parseS input_entries_nosep_noterm2
expect_entries_nosep_noterm2 = ([],0,"")
test_entries_nosep_noterm2 = mkTestCase "NoSep_NoTerm2" expect_entries_nosep_noterm2 result_entries_nosep_noterm2
[pads| type EvenInt = constrain x :: Digit where <| x `mod` 2 == 0 |>
type EvenInts = [EvenInt] |]
input_evenInts = "2465"
result_evenInt = evenInt_parseS input_evenInts
expect_evenInt = ( 2,0,"465")
test_evenInt = mkTestCase "EvenInt" expect_evenInt result_evenInt
result_evenInts = evenInts_parseS input_evenInts
expect_evenInts = ([2,4,6],0,"5")
test_evenInts = mkTestCase "EvenInts" expect_evenInts result_evenInts
[pads| type DigitList = [Digit | ','] |]
input_digitListG = "1,2,3"
input_digitList2G = "1,2,3|fed"
input_digitListB = "1,b,3"
result_digitListG = digitList_parseS input_digitListG
expect_digitListG = ([1,2,3],0,"")
test_digitListG = mkTestCase "DigitListG" expect_digitListG result_digitListG
result_digitList2G = digitList_parseS input_digitList2G
expect_digitList2G = ([1,2,3],0,"|fed")
test_digitList2G = mkTestCase "DigitList2G" expect_digitList2G result_digitList2G
result_digitListB = digitList_parseS input_digitListB
expect_digitListB = ([1],0,",b,3")
test_digitListB = mkTestCase "DigitListB" expect_digitListB result_digitListB
[pads| type DigitListLen (x::Int) = [Digit] length <|x + 1 |> |]
input_digitListLenG = "123456"
input_digitListLenB = "12a456"
result_digitListLenG = digitListLen_parseS 4 input_digitListLenG
expect_digitListLenG = ([1,2,3,4,5],0,"6")
test_digitListLenG = mkTestCase "DigitListLenG" expect_digitListLenG result_digitListLenG
result_digitListLenB = digitListLen_parseS 4 input_digitListLenB
expect_digitListLenB = ([1,2,0,4,5],1 ,"6")
test_digitListLenB = mkTestCase "DigitListLenB" expect_digitListLenB result_digitListLenB
[pads| type DigitListLenSep (x::Int) = [Digit | "ab" ] length <|x + 1|> |]
input_digitListLenSepG = "1ab2ab3ab4ab5ab6ab7ab"
input_digitListLenSepB = "1ab2ab3abDab5ab6ab7ab"
result_digitListLenSepG = digitListLenSep_parseS 4 input_digitListLenSepG
expect_digitListLenSepG = ([1,2,3,4,5],0,"ab6ab7ab")
test_digitListLenSepG = mkTestCase "DigitListLenSepG" expect_digitListLenSepG result_digitListLenSepG
result_digitListLenSepB = digitListLenSep_parseS 4 input_digitListLenSepB
expect_digitListLenSepB = ([1,2,3,0,5],1,"ab6ab7ab")
test_digitListLenSepB = mkTestCase "DigitListLenSepB" expect_digitListLenSepB result_digitListLenSepB
[pads| type DigitListTerm = [Digit] terminator EOR|]
input_digitListTermG = "12345\nhello"
result_digitListTermG = digitListTerm_parseS input_digitListTermG
expect_digitListTermG = ([1,2,3,4,5],0,"hello")
test_digitListTermG = mkTestCase "DigitListTermG" expect_digitListTermG result_digitListTermG
input_digitListTermB = "12345,h"
result_digitListTermB = digitListTerm_parseS input_digitListTermB
expect_digitListTermB = ([1,2,3,4,5,0,0],2,"")
test_digitListTermB = mkTestCase "DigitListTermB" expect_digitListTermB result_digitListTermB
[pads| type DigitListTermSep = [Digit | '|' ] terminator ';' |]
input_digitListTermSepG = "1|2|3|4|5|6;hello"
result_digitListTermSepG = digitListTermSep_parseS input_digitListTermSepG
expect_digitListTermSepG = ([1,2,3,4,5,6], 0,"hello")
test_digitListTermSepG = mkTestCase "digitListTermSepG" expect_digitListTermSepG result_digitListTermSepG
input_digitListTermSepB = "1|2|3|4|56;hello"
result_digitListTermSepB = digitListTermSep_parseS input_digitListTermSepB
expect_digitListTermSepB = ([1,2,3,4,5],1,"hello")
test_digitListTermSepB = mkTestCase "digitListTermSepB" expect_digitListTermSepB result_digitListTermSepB
[pads| type TryTest = (Try Char, StringFW 3) |]
input_tryTest = "abc123"
result_tryTest = tryTest_parseS input_tryTest
expect_tryTest = (('a',"abc"),0,"123")
test_tryTest = mkTestCase "tryTest" expect_tryTest result_tryTest
[pads| type TryTestD = (Try Digit, StringFW 3) |]
input_tryTestDG = "123abc"
result_tryTestDG = tryTestD_parseS input_tryTestDG
expect_tryTestDG = ((1,"123"),0,"abc")
test_tryTestDG = mkTestCase "tryTestDG" expect_tryTestDG result_tryTestDG
input_tryTestDB = "abc123"
result_tryTestDB = tryTestD_parseS input_tryTestDB
expect_tryTestDB = ((0,"abc"),0,"123")
test_tryTestDB = mkTestCase "tryTestDB" expect_tryTestDB result_tryTestDB
( ( TryTestD ( 0,"abc " ) ,
( Errors : 1 Encountered a when expecting Digit . at : Line : 0 , Offset :
0,(Errors : 1 Encountered a when expecting Digit . at : Line : 0 , Offset :
0,Errors : 0))),"123 " )
XXX : we are getting a repeat error message because of change to how errors are
propagated . Need to work on cleaning up error reporting .
(Errors: 1 Encountered a when expecting Digit. at: Line: 0, Offset:
0,(Errors: 1 Encountered a when expecting Digit. at: Line: 0, Offset:
0,Errors: 0))),"123")
XXX: we are getting a repeat error message because of change to how errors are
propagated. Need to work on cleaning up error reporting.
-}
[pads| type ListWithTry = ([Char] terminator (Try Digit), Digit) |]
input_ListWithTry = "cat123"
result_ListWithTry = listWithTry_parseS input_ListWithTry
expect_ListWithTry = ((['c', 'a', 't'],1),0,"23")
test_ListWithTry = mkTestCase "ListWithTry" expect_ListWithTry result_ListWithTry
[pads| type WithVoid = (Char, ',', Void, '|') |]
input_WithVoid = "a,|rest"
result_WithVoid = withVoid_parseS input_WithVoid
expect_WithVoid = ('a',0,"rest")
test_WithVoid = mkTestCase "WithVoid" expect_WithVoid result_WithVoid
[pads| data VoidOpt = PDigit Digit | Pcolor "red" | Pnothing Void
type VoidEntry = (VoidOpt, StringFW 3) |]
input_voidEntry1 = "9abcdef"
result_voidEntry1 = voidEntry_parseS input_voidEntry1
expect_voidEntry1 = ((PDigit 9,"abc"),0,"def")
test_voidEntry1 = mkTestCase "VoidEntry1" expect_voidEntry1 result_voidEntry1
input_voidEntry2 = "redabcdef"
result_voidEntry2 = voidEntry_parseS input_voidEntry2
expect_voidEntry2 = ((Pcolor,"abc"),0,"def")
test_voidEntry2 = mkTestCase "VoidEntry2" expect_voidEntry2 result_voidEntry2
input_voidEntry3 = "abcdef"
result_voidEntry3 = voidEntry_parseS input_voidEntry3
expect_voidEntry3 = ((Pnothing,"abc"),0,"def")
test_voidEntry3 = mkTestCase "VoidEntry3" expect_voidEntry3 result_voidEntry3
[pads| data Switch (which :: Int) =
case <| which |> of
0 -> Even Int where <| even `mod` 2 == 0 |>
| 1 -> Comma ','
| otherwise -> Missing Void |]
input_switch0 = "2hello"
input_switch1 = ",hello"
input_switchOther = "hello"
result_switch0 = switch_parseS 0 input_switch0
expect_switch0 = (Even (2),0,"hello")
test_switch0 = mkTestCase "switch0" expect_switch0 result_switch0
result_switch1 = switch_parseS 1 input_switch1
expect_switch1 = (Comma,0,"hello")
test_switch1 = mkTestCase "switch1" expect_switch1 result_switch1
result_switchOther = switch_parseS 2 input_switchOther
expect_switchOther = (Missing,0,"hello")
test_switchOther = mkTestCase "switchOther" expect_switchOther result_switchOther
result_stringln = stringLn_parseS "hello\ngoodbye"
expect_stringln = ("hello",0,"\ngoodbye")
test_stringln = mkTestCase "stringln" expect_stringln result_stringln
[pads| data MyBody (which::Int) =
case <| which |> of
0 -> First Int
| 1 -> Second (StringC ',')
| otherwise -> Other Void
data MyEntry = MyEntry
{ header :: Int, ','
, body :: MyBody header, ','
, trailer :: Char}
type MyData = [Line MyEntry] terminator EOF |]
input_myData = "0,23,a\n1,hello,b\n2,,c\n"
result_myData = myData_parseS input_myData
expect_myData = ([MyEntry {header = 0, body = First (23), trailer = 'a'},
MyEntry {header = 1, body = Second ("hello"), trailer = 'b'},
MyEntry {header = 2, body = Other, trailer = 'c'}],0, "")
test_myData = mkTestCase "MyData" expect_myData result_myData
[pads| data HP = HP { student_num :: Int, ',',
student_name :: StringFW student_num }
type HP_data = [Line HP] terminator EOF |]
input_hp_data = "8,Hermione\n3,Ron\n5,Harry\n"
result_hp_data = hP_data_parseS input_hp_data
expect_hp_data = ([HP {student_num = 8, student_name = "Hermione"},
HP {student_num = 3, student_name = "Ron"},
HP {student_num = 5, student_name = "Harry"}], 0, "")
test_hp_data = mkTestCase "HP Data" expect_hp_data result_hp_data
test_file = "examples/data/test_file"
result_hp_data_file_parse :: (HP_data, HP_data_md) = unsafePerformIO $ parseFileWith hP_data_parseM test_file
expect_hp_data_file_parse =
( [HP {student_num = 8, student_name = "Hermione"},
HP {student_num = 3, student_name = "Ron"},
HP {student_num = 5, student_name = "Harry"}], 0)
test_hp_data_file_parse = mkFileTestCase "HP file" expect_hp_data_file_parse result_hp_data_file_parse
strToBS = B.pack . (map chrToWord8)
[pads| newtype MyDoc = MyDoc Text |]
myDoc_input_file = "examples/data/test_file"
myDoc_result :: (MyDoc, MyDoc_md) = unsafePerformIO $ parseFile myDoc_input_file
myDoc_expects = (MyDoc (Text (strToBS "8,Hermione\n3,Ron\n5,Harry\n")),0)
myDoc_test = mkFileTestCase "myDoc" myDoc_expects myDoc_result
acomma = ","
[pads| data LitRec = LitRec { fstField :: Int, acomma, sndField :: Int} |]
litRec_input = "12,34"
litRec_result = litRec_parseS litRec_input
litRec_expects = (LitRec {fstField = 12, sndField = 34},0,"")
litRec_test = mkTestCase "Haskell identifier literal" litRec_expects litRec_result
[pads| type WhiteSpace = (Int, '[ \t]+', Int) |]
whiteSpace_input = "12 34"
whiteSpace_result = whiteSpace_parseS whiteSpace_input
whiteSpace_expects = ((12,34),0,"")
whiteSpace_test = mkTestCase "regular expression literal" whiteSpace_expects whiteSpace_result
[pads| type WhiteSpace2 = (Int, ws, Int) |]
whiteSpace2_input = "12 34"
whiteSpace2_result = whiteSpace2_parseS whiteSpace2_input
whiteSpace2_expects = ((12,34),0,"")
whiteSpace2_test = mkTestCase "Haskell expression regular expression literal" whiteSpace2_expects whiteSpace2_result
[pads| type RE_ty = (StringME '[tod]', ws, StringME 'a+') |]
rE_ty_input = "t aaaa"
rE_ty_result = rE_ty_parseS rE_ty_input
rE_ty_expects = (("t","aaaa"),0,"")
rE_ty_test = mkTestCase "regular expression abbreviation for StringME" rE_ty_expects rE_ty_result
[pads| type Disc = (Int, EOR, Int, EOR, (partition (Int, EOR, Int, EOR) using windows), Int, EOR) |]
disc_input = "1\n2\n3\r\n4\r\n5\n"
disc_result = disc_parseS disc_input
disc_expects = ((1,2,(3,4),5),0,"")
disc_test = mkTestCase "multiple record disciplines" disc_expects disc_result
[pads| data Exxy a = Exxy {exxy :: Int, aa :: a}
type ExxyInt = Exxy Char |]
exxy_input = "32635def"
exxy_result = exxyInt_parseS exxy_input
exxy_expects = (Exxy {exxy = 32635, aa = 'd'},0,"ef")
exxy_test = mkTestCase "label overlap" exxy_expects exxy_result
[pads| type OneLine = [Char] terminator EOR
type Lines = [OneLine] terminator EOF
type LinesFW = partition Lines using <| bytes 3 |>
|]
linesFW_input = "123456789"
linesFW_result = linesFW_parseS linesFW_input
linesFW_expects = (["123","456","789"],0,"")
linesFW_test = mkTestCase "fixed-width record discpline" linesFW_expects linesFW_result
[pads| type Strs = [StringSE ws | ws] terminator EOR |]
strs_input = "0.700264\n"
strs_result = strs_parseS strs_input
[pads| data Vals = Vals { vv :: Int, uu = value <| even vv |> :: Bool, ww::Char} |]
vals_input = "123x"
vals_result = vals_parseS vals_input
vals_expects = (Vals {vv=123,uu=False,ww='x'},0,"")
vals_test = mkTestCase "values" vals_expects vals_result
[pads| data Vals2 = Vals2 { vv2 :: (Int,",",Int),
uu2 = value <| fst vv2 `mod` snd vv2 == 0 |> :: Bool,
ww2 :: Char} |]
vals2_input = "12,3x"
vals2_result = vals2_parseS vals2_input
vals2_expects = (Vals2 {vv2=(12,3),uu2=True,ww2='x'},0,"")
vals2_test = mkTestCase "values" vals2_expects vals2_result
[ pads| data Vals3 a = Vals3 { vv3 : : ( Int,",",Int ) ,
uu3 = value < | [ ] | > : : [ a ] ,
ww3 : : } | ]
vals3_input = " 12,3x "
vals3_result = vals3_parseS vals3_input
vals3_expects = ( Vals3 { vv3=(12,3),uu3=[],ww3='x'},0 , " " )
vals3_test = mkTestCase " values " vals3_expects vals3_result
[pads| data Vals3 a = Vals3 { vv3 :: (Int,",",Int),
uu3 = value <| [] |> :: [a],
ww3 :: Char} |]
vals3_input = "12,3x"
vals3_result = vals3_parseS vals3_input
vals3_expects = (Vals3 {vv3=(12,3),uu3=[],ww3='x'},0,"")
vals3_test = mkTestCase "values" vals3_expects vals3_result
-}
[pads| type Doubles = [Double | EOR] terminator EOF |]
doubles_input = "12.34\n1\n-12.0\n1.3e4\n1.2e-2"
doubles_result = doubles_parseS doubles_input
doubles_expects = ([12.34,1.0,-12.0,13000.0,1.2e-2],0,"")
doubles_test = mkTestCase "doubles" doubles_expects doubles_result
[pads| type StringSEs = [StringSE <|RE "$"|> | EOR] terminator EOF |]
stringSEs_input = "12.34\n1\n-12.0\n1.3e4\n1.2e-2"
stringSEs_result = stringSEs_parseS stringSEs_input
stringSEs_expects = (["12.34","1","-12.0","1.3e4","1.2e-2"],0,"")
stringSEs_test = mkTestCase "stringSEs" stringSEs_expects stringSEs_result
[pads| type StringFWs = [StringFW 3| EOR] terminator EOF |]
stringFWs_input = "abc\nabcd\nab\nabc"
stringFWs_result = stringFWs_parseS stringFWs_input
stringFWs_expects = (["abc","abc","XXX","abc"],2,"")
stringFWs_test = mkTestCase "stringFWs" stringFWs_expects stringFWs_result
[pads| type StringESCs = [(StringESC <| ('!', ";,") |>, '[;,]') | EOR] terminator EOF |]
stringESCs_input = "abc\na;\nb,\na!;bc,\na!,cd\nhe!"
stringESCs_result = stringESCs_parseS stringESCs_input
stringESCs_expects = (["abc","a","b","a;bc","a,cd","he!"],4, "")
stringESCs_test = mkTestCase "stringESCs" stringESCs_expects stringESCs_result
[pads| type StringPs = [StringP Char.isDigit | EOR] terminator EOF |]
stringPs_input = "123\na\n123a"
stringPs_result = stringPs_parseS stringPs_input
stringPs_expects = (["123","","123"],2, "")
stringPs_test = mkTestCase "stringPs" stringPs_expects stringPs_result
[pads| type BitBools = (partition [BitBool] using none) |]
binary 01100001
bitBools_result = bitBools_parseS bitBools_input
bitBools_expects = ([False,True,True,False,False,False,False,True], 0, "")
bitBools_test = mkTestCase "bitBools" bitBools_expects bitBools_result
bitBools_result2 = bitBools_parseS bitBools_input2
bitBools_expects2 = ([False,True,True,False,False,False,False,True,
False,False,False,False,True,False,True,False], 0, "")
bitBools_test2 = mkTestCase "bitBools" bitBools_expects2 bitBools_result2
[pads| type IncompleteBitBools = (partition (BitBool,
BitBool,
BitBool) using none) |]
incompleteBitBools_input = "4"
incompleteBitBools_result = incompleteBitBools_parseS incompleteBitBools_input
incompleteBitBools_expects = ((False,False,True), 0, "4")
incompleteBitBools_test = mkTestCase "incompleteBitBools"
incompleteBitBools_expects
incompleteBitBools_result
[pads| type ArithPixel = (partition (Bits16 9,
Bits8 5,
Bits8 5,
Bits8 5,
Bits8 4,
Bits8 4) using none) |]
arithPixel_input = map word8ToChr [136,114,32,0]
arithPixel_result = arithPixel_parseS arithPixel_input
arithPixel_expects = ((272,28,17,0,0,0), 0, "")
arithPixel_test = mkTestCase "arithPixel" arithPixel_expects arithPixel_result
[pads| type Mixed = (partition (StringC ' ',
' ',
Bits8 4,
BitBool,
Bits8 3,
Char) using none) |]
mixed_input = "Hello " ++ (map word8ToChr [74]) ++ "c"
mixed_result = mixed_parseS mixed_input
mixed_expects = (("Hello",4,True,2,'c'), 0, "")
mixed_test = mkTestCase "mixed" mixed_expects mixed_result
[pads| type OddWidths = (partition (Bits32 19,
Bits64 39,
Bits8 1,
Bits8 5) using none) |]
oddWidths_input = map word8ToChr [104,46,174,3,185,8,6,158]
oddWidths_result = oddWidths_parseS oddWidths_input
oddWidths_expects = ((213365,240768000026,0,30), 0, "")
oddWidths_test = mkTestCase "oddWidths" oddWidths_expects oddWidths_result
[pads| type LargeWidths = (partition (Bits8 7,
BitField 89,
BitField 65) using none) |]
largeWidths_input = map word8ToChr [1,0,0,0,0,0,0,0,0,0,0,1,128,0,0,0,0,0,0,0,128]
largeWidths_result = largeWidths_parseS largeWidths_input
largeWidths_expects = ((0,309485009821345068724781057,18446744073709551617), 0, map word8ToChr [128])
largeWidths_test = mkTestCase "largeWidths" largeWidths_expects largeWidths_result
[pads| data EnumType (x :: Bits8) = case x of 0 -> ZERO {}
| 1 -> ONE {}
| 2 -> TWO {}
| _ -> OTHER {}
data Enumerate = Enumerate {x :: Bits8 3,
Bits8 5,
y :: EnumType x}
type Enumerated = (partition Enumerate using none) |]
enumerated_input = map word8ToChr [64]
enumerated_result = enumerated_parseS enumerated_input
enumerated_expects = (Enumerate {x = 2, y = TWO}, 0, "")
enumerated_test = mkTestCase "Enumerated" enumerated_expects enumerated_result
enumerated_input_wc = map word8ToChr [255]
enumerated_result_wc = enumerated_parseS enumerated_input_wc
enumerated_expects_wc = (Enumerate {x = 7, y = OTHER}, 0, "")
enumerated_test_wc = mkTestCase "EnumeratedWC" enumerated_expects_wc enumerated_result_wc
[pads| data EnumTypeBool (x' :: BitBool) = case x' of True -> ON {}
| False -> OFF {}
data EnumerateBool = EnumerateBool {Bits8 7,
x' :: BitBool,
y' :: EnumTypeBool x'}
type EnumeratedBool = (partition EnumerateBool using none) |]
enumeratedBool_input = map word8ToChr [1]
enumeratedBool_result = enumeratedBool_parseS enumeratedBool_input
enumeratedBool_expects = (EnumerateBool {x' = True, y' = ON}, 0, "")
enumeratedBool_test = mkTestCase "EnumeratedBool" enumeratedBool_expects enumeratedBool_result
[pads| type NBA_char = (partition (Bits8 3, CharNB, Bits8 5) using none) |]
nBA_char_result = nBA_char_parseS nBA_char_input
010 00110101 10101
nBA_char_test = mkTestCase "NBA_char" nBA_char_expects nBA_char_result
[pads| type NBA_char_aligned = (partition (CharNB, CharNB) using none)|]
nBA_char_aligned_input = map word8ToChr [70,181]
nBA_char_aligned_result = nBA_char_aligned_parseS nBA_char_aligned_input
nBA_char_aligned_expects = ((word8ToChr 70, word8ToChr 181), 0, "")
nBA_char_aligned_test = mkTestCase "NBA_char_aligned" nBA_char_aligned_expects nBA_char_aligned_result
[pads| type NBA_BS = (partition (Bits8 6, BytesNB 2, Bits8 2) using none) |]
01100011 11101010 0000011
nBA_BS_result = nBA_BS_parseS nBA_BS_input
nBA_BS_expects = ((24, B.pack [250,128], 3), 0, "")
nBA_BS_test = mkTestCase "NBA_BS" nBA_BS_expects nBA_BS_result
[pads| type NBA_BS_aligned = (partition (BytesNB 4) using none) |]
nBA_BS_aligned_input = map word8ToChr [9,8,7,255]
nBA_BS_aligned_result = nBA_BS_aligned_parseS nBA_BS_aligned_input
nBA_BS_aligned_expects = ((B.pack [9,8,7,255]), 0, "")
nBA_BS_aligned_test = mkTestCase "NBA_BS_aligned" nBA_BS_aligned_expects nBA_BS_aligned_result
[pads| type NBA_BS_empty = (partition (BytesNB 1) using none) |]
nBA_BS_empty_input = ""
nBA_BS_empty_result = nBA_BS_empty_parseS nBA_BS_empty_input
nBA_BS_empty_expects = ((B.singleton 0), 1, "")
nBA_BS_empty_test = mkTestCase "NBA_BS_empty" nBA_BS_empty_expects nBA_BS_empty_result
[pads| type NBA_StringFW = (partition (Bits8 4, StringFWNB 3, Bits8 4) using none) |]
1000 0110 0001 0110 0010 0110 0011 1111
nBA_StringFW_result = nBA_StringFW_parseS nBA_StringFW_input
nBA_StringFW_expects = ((8,"abc",15),0,"")
nBA_StringFW_test = mkTestCase "NBA_StringFW" nBA_StringFW_expects nBA_StringFW_result
[pads| type NBA_StringFW_aligned = (partition (StringFWNB 15) using none) |]
nBA_StringFW_aligned_input = map word8ToChr (replicate 15 97)
nBA_StringFW_aligned_result = nBA_StringFW_aligned_parseS nBA_StringFW_aligned_input
nBA_StringFW_aligned_expects = (("aaaaaaaaaaaaaaa"),0,"")
nBA_StringFW_aligned_test = mkTestCase "NBA_StringFW_aligned" nBA_StringFW_aligned_expects nBA_StringFW_aligned_result
[pads| type NBA_StringFW_err = (partition (StringFWNB 3) using none) |]
nBA_StringFW_err_input = map word8ToChr [99,99]
nBA_StringFW_err_result = nBA_StringFW_err_parseS nBA_StringFW_err_input
nBA_StringFW_err_expects = (("XXX"),1,"")
nBA_StringFW_err_test = mkTestCase "NBA_StringFW_err" nBA_StringFW_err_expects nBA_StringFW_err_result
[pads| type NBA_StringC = (partition (Bits8 2, StringCNB 'z', CharNB, Bits8 6) using none) |]
10 011110 00 011110 01 011110 10 010101
nBA_StringC_result = nBA_StringC_parseS nBA_StringC_input
nBA_StringC_expects = ((2,"xy",'z',21),0,"")
nBA_StringC_test = mkTestCase "NBA_StringC" nBA_StringC_expects nBA_StringC_result
[pads| type NBA_StringC_aligned = (partition (StringCNB 'z') using none) |]
nBA_StringC_aligned_input = "xyz"
nBA_StringC_aligned_result = nBA_StringC_aligned_parseS nBA_StringC_aligned_input
nBA_StringC_aligned_expects = (("xy"),0,"z")
nBA_StringC_aligned_test = mkTestCase "NBA_StringC_aligned" nBA_StringC_aligned_expects nBA_StringC_aligned_result
$(make_pads_declarations $ map snd padsExp)
padsExp_ast =
[ ("Halloween", PadsDeclType "Halloween" [] Nothing
( PList (PApp [PTycon ["StringFW"]] (Just (LitE (IntegerL 4))))
(Just (PTycon ["EOR"]))
(Just (LTerm (PTycon ["EOF"])))) Nothing)]
padsExp_input = "karl\njred\nmatt\nsam_"
padsExp_result = halloween_parseS padsExp_input
padsExp_expects = (["karl", "jred", "matt", "sam_"], 0, "")
padsExp_test2 = mkTestCase "padsExp" padsExp_expects padsExp_result
test = runTestTT (TestList tests)
tests = [ TestLabel "MyChar" myChar_test
, TestLabel "IntPair" intPair_test
, TestLabel "Bar" bar_test
, TestLabel "Bar2" bar2_test
, TestLabel "Bazr" bazr_test
, TestLabel "MyInt" myInt_test
, TestLabel "StrTy" strTy_test
, TestLabel "StrTy1" strTy1_test
, TestLabel "Baz" baz_test
, TestLabel "Phex32FW" phex32FW_test
, TestLabel "IntRange" test_intRange24
, TestLabel "IntRange" test_intRange0
, TestLabel "IntRange" test_intRange256
, TestLabel "IntRange" test_intRangeLow
, TestLabel "IntRange" test_intRangeHigh
, TestLabel "IntRange" test_intRangeBad
, TestLabel "IntRangeP" test_intRangeP24
, TestLabel "IntRangeP" test_intRangeP0
, TestLabel "IntRangeP" test_intRangeP256
, TestLabel "IntRangeP" test_intRangePLow
, TestLabel "IntRangeP" test_intRangePHigh
, TestLabel "IntRangeP" test_intRangePBad
, TestLabel "Record" test_Record
, TestLabel "Id" test_IdInt
, TestLabel "Id" test_IdStr
, TestLabel "Id" test_IdInt2
, TestLabel "Id" test_IdStr2
, TestLabel "Id3" test_IdInt3
, TestLabel "Id3" test_IdLit3
, TestLabel "Ab_or_a" test_Ab_or_a
, TestLabel "AB_test" test_AB_test1
, TestLabel "AB_test" test_AB_test2
, TestLabel "Method" test_method_get
, TestLabel "Method" test_method_put
, TestLabel "Method" test_method_link
, TestLabel "Method" test_method_post
, TestLabel "Version" test_version
, TestLabel "Request" test_request_G
, TestLabel "Request" test_request_B
, TestLabel "Eor" test_eor_test
, TestLabel "Eof" test_eof_test_G
, TestLabel "Eof" test_eof_test_B
, TestLabel "Opt" test_opt_test_j
, TestLabel "Opt" test_opt_test_n
, TestLabel "List" test_entries_nosep_noterm
, TestLabel "List" test_entries_nosep_noterm'
, TestLabel "List" test_entries_nosep_noterm2
, TestLabel "List" test_evenInt
, TestLabel "List" test_evenInts
, TestLabel "List" test_digitListG
, TestLabel "List" test_digitList2G
, TestLabel "List" test_digitListB
, TestLabel "List" test_digitListLenG
, TestLabel "List" test_digitListLenB
, TestLabel "List" test_digitListLenSepG
, TestLabel "List" test_digitListLenSepB
, TestLabel "List" test_digitListTermG
, TestLabel "List" test_digitListTermB
, TestLabel "List" test_digitListTermSepG
, TestLabel "List" test_digitListTermSepB
, TestLabel "Try" test_tryTest
, TestLabel "Try" test_tryTestDG
, TestLabel "Try" test_tryTestDB
, TestLabel "Try" test_ListWithTry
, TestLabel "Void" test_WithVoid
, TestLabel "Void" test_voidEntry1
, TestLabel "Void" test_voidEntry2
, TestLabel "Void" test_voidEntry3
, TestLabel "Switch" test_switch0
, TestLabel "Switch" test_switch1
, TestLabel "Switch" test_switchOther
, TestLabel "Stringln" test_stringln
, TestLabel "Compound" test_myData
, TestLabel "Compound" test_hp_data
, TestLabel "Doc" test_hp_data_file_parse
, TestLabel "Doc" myDoc_test
, TestLabel "Literal" litRec_test
, TestLabel "Literal" whiteSpace_test
, TestLabel "Literal" whiteSpace2_test
, TestLabel "Regular Expression" rE_ty_test
, TestLabel "Discipline" disc_test
, TestLabel "Overlap" exxy_test
, TestLabel "Discipline" linesFW_test
, TestLabel "Values" vals_test
, TestLabel "Values" vals2_test
, TestLabel "Double" doubles_test
, TestLabel "StringSE" stringSEs_test
, TestLabel "StringFWs" stringFWs_test
, TestLabel "StringESCs" stringESCs_test
, TestLabel "StringPs" stringPs_test
, TestLabel "PadsExp" padsExp_test
, TestLabel "PadsExp2" padsExp_test2
, TestLabel "BitBools" bitBools_test
, TestLabel "BitBools" bitBools_test2
, TestLabel "ArithPixel" arithPixel_test
, TestLabel "IncompleteBitBools" incompleteBitBools_test
, TestLabel "Mixed" mixed_test
, TestLabel "OddWidths" oddWidths_test
, TestLabel "LargeWidths" largeWidths_test
, TestLabel "Enumerated" enumerated_test
, TestLabel "EnumeratedWC" enumerated_test_wc
, TestLabel "EnumeratedBool" enumeratedBool_test
, TestLabel "NBA_char" nBA_char_test
, TestLabel "NBA_char_aligned" nBA_char_aligned_test
, TestLabel "NBA_BS" nBA_BS_test
, TestLabel "NBA_BS_aligned" nBA_BS_aligned_test
, TestLabel "NBA_BS_empty" nBA_BS_empty_test
, TestLabel "NBA_StringFW" nBA_StringFW_test
, TestLabel "NBA_StringFW_aligned" nBA_StringFW_aligned_test
, TestLabel "NBA_StringFW_err" nBA_StringFW_err_test
, TestLabel "NBA_StringC" nBA_StringC_test
, TestLabel "NBA_StringC_aligned" nBA_StringC_aligned_test
]
|
4191d742ebc4355432b062a669faadafbac83bfbb25ccf1543de8f08e33f0fc7 | sunng87/rigui | math.cljc | (ns rigui.math)
(defn pow [base p]
#?(:clj (Math/pow base p)
:cljs (.pow js/Math base p)))
(defn log [base]
#?(:clj (Math/log base)
:cljs (.log js/Math base)))
(defn floor [v]
#?(:clj (Math/floor v)
:cljs (.floor js/Math v)))
| null | https://raw.githubusercontent.com/sunng87/rigui/1c695b8259dc8dd4a527298e8dbb9e0f738e517c/src/rigui/math.cljc | clojure | (ns rigui.math)
(defn pow [base p]
#?(:clj (Math/pow base p)
:cljs (.pow js/Math base p)))
(defn log [base]
#?(:clj (Math/log base)
:cljs (.log js/Math base)))
(defn floor [v]
#?(:clj (Math/floor v)
:cljs (.floor js/Math v)))
| |
7381fbcaa90fefb563095124c18971e8716eecce4d17c6e2f9219c413ed0091a | OCamlPro/alt-ergo | polynome.mli | (******************************************************************************)
(* *)
(* The Alt-Ergo theorem prover *)
Copyright ( C ) 2006 - 2013
(* *)
(* *)
(* *)
CNRS - INRIA - Universite Paris Sud
(* *)
This file is distributed under the terms of the Apache Software
(* License version 2.0 *)
(* *)
(* ------------------------------------------------------------------------ *)
(* *)
Alt - Ergo : The SMT Solver For Software Verification
Copyright ( C ) 2013 - 2018
(* *)
This file is distributed under the terms of the Apache Software
(* License version 2.0 *)
(* *)
(******************************************************************************)
exception Not_a_num
exception Maybe_zero
module type S = sig
include Sig.X
val mult : r -> r -> r
end
module type T = sig
type r
type t
val compare : t -> t -> int
val equal : t -> t -> bool
val hash : t -> int
val create : (Numbers.Q.t * r) list -> Numbers.Q.t -> Ty.t-> t
val add : t -> t -> t
val sub : t -> t -> t
val mult : t -> t -> t
val mult_const : Numbers.Q.t -> t -> t
val add_const : Numbers.Q.t -> t -> t
val div : t -> t -> t * bool
val modulo : t -> t -> t
val is_const : t -> Numbers.Q.t option
val is_empty : t -> bool
val find : r -> t -> Numbers.Q.t
val choose : t -> Numbers.Q.t * r
val subst : r -> t -> t -> t
val remove : r -> t -> t
val to_list : t -> (Numbers.Q.t * r) list * Numbers.Q.t
val leaves : t -> r list
val print : Format.formatter -> t -> unit
val type_info : t -> Ty.t
val is_monomial : t -> (Numbers.Q.t * r * Numbers.Q.t) option
PPMC coefficients constante
val ppmc_denominators : t -> Numbers.Q.t
PGCD des numerateurs des coefficients constante
val pgcd_numerators : t -> Numbers.Q.t
retourne constante
et la constante multiplicative :
normal_form p = ( p',c , d ) < = > p = ( p ' + c ) * d
et la constante multiplicative:
normal_form p = (p',c,d) <=> p = (p' + c) * d *)
val normal_form : t -> t * Numbers.Q.t * Numbers.Q.t
comme normal_form mais le signe est aussi normalise
val normal_form_pos : t -> t * Numbers.Q.t * Numbers.Q.t
val abstract_selectors : t -> (r * r) list -> t * (r * r) list
val separate_constant : t -> t * Numbers.Q.t
end
module type EXTENDED_Polynome = sig
include T
val extract : r -> t option
val embed : t -> r
end
module Make (X : S) : T with type r = X.r
| null | https://raw.githubusercontent.com/OCamlPro/alt-ergo/291523151417f4cd112744d740b58ab1e8a630b4/src/lib/reasoners/polynome.mli | ocaml | ****************************************************************************
The Alt-Ergo theorem prover
License version 2.0
------------------------------------------------------------------------
License version 2.0
**************************************************************************** | Copyright ( C ) 2006 - 2013
CNRS - INRIA - Universite Paris Sud
This file is distributed under the terms of the Apache Software
Alt - Ergo : The SMT Solver For Software Verification
Copyright ( C ) 2013 - 2018
This file is distributed under the terms of the Apache Software
exception Not_a_num
exception Maybe_zero
module type S = sig
include Sig.X
val mult : r -> r -> r
end
module type T = sig
type r
type t
val compare : t -> t -> int
val equal : t -> t -> bool
val hash : t -> int
val create : (Numbers.Q.t * r) list -> Numbers.Q.t -> Ty.t-> t
val add : t -> t -> t
val sub : t -> t -> t
val mult : t -> t -> t
val mult_const : Numbers.Q.t -> t -> t
val add_const : Numbers.Q.t -> t -> t
val div : t -> t -> t * bool
val modulo : t -> t -> t
val is_const : t -> Numbers.Q.t option
val is_empty : t -> bool
val find : r -> t -> Numbers.Q.t
val choose : t -> Numbers.Q.t * r
val subst : r -> t -> t -> t
val remove : r -> t -> t
val to_list : t -> (Numbers.Q.t * r) list * Numbers.Q.t
val leaves : t -> r list
val print : Format.formatter -> t -> unit
val type_info : t -> Ty.t
val is_monomial : t -> (Numbers.Q.t * r * Numbers.Q.t) option
PPMC coefficients constante
val ppmc_denominators : t -> Numbers.Q.t
PGCD des numerateurs des coefficients constante
val pgcd_numerators : t -> Numbers.Q.t
retourne constante
et la constante multiplicative :
normal_form p = ( p',c , d ) < = > p = ( p ' + c ) * d
et la constante multiplicative:
normal_form p = (p',c,d) <=> p = (p' + c) * d *)
val normal_form : t -> t * Numbers.Q.t * Numbers.Q.t
comme normal_form mais le signe est aussi normalise
val normal_form_pos : t -> t * Numbers.Q.t * Numbers.Q.t
val abstract_selectors : t -> (r * r) list -> t * (r * r) list
val separate_constant : t -> t * Numbers.Q.t
end
module type EXTENDED_Polynome = sig
include T
val extract : r -> t option
val embed : t -> r
end
module Make (X : S) : T with type r = X.r
|
cee0a4f6f9afc1643e328e38e481deaaeba610fa9e619ad608ed25d5aeb76f58 | mainland/nikola | Program.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
-- |
-- Module : Data.Array.Nikola.Program
Copyright : ( c ) 2012
-- License : BSD-style
--
Maintainer : < >
-- Stability : experimental
-- Portability : non-portable
module Data.Array.Nikola.Program (
P,
seqK
) where
-- import Text.PrettyPrint.Mainland
import Data.Array.Nikola.Language.Monad
import Data.Array.Nikola.Language.Syntax hiding (Var, Exp)
import qualified Data.Array.Nikola.Language.Syntax as S
-- | Prepend a monadic action to the program being generated.
seqK :: S.Exp -> a -> P a
seqK p1 x = do
shift $ \k -> do
p2 <- reset $ k x
return $ p1 `seqE` p2
| null | https://raw.githubusercontent.com/mainland/nikola/d86398888c0a76f8ad1556a269a708de9dd92644/src/Data/Array/Nikola/Program.hs | haskell | |
Module : Data.Array.Nikola.Program
License : BSD-style
Stability : experimental
Portability : non-portable
import Text.PrettyPrint.Mainland
| Prepend a monadic action to the program being generated. | # LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
Copyright : ( c ) 2012
Maintainer : < >
module Data.Array.Nikola.Program (
P,
seqK
) where
import Data.Array.Nikola.Language.Monad
import Data.Array.Nikola.Language.Syntax hiding (Var, Exp)
import qualified Data.Array.Nikola.Language.Syntax as S
seqK :: S.Exp -> a -> P a
seqK p1 x = do
shift $ \k -> do
p2 <- reset $ k x
return $ p1 `seqE` p2
|
cf5aa00493d7ac88e85d4e2d87b2d7c1bdde7059ba93e9ed4332688ccd816d3c | threatgrid/ctia | feed_test.clj | (ns ctia.entity.feed-test
(:require
[clj-http.client :as client]
[clojure.edn :as edn]
[clojure.string :as string]
[clojure.test :refer [deftest is join-fixtures testing use-fixtures]]
[ctia.entity.feed :as sut]
[ctia.test-helpers.access-control :refer [access-control-test]]
[ctia.test-helpers.auth :refer [all-capabilities]]
[ctia.test-helpers.core :as helpers]
[ctia.test-helpers.crud :refer [entity-crud-test]]
[ctia.test-helpers.fake-whoami-service :as whoami-helpers]
[ctia.test-helpers.store :refer [test-for-each-store-with-app]]
[ctim.schemas.common :as c]
[schema.test :refer [validate-schemas]]))
(def new-feed-maximal
{:revision 0
:schema_version c/ctim-schema-version
:type "feed"
:tlp "green"
:timestamp #inst "2016-05-11T00:40:48.212-00:00"
:external_references [{:source_name "source"
:external_id "T1067"
:url ""
:hashes ["#section1"]
:description "Description text"}]
:external_ids ["-345"]
:indicator_id "-345"
:language "en"
:title "title"
:feed_type "indicator"
:output :observables
:lifetime {:start_time #inst "2016-01-01T01:01:01.000Z"
:end_time #inst "2042-01-01T01:01:01.000Z"}})
(def new-feed-minimal
{:title "title"
:feed_type "indicator"
:output :judgements})
(def indicator
{:description
"A lookup table for IPs (IPv4 and IPv6) that are considered suspicious by security analysts",
:tags ["Suspicious IPs"],
:valid_time
{:start_time "2019-05-03T21:48:25.801Z",
:end_time "2052-06-03T21:48:25.801Z"},
:producer "Talos",
:schema_version "1.0.11",
:type "indicator",
:source "Feed Indicator Example",
:external_ids
["ctia-feed-indicator-test"],
:short_description
"Custom Suspicious IP Watchlist",
:title "Custom Suspicious IP Watchlist",
:indicator_type ["IP Watchlist"],
:source_uri
"-threat-intel-ctim.md",
:id
"transient:esa-indicator-ec95b042572a11894fffe553555c44f5c88c9199aad23a925bb959daa501338e",
:severity "High",
:tlp "amber",
:confidence "High"})
(def base-judgement
{:schema_version "1.0.11",
:type "judgement",
:source "Feed Indicator Example",
:external_ids
["ctia-feed-indicator-test"],
:disposition 2,
:source_uri
"-threat-intel-ctim.md",
:disposition_name "Malicious",
:priority 95,
:severity "High",
:tlp "amber",
:timestamp "2019-03-01T19:22:45.531Z",
:confidence "High"})
(def judgements
(map #(let [transient-id (format "transient:esa-judgement-%03d" %)
ip (format "187.75.42.%d" %)]
(into base-judgement {:id transient-id
:valid_time {:start_time "2019-03-01T19:22:45.531Z",
:end_time "2052-03-31T19:22:45.531Z"}
:observable {:type "ip" :value ip}}))
(range 100)))
(def expired-judgements
(map #(let [transient-id (format "transient:esa-judgement-%03d" %)
ip (format "187.75.16.%d" %)]
(into base-judgement {:id transient-id
:valid_time {:start_time "2019-03-01T19:22:45.531Z",
:end_time "2019-03-31T19:22:45.531Z"}
:observable {:type "ip" :value ip}}))
(range 100 200)))
(def relationships
(map #(let [suffix (string/replace % #"transient:esa-judgement-" "")
transient-id (str "transient:esa-relationship-" suffix)]
{:id transient-id
:source_ref %
:schema_version "1.0.11",
:target_ref
"transient:esa-indicator-ec95b042572a11894fffe553555c44f5c88c9199aad23a925bb959daa501338e",
:type "relationship",
:external_ids
["ctia-feed-indicator-test"],
:short_description
"Judgement is part of indicator - 'Custom Malicious IP Watchlist'",
:title "judgement/indicator relationship",
:external_references [],
:tlp "amber",
:timestamp "2019-05-08T18:03:32.785Z",
:relationship_type "element-of"})
(map :id judgements)))
(def blocklist-bundle
{:type "bundle",
:source "Feed Indicator Example",
:source_uri
"-threat-intel-ctim.md",
:indicators [indicator],
:judgements (let [duplicated-observable-value (-> judgements
first
(assoc :id "transient:esa-judgement-4340e8cc49ff428e21ad1467de4b40246eb0e3b8da96caa2f71f9fe54123d500"))]
(-> (conj judgements duplicated-observable-value)
(concat expired-judgements)))
:relationships relationships})
(use-fixtures :once
(join-fixtures [validate-schemas
whoami-helpers/fixture-server]))
(defn feed-view-tests [app feed-id feed]
(testing "GET /ctia/feed/:id/view?s=:secret"
(let [feed-view-url-txt (:feed_view_url feed)
feed-view-url-txt-wrong-secret (->> (drop-last feed-view-url-txt)
(string/join ""))
response-txt (client/get feed-view-url-txt {})
response-txt-wrong-secret
(client/get feed-view-url-txt-wrong-secret
{:throw-exceptions false
:headers {"Authorization" "45c1f5e3f05d0"}})
response-body-txt (:body response-txt)
response-body-txt-wrong-secret (:body response-txt-wrong-secret)
expected-body (->> (map #(-> % :observable :value) judgements)
sort
(string/join "\n"))]
(assert (not (string/blank? expected-body)))
(is (= 200 (:status response-txt)))
(is (= expected-body
response-body-txt))
(is (= 401 (:status response-txt-wrong-secret)))
(is (= "wrong secret"
response-body-txt-wrong-secret))
(testing "bad request"
(let [url-with-invalid-query-params
(string/replace feed-view-url-txt #"s=" "invalid=")
{:keys [body headers status]}
(client/get url-with-invalid-query-params {:throw-exceptions false})]
(is (= 400 status))
(is (string/starts-with? (get headers "Content-Type") "text/plain"))
(is (= "{:errors {:s missing-required-key, :invalid disallowed-key}}" body))))
(testing "feed output judgements"
(let [feed-update (assoc feed :output :judgements)
updated-feed-response
(helpers/PUT app
(str "ctia/feed/" (:short-id feed-id))
:body feed-update
:headers {"Authorization" "45c1f5e3f05d0"})
updated-feed (:parsed-body updated-feed-response)]
(is (= 200 (:status updated-feed-response)))
(is (= (dissoc feed-update :feed_view_url)
(dissoc updated-feed
:feed_view_url)))
(let [feed-view-url (:feed_view_url updated-feed)
valid-response (client/get feed-view-url
{:as :json})
valid-response-body (:body valid-response)]
(is (= 200 (:status valid-response)))
(is (= (count judgements)
(count (:judgements valid-response-body))))
(is (= (set (map :observable
judgements))
(set (map :observable
(:judgements valid-response-body)))))
;;teardown
(is (= 200 (:status
(helpers/PUT app
(str "ctia/feed/" (:short-id feed-id))
:body feed
:headers {"Authorization" "45c1f5e3f05d0"})))))))))
(testing "pagination"
(let [feed-view-url (:feed_view_url feed)
counter (atom 0)
expected-response (into #{} (map #(-> % :observable :value)) judgements)
response (loop [acc #{} limit 20 search-after []]
(let [{:keys [headers body]}
(client/get feed-view-url {:query-params {:limit limit
:search_after search-after}})]
(swap! counter inc)
(if (contains? headers "X-Search_after")
(recur (into acc (string/split-lines body))
(edn/read-string (get headers "X-Limit"))
(edn/read-string (get headers "X-Search_after")))
acc)))]
(is (= response expected-response))
(is (= (inc (/ (count expected-response) 20)) @counter))))
(testing "when no pagination params - return entire collection"
(let [feed-view-url (:feed_view_url feed)
expected-response (into #{} (map #(-> % :observable :value)) judgements)
response (let [{:keys [body]} (client/get feed-view-url)]
(into #{} (string/split-lines body)))]
(is (= 100 (count response)))
(is (= response expected-response)))))
(deftest test-feed-routes
(test-for-each-store-with-app
(fn [app]
(helpers/set-capabilities! app
"foouser"
["foogroup"]
"user"
all-capabilities)
(whoami-helpers/set-whoami-response app
"45c1f5e3f05d0"
"foouser"
"foogroup"
"user")
(let [response (helpers/POST app
"ctia/bundle/import"
:body blocklist-bundle
:headers {"Authorization" "45c1f5e3f05d0"})
bundle-import-result (:parsed-body response)
indicator-id (some->> (:results bundle-import-result)
(filter #(= (:type %) :indicator))
first
:id)]
(is (not (nil? indicator-id))
"we successfully have an indicator id to test the view")
(entity-crud-test
(assoc sut/feed-entity
:app app
:example (assoc new-feed-maximal
:indicator_id
indicator-id)
:search-field :title
:update-field :title
:invalid-test-field :title
:delete-search-tests? false
:headers {:Authorization "45c1f5e3f05d0"}
:additional-tests feed-view-tests))))))
(deftest test-feed-routes-access-control
(access-control-test "feed"
new-feed-minimal
true
true
test-for-each-store-with-app))
| null | https://raw.githubusercontent.com/threatgrid/ctia/6c11ba6a7c57a44de64c16601d3914f5b0cf308e/test/ctia/entity/feed_test.clj | clojure | teardown | (ns ctia.entity.feed-test
(:require
[clj-http.client :as client]
[clojure.edn :as edn]
[clojure.string :as string]
[clojure.test :refer [deftest is join-fixtures testing use-fixtures]]
[ctia.entity.feed :as sut]
[ctia.test-helpers.access-control :refer [access-control-test]]
[ctia.test-helpers.auth :refer [all-capabilities]]
[ctia.test-helpers.core :as helpers]
[ctia.test-helpers.crud :refer [entity-crud-test]]
[ctia.test-helpers.fake-whoami-service :as whoami-helpers]
[ctia.test-helpers.store :refer [test-for-each-store-with-app]]
[ctim.schemas.common :as c]
[schema.test :refer [validate-schemas]]))
(def new-feed-maximal
{:revision 0
:schema_version c/ctim-schema-version
:type "feed"
:tlp "green"
:timestamp #inst "2016-05-11T00:40:48.212-00:00"
:external_references [{:source_name "source"
:external_id "T1067"
:url ""
:hashes ["#section1"]
:description "Description text"}]
:external_ids ["-345"]
:indicator_id "-345"
:language "en"
:title "title"
:feed_type "indicator"
:output :observables
:lifetime {:start_time #inst "2016-01-01T01:01:01.000Z"
:end_time #inst "2042-01-01T01:01:01.000Z"}})
(def new-feed-minimal
{:title "title"
:feed_type "indicator"
:output :judgements})
(def indicator
{:description
"A lookup table for IPs (IPv4 and IPv6) that are considered suspicious by security analysts",
:tags ["Suspicious IPs"],
:valid_time
{:start_time "2019-05-03T21:48:25.801Z",
:end_time "2052-06-03T21:48:25.801Z"},
:producer "Talos",
:schema_version "1.0.11",
:type "indicator",
:source "Feed Indicator Example",
:external_ids
["ctia-feed-indicator-test"],
:short_description
"Custom Suspicious IP Watchlist",
:title "Custom Suspicious IP Watchlist",
:indicator_type ["IP Watchlist"],
:source_uri
"-threat-intel-ctim.md",
:id
"transient:esa-indicator-ec95b042572a11894fffe553555c44f5c88c9199aad23a925bb959daa501338e",
:severity "High",
:tlp "amber",
:confidence "High"})
(def base-judgement
{:schema_version "1.0.11",
:type "judgement",
:source "Feed Indicator Example",
:external_ids
["ctia-feed-indicator-test"],
:disposition 2,
:source_uri
"-threat-intel-ctim.md",
:disposition_name "Malicious",
:priority 95,
:severity "High",
:tlp "amber",
:timestamp "2019-03-01T19:22:45.531Z",
:confidence "High"})
(def judgements
(map #(let [transient-id (format "transient:esa-judgement-%03d" %)
ip (format "187.75.42.%d" %)]
(into base-judgement {:id transient-id
:valid_time {:start_time "2019-03-01T19:22:45.531Z",
:end_time "2052-03-31T19:22:45.531Z"}
:observable {:type "ip" :value ip}}))
(range 100)))
(def expired-judgements
(map #(let [transient-id (format "transient:esa-judgement-%03d" %)
ip (format "187.75.16.%d" %)]
(into base-judgement {:id transient-id
:valid_time {:start_time "2019-03-01T19:22:45.531Z",
:end_time "2019-03-31T19:22:45.531Z"}
:observable {:type "ip" :value ip}}))
(range 100 200)))
(def relationships
(map #(let [suffix (string/replace % #"transient:esa-judgement-" "")
transient-id (str "transient:esa-relationship-" suffix)]
{:id transient-id
:source_ref %
:schema_version "1.0.11",
:target_ref
"transient:esa-indicator-ec95b042572a11894fffe553555c44f5c88c9199aad23a925bb959daa501338e",
:type "relationship",
:external_ids
["ctia-feed-indicator-test"],
:short_description
"Judgement is part of indicator - 'Custom Malicious IP Watchlist'",
:title "judgement/indicator relationship",
:external_references [],
:tlp "amber",
:timestamp "2019-05-08T18:03:32.785Z",
:relationship_type "element-of"})
(map :id judgements)))
(def blocklist-bundle
{:type "bundle",
:source "Feed Indicator Example",
:source_uri
"-threat-intel-ctim.md",
:indicators [indicator],
:judgements (let [duplicated-observable-value (-> judgements
first
(assoc :id "transient:esa-judgement-4340e8cc49ff428e21ad1467de4b40246eb0e3b8da96caa2f71f9fe54123d500"))]
(-> (conj judgements duplicated-observable-value)
(concat expired-judgements)))
:relationships relationships})
(use-fixtures :once
(join-fixtures [validate-schemas
whoami-helpers/fixture-server]))
(defn feed-view-tests [app feed-id feed]
(testing "GET /ctia/feed/:id/view?s=:secret"
(let [feed-view-url-txt (:feed_view_url feed)
feed-view-url-txt-wrong-secret (->> (drop-last feed-view-url-txt)
(string/join ""))
response-txt (client/get feed-view-url-txt {})
response-txt-wrong-secret
(client/get feed-view-url-txt-wrong-secret
{:throw-exceptions false
:headers {"Authorization" "45c1f5e3f05d0"}})
response-body-txt (:body response-txt)
response-body-txt-wrong-secret (:body response-txt-wrong-secret)
expected-body (->> (map #(-> % :observable :value) judgements)
sort
(string/join "\n"))]
(assert (not (string/blank? expected-body)))
(is (= 200 (:status response-txt)))
(is (= expected-body
response-body-txt))
(is (= 401 (:status response-txt-wrong-secret)))
(is (= "wrong secret"
response-body-txt-wrong-secret))
(testing "bad request"
(let [url-with-invalid-query-params
(string/replace feed-view-url-txt #"s=" "invalid=")
{:keys [body headers status]}
(client/get url-with-invalid-query-params {:throw-exceptions false})]
(is (= 400 status))
(is (string/starts-with? (get headers "Content-Type") "text/plain"))
(is (= "{:errors {:s missing-required-key, :invalid disallowed-key}}" body))))
(testing "feed output judgements"
(let [feed-update (assoc feed :output :judgements)
updated-feed-response
(helpers/PUT app
(str "ctia/feed/" (:short-id feed-id))
:body feed-update
:headers {"Authorization" "45c1f5e3f05d0"})
updated-feed (:parsed-body updated-feed-response)]
(is (= 200 (:status updated-feed-response)))
(is (= (dissoc feed-update :feed_view_url)
(dissoc updated-feed
:feed_view_url)))
(let [feed-view-url (:feed_view_url updated-feed)
valid-response (client/get feed-view-url
{:as :json})
valid-response-body (:body valid-response)]
(is (= 200 (:status valid-response)))
(is (= (count judgements)
(count (:judgements valid-response-body))))
(is (= (set (map :observable
judgements))
(set (map :observable
(:judgements valid-response-body)))))
(is (= 200 (:status
(helpers/PUT app
(str "ctia/feed/" (:short-id feed-id))
:body feed
:headers {"Authorization" "45c1f5e3f05d0"})))))))))
(testing "pagination"
(let [feed-view-url (:feed_view_url feed)
counter (atom 0)
expected-response (into #{} (map #(-> % :observable :value)) judgements)
response (loop [acc #{} limit 20 search-after []]
(let [{:keys [headers body]}
(client/get feed-view-url {:query-params {:limit limit
:search_after search-after}})]
(swap! counter inc)
(if (contains? headers "X-Search_after")
(recur (into acc (string/split-lines body))
(edn/read-string (get headers "X-Limit"))
(edn/read-string (get headers "X-Search_after")))
acc)))]
(is (= response expected-response))
(is (= (inc (/ (count expected-response) 20)) @counter))))
(testing "when no pagination params - return entire collection"
(let [feed-view-url (:feed_view_url feed)
expected-response (into #{} (map #(-> % :observable :value)) judgements)
response (let [{:keys [body]} (client/get feed-view-url)]
(into #{} (string/split-lines body)))]
(is (= 100 (count response)))
(is (= response expected-response)))))
(deftest test-feed-routes
(test-for-each-store-with-app
(fn [app]
(helpers/set-capabilities! app
"foouser"
["foogroup"]
"user"
all-capabilities)
(whoami-helpers/set-whoami-response app
"45c1f5e3f05d0"
"foouser"
"foogroup"
"user")
(let [response (helpers/POST app
"ctia/bundle/import"
:body blocklist-bundle
:headers {"Authorization" "45c1f5e3f05d0"})
bundle-import-result (:parsed-body response)
indicator-id (some->> (:results bundle-import-result)
(filter #(= (:type %) :indicator))
first
:id)]
(is (not (nil? indicator-id))
"we successfully have an indicator id to test the view")
(entity-crud-test
(assoc sut/feed-entity
:app app
:example (assoc new-feed-maximal
:indicator_id
indicator-id)
:search-field :title
:update-field :title
:invalid-test-field :title
:delete-search-tests? false
:headers {:Authorization "45c1f5e3f05d0"}
:additional-tests feed-view-tests))))))
(deftest test-feed-routes-access-control
(access-control-test "feed"
new-feed-minimal
true
true
test-for-each-store-with-app))
|
ed5cd84c158438238110d50c3396dd822145a7f8590beb5a5f260c6aabcf0975 | asivitz/Hickory | Mesh.hs | # LANGUAGE OverloadedLists #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE DataKinds , PatternSynonyms #
{-# OPTIONS_GHC -Wno-deferred-out-of-scope-variables #-}
module Hickory.Vulkan.Mesh where
import Data.Binary
import Data.Vector.Binary ()
import Data.Vector.Storable as SV
import Data.Vector as V
import Data.Functor ((<&>))
import Vulkan (VertexInputBindingDescription (..), VertexInputRate (..), VertexInputAttributeDescription (..), Format (..), BufferCreateInfo(..), MemoryPropertyFlags, DeviceSize, Buffer, SharingMode (..), BufferUsageFlags, MemoryPropertyFlagBits (..), BufferUsageFlagBits (..), CommandBufferAllocateInfo(..), CommandBufferLevel (..), withCommandBuffers, SubmitInfo(..), BufferCopy(..), useCommandBuffer, cmdCopyBuffer, queueSubmit, commandBufferHandle, pattern COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, CommandBufferBeginInfo(..), queueWaitIdle, CommandBuffer
)
import Foreign (sizeOf, (.|.), castPtr)
import Hickory.Vulkan.Vulkan (mkAcquire, runAcquire)
import Vulkan.Zero (zero)
import VulkanMemoryAllocator (AllocationCreateInfo(requiredFlags), Allocator, Allocation, AllocationInfo, withMappedMemory)
import qualified VulkanMemoryAllocator as VMA
import Control.Exception (bracket)
import Foreign.Marshal.Array (copyArray)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Vulkan.CStruct.Extends (SomeStruct(..))
import Data.List (sortOn)
import Acquire.Acquire (Acquire)
import Hickory.Vulkan.Types (Mesh (..), Attribute (..), VulkanResources (..), DeviceContext (..), BufferedMesh (..))
writeMeshToFile :: FilePath -> Mesh -> IO ()
writeMeshToFile = encodeFile
loadMeshFromFile :: FilePath -> IO Mesh
loadMeshFromFile = decodeFile
instance Binary Attribute
instance Binary Mesh
attrStride :: Attribute -> Int
attrStride Position = 3
attrStride Normal = 3
attrStride TextureCoord = 2
attrStride Color = 4
attrStride BoneIndex = 1
attrStride MaterialIndex = 1
attrLocation :: Attribute -> Word32
attrLocation Position = 0
attrLocation Color = 1
attrLocation Normal = 2
attrLocation TextureCoord = 3
attrLocation BoneIndex = 4
attrLocation MaterialIndex = 5
attrFormat :: Attribute -> Format
attrFormat Position = FORMAT_R32G32B32_SFLOAT
attrFormat Normal = FORMAT_R32G32B32_SFLOAT
attrFormat TextureCoord = FORMAT_R32G32_SFLOAT
attrFormat Color = FORMAT_R32G32B32A32_SFLOAT
attrFormat BoneIndex = FORMAT_R32_SFLOAT
attrFormat MaterialIndex = FORMAT_R32_SFLOAT
pack :: Mesh -> SV.Vector Float
pack Mesh {..} = SV.concat $ fmap snd . sortOn (attrLocation . fst) $ vertices
numVerts :: Mesh -> Int
numVerts Mesh { vertices = ((attr, vec):_) } =
let (num, remainder) = SV.length vec `quotRem` attrStride attr
in if remainder == 0 then num else error "Invalid mesh. Attribute not evenly divisible by stride."
numVerts _ = 0
meshAttributes :: Mesh -> [Attribute]
meshAttributes = fmap fst . vertices
bindingDescriptions :: [Attribute] -> V.Vector VertexInputBindingDescription
bindingDescriptions attrs = V.fromList $ Prelude.zip [0..] attrs <&> \(i,a) -> VertexInputBindingDescription
{ binding = i
, stride = fromIntegral $ attrStride a * sizeOf (0 :: Float)
, inputRate = VERTEX_INPUT_RATE_VERTEX
}
attributeDescriptions :: [Attribute] -> V.Vector VertexInputAttributeDescription
attributeDescriptions attrs = V.fromList $ Prelude.zip [0..] attrs <&> \(i,a) -> VertexInputAttributeDescription
{ binding = i
, location = attrLocation a
, format = attrFormat a
, offset = 0
}
withBufferedMesh :: VulkanResources -> Mesh -> Acquire BufferedMesh
withBufferedMesh bag mesh@Mesh {..} = do
vertexBuffer <- withVertexBuffer bag (pack mesh)
indexBuffer <- traverse (withIndexBuffer bag) indices
pure BufferedMesh {..}
{- Buffer Utils -}
withBuffer' :: Allocator -> BufferUsageFlags -> MemoryPropertyFlags -> DeviceSize -> Acquire (Buffer,Allocation,AllocationInfo)
withBuffer' allocator usageFlags requiredFlags size = VMA.withBuffer allocator bufferCreateInfo allocInfo mkAcquire
where
bufferCreateInfo = zero
{ size = size
, usage = usageFlags
, sharingMode = SHARING_MODE_EXCLUSIVE
}
allocInfo = zero { requiredFlags = requiredFlags }
withVertexBuffer :: Storable a => VulkanResources -> SV.Vector a -> Acquire Buffer
withVertexBuffer bag = withBuffer bag BUFFER_USAGE_VERTEX_BUFFER_BIT
withIndexBuffer :: Storable a => VulkanResources -> SV.Vector a -> Acquire Buffer
withIndexBuffer bag = withBuffer bag BUFFER_USAGE_INDEX_BUFFER_BIT
vsizeOf :: Storable a => SV.Vector a -> Word32
vsizeOf v = fromIntegral $ SV.length v * sizeOf (SV.head v)
withBuffer :: Storable a => VulkanResources -> BufferUsageFlags -> SV.Vector a -> Acquire Buffer
withBuffer vr@VulkanResources {..} usageFlags dat = do
let bufferSize = fromIntegral $ vsizeOf dat
-- Rather than copying directly from CPU to GPU, we want the buffer to
-- live in memory only accesible from GPU for better peformance.
-- So we set up a staging buffer, transfer from host to staging,
-- and then go from staging to optimized memory.
-- Set up the staging buffer
(stagingBuffer, stagingAlloc, _) <- withBuffer' allocator
BUFFER_USAGE_TRANSFER_SRC_BIT
(MEMORY_PROPERTY_HOST_VISIBLE_BIT .|. MEMORY_PROPERTY_HOST_COHERENT_BIT)
bufferSize
liftIO $ withMappedMemory allocator stagingAlloc bracket \bptr -> do
SV.unsafeWith dat \vptr ->
copyArray (castPtr bptr) vptr (SV.length dat)
Set up the real buffer on the GPU and copy from the staging buffer
(buffer, _, _) <- withBuffer' allocator
(BUFFER_USAGE_TRANSFER_DST_BIT .|. usageFlags)
MEMORY_PROPERTY_DEVICE_LOCAL_BIT
bufferSize
copyBuffer vr stagingBuffer buffer bufferSize
pure buffer
copyBuffer :: MonadIO m => VulkanResources -> Buffer -> Buffer -> DeviceSize -> m ()
copyBuffer bag srcBuf dstBuf bufferSize = withSingleTimeCommands bag \commandBuffer -> do
let copyInfo :: BufferCopy
copyInfo = zero { size = bufferSize }
cmdCopyBuffer commandBuffer srcBuf dstBuf [copyInfo]
withSingleTimeCommands :: MonadIO m => VulkanResources -> (CommandBuffer -> IO ()) -> m ()
withSingleTimeCommands VulkanResources {..} f = liftIO $ runAcquire do
let DeviceContext {..} = deviceContext
-- Need a temporary command buffer for copy commands
commandBuffer <- V.head <$>
let commandBufferAllocateInfo :: CommandBufferAllocateInfo
commandBufferAllocateInfo = zero
{ commandPool = shortLivedCommandPool
, level = COMMAND_BUFFER_LEVEL_PRIMARY
, commandBufferCount = 1
}
in withCommandBuffers device commandBufferAllocateInfo mkAcquire
let beginInfo :: CommandBufferBeginInfo '[]
beginInfo = zero { flags = COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT }
useCommandBuffer commandBuffer beginInfo do
liftIO $ f commandBuffer
let submitInfo = zero { commandBuffers = [commandBufferHandle commandBuffer] }
queueSubmit graphicsQueue [SomeStruct submitInfo] zero
queueWaitIdle graphicsQueue
| null | https://raw.githubusercontent.com/asivitz/Hickory/2295504d3ba82054cc8b68db441b8e9f256ee5fa/core/Hickory/Vulkan/Mesh.hs | haskell | # OPTIONS_GHC -Wno-deferred-out-of-scope-variables #
Buffer Utils
Rather than copying directly from CPU to GPU, we want the buffer to
live in memory only accesible from GPU for better peformance.
So we set up a staging buffer, transfer from host to staging,
and then go from staging to optimized memory.
Set up the staging buffer
Need a temporary command buffer for copy commands | # LANGUAGE OverloadedLists #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE DataKinds , PatternSynonyms #
module Hickory.Vulkan.Mesh where
import Data.Binary
import Data.Vector.Binary ()
import Data.Vector.Storable as SV
import Data.Vector as V
import Data.Functor ((<&>))
import Vulkan (VertexInputBindingDescription (..), VertexInputRate (..), VertexInputAttributeDescription (..), Format (..), BufferCreateInfo(..), MemoryPropertyFlags, DeviceSize, Buffer, SharingMode (..), BufferUsageFlags, MemoryPropertyFlagBits (..), BufferUsageFlagBits (..), CommandBufferAllocateInfo(..), CommandBufferLevel (..), withCommandBuffers, SubmitInfo(..), BufferCopy(..), useCommandBuffer, cmdCopyBuffer, queueSubmit, commandBufferHandle, pattern COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, CommandBufferBeginInfo(..), queueWaitIdle, CommandBuffer
)
import Foreign (sizeOf, (.|.), castPtr)
import Hickory.Vulkan.Vulkan (mkAcquire, runAcquire)
import Vulkan.Zero (zero)
import VulkanMemoryAllocator (AllocationCreateInfo(requiredFlags), Allocator, Allocation, AllocationInfo, withMappedMemory)
import qualified VulkanMemoryAllocator as VMA
import Control.Exception (bracket)
import Foreign.Marshal.Array (copyArray)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Vulkan.CStruct.Extends (SomeStruct(..))
import Data.List (sortOn)
import Acquire.Acquire (Acquire)
import Hickory.Vulkan.Types (Mesh (..), Attribute (..), VulkanResources (..), DeviceContext (..), BufferedMesh (..))
writeMeshToFile :: FilePath -> Mesh -> IO ()
writeMeshToFile = encodeFile
loadMeshFromFile :: FilePath -> IO Mesh
loadMeshFromFile = decodeFile
instance Binary Attribute
instance Binary Mesh
attrStride :: Attribute -> Int
attrStride Position = 3
attrStride Normal = 3
attrStride TextureCoord = 2
attrStride Color = 4
attrStride BoneIndex = 1
attrStride MaterialIndex = 1
attrLocation :: Attribute -> Word32
attrLocation Position = 0
attrLocation Color = 1
attrLocation Normal = 2
attrLocation TextureCoord = 3
attrLocation BoneIndex = 4
attrLocation MaterialIndex = 5
attrFormat :: Attribute -> Format
attrFormat Position = FORMAT_R32G32B32_SFLOAT
attrFormat Normal = FORMAT_R32G32B32_SFLOAT
attrFormat TextureCoord = FORMAT_R32G32_SFLOAT
attrFormat Color = FORMAT_R32G32B32A32_SFLOAT
attrFormat BoneIndex = FORMAT_R32_SFLOAT
attrFormat MaterialIndex = FORMAT_R32_SFLOAT
pack :: Mesh -> SV.Vector Float
pack Mesh {..} = SV.concat $ fmap snd . sortOn (attrLocation . fst) $ vertices
numVerts :: Mesh -> Int
numVerts Mesh { vertices = ((attr, vec):_) } =
let (num, remainder) = SV.length vec `quotRem` attrStride attr
in if remainder == 0 then num else error "Invalid mesh. Attribute not evenly divisible by stride."
numVerts _ = 0
meshAttributes :: Mesh -> [Attribute]
meshAttributes = fmap fst . vertices
bindingDescriptions :: [Attribute] -> V.Vector VertexInputBindingDescription
bindingDescriptions attrs = V.fromList $ Prelude.zip [0..] attrs <&> \(i,a) -> VertexInputBindingDescription
{ binding = i
, stride = fromIntegral $ attrStride a * sizeOf (0 :: Float)
, inputRate = VERTEX_INPUT_RATE_VERTEX
}
attributeDescriptions :: [Attribute] -> V.Vector VertexInputAttributeDescription
attributeDescriptions attrs = V.fromList $ Prelude.zip [0..] attrs <&> \(i,a) -> VertexInputAttributeDescription
{ binding = i
, location = attrLocation a
, format = attrFormat a
, offset = 0
}
withBufferedMesh :: VulkanResources -> Mesh -> Acquire BufferedMesh
withBufferedMesh bag mesh@Mesh {..} = do
vertexBuffer <- withVertexBuffer bag (pack mesh)
indexBuffer <- traverse (withIndexBuffer bag) indices
pure BufferedMesh {..}
withBuffer' :: Allocator -> BufferUsageFlags -> MemoryPropertyFlags -> DeviceSize -> Acquire (Buffer,Allocation,AllocationInfo)
withBuffer' allocator usageFlags requiredFlags size = VMA.withBuffer allocator bufferCreateInfo allocInfo mkAcquire
where
bufferCreateInfo = zero
{ size = size
, usage = usageFlags
, sharingMode = SHARING_MODE_EXCLUSIVE
}
allocInfo = zero { requiredFlags = requiredFlags }
withVertexBuffer :: Storable a => VulkanResources -> SV.Vector a -> Acquire Buffer
withVertexBuffer bag = withBuffer bag BUFFER_USAGE_VERTEX_BUFFER_BIT
withIndexBuffer :: Storable a => VulkanResources -> SV.Vector a -> Acquire Buffer
withIndexBuffer bag = withBuffer bag BUFFER_USAGE_INDEX_BUFFER_BIT
vsizeOf :: Storable a => SV.Vector a -> Word32
vsizeOf v = fromIntegral $ SV.length v * sizeOf (SV.head v)
withBuffer :: Storable a => VulkanResources -> BufferUsageFlags -> SV.Vector a -> Acquire Buffer
withBuffer vr@VulkanResources {..} usageFlags dat = do
let bufferSize = fromIntegral $ vsizeOf dat
(stagingBuffer, stagingAlloc, _) <- withBuffer' allocator
BUFFER_USAGE_TRANSFER_SRC_BIT
(MEMORY_PROPERTY_HOST_VISIBLE_BIT .|. MEMORY_PROPERTY_HOST_COHERENT_BIT)
bufferSize
liftIO $ withMappedMemory allocator stagingAlloc bracket \bptr -> do
SV.unsafeWith dat \vptr ->
copyArray (castPtr bptr) vptr (SV.length dat)
Set up the real buffer on the GPU and copy from the staging buffer
(buffer, _, _) <- withBuffer' allocator
(BUFFER_USAGE_TRANSFER_DST_BIT .|. usageFlags)
MEMORY_PROPERTY_DEVICE_LOCAL_BIT
bufferSize
copyBuffer vr stagingBuffer buffer bufferSize
pure buffer
copyBuffer :: MonadIO m => VulkanResources -> Buffer -> Buffer -> DeviceSize -> m ()
copyBuffer bag srcBuf dstBuf bufferSize = withSingleTimeCommands bag \commandBuffer -> do
let copyInfo :: BufferCopy
copyInfo = zero { size = bufferSize }
cmdCopyBuffer commandBuffer srcBuf dstBuf [copyInfo]
withSingleTimeCommands :: MonadIO m => VulkanResources -> (CommandBuffer -> IO ()) -> m ()
withSingleTimeCommands VulkanResources {..} f = liftIO $ runAcquire do
let DeviceContext {..} = deviceContext
commandBuffer <- V.head <$>
let commandBufferAllocateInfo :: CommandBufferAllocateInfo
commandBufferAllocateInfo = zero
{ commandPool = shortLivedCommandPool
, level = COMMAND_BUFFER_LEVEL_PRIMARY
, commandBufferCount = 1
}
in withCommandBuffers device commandBufferAllocateInfo mkAcquire
let beginInfo :: CommandBufferBeginInfo '[]
beginInfo = zero { flags = COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT }
useCommandBuffer commandBuffer beginInfo do
liftIO $ f commandBuffer
let submitInfo = zero { commandBuffers = [commandBufferHandle commandBuffer] }
queueSubmit graphicsQueue [SomeStruct submitInfo] zero
queueWaitIdle graphicsQueue
|
4c0c12d4ff62b20b33903006ad2954d7ee7236a0e40925223a6d2106d0c5f325 | facebookarchive/pfff | glade.ml | (**************************************************************************)
(* Lablgtk *)
(* *)
(* This program is free software; you can redistribute it *)
and/or modify it under the terms of the GNU Library General
Public License as published by the Free Software Foundation
version 2 , with the exception described in file COPYING which
(* comes with the library. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
GNU Library General Public License for more details .
(* *)
You should have received a copy of the GNU Library General
Public License along with this program ; if not , write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
(* *)
(* *)
(**************************************************************************)
$ I d : glade.ml 1347 2007 - 06 - 20 07:40:34Z guesdon $
open StdLabels
open Gtk
(* GladeXML widget *)
type glade_xml = [`data|`glade_xml]
external init : unit -> unit = "ml_glade_init"
(* external gnome_init : unit -> unit = "ml_glade_gnome_init" *)
external create :
?file:string -> ?data:string ->
?root:string -> ?domain:string -> unit -> glade_xml obj
= "ml_glade_xml_new"
external _signal_autoconnect :
[> `glade_xml] obj ->
(string * unit obj * string * unit obj option * bool -> unit) -> unit
= "ml_glade_xml_signal_autoconnect_full"
let signal_autoconnect self ~f =
_signal_autoconnect self
(fun (handler, obj, signal, target, after) ->
f ~handler ~signal ~after ?target obj)
external _signal_connect :
[> `glade_xml] obj -> string ->
(string * unit obj * string * unit obj option * bool -> unit) -> unit
= "ml_glade_xml_signal_connect_full"
let signal_connect self ~handler ~f =
_signal_connect self handler
(fun (handler, obj, signal, target, after) ->
f ~signal ~after ?target obj)
external get_widget : [> `glade_xml] obj -> name:string -> widget obj
= "ml_glade_xml_get_widget"
external get_widget_name : [> `widget] obj -> string
= "ml_glade_get_widget_name"
external get_widget_tree : [> `widget] obj -> glade_xml obj
= "ml_glade_get_widget_tree"
let get_widget_msg ~name ?info xml =
try get_widget ~name xml
with Gpointer.Null ->
let name = match info with None -> name | Some s -> s^":"^name in
failwith ("Glade error: " ^ name ^ " is not accessible.")
Signal handlers
open Gobject
type handler =
[ `Simple of (unit -> unit)
| `Object of string * (unit obj -> unit)
| `Custom of (Closure.argv -> data_get list -> unit) ]
let ($) f g x = g (f x)
let gtk_bool b argv _ = Closure.set_result argv (`BOOL b)
let known_handlers : (string, handler) Hashtbl.t = Hashtbl.create 11
let add_handler ~name handler =
Hashtbl.add known_handlers name handler
open GtkBase
let _ = List.iter ~f:(fun (name,h) -> add_handler ~name h)
[ "gtk_widget_destroy",`Object ("GtkObject", Object.cast $ Object.destroy);
"gtk_main_quit", `Simple GtkMain.Main.quit;
"gtk_widget_show", `Object ("GtkWidget", Widget.cast $ Widget.show);
"gtk_widget_hide", `Object ("GtkWidget", Widget.cast $ Widget.hide);
"gtk_widget_grab_focus",
`Object ("GtkWidget",
Widget.cast $ fun w -> set Widget.P.has_focus w true);
"gtk_window_activate_default",
`Object ("GtkWindow", fun w -> ignore (GtkWindow.Window.activate_default
(GtkWindow.Window.cast w)));
"gtk_true", `Custom (gtk_bool true);
"gtk_false", `Custom (gtk_bool false);
]
open Printf
let check_handler ?target ?(name="<unknown>") handler =
match handler with
`Simple f ->
fun _ -> f ()
| `Object (cls, f) ->
begin match target with
None ->
eprintf "Glade-warning: %s requires an object argument.\n" name;
raise Not_found
| Some obj ->
if Gobject.is_a obj cls then
fun _ -> f obj
else begin
eprintf "Glade-warning: %s expects a %s argument.\n" name cls;
raise Not_found
end
end
| `Custom f ->
if target <> None then
eprintf "Glade-warning: %s does not take an object argument.\n" name;
fun argv -> f argv (Closure.get_args argv)
let bind_handlers ?(extra=[]) ?(warn=false) xml =
signal_autoconnect xml ~f:
begin fun ~handler:name ~signal ~after ?target obj ->
try
let handler =
try List.assoc name extra
with Not_found -> Hashtbl.find known_handlers name
in
let callback = check_handler ?target ~name handler in
ignore (GtkSignal.connect_by_name obj ~name:signal ~after
~callback:(Closure.create callback))
with Not_found ->
if warn then eprintf "Glade.bind_handlers: no handler for %s\n" name
end;
flush stderr
let bind_handler ~name ~handler ?(warn=true) xml =
let warn = ref warn in
signal_connect xml ~handler:name ~f:
begin fun ~signal ~after ?target obj ->
warn := false;
let callback = check_handler ?target ~name handler in
ignore (GtkSignal.connect_by_name obj ~name:signal ~after
~callback:(Closure.create callback))
end;
if !warn then begin
eprintf "Glade-warning: handler %s is not used\n" name;
flush stderr
end
(* To list bindings *)
let ($) f g x = g (f x)
let show_option sh = function None -> "None" | Some x -> "Some " ^ sh x
let print_binding oc ~handler ~signal ~after ?target obj =
Printf.fprintf oc "object=%s, signal=%s, handler=%s, target=%s\n"
(get_widget_name (GtkBase.Widget.cast obj)) signal handler
(show_option (GtkBase.Widget.cast $ get_widget_name) target)
let print_bindings oc xml =
signal_autoconnect xml ~f:(print_binding oc); flush oc
let trace_handlers oc xml =
signal_autoconnect xml ~f:
begin fun ~handler ~signal ~after ?target obj ->
let callback _ =
if signal = "" then
Printf.fprintf oc "Glade-debug: handler %s called\n" handler
else
Printf.fprintf oc
"Glade-debug: %s called by signal %s on widget %s\n"
handler signal (get_widget_name (GtkBase.Widget.cast obj));
flush oc
in
ignore (GtkSignal.connect_by_name obj ~name:signal ~after
~callback:(Closure.create callback))
end
(* class skeleton, for use in generated wrappers *)
let create ?file ?data ?root ?domain () =
init (); create ?file ?data ?root ?domain ()
class xml ?trace ?(autoconnect = true) (xmldata : glade_xml Gtk.obj) =
let () = match trace with Some oc -> trace_handlers oc xmldata | None -> () in
let () = if autoconnect then bind_handlers xmldata in
object (self)
val xml = xmldata
method xml = xmldata
method bind ~name ~callback =
bind_handler ~name ~handler:(`Simple callback) ~warn:true xmldata
end
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/external/ocamlgtk/src/glade.ml | ocaml | ************************************************************************
Lablgtk
This program is free software; you can redistribute it
comes with the library.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
************************************************************************
GladeXML widget
external gnome_init : unit -> unit = "ml_glade_gnome_init"
To list bindings
class skeleton, for use in generated wrappers | and/or modify it under the terms of the GNU Library General
Public License as published by the Free Software Foundation
version 2 , with the exception described in file COPYING which
GNU Library General Public License for more details .
You should have received a copy of the GNU Library General
Public License along with this program ; if not , write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
$ I d : glade.ml 1347 2007 - 06 - 20 07:40:34Z guesdon $
open StdLabels
open Gtk
type glade_xml = [`data|`glade_xml]
external init : unit -> unit = "ml_glade_init"
external create :
?file:string -> ?data:string ->
?root:string -> ?domain:string -> unit -> glade_xml obj
= "ml_glade_xml_new"
external _signal_autoconnect :
[> `glade_xml] obj ->
(string * unit obj * string * unit obj option * bool -> unit) -> unit
= "ml_glade_xml_signal_autoconnect_full"
let signal_autoconnect self ~f =
_signal_autoconnect self
(fun (handler, obj, signal, target, after) ->
f ~handler ~signal ~after ?target obj)
external _signal_connect :
[> `glade_xml] obj -> string ->
(string * unit obj * string * unit obj option * bool -> unit) -> unit
= "ml_glade_xml_signal_connect_full"
let signal_connect self ~handler ~f =
_signal_connect self handler
(fun (handler, obj, signal, target, after) ->
f ~signal ~after ?target obj)
external get_widget : [> `glade_xml] obj -> name:string -> widget obj
= "ml_glade_xml_get_widget"
external get_widget_name : [> `widget] obj -> string
= "ml_glade_get_widget_name"
external get_widget_tree : [> `widget] obj -> glade_xml obj
= "ml_glade_get_widget_tree"
let get_widget_msg ~name ?info xml =
try get_widget ~name xml
with Gpointer.Null ->
let name = match info with None -> name | Some s -> s^":"^name in
failwith ("Glade error: " ^ name ^ " is not accessible.")
Signal handlers
open Gobject
type handler =
[ `Simple of (unit -> unit)
| `Object of string * (unit obj -> unit)
| `Custom of (Closure.argv -> data_get list -> unit) ]
let ($) f g x = g (f x)
let gtk_bool b argv _ = Closure.set_result argv (`BOOL b)
let known_handlers : (string, handler) Hashtbl.t = Hashtbl.create 11
let add_handler ~name handler =
Hashtbl.add known_handlers name handler
open GtkBase
let _ = List.iter ~f:(fun (name,h) -> add_handler ~name h)
[ "gtk_widget_destroy",`Object ("GtkObject", Object.cast $ Object.destroy);
"gtk_main_quit", `Simple GtkMain.Main.quit;
"gtk_widget_show", `Object ("GtkWidget", Widget.cast $ Widget.show);
"gtk_widget_hide", `Object ("GtkWidget", Widget.cast $ Widget.hide);
"gtk_widget_grab_focus",
`Object ("GtkWidget",
Widget.cast $ fun w -> set Widget.P.has_focus w true);
"gtk_window_activate_default",
`Object ("GtkWindow", fun w -> ignore (GtkWindow.Window.activate_default
(GtkWindow.Window.cast w)));
"gtk_true", `Custom (gtk_bool true);
"gtk_false", `Custom (gtk_bool false);
]
open Printf
let check_handler ?target ?(name="<unknown>") handler =
match handler with
`Simple f ->
fun _ -> f ()
| `Object (cls, f) ->
begin match target with
None ->
eprintf "Glade-warning: %s requires an object argument.\n" name;
raise Not_found
| Some obj ->
if Gobject.is_a obj cls then
fun _ -> f obj
else begin
eprintf "Glade-warning: %s expects a %s argument.\n" name cls;
raise Not_found
end
end
| `Custom f ->
if target <> None then
eprintf "Glade-warning: %s does not take an object argument.\n" name;
fun argv -> f argv (Closure.get_args argv)
let bind_handlers ?(extra=[]) ?(warn=false) xml =
signal_autoconnect xml ~f:
begin fun ~handler:name ~signal ~after ?target obj ->
try
let handler =
try List.assoc name extra
with Not_found -> Hashtbl.find known_handlers name
in
let callback = check_handler ?target ~name handler in
ignore (GtkSignal.connect_by_name obj ~name:signal ~after
~callback:(Closure.create callback))
with Not_found ->
if warn then eprintf "Glade.bind_handlers: no handler for %s\n" name
end;
flush stderr
let bind_handler ~name ~handler ?(warn=true) xml =
let warn = ref warn in
signal_connect xml ~handler:name ~f:
begin fun ~signal ~after ?target obj ->
warn := false;
let callback = check_handler ?target ~name handler in
ignore (GtkSignal.connect_by_name obj ~name:signal ~after
~callback:(Closure.create callback))
end;
if !warn then begin
eprintf "Glade-warning: handler %s is not used\n" name;
flush stderr
end
let ($) f g x = g (f x)
let show_option sh = function None -> "None" | Some x -> "Some " ^ sh x
let print_binding oc ~handler ~signal ~after ?target obj =
Printf.fprintf oc "object=%s, signal=%s, handler=%s, target=%s\n"
(get_widget_name (GtkBase.Widget.cast obj)) signal handler
(show_option (GtkBase.Widget.cast $ get_widget_name) target)
let print_bindings oc xml =
signal_autoconnect xml ~f:(print_binding oc); flush oc
let trace_handlers oc xml =
signal_autoconnect xml ~f:
begin fun ~handler ~signal ~after ?target obj ->
let callback _ =
if signal = "" then
Printf.fprintf oc "Glade-debug: handler %s called\n" handler
else
Printf.fprintf oc
"Glade-debug: %s called by signal %s on widget %s\n"
handler signal (get_widget_name (GtkBase.Widget.cast obj));
flush oc
in
ignore (GtkSignal.connect_by_name obj ~name:signal ~after
~callback:(Closure.create callback))
end
let create ?file ?data ?root ?domain () =
init (); create ?file ?data ?root ?domain ()
class xml ?trace ?(autoconnect = true) (xmldata : glade_xml Gtk.obj) =
let () = match trace with Some oc -> trace_handlers oc xmldata | None -> () in
let () = if autoconnect then bind_handlers xmldata in
object (self)
val xml = xmldata
method xml = xmldata
method bind ~name ~callback =
bind_handler ~name ~handler:(`Simple callback) ~warn:true xmldata
end
|
20c9f89041510ac195d27aeba4e0d9ef6f68d5a5814ebf3b93a69525aa2757a7 | rabbitmq/rabbitmq-mqtt | java_SUITE.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(java_SUITE).
-compile([export_all]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(BASE_CONF_RABBIT, {rabbit, [{ssl_options, [{fail_if_no_peer_cert, false}]}]}).
-define(BASE_CONF_MQTT,
{rabbitmq_mqtt, [
{ssl_cert_login, true},
{allow_anonymous, false},
{sparkplug, true},
{tcp_listeners, []},
{ssl_listeners, []}
]}).
all() ->
[
{group, non_parallel_tests}
].
groups() ->
[
{non_parallel_tests, [], [
java
]}
].
suite() ->
[{timetrap, {seconds, 600}}].
%% -------------------------------------------------------------------
%% Testsuite setup/teardown.
%% -------------------------------------------------------------------
merge_app_env(Config) ->
{ok, Ssl} = q(Config, [erlang_node_config, rabbit, ssl_options]),
Ssl1 = lists:keyreplace(fail_if_no_peer_cert, 1, Ssl, {fail_if_no_peer_cert, false}),
Config1 = rabbit_ct_helpers:merge_app_env(Config, {rabbit, [{ssl_options, Ssl1}]}),
rabbit_ct_helpers:merge_app_env(Config1, ?BASE_CONF_MQTT).
init_per_suite(Config) ->
rabbit_ct_helpers:log_environment(),
Config1 = rabbit_ct_helpers:set_config(Config, [
{rmq_nodename_suffix, ?MODULE},
{rmq_certspwd, "bunnychow"},
{rmq_nodes_clustered, true},
{rmq_nodes_count, 3}
]),
rabbit_ct_helpers:run_setup_steps(Config1,
[ fun merge_app_env/1 ] ++
rabbit_ct_broker_helpers:setup_steps() ++
rabbit_ct_client_helpers:setup_steps()).
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config,
rabbit_ct_client_helpers:teardown_steps() ++
rabbit_ct_broker_helpers:teardown_steps()).
init_per_group(_, Config) ->
Config.
end_per_group(_, Config) ->
Config.
init_per_testcase(Testcase, Config) ->
CertsDir = ?config(rmq_certsdir, Config),
CertFile = filename:join([CertsDir, "client", "cert.pem"]),
{ok, CertBin} = file:read_file(CertFile),
[{'Certificate', Cert, not_encrypted}] = public_key:pem_decode(CertBin),
UserBin = rabbit_ct_broker_helpers:rpc(Config, 0,
rabbit_ssl,
peer_cert_auth_name,
[Cert]),
User = binary_to_list(UserBin),
{ok,_} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["add_user", User, ""]),
{ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["set_permissions", "-p", "/", User, ".*", ".*", ".*"]),
{ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0,
["set_topic_permissions", "-p", "/", "guest", "amq.topic",
% Write permission
"test-topic|test-retained-topic|{username}.{client_id}.a|^sp[AB]v\\d+___\\d+",
% Read permission
"test-topic|test-retained-topic|last-will|{username}.{client_id}.a|^sp[AB]v\\d+___\\d+"]),
rabbit_ct_helpers:testcase_started(Config, Testcase).
end_per_testcase(Testcase, Config) ->
rabbit_ct_helpers:testcase_finished(Config, Testcase).
%% -------------------------------------------------------------------
%% Testsuite cases
%% -------------------------------------------------------------------
java(Config) ->
CertsDir = rabbit_ct_helpers:get_config(Config, rmq_certsdir),
MqttPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt),
MqttPort2 = rabbit_ct_broker_helpers:get_node_config(Config, 1, tcp_port_mqtt),
MqttPort3 = rabbit_ct_broker_helpers:get_node_config(Config, 2, tcp_port_mqtt),
MqttSslPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt_tls),
AmqpPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
os:putenv("SSL_CERTS_DIR", CertsDir),
os:putenv("MQTT_SSL_PORT", erlang:integer_to_list(MqttSslPort)),
os:putenv("MQTT_PORT", erlang:integer_to_list(MqttPort)),
os:putenv("MQTT_PORT_2", erlang:integer_to_list(MqttPort2)),
os:putenv("MQTT_PORT_3", erlang:integer_to_list(MqttPort3)),
os:putenv("AMQP_PORT", erlang:integer_to_list(AmqpPort)),
DataDir = rabbit_ct_helpers:get_config(Config, data_dir),
MakeResult = rabbit_ct_helpers:make(Config, DataDir, ["tests"]),
{ok, _} = MakeResult.
rpc(Config, M, F, A) ->
rabbit_ct_broker_helpers:rpc(Config, 0, M, F, A).
q(P, [K | Rem]) ->
case proplists:get_value(K, P) of
undefined -> undefined;
V -> q(V, Rem)
end;
q(P, []) -> {ok, P}.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-mqtt/817971bfec4461630a2ef7e27d4fa9f4c4fb8ff9/test/java_SUITE.erl | erlang |
-------------------------------------------------------------------
Testsuite setup/teardown.
-------------------------------------------------------------------
Write permission
Read permission
-------------------------------------------------------------------
Testsuite cases
------------------------------------------------------------------- | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
-module(java_SUITE).
-compile([export_all]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(BASE_CONF_RABBIT, {rabbit, [{ssl_options, [{fail_if_no_peer_cert, false}]}]}).
-define(BASE_CONF_MQTT,
{rabbitmq_mqtt, [
{ssl_cert_login, true},
{allow_anonymous, false},
{sparkplug, true},
{tcp_listeners, []},
{ssl_listeners, []}
]}).
all() ->
[
{group, non_parallel_tests}
].
groups() ->
[
{non_parallel_tests, [], [
java
]}
].
suite() ->
[{timetrap, {seconds, 600}}].
merge_app_env(Config) ->
{ok, Ssl} = q(Config, [erlang_node_config, rabbit, ssl_options]),
Ssl1 = lists:keyreplace(fail_if_no_peer_cert, 1, Ssl, {fail_if_no_peer_cert, false}),
Config1 = rabbit_ct_helpers:merge_app_env(Config, {rabbit, [{ssl_options, Ssl1}]}),
rabbit_ct_helpers:merge_app_env(Config1, ?BASE_CONF_MQTT).
init_per_suite(Config) ->
rabbit_ct_helpers:log_environment(),
Config1 = rabbit_ct_helpers:set_config(Config, [
{rmq_nodename_suffix, ?MODULE},
{rmq_certspwd, "bunnychow"},
{rmq_nodes_clustered, true},
{rmq_nodes_count, 3}
]),
rabbit_ct_helpers:run_setup_steps(Config1,
[ fun merge_app_env/1 ] ++
rabbit_ct_broker_helpers:setup_steps() ++
rabbit_ct_client_helpers:setup_steps()).
end_per_suite(Config) ->
rabbit_ct_helpers:run_teardown_steps(Config,
rabbit_ct_client_helpers:teardown_steps() ++
rabbit_ct_broker_helpers:teardown_steps()).
init_per_group(_, Config) ->
Config.
end_per_group(_, Config) ->
Config.
init_per_testcase(Testcase, Config) ->
CertsDir = ?config(rmq_certsdir, Config),
CertFile = filename:join([CertsDir, "client", "cert.pem"]),
{ok, CertBin} = file:read_file(CertFile),
[{'Certificate', Cert, not_encrypted}] = public_key:pem_decode(CertBin),
UserBin = rabbit_ct_broker_helpers:rpc(Config, 0,
rabbit_ssl,
peer_cert_auth_name,
[Cert]),
User = binary_to_list(UserBin),
{ok,_} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["add_user", User, ""]),
{ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0, ["set_permissions", "-p", "/", User, ".*", ".*", ".*"]),
{ok, _} = rabbit_ct_broker_helpers:rabbitmqctl(Config, 0,
["set_topic_permissions", "-p", "/", "guest", "amq.topic",
"test-topic|test-retained-topic|{username}.{client_id}.a|^sp[AB]v\\d+___\\d+",
"test-topic|test-retained-topic|last-will|{username}.{client_id}.a|^sp[AB]v\\d+___\\d+"]),
rabbit_ct_helpers:testcase_started(Config, Testcase).
end_per_testcase(Testcase, Config) ->
rabbit_ct_helpers:testcase_finished(Config, Testcase).
java(Config) ->
CertsDir = rabbit_ct_helpers:get_config(Config, rmq_certsdir),
MqttPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt),
MqttPort2 = rabbit_ct_broker_helpers:get_node_config(Config, 1, tcp_port_mqtt),
MqttPort3 = rabbit_ct_broker_helpers:get_node_config(Config, 2, tcp_port_mqtt),
MqttSslPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_mqtt_tls),
AmqpPort = rabbit_ct_broker_helpers:get_node_config(Config, 0, tcp_port_amqp),
os:putenv("SSL_CERTS_DIR", CertsDir),
os:putenv("MQTT_SSL_PORT", erlang:integer_to_list(MqttSslPort)),
os:putenv("MQTT_PORT", erlang:integer_to_list(MqttPort)),
os:putenv("MQTT_PORT_2", erlang:integer_to_list(MqttPort2)),
os:putenv("MQTT_PORT_3", erlang:integer_to_list(MqttPort3)),
os:putenv("AMQP_PORT", erlang:integer_to_list(AmqpPort)),
DataDir = rabbit_ct_helpers:get_config(Config, data_dir),
MakeResult = rabbit_ct_helpers:make(Config, DataDir, ["tests"]),
{ok, _} = MakeResult.
rpc(Config, M, F, A) ->
rabbit_ct_broker_helpers:rpc(Config, 0, M, F, A).
q(P, [K | Rem]) ->
case proplists:get_value(K, P) of
undefined -> undefined;
V -> q(V, Rem)
end;
q(P, []) -> {ok, P}.
|
53bed05fde4b098bd78bcd6efea2f57e0102c2592827cfacdb3be07d91c60384 | c4-project/c4f | call.ml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Base
open struct
module Tx = Travesty_base_exts
end
type t = {arguments: Expression.t list; function_id: C4f_common.C_id.t}
[@@deriving make, fields, sexp, compare, equal]
module On_expressions :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Expression.t =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Expression
module On (M : Applicative.S) = struct
module Ls = Tx.List.On (M)
let map_m (x : t) ~(f : Expression.t -> Expression.t M.t) : t M.t =
M.map (Ls.map_m ~f x.arguments) ~f:(fun arguments ->
make ~function_id:x.function_id ~arguments () )
end
end)
module On_addresses :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Address.t =
Travesty.Traversable.Chain0
(On_expressions)
(Expression_traverse.On_addresses)
| null | https://raw.githubusercontent.com/c4-project/c4f/8939477732861789abc807c8c1532a302b2848a5/lib/fir/src/call.ml | ocaml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Base
open struct
module Tx = Travesty_base_exts
end
type t = {arguments: Expression.t list; function_id: C4f_common.C_id.t}
[@@deriving make, fields, sexp, compare, equal]
module On_expressions :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Expression.t =
Travesty.Traversable.Make0 (struct
type nonrec t = t
module Elt = Expression
module On (M : Applicative.S) = struct
module Ls = Tx.List.On (M)
let map_m (x : t) ~(f : Expression.t -> Expression.t M.t) : t M.t =
M.map (Ls.map_m ~f x.arguments) ~f:(fun arguments ->
make ~function_id:x.function_id ~arguments () )
end
end)
module On_addresses :
Travesty.Traversable_types.S0 with type t = t and type Elt.t = Address.t =
Travesty.Traversable.Chain0
(On_expressions)
(Expression_traverse.On_addresses)
| |
8d368c5eaa929ebe015b2cb56e2cc285f9fa43d44cc65bca6b5ed60c90bc6644 | cronokirby/haze | Tracker.hs | # LANGUAGE RecordWildCards #
|
Description : Contains functions related to trackers
This file provides a more abstract description of
the communication protocol with trackers . First it
specificies the data in a .torrent file with MetaInfo ,
then data sent to and returned from a tracker .
Description: Contains functions related to trackers
This file provides a more abstract description of
the communication protocol with trackers. First it
specificies the data in a .torrent file with MetaInfo,
then data sent to and returned from a tracker.
-}
module Haze.Tracker
( Tracker(..)
, TieredList
, MD5Sum(..)
, SHA1(..)
, SHAPieces(..)
, pieceHashesCorrectly
, FileInfo(..)
, FileItem(..)
, totalFileLength
, MetaInfo(..)
, totalFileSize
, squashedTrackers
, decodeMeta
, metaFromBytes
, UDPConnection(..)
, parseUDPConn
, UDPTrackerRequest
, newUDPRequest
, updateUDPTransID
, updateUDPConnID
, encodeUDPRequest
, Announce(..)
, AnnounceInfo(..)
, parseUDPAnnounce
, PeerID(..)
, peerIDBytes
, generatePeerID
, Peer(..)
, decodeAnnounce
, announceFromHTTP
, ReqEvent(..)
, TrackStatus(..)
, firstTrackStatus
, updateTrackStatus
, updateUDPTrackStatus
, TrackerRequest(..)
, newTrackerRequest
, updateTransactionID
, trackerQuery
)
where
import Relude
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Data.Attoparsec.ByteString as AP
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Data.Hashable ( Hashable(..) )
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import Data.Time.Clock ( DiffTime
, UTCTime
, getCurrentTime
, utctDayTime
)
import Data.Time.Clock.POSIX ( posixSecondsToUTCTime )
import Network.Socket ( HostName
, PortNumber
)
import Path ( Path
, Rel
, File
, Dir
, (</>)
)
import qualified Path
import Text.Show ( Show(..) )
import Data.TieredList ( TieredList
, makeTieredList
, tieredSingleton
)
import Haze.Bencoding ( Bencoding(..)
, Decoder(..)
, DecodeError(..)
, decode
, encode
, encodeBen
)
import Haze.Bits ( Bits
, encodeIntegralN
, packBytes
, parseInt
)
import Haze.Config ( Port(..) )
| Represents the URL for a torrent Tracker
This distinguishes between the different types of
supported clients . UDPTracker comes with a pre split
link and port , ready for socket connection .
This distinguishes between the different types of
supported clients. UDPTracker comes with a pre split
link and port, ready for socket connection.
-}
data Tracker
= HTTPTracker !Text
| UDPTracker !Text !Text
| UnknownTracker !Text
deriving (Show)
| Try and get the type of tracker from a URL
Makes a decision based on the presence of udp:// or
http:// or https:// in the url .
Will fail completely if none of these is found .
Makes a decision based on the presence of udp:// or
http:// or https:// in the url.
Will fail completely if none of these is found.
-}
trackerFromURL :: Text -> Tracker
trackerFromURL t | T.isPrefixOf "udp://" t = udpFromURL t
| T.isPrefixOf "http://" t = HTTPTracker t
| T.isPrefixOf "https://" t = HTTPTracker t
| otherwise = UnknownTracker t
where
udpFromURL t' = fromMaybe (UnknownTracker t) $ do
unPrefix <- T.stripPrefix "udp://" t'
let (url, port) = T.span (/= ':') unPrefix
return (UDPTracker url (T.drop 1 port))
-- | Represents the MD5 sum of a file
newtype MD5Sum = MD5Sum ByteString deriving (Show)
| Represents a 20 byte SHA1 hash
newtype SHA1 = SHA1 { getSHA1 :: ByteString } deriving (Eq, Show)
-- | Construct a new SHA1 hash from the data
makeSHA1 :: ByteString -> SHA1
makeSHA1 = SHA1 . SHA1.hash
| Represents the concatenation of multiple SHA pieces .
The integer represents the length of each piece , and the bytestring
the concatenation of all the hashes
The integer represents the length of each piece, and the bytestring
the concatenation of all the SHA1 hashes
-}
data SHAPieces = SHAPieces Int64 ByteString deriving (Eq)
instance Show SHAPieces where
show (SHAPieces i _) = "SHAPieces " ++ Relude.show i ++ " (..bytes)"
-- | Check whether or not a given piece hashes to the right value
pieceHashesCorrectly :: SHAPieces -> Int -> ByteString -> Bool
pieceHashesCorrectly (SHAPieces _ hashes) piece pieceBytes =
let (SHA1 pieceHash) = makeSHA1 pieceBytes
slottedHash = BS.take 20 $ BS.drop (20 * piece) hashes
in pieceHash == slottedHash
{- | Represents the information in the `info` of a metainfo file
A torrent can contain either a single file, or multiple files,
and what each file contains in the multi file mode is different than
the single file.
-}
data FileInfo
-- | A single file, with name, length, and md5 sum
= SingleFile !FileItem
-- | Multiple files, with directory name
| MultiFile !(Path Rel Dir) ![FileItem]
deriving (Show)
-- | Returns the total length of all files in the torrent
totalFileLength :: FileInfo -> Int64
totalFileLength fileInfo = case fileInfo of
SingleFile item -> itemLength item
MultiFile _ items -> sum $ map itemLength items
where
itemLength :: FileItem -> Int64
itemLength (FileItem _ l _) = l
| A single file in a multi file torrent
The Raw representation is a list of paths , but we concatenate
and verify the validity of those as an actual relative file path
during parsing . For example [ " dir " , " file.ext " ] will become " dir / file.ext "
The Raw representation is a list of paths, but we concatenate
and verify the validity of those as an actual relative file path
during parsing. For example ["dir", "file.ext"] will become "dir/file.ext"
-}
data FileItem = FileItem !(Path Rel File) !Int64 !(Maybe MD5Sum) deriving (Show)
{- | Represents the information in a .torrent file
Contains information about the files contained in the
torrent, and the trackers to use to connect to peers
seeding those files.
-}
data MetaInfo = MetaInfo
{ metaPieces :: !SHAPieces
, metaPrivate :: !Bool
, metaFile :: !FileInfo
, metaInfoHash :: !SHA1
, metaAnnounce :: !Tracker
, metaAnnounceList :: !(Maybe (TieredList Tracker))
, metaCreation :: !(Maybe UTCTime)
, metaComment :: !(Maybe Text)
, metaCreatedBy :: !(Maybe Text)
, metaEncoding :: !(Maybe Text)
}
deriving (Show)
{- | Make a tiered list of trackers no matter what
If the announce list isn't present, there will be a single
tier with just the given trackers. If the tracker list
is present, the single tracker is ignored.
-}
squashedTrackers :: MetaInfo -> TieredList Tracker
squashedTrackers MetaInfo {..} =
fromMaybe (tieredSingleton metaAnnounce) metaAnnounceList
-- | Try and decode a meta file from a bytestring
metaFromBytes :: ByteString -> Either DecodeError MetaInfo
metaFromBytes bs =
decode decodeMeta bs
>>= maybe (Left (DecodeError "Bad MetaInfo file")) Right
-- | Get the total size (bytes) of all the files in a torrent
totalFileSize :: MetaInfo -> Int64
totalFileSize meta = totalFileLength $ metaFile meta
type BenMap = HM.HashMap ByteString Bencoding
decodeMeta :: Decoder (Maybe MetaInfo)
decodeMeta = Decoder doDecode
where
doDecode (BMap mp) = do
info <- HM.lookup "info" mp
(metaPieces, metaPrivate, metaFile) <- getInfo info
let metaInfoHash = makeSHA1 (encode encodeBen info)
announceURL <- withKey "announce" mp tryText
let metaAnnounce = trackerFromURL announceURL
let metaAnnounceList = getAnnounces "announce-list" mp
let metaCreation = withKey "creation date" mp tryDate
let metaComment = withKey "comment" mp tryText
let metaCreatedBy = withKey "created by" mp tryText
let metaEncoding = withKey "encoding" mp tryText
return (MetaInfo { .. })
doDecode _ = Nothing
getBool :: ByteString -> BenMap -> Bool
getBool k mp = case HM.lookup k mp of
Just (BInt 1) -> True
_ -> False
getAnnounces :: ByteString -> BenMap -> Maybe (TieredList Tracker)
getAnnounces k mp = makeTieredList
<$> withKey k mp (traverse getTrackers <=< tryList)
where
getTrackers :: Bencoding -> Maybe [Tracker]
getTrackers = traverse (fmap trackerFromURL . tryText) <=< tryList
tryDate :: Bencoding -> Maybe UTCTime
tryDate (BInt i) = Just . posixSecondsToUTCTime $ fromInteger (toInteger i)
tryDate _ = Nothing
getInfo :: Bencoding -> Maybe (SHAPieces, Bool, FileInfo)
getInfo (BMap mp) = do
let private = getBool "private" mp
pieceLen <- withKey "piece length" mp tryInt
pieceHash <- withKey "pieces" mp tryBS
let sha = SHAPieces pieceLen pieceHash
file <- case HM.lookup "files" mp of
Nothing -> getSingle mp
Just files -> getMulti mp files
return (sha, private, file)
getInfo _ = Nothing
getFilePart :: BenMap -> Maybe (Int64, Maybe MD5Sum)
getFilePart mp = do
len <- withKey "length" mp tryInt
let md5 = MD5Sum <$> withKey "md5sum" mp tryBS
return (len, md5)
getSingle :: BenMap -> Maybe FileInfo
getSingle mp = do
name <- withKey "name" mp tryPath
(len, md5) <- getFilePart mp
path <- Path.parseRelFile name
return (SingleFile (FileItem path len md5))
getMulti :: BenMap -> Bencoding -> Maybe FileInfo
getMulti mp (BList l) = do
name <- withKey "name" mp tryPath
files <- traverse getFileItem l
dir <- Path.parseRelDir name
return (MultiFile dir files)
getMulti _ _ = Nothing
getFileItem :: Bencoding -> Maybe FileItem
getFileItem (BMap mp) = do
(len, md5) <- getFilePart mp
rawParts <- withKey "path" mp tryList
strings <- traverse tryPath rawParts >>= nonEmpty
dirs <- traverse Path.parseRelDir (init strings)
file <- Path.parseRelFile (last strings)
let joinedPath = foldr (</>) file dirs
return (FileItem joinedPath len md5)
getFileItem _ = Nothing
-- | Information sent to the tracker about the state of the request
data ReqEvent
-- | The request has just started
= ReqStarted
-- | The request has stopped
| ReqStopped
-- | The request has successfully downloaded everything
| ReqCompleted
-- | No new information about the request
| ReqEmpty
deriving (Show)
{- | Represents information about the health of the request.
The tracker is interested in this information, as well as the user.
-}
data TrackStatus = TrackStatus
{ trackUp :: !Int64 -- | The total number of bytes uploaded
-- | The total number of bytes downloaded
, trackDown :: !Int64
-- | the total number of bytes in the file left to download
, trackLeft :: !Int64
}
deriving (Show)
| Create the first track status given the torrent file
firstTrackStatus :: MetaInfo -> TrackStatus
firstTrackStatus meta = TrackStatus 0 0 (totalFileSize meta)
-- | Represents the information in a request to a tracker
data TrackerRequest = TrackerRequest
{ treqInfoHash :: !SHA1
-- | Represents the peer id for this client
, treqPeerID :: !ByteString
, treqPort :: !PortNumber
, treqStatus :: !TrackStatus
-- | Whether or not the client expects a compact response
, treqCompact :: !Bool
-- | The current state of this ongoing request
, treqEvent :: !ReqEvent
, treqNumWant :: !(Maybe Int)
-- | This is to be included if the tracker sent it
, treqTransactionID :: !(Maybe ByteString)
}
deriving (Show)
-- | Constructs the tracker request to be used at the start of a session
newTrackerRequest :: Port -> MetaInfo -> ByteString -> TrackerRequest
newTrackerRequest (Port port) meta@MetaInfo {..} peerID = TrackerRequest
metaInfoHash
peerID
(fromIntegral port)
(firstTrackStatus meta)
True
ReqStarted
Nothing
Nothing
updateTransactionID :: Maybe ByteString -> TrackerRequest -> TrackerRequest
updateTransactionID transID treq =
treq { treqTransactionID = transID, treqEvent = ReqEmpty }
-- | Update the tracking status of a request
updateTrackStatus :: TrackStatus -> TrackerRequest -> TrackerRequest
updateTrackStatus status treq = treq { treqStatus = status }
-- | Encodes a 'TrackerRequest' as query parameters
trackerQuery :: TrackerRequest -> [(ByteString, Maybe ByteString)]
trackerQuery TrackerRequest {..} =
map (\(a, b) -> (a, Just b))
$ [ ("info_hash" , getSHA1 treqInfoHash)
, ("peer_id" , treqPeerID)
, ("port" , Relude.show treqPort)
, ("uploaded" , Relude.show (trackUp treqStatus))
, ("downloaded", Relude.show (trackDown treqStatus))
, ("left" , Relude.show (trackLeft treqStatus))
, ("compact" , if treqCompact then "1" else "0")
]
++ eventQuery
++ maybe [] (\i -> [("numwant", Relude.show i)]) treqNumWant
++ maybe [] (\s -> [("trackerid", s)]) treqTransactionID
where
eventQuery = case treqEvent of
ReqStarted -> [("event", "started")]
ReqStopped -> [("event", "stopped")]
ReqCompleted -> [("event", "completed")]
ReqEmpty -> []
| A UDP tracker will send this after a connection
Contains a transaction ID and a connection ID
Contains a transaction ID and a connection ID
-}
data UDPConnection = UDPConnection ByteString ByteString
parseUDPConn :: AP.Parser UDPConnection
parseUDPConn = do
_ <- AP.string "\0\0\0\0"
trans <- AP.take 4
conn <- AP.take 8
return (UDPConnection trans conn)
| Represents a request to a UDP tracker
data UDPTrackerRequest =
UDPTrackerRequest ByteString TrackerRequest
| Construct a new UDP request .
newUDPRequest :: Port -> MetaInfo -> ByteString -> UDPConnection -> UDPTrackerRequest
newUDPRequest port meta peerID (UDPConnection trans conn) =
let trackerReq = newTrackerRequest port meta peerID
withTrans = trackerReq { treqTransactionID = Just trans }
in UDPTrackerRequest conn withTrans
| Updates the transaction ID in a UDP request
updateUDPTransID :: ByteString -> UDPTrackerRequest -> UDPTrackerRequest
updateUDPTransID transID (UDPTrackerRequest c treq) =
UDPTrackerRequest c (updateTransactionID (Just transID) treq)
| Updates the connection ID in a UDP request
updateUDPConnID :: ByteString -> UDPTrackerRequest -> UDPTrackerRequest
updateUDPConnID connID (UDPTrackerRequest _ treq) =
UDPTrackerRequest connID treq
| Update the tracking status of a UDP request
updateUDPTrackStatus :: TrackStatus -> UDPTrackerRequest -> UDPTrackerRequest
updateUDPTrackStatus status (UDPTrackerRequest c treq) =
UDPTrackerRequest c (updateTrackStatus status treq)
a UDP request as a bytestring
encodeUDPRequest :: UDPTrackerRequest -> ByteString
encodeUDPRequest (UDPTrackerRequest conn TrackerRequest {..}) =
conn
<> "\0\0\0\1"
-- The upstream tracker won't like this
<> fromMaybe "\0\0\0\0" treqTransactionID
<> getSHA1 treqInfoHash
<> treqPeerID
<> pack64 (trackUp treqStatus)
<> pack64 (trackDown treqStatus)
<> pack64 (trackLeft treqStatus)
<> pack32 eventNum
-- The IP address we hardcode (default)
<> "\0\0\0\0"
-- This should be sufficiently unique
<> BS.drop 16 treqPeerID
<> pack32 (fromMaybe (-1) treqNumWant)
<> packPort treqPort
where
pack64 :: Int64 -> ByteString
pack64 = BS.pack . encodeIntegralN 8
pack32 :: (Bits i, Integral i) => i -> ByteString
pack32 = BS.pack . encodeIntegralN 4
packPort :: PortNumber -> ByteString
packPort p = BS.drop 2 (pack32 (fromIntegral p :: Int))
eventNum :: Int32
eventNum = case treqEvent of
ReqEmpty -> 0
ReqCompleted -> 1
ReqStarted -> 2
ReqStopped -> 3
-- | Represents the announce response from a tracker
data Announce
-- | The request to the tracker was bad
= FailedAnnounce !Text
| GoodAnnounce !AnnounceInfo
deriving (Show)
-- | The information of a successful announce response
data AnnounceInfo = AnnounceInfo
{ annWarning :: !(Maybe Text) -- ^ A warning message
, annInterval :: !Int -- ^ Seconds between requests
-- | If present, the client must not act more frequently
, annMinInterval :: !(Maybe Int)
, annTransactionID :: !(Maybe ByteString)
-- | The number of peers with the complete file
, annSeeders :: !(Maybe Int)
-- | The number of peers without the complete file
, annLeechers :: !(Maybe Int)
, annPeers :: ![Peer]
}
deriving (Show)
-- | Represents an identifier we share with other peers
newtype PeerID = PeerID ByteString deriving (Eq, Show)
-- | Get the bytestring form of a peer id
peerIDBytes :: PeerID -> ByteString
peerIDBytes (PeerID bytes) = bytes
| Generates a peer i d from scratch .
Note that this should be generated before the first interaction with
a tracker , and not at every interaction with the tracker .
Uses the Azureus style i d , with HZ as the prefix , and then appends
a UTC timestamp , before then taking only the first 20 bytes .
Note that this should be generated before the first interaction with
a tracker, and not at every interaction with the tracker.
Uses the Azureus style id, with HZ as the prefix, and then appends
a UTC timestamp, before then taking only the first 20 bytes.
-}
generatePeerID :: MonadIO m => m PeerID
generatePeerID = liftIO $ do
secs <- getSeconds
let whole = "-HZ010-" <> Relude.show secs
cut = BS.take 20 whole
return (PeerID cut)
where
getSeconds :: MonadIO m => m DiffTime
getSeconds = liftIO $ utctDayTime <$> getCurrentTime
| Represents a peer in the swarm
A Peer can be hashed , which will use it 's peerID ,
if it has one , and the host name .
A Peer can be hashed, which will use it's peerID,
if it has one, and the host name.
-}
data Peer = Peer
{ peerID :: !(Maybe PeerID)
, peerHost :: !HostName
, peerPort :: !PortNumber
}
deriving (Show)
instance Eq Peer where
(Peer idA hostA _) == (Peer idB hostB _) = idA == idB && hostA == hostB
instance Hashable Peer where
hashWithSalt salt (Peer peerID host _) =
salt `hashWithSalt` (peerIDBytes <$> peerID) `hashWithSalt` host
| This reads a bytestring announce from HTTP
HTTP and UDP trackers differ in that HTTP trackers
will send back a bencoded bytestring to read the
announce information from , but UDP trackers will
send a bytestring without bencoding .
This parses the bencoded bytestring from HTTP .
HTTP and UDP trackers differ in that HTTP trackers
will send back a bencoded bytestring to read the
announce information from, but UDP trackers will
send a bytestring without bencoding.
This parses the bencoded bytestring from HTTP.
-}
announceFromHTTP :: ByteString -> Either DecodeError Announce
announceFromHTTP bs =
decode decodeAnnounce bs
>>= maybe (Left (DecodeError "Bad Announce Data")) Right
| Decode a bytestring as a list of Peer addresses
decodeBinaryPeers :: ByteString -> Maybe [Peer]
decodeBinaryPeers bs | BS.length bs `mod` 6 /= 0 = Nothing
| otherwise = Just . map makeHostAndPort $ makeChunks 6 bs
where
makeChunks :: Int -> ByteString -> [ByteString]
makeChunks size body
| BS.null body = []
| otherwise = BS.take size body : makeChunks size (BS.drop size body)
makePeerHost :: ByteString -> String
makePeerHost chunk =
intercalate "." . map Relude.show $ BS.unpack (BS.take 4 chunk)
makePeerPort :: ByteString -> PortNumber
makePeerPort chunk =
-- this is safe because of when we call this
packBytes (BS.unpack (BS.drop 4 chunk))
makeHostAndPort :: ByteString -> Peer
makeHostAndPort chnk = Peer Nothing (makePeerHost chnk) (makePeerPort chnk)
| Parse Announce information from a UDP tracker
parseUDPAnnounce :: AP.Parser Announce
parseUDPAnnounce = do
_ <- AP.string "\0\0\0\1"
annTransactionID <- Just <$> AP.take 4
let annWarning = Nothing
annInterval <- parseInt
let annMinInterval = Nothing
annLeechers <- Just <$> parseInt
annSeeders <- Just <$> parseInt
rest <- AP.takeByteString
case decodeBinaryPeers rest of
Nothing -> fail "Failed to decode binary peers"
Just annPeers -> return (GoodAnnounce AnnounceInfo { .. })
| A decoder for the Announce data
decodeAnnounce :: Decoder (Maybe Announce)
decodeAnnounce = Decoder doDecode
where
doDecode :: Bencoding -> Maybe Announce
doDecode (BMap mp) = case HM.lookup "failure reason" mp of
Just (BString s) -> Just (FailedAnnounce (decodeUtf8 s))
Nothing -> do
info <- decodeAnnounceInfo mp
return (GoodAnnounce info)
Just _ -> Nothing
doDecode _ = Nothing
decodeAnnounceInfo :: BenMap -> Maybe AnnounceInfo
decodeAnnounceInfo mp = do
let annWarning = withKey "warning message" mp tryText
annInterval <- withKey "interval" mp tryNum
let annMinInterval = withKey "min interval" mp tryNum
let annTransactionID = withKey "tracker id" mp tryBS
let annSeeders = withKey "complete" mp tryNum
let annLeechers = withKey "incomplete" mp tryNum
pInfo <- HM.lookup "peers" mp
annPeers <- dictPeers pInfo <|> binPeers pInfo
return (AnnounceInfo { .. })
dictPeers :: Bencoding -> Maybe [Peer]
dictPeers = tryList >=> traverse getPeer
where
getPeer :: Bencoding -> Maybe Peer
getPeer (BMap mp) = do
let peerID = PeerID <$> withKey "peer id" mp tryBS
peerHost <- BSC.unpack <$> withKey "ip" mp tryBS
peerPort <- withKey "port" mp tryNum
return (Peer { .. })
getPeer _ = Nothing
binPeers :: Bencoding -> Maybe [Peer]
binPeers (BString bs) = decodeBinaryPeers bs
binPeers _ = Nothing
{- Decoding utilities -}
withKey :: ByteString -> BenMap -> (Bencoding -> Maybe a) -> Maybe a
withKey k mp next = HM.lookup k mp >>= next
tryInt :: Bencoding -> Maybe Int64
tryInt (BInt i) = Just i
tryInt _ = Nothing
tryNum :: Num n => Bencoding -> Maybe n
tryNum (BInt i) = Just (fromInteger (toInteger i))
tryNum _ = Nothing
tryBS :: Bencoding -> Maybe ByteString
tryBS (BString bs) = Just bs
tryBS _ = Nothing
tryPath :: Bencoding -> Maybe FilePath
tryPath = fmap BSC.unpack . tryBS
tryText :: Bencoding -> Maybe Text
tryText = fmap decodeUtf8 . tryBS
tryList :: Bencoding -> Maybe [Bencoding]
tryList (BList l) = Just l
tryList _ = Nothing
| null | https://raw.githubusercontent.com/cronokirby/haze/3cfbc9de8d923a541429f4a5cb1eb4151d5aea08/src/Haze/Tracker.hs | haskell | | Represents the MD5 sum of a file
| Construct a new SHA1 hash from the data
| Check whether or not a given piece hashes to the right value
| Represents the information in the `info` of a metainfo file
A torrent can contain either a single file, or multiple files,
and what each file contains in the multi file mode is different than
the single file.
| A single file, with name, length, and md5 sum
| Multiple files, with directory name
| Returns the total length of all files in the torrent
| Represents the information in a .torrent file
Contains information about the files contained in the
torrent, and the trackers to use to connect to peers
seeding those files.
| Make a tiered list of trackers no matter what
If the announce list isn't present, there will be a single
tier with just the given trackers. If the tracker list
is present, the single tracker is ignored.
| Try and decode a meta file from a bytestring
| Get the total size (bytes) of all the files in a torrent
| Information sent to the tracker about the state of the request
| The request has just started
| The request has stopped
| The request has successfully downloaded everything
| No new information about the request
| Represents information about the health of the request.
The tracker is interested in this information, as well as the user.
| The total number of bytes uploaded
| The total number of bytes downloaded
| the total number of bytes in the file left to download
| Represents the information in a request to a tracker
| Represents the peer id for this client
| Whether or not the client expects a compact response
| The current state of this ongoing request
| This is to be included if the tracker sent it
| Constructs the tracker request to be used at the start of a session
| Update the tracking status of a request
| Encodes a 'TrackerRequest' as query parameters
The upstream tracker won't like this
The IP address we hardcode (default)
This should be sufficiently unique
| Represents the announce response from a tracker
| The request to the tracker was bad
| The information of a successful announce response
^ A warning message
^ Seconds between requests
| If present, the client must not act more frequently
| The number of peers with the complete file
| The number of peers without the complete file
| Represents an identifier we share with other peers
| Get the bytestring form of a peer id
this is safe because of when we call this
Decoding utilities | # LANGUAGE RecordWildCards #
|
Description : Contains functions related to trackers
This file provides a more abstract description of
the communication protocol with trackers . First it
specificies the data in a .torrent file with MetaInfo ,
then data sent to and returned from a tracker .
Description: Contains functions related to trackers
This file provides a more abstract description of
the communication protocol with trackers. First it
specificies the data in a .torrent file with MetaInfo,
then data sent to and returned from a tracker.
-}
module Haze.Tracker
( Tracker(..)
, TieredList
, MD5Sum(..)
, SHA1(..)
, SHAPieces(..)
, pieceHashesCorrectly
, FileInfo(..)
, FileItem(..)
, totalFileLength
, MetaInfo(..)
, totalFileSize
, squashedTrackers
, decodeMeta
, metaFromBytes
, UDPConnection(..)
, parseUDPConn
, UDPTrackerRequest
, newUDPRequest
, updateUDPTransID
, updateUDPConnID
, encodeUDPRequest
, Announce(..)
, AnnounceInfo(..)
, parseUDPAnnounce
, PeerID(..)
, peerIDBytes
, generatePeerID
, Peer(..)
, decodeAnnounce
, announceFromHTTP
, ReqEvent(..)
, TrackStatus(..)
, firstTrackStatus
, updateTrackStatus
, updateUDPTrackStatus
, TrackerRequest(..)
, newTrackerRequest
, updateTransactionID
, trackerQuery
)
where
import Relude
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Data.Attoparsec.ByteString as AP
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Data.Hashable ( Hashable(..) )
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import Data.Time.Clock ( DiffTime
, UTCTime
, getCurrentTime
, utctDayTime
)
import Data.Time.Clock.POSIX ( posixSecondsToUTCTime )
import Network.Socket ( HostName
, PortNumber
)
import Path ( Path
, Rel
, File
, Dir
, (</>)
)
import qualified Path
import Text.Show ( Show(..) )
import Data.TieredList ( TieredList
, makeTieredList
, tieredSingleton
)
import Haze.Bencoding ( Bencoding(..)
, Decoder(..)
, DecodeError(..)
, decode
, encode
, encodeBen
)
import Haze.Bits ( Bits
, encodeIntegralN
, packBytes
, parseInt
)
import Haze.Config ( Port(..) )
| Represents the URL for a torrent Tracker
This distinguishes between the different types of
supported clients . UDPTracker comes with a pre split
link and port , ready for socket connection .
This distinguishes between the different types of
supported clients. UDPTracker comes with a pre split
link and port, ready for socket connection.
-}
data Tracker
= HTTPTracker !Text
| UDPTracker !Text !Text
| UnknownTracker !Text
deriving (Show)
| Try and get the type of tracker from a URL
Makes a decision based on the presence of udp:// or
http:// or https:// in the url .
Will fail completely if none of these is found .
Makes a decision based on the presence of udp:// or
http:// or https:// in the url.
Will fail completely if none of these is found.
-}
trackerFromURL :: Text -> Tracker
trackerFromURL t | T.isPrefixOf "udp://" t = udpFromURL t
| T.isPrefixOf "http://" t = HTTPTracker t
| T.isPrefixOf "https://" t = HTTPTracker t
| otherwise = UnknownTracker t
where
udpFromURL t' = fromMaybe (UnknownTracker t) $ do
unPrefix <- T.stripPrefix "udp://" t'
let (url, port) = T.span (/= ':') unPrefix
return (UDPTracker url (T.drop 1 port))
newtype MD5Sum = MD5Sum ByteString deriving (Show)
| Represents a 20 byte SHA1 hash
newtype SHA1 = SHA1 { getSHA1 :: ByteString } deriving (Eq, Show)
makeSHA1 :: ByteString -> SHA1
makeSHA1 = SHA1 . SHA1.hash
| Represents the concatenation of multiple SHA pieces .
The integer represents the length of each piece , and the bytestring
the concatenation of all the hashes
The integer represents the length of each piece, and the bytestring
the concatenation of all the SHA1 hashes
-}
data SHAPieces = SHAPieces Int64 ByteString deriving (Eq)
instance Show SHAPieces where
show (SHAPieces i _) = "SHAPieces " ++ Relude.show i ++ " (..bytes)"
pieceHashesCorrectly :: SHAPieces -> Int -> ByteString -> Bool
pieceHashesCorrectly (SHAPieces _ hashes) piece pieceBytes =
let (SHA1 pieceHash) = makeSHA1 pieceBytes
slottedHash = BS.take 20 $ BS.drop (20 * piece) hashes
in pieceHash == slottedHash
data FileInfo
= SingleFile !FileItem
| MultiFile !(Path Rel Dir) ![FileItem]
deriving (Show)
totalFileLength :: FileInfo -> Int64
totalFileLength fileInfo = case fileInfo of
SingleFile item -> itemLength item
MultiFile _ items -> sum $ map itemLength items
where
itemLength :: FileItem -> Int64
itemLength (FileItem _ l _) = l
| A single file in a multi file torrent
The Raw representation is a list of paths , but we concatenate
and verify the validity of those as an actual relative file path
during parsing . For example [ " dir " , " file.ext " ] will become " dir / file.ext "
The Raw representation is a list of paths, but we concatenate
and verify the validity of those as an actual relative file path
during parsing. For example ["dir", "file.ext"] will become "dir/file.ext"
-}
data FileItem = FileItem !(Path Rel File) !Int64 !(Maybe MD5Sum) deriving (Show)
data MetaInfo = MetaInfo
{ metaPieces :: !SHAPieces
, metaPrivate :: !Bool
, metaFile :: !FileInfo
, metaInfoHash :: !SHA1
, metaAnnounce :: !Tracker
, metaAnnounceList :: !(Maybe (TieredList Tracker))
, metaCreation :: !(Maybe UTCTime)
, metaComment :: !(Maybe Text)
, metaCreatedBy :: !(Maybe Text)
, metaEncoding :: !(Maybe Text)
}
deriving (Show)
squashedTrackers :: MetaInfo -> TieredList Tracker
squashedTrackers MetaInfo {..} =
fromMaybe (tieredSingleton metaAnnounce) metaAnnounceList
metaFromBytes :: ByteString -> Either DecodeError MetaInfo
metaFromBytes bs =
decode decodeMeta bs
>>= maybe (Left (DecodeError "Bad MetaInfo file")) Right
totalFileSize :: MetaInfo -> Int64
totalFileSize meta = totalFileLength $ metaFile meta
type BenMap = HM.HashMap ByteString Bencoding
decodeMeta :: Decoder (Maybe MetaInfo)
decodeMeta = Decoder doDecode
where
doDecode (BMap mp) = do
info <- HM.lookup "info" mp
(metaPieces, metaPrivate, metaFile) <- getInfo info
let metaInfoHash = makeSHA1 (encode encodeBen info)
announceURL <- withKey "announce" mp tryText
let metaAnnounce = trackerFromURL announceURL
let metaAnnounceList = getAnnounces "announce-list" mp
let metaCreation = withKey "creation date" mp tryDate
let metaComment = withKey "comment" mp tryText
let metaCreatedBy = withKey "created by" mp tryText
let metaEncoding = withKey "encoding" mp tryText
return (MetaInfo { .. })
doDecode _ = Nothing
getBool :: ByteString -> BenMap -> Bool
getBool k mp = case HM.lookup k mp of
Just (BInt 1) -> True
_ -> False
getAnnounces :: ByteString -> BenMap -> Maybe (TieredList Tracker)
getAnnounces k mp = makeTieredList
<$> withKey k mp (traverse getTrackers <=< tryList)
where
getTrackers :: Bencoding -> Maybe [Tracker]
getTrackers = traverse (fmap trackerFromURL . tryText) <=< tryList
tryDate :: Bencoding -> Maybe UTCTime
tryDate (BInt i) = Just . posixSecondsToUTCTime $ fromInteger (toInteger i)
tryDate _ = Nothing
getInfo :: Bencoding -> Maybe (SHAPieces, Bool, FileInfo)
getInfo (BMap mp) = do
let private = getBool "private" mp
pieceLen <- withKey "piece length" mp tryInt
pieceHash <- withKey "pieces" mp tryBS
let sha = SHAPieces pieceLen pieceHash
file <- case HM.lookup "files" mp of
Nothing -> getSingle mp
Just files -> getMulti mp files
return (sha, private, file)
getInfo _ = Nothing
getFilePart :: BenMap -> Maybe (Int64, Maybe MD5Sum)
getFilePart mp = do
len <- withKey "length" mp tryInt
let md5 = MD5Sum <$> withKey "md5sum" mp tryBS
return (len, md5)
getSingle :: BenMap -> Maybe FileInfo
getSingle mp = do
name <- withKey "name" mp tryPath
(len, md5) <- getFilePart mp
path <- Path.parseRelFile name
return (SingleFile (FileItem path len md5))
getMulti :: BenMap -> Bencoding -> Maybe FileInfo
getMulti mp (BList l) = do
name <- withKey "name" mp tryPath
files <- traverse getFileItem l
dir <- Path.parseRelDir name
return (MultiFile dir files)
getMulti _ _ = Nothing
getFileItem :: Bencoding -> Maybe FileItem
getFileItem (BMap mp) = do
(len, md5) <- getFilePart mp
rawParts <- withKey "path" mp tryList
strings <- traverse tryPath rawParts >>= nonEmpty
dirs <- traverse Path.parseRelDir (init strings)
file <- Path.parseRelFile (last strings)
let joinedPath = foldr (</>) file dirs
return (FileItem joinedPath len md5)
getFileItem _ = Nothing
data ReqEvent
= ReqStarted
| ReqStopped
| ReqCompleted
| ReqEmpty
deriving (Show)
data TrackStatus = TrackStatus
, trackDown :: !Int64
, trackLeft :: !Int64
}
deriving (Show)
| Create the first track status given the torrent file
firstTrackStatus :: MetaInfo -> TrackStatus
firstTrackStatus meta = TrackStatus 0 0 (totalFileSize meta)
data TrackerRequest = TrackerRequest
{ treqInfoHash :: !SHA1
, treqPeerID :: !ByteString
, treqPort :: !PortNumber
, treqStatus :: !TrackStatus
, treqCompact :: !Bool
, treqEvent :: !ReqEvent
, treqNumWant :: !(Maybe Int)
, treqTransactionID :: !(Maybe ByteString)
}
deriving (Show)
newTrackerRequest :: Port -> MetaInfo -> ByteString -> TrackerRequest
newTrackerRequest (Port port) meta@MetaInfo {..} peerID = TrackerRequest
metaInfoHash
peerID
(fromIntegral port)
(firstTrackStatus meta)
True
ReqStarted
Nothing
Nothing
updateTransactionID :: Maybe ByteString -> TrackerRequest -> TrackerRequest
updateTransactionID transID treq =
treq { treqTransactionID = transID, treqEvent = ReqEmpty }
updateTrackStatus :: TrackStatus -> TrackerRequest -> TrackerRequest
updateTrackStatus status treq = treq { treqStatus = status }
trackerQuery :: TrackerRequest -> [(ByteString, Maybe ByteString)]
trackerQuery TrackerRequest {..} =
map (\(a, b) -> (a, Just b))
$ [ ("info_hash" , getSHA1 treqInfoHash)
, ("peer_id" , treqPeerID)
, ("port" , Relude.show treqPort)
, ("uploaded" , Relude.show (trackUp treqStatus))
, ("downloaded", Relude.show (trackDown treqStatus))
, ("left" , Relude.show (trackLeft treqStatus))
, ("compact" , if treqCompact then "1" else "0")
]
++ eventQuery
++ maybe [] (\i -> [("numwant", Relude.show i)]) treqNumWant
++ maybe [] (\s -> [("trackerid", s)]) treqTransactionID
where
eventQuery = case treqEvent of
ReqStarted -> [("event", "started")]
ReqStopped -> [("event", "stopped")]
ReqCompleted -> [("event", "completed")]
ReqEmpty -> []
| A UDP tracker will send this after a connection
Contains a transaction ID and a connection ID
Contains a transaction ID and a connection ID
-}
data UDPConnection = UDPConnection ByteString ByteString
parseUDPConn :: AP.Parser UDPConnection
parseUDPConn = do
_ <- AP.string "\0\0\0\0"
trans <- AP.take 4
conn <- AP.take 8
return (UDPConnection trans conn)
| Represents a request to a UDP tracker
data UDPTrackerRequest =
UDPTrackerRequest ByteString TrackerRequest
| Construct a new UDP request .
newUDPRequest :: Port -> MetaInfo -> ByteString -> UDPConnection -> UDPTrackerRequest
newUDPRequest port meta peerID (UDPConnection trans conn) =
let trackerReq = newTrackerRequest port meta peerID
withTrans = trackerReq { treqTransactionID = Just trans }
in UDPTrackerRequest conn withTrans
| Updates the transaction ID in a UDP request
updateUDPTransID :: ByteString -> UDPTrackerRequest -> UDPTrackerRequest
updateUDPTransID transID (UDPTrackerRequest c treq) =
UDPTrackerRequest c (updateTransactionID (Just transID) treq)
| Updates the connection ID in a UDP request
updateUDPConnID :: ByteString -> UDPTrackerRequest -> UDPTrackerRequest
updateUDPConnID connID (UDPTrackerRequest _ treq) =
UDPTrackerRequest connID treq
| Update the tracking status of a UDP request
updateUDPTrackStatus :: TrackStatus -> UDPTrackerRequest -> UDPTrackerRequest
updateUDPTrackStatus status (UDPTrackerRequest c treq) =
UDPTrackerRequest c (updateTrackStatus status treq)
a UDP request as a bytestring
encodeUDPRequest :: UDPTrackerRequest -> ByteString
encodeUDPRequest (UDPTrackerRequest conn TrackerRequest {..}) =
conn
<> "\0\0\0\1"
<> fromMaybe "\0\0\0\0" treqTransactionID
<> getSHA1 treqInfoHash
<> treqPeerID
<> pack64 (trackUp treqStatus)
<> pack64 (trackDown treqStatus)
<> pack64 (trackLeft treqStatus)
<> pack32 eventNum
<> "\0\0\0\0"
<> BS.drop 16 treqPeerID
<> pack32 (fromMaybe (-1) treqNumWant)
<> packPort treqPort
where
pack64 :: Int64 -> ByteString
pack64 = BS.pack . encodeIntegralN 8
pack32 :: (Bits i, Integral i) => i -> ByteString
pack32 = BS.pack . encodeIntegralN 4
packPort :: PortNumber -> ByteString
packPort p = BS.drop 2 (pack32 (fromIntegral p :: Int))
eventNum :: Int32
eventNum = case treqEvent of
ReqEmpty -> 0
ReqCompleted -> 1
ReqStarted -> 2
ReqStopped -> 3
data Announce
= FailedAnnounce !Text
| GoodAnnounce !AnnounceInfo
deriving (Show)
data AnnounceInfo = AnnounceInfo
, annMinInterval :: !(Maybe Int)
, annTransactionID :: !(Maybe ByteString)
, annSeeders :: !(Maybe Int)
, annLeechers :: !(Maybe Int)
, annPeers :: ![Peer]
}
deriving (Show)
newtype PeerID = PeerID ByteString deriving (Eq, Show)
peerIDBytes :: PeerID -> ByteString
peerIDBytes (PeerID bytes) = bytes
| Generates a peer i d from scratch .
Note that this should be generated before the first interaction with
a tracker , and not at every interaction with the tracker .
Uses the Azureus style i d , with HZ as the prefix , and then appends
a UTC timestamp , before then taking only the first 20 bytes .
Note that this should be generated before the first interaction with
a tracker, and not at every interaction with the tracker.
Uses the Azureus style id, with HZ as the prefix, and then appends
a UTC timestamp, before then taking only the first 20 bytes.
-}
generatePeerID :: MonadIO m => m PeerID
generatePeerID = liftIO $ do
secs <- getSeconds
let whole = "-HZ010-" <> Relude.show secs
cut = BS.take 20 whole
return (PeerID cut)
where
getSeconds :: MonadIO m => m DiffTime
getSeconds = liftIO $ utctDayTime <$> getCurrentTime
| Represents a peer in the swarm
A Peer can be hashed , which will use it 's peerID ,
if it has one , and the host name .
A Peer can be hashed, which will use it's peerID,
if it has one, and the host name.
-}
data Peer = Peer
{ peerID :: !(Maybe PeerID)
, peerHost :: !HostName
, peerPort :: !PortNumber
}
deriving (Show)
instance Eq Peer where
(Peer idA hostA _) == (Peer idB hostB _) = idA == idB && hostA == hostB
instance Hashable Peer where
hashWithSalt salt (Peer peerID host _) =
salt `hashWithSalt` (peerIDBytes <$> peerID) `hashWithSalt` host
| This reads a bytestring announce from HTTP
HTTP and UDP trackers differ in that HTTP trackers
will send back a bencoded bytestring to read the
announce information from , but UDP trackers will
send a bytestring without bencoding .
This parses the bencoded bytestring from HTTP .
HTTP and UDP trackers differ in that HTTP trackers
will send back a bencoded bytestring to read the
announce information from, but UDP trackers will
send a bytestring without bencoding.
This parses the bencoded bytestring from HTTP.
-}
announceFromHTTP :: ByteString -> Either DecodeError Announce
announceFromHTTP bs =
decode decodeAnnounce bs
>>= maybe (Left (DecodeError "Bad Announce Data")) Right
| Decode a bytestring as a list of Peer addresses
decodeBinaryPeers :: ByteString -> Maybe [Peer]
decodeBinaryPeers bs | BS.length bs `mod` 6 /= 0 = Nothing
| otherwise = Just . map makeHostAndPort $ makeChunks 6 bs
where
makeChunks :: Int -> ByteString -> [ByteString]
makeChunks size body
| BS.null body = []
| otherwise = BS.take size body : makeChunks size (BS.drop size body)
makePeerHost :: ByteString -> String
makePeerHost chunk =
intercalate "." . map Relude.show $ BS.unpack (BS.take 4 chunk)
makePeerPort :: ByteString -> PortNumber
makePeerPort chunk =
packBytes (BS.unpack (BS.drop 4 chunk))
makeHostAndPort :: ByteString -> Peer
makeHostAndPort chnk = Peer Nothing (makePeerHost chnk) (makePeerPort chnk)
| Parse Announce information from a UDP tracker
parseUDPAnnounce :: AP.Parser Announce
parseUDPAnnounce = do
_ <- AP.string "\0\0\0\1"
annTransactionID <- Just <$> AP.take 4
let annWarning = Nothing
annInterval <- parseInt
let annMinInterval = Nothing
annLeechers <- Just <$> parseInt
annSeeders <- Just <$> parseInt
rest <- AP.takeByteString
case decodeBinaryPeers rest of
Nothing -> fail "Failed to decode binary peers"
Just annPeers -> return (GoodAnnounce AnnounceInfo { .. })
| A decoder for the Announce data
decodeAnnounce :: Decoder (Maybe Announce)
decodeAnnounce = Decoder doDecode
where
doDecode :: Bencoding -> Maybe Announce
doDecode (BMap mp) = case HM.lookup "failure reason" mp of
Just (BString s) -> Just (FailedAnnounce (decodeUtf8 s))
Nothing -> do
info <- decodeAnnounceInfo mp
return (GoodAnnounce info)
Just _ -> Nothing
doDecode _ = Nothing
decodeAnnounceInfo :: BenMap -> Maybe AnnounceInfo
decodeAnnounceInfo mp = do
let annWarning = withKey "warning message" mp tryText
annInterval <- withKey "interval" mp tryNum
let annMinInterval = withKey "min interval" mp tryNum
let annTransactionID = withKey "tracker id" mp tryBS
let annSeeders = withKey "complete" mp tryNum
let annLeechers = withKey "incomplete" mp tryNum
pInfo <- HM.lookup "peers" mp
annPeers <- dictPeers pInfo <|> binPeers pInfo
return (AnnounceInfo { .. })
dictPeers :: Bencoding -> Maybe [Peer]
dictPeers = tryList >=> traverse getPeer
where
getPeer :: Bencoding -> Maybe Peer
getPeer (BMap mp) = do
let peerID = PeerID <$> withKey "peer id" mp tryBS
peerHost <- BSC.unpack <$> withKey "ip" mp tryBS
peerPort <- withKey "port" mp tryNum
return (Peer { .. })
getPeer _ = Nothing
binPeers :: Bencoding -> Maybe [Peer]
binPeers (BString bs) = decodeBinaryPeers bs
binPeers _ = Nothing
withKey :: ByteString -> BenMap -> (Bencoding -> Maybe a) -> Maybe a
withKey k mp next = HM.lookup k mp >>= next
tryInt :: Bencoding -> Maybe Int64
tryInt (BInt i) = Just i
tryInt _ = Nothing
tryNum :: Num n => Bencoding -> Maybe n
tryNum (BInt i) = Just (fromInteger (toInteger i))
tryNum _ = Nothing
tryBS :: Bencoding -> Maybe ByteString
tryBS (BString bs) = Just bs
tryBS _ = Nothing
tryPath :: Bencoding -> Maybe FilePath
tryPath = fmap BSC.unpack . tryBS
tryText :: Bencoding -> Maybe Text
tryText = fmap decodeUtf8 . tryBS
tryList :: Bencoding -> Maybe [Bencoding]
tryList (BList l) = Just l
tryList _ = Nothing
|
81b8710cce39eed30e7a56f207e84e726cf30458b8ea038d2104beb3578a3675 | xu-hao/QueryArrow | BuiltIn.hs | # LANGUAGE FlexibleContexts , OverloadedStrings #
module QueryArrow.SQL.BuiltIn where
import QueryArrow.DB.GenericDatabase
import QueryArrow.Syntax.Term
import QueryArrow.SQL.SQL
import QueryArrow.Mapping
import Data.Namespace.Namespace
import Data.Text (unpack)
import qualified Data.Text as T
import Data.Map.Strict (fromList, keys)
import Text.Read
convertTextToSQLText :: T.Text -> SQLExpr
convertTextToSQLText str = SQLExprText (if str `T.index` 0 /= '(' then if str `T.index` 0 /= '\'' then "(\'" ++ unpack str ++ "\')" else "(" ++ unpack str ++ ")" else unpack str)
aSQLInfixFuncExpr :: String -> SQLExpr -> SQLExpr -> SQLExpr
aSQLInfixFuncExpr func a@(SQLParamExpr _) b@(SQLParamExpr _) =
SQLInfixFuncExpr func (SQLCastExpr a "integer") (SQLCastExpr b "integer")
aSQLInfixFuncExpr func a b =
SQLInfixFuncExpr func a b
sqlBuiltIn :: (String -> PredName) -> BuiltIn
sqlBuiltIn lookupPred =
BuiltIn ( fromList [
(lookupPred "le", simpleBuildIn "le" (\args ->
return (swhere (SQLCompCond "<=" (head args) (args !! 1))))),
(lookupPred "lt", simpleBuildIn "lt" (\ args ->
return (swhere (SQLCompCond "<" (head args) (args !! 1))))),
(lookupPred "eq", \args -> do
sqlExprs <- mapM sqlExprFromArg args
case sqlExprs of
[Left a, Left b] ->
return (swhere (SQLCompCond "=" a b))
[Left a, Right v] -> case a of
SQLParamExpr _ ->
error "eq: unsupported arguments, the first argument cannot be a param"
_ -> do
addVarRep v a
return mempty
_ -> error "eq: unsupported arguments, only the second argument can be unbounded"),
(lookupPred "ge", simpleBuildIn "le" (\args ->
return (swhere (SQLCompCond ">=" (head args) (args !! 1))))),
(lookupPred "gt", simpleBuildIn "lt" (\args ->
return (swhere (SQLCompCond ">" (head args) (args !! 1))))),
(lookupPred "ne", \args -> do
sqlExprs <- mapM sqlExprFromArg args
case sqlExprs of
[Left a, Left b] ->
return (swhere (SQLCompCond "<>" a b))
_ -> error "ne: unsupported arguments, no argument can be unbounded"),
(lookupPred "like", simpleBuildIn "like" (\args ->
return (swhere (SQLCompCond "LIKE" (head args) (args !! 1))))),
(lookupPred "not_like", simpleBuildIn "not_like" (\args ->
return (swhere (SQLCompCond "NOT LIKE" (head args) (args !! 1))))),
(lookupPred "like_regex", simpleBuildIn "like_regex" (\ args ->
return (swhere (SQLCompCond "~" (head args) (args !! 1))))),
(lookupPred "not_like_regex", simpleBuildIn "not_like_regex" (\ args ->
return (swhere (SQLCompCond "!~" (head args) (args !! 1))))),
(lookupPred "in", simpleBuildIn "in" (\args ->
let sql = swhere (case args !! 1 of
-- SQLStringConstExpr str | T.length str == 0 || str `T.index` 0 /= '{' -> SQLCompCond "in" (head args) (convertTextToSQLText str)
_ -> SQLCompCond "=" (head args) (SQLFuncExpr "ANY" [args !! 1]))
in
return sql)),
(lookupPred "add", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "+" a b)]
)),
(lookupPred "sub", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "-" a b)]
)),
(lookupPred "mul", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "*" a b)]
)),
(lookupPred "div", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "/" a b)]
)),
(lookupPred "mod", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "%" a b)]
)),
(lookupPred "exp", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "^" a b)]
)),
(lookupPred "concat", repBuildIn (\ [Left a, Left b, Right v] -> [(v, SQLInfixFuncExpr "||" a b)]
)),
(lookupPred "substr", repBuildIn (\ [Left a, Left b, Left c, Right v] -> [(v, SQLFuncExpr "substr" [a, b, c])]
)),
(lookupPred "replace", repBuildIn (\ [Left a, Left b, Left c, Right v] -> [(v, SQLFuncExpr "replace" [a, b, c])]
)),
(lookupPred "regex_replace", repBuildIn (\ [Left a, Left b, Left c, Right v] -> [(v, SQLFuncExpr "regexp_replace" [a, b, c, SQLStringConstExpr "g"])]
)),
(lookupPred "strlen", repBuildIn (\ [Left a, Right v] -> [(v, SQLFuncExpr "length" [a])]
))
])
| null | https://raw.githubusercontent.com/xu-hao/QueryArrow/4dd5b8a22c8ed2d24818de5b8bcaa9abc456ef0d/QueryArrow-db-sql-common/src/QueryArrow/SQL/BuiltIn.hs | haskell | SQLStringConstExpr str | T.length str == 0 || str `T.index` 0 /= '{' -> SQLCompCond "in" (head args) (convertTextToSQLText str) | # LANGUAGE FlexibleContexts , OverloadedStrings #
module QueryArrow.SQL.BuiltIn where
import QueryArrow.DB.GenericDatabase
import QueryArrow.Syntax.Term
import QueryArrow.SQL.SQL
import QueryArrow.Mapping
import Data.Namespace.Namespace
import Data.Text (unpack)
import qualified Data.Text as T
import Data.Map.Strict (fromList, keys)
import Text.Read
convertTextToSQLText :: T.Text -> SQLExpr
convertTextToSQLText str = SQLExprText (if str `T.index` 0 /= '(' then if str `T.index` 0 /= '\'' then "(\'" ++ unpack str ++ "\')" else "(" ++ unpack str ++ ")" else unpack str)
aSQLInfixFuncExpr :: String -> SQLExpr -> SQLExpr -> SQLExpr
aSQLInfixFuncExpr func a@(SQLParamExpr _) b@(SQLParamExpr _) =
SQLInfixFuncExpr func (SQLCastExpr a "integer") (SQLCastExpr b "integer")
aSQLInfixFuncExpr func a b =
SQLInfixFuncExpr func a b
sqlBuiltIn :: (String -> PredName) -> BuiltIn
sqlBuiltIn lookupPred =
BuiltIn ( fromList [
(lookupPred "le", simpleBuildIn "le" (\args ->
return (swhere (SQLCompCond "<=" (head args) (args !! 1))))),
(lookupPred "lt", simpleBuildIn "lt" (\ args ->
return (swhere (SQLCompCond "<" (head args) (args !! 1))))),
(lookupPred "eq", \args -> do
sqlExprs <- mapM sqlExprFromArg args
case sqlExprs of
[Left a, Left b] ->
return (swhere (SQLCompCond "=" a b))
[Left a, Right v] -> case a of
SQLParamExpr _ ->
error "eq: unsupported arguments, the first argument cannot be a param"
_ -> do
addVarRep v a
return mempty
_ -> error "eq: unsupported arguments, only the second argument can be unbounded"),
(lookupPred "ge", simpleBuildIn "le" (\args ->
return (swhere (SQLCompCond ">=" (head args) (args !! 1))))),
(lookupPred "gt", simpleBuildIn "lt" (\args ->
return (swhere (SQLCompCond ">" (head args) (args !! 1))))),
(lookupPred "ne", \args -> do
sqlExprs <- mapM sqlExprFromArg args
case sqlExprs of
[Left a, Left b] ->
return (swhere (SQLCompCond "<>" a b))
_ -> error "ne: unsupported arguments, no argument can be unbounded"),
(lookupPred "like", simpleBuildIn "like" (\args ->
return (swhere (SQLCompCond "LIKE" (head args) (args !! 1))))),
(lookupPred "not_like", simpleBuildIn "not_like" (\args ->
return (swhere (SQLCompCond "NOT LIKE" (head args) (args !! 1))))),
(lookupPred "like_regex", simpleBuildIn "like_regex" (\ args ->
return (swhere (SQLCompCond "~" (head args) (args !! 1))))),
(lookupPred "not_like_regex", simpleBuildIn "not_like_regex" (\ args ->
return (swhere (SQLCompCond "!~" (head args) (args !! 1))))),
(lookupPred "in", simpleBuildIn "in" (\args ->
let sql = swhere (case args !! 1 of
_ -> SQLCompCond "=" (head args) (SQLFuncExpr "ANY" [args !! 1]))
in
return sql)),
(lookupPred "add", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "+" a b)]
)),
(lookupPred "sub", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "-" a b)]
)),
(lookupPred "mul", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "*" a b)]
)),
(lookupPred "div", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "/" a b)]
)),
(lookupPred "mod", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "%" a b)]
)),
(lookupPred "exp", repBuildIn (\ [Left a, Left b, Right v] -> [(v, aSQLInfixFuncExpr "^" a b)]
)),
(lookupPred "concat", repBuildIn (\ [Left a, Left b, Right v] -> [(v, SQLInfixFuncExpr "||" a b)]
)),
(lookupPred "substr", repBuildIn (\ [Left a, Left b, Left c, Right v] -> [(v, SQLFuncExpr "substr" [a, b, c])]
)),
(lookupPred "replace", repBuildIn (\ [Left a, Left b, Left c, Right v] -> [(v, SQLFuncExpr "replace" [a, b, c])]
)),
(lookupPred "regex_replace", repBuildIn (\ [Left a, Left b, Left c, Right v] -> [(v, SQLFuncExpr "regexp_replace" [a, b, c, SQLStringConstExpr "g"])]
)),
(lookupPred "strlen", repBuildIn (\ [Left a, Right v] -> [(v, SQLFuncExpr "length" [a])]
))
])
|
6cad5874065083b460d8144e2575d0bbe360fc36df45648904dcf6edc58e3ec6 | alesaccoia/festival_flinger | cmu_us_fem_f0model.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
Carnegie Mellon University ; ; ;
and and ; ; ;
Copyright ( c ) 1998 - 2000 ; ; ;
All Rights Reserved . ; ; ;
;;; ;;;
;;; Permission is hereby granted, free of charge, to use and distribute ;;;
;;; this software and its documentation without restriction, including ;;;
;;; without limitation the rights to use, copy, modify, merge, publish, ;;;
;;; distribute, sublicense, and/or sell copies of this work, and to ;;;
;;; permit persons to whom this work is furnished to do so, subject to ;;;
;;; the following conditions: ;;;
1 . The code must retain the above copyright notice , this list of ; ; ;
;;; conditions and the following disclaimer. ;;;
2 . Any modifications must be clearly marked as such . ; ; ;
3 . Original authors ' names are not deleted . ; ; ;
4 . The authors ' names are not used to endorse or promote products ; ; ;
;;; derived from this software without specific prior written ;;;
;;; permission. ;;;
;;; ;;;
CARNEGIE MELLON UNIVERSITY AND THE CONTRIBUTORS TO THIS WORK ; ; ;
;;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
;;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
SHALL CARNEGIE MELLON UNIVERSITY NOR THE CONTRIBUTORS BE LIABLE ; ; ;
;;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ; ;
;;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
;;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
;;; THIS SOFTWARE. ;;;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
F0 model for English
;;;
;;; Load any necessary files here
(require 'f2bf0lr)
(set! cmu_us_fem_int_lr_params
'(
;; These numbers may be modified the speakers range.
(target_f0_mean 105) ;; speaker's mean F0
(target_f0_std 14) ;; speaker's range
;; These number should remain as they are
(model_f0_mean 170)
(model_f0_std 34)))
(define (cmu_us_fem::select_f0model)
"(cmu_us_fem::select_f0model)
Set up the F0 model for English."
(set! f0_lr_start f2b_f0_lr_start)
(set! f0_lr_mid f2b_f0_lr_mid)
(set! f0_lr_end f2b_f0_lr_end)
(set! int_lr_params cmu_us_fem_int_lr_params)
(Parameter.set 'Int_Target_Method Int_Targets_LR)
)
(define (cmu_us_fem::reset_f0model)
"(cmu_us_fem::reset_f0model)
Reset F0 model information."
t
)
(provide 'cmu_us_fem_f0model)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/voices/us/cmu_us_fem_cg/festvox/cmu_us_fem_f0model.scm | scheme |
;;;
; ;
; ;
; ;
; ;
;;;
Permission is hereby granted, free of charge, to use and distribute ;;;
this software and its documentation without restriction, including ;;;
without limitation the rights to use, copy, modify, merge, publish, ;;;
distribute, sublicense, and/or sell copies of this work, and to ;;;
permit persons to whom this work is furnished to do so, subject to ;;;
the following conditions: ;;;
; ;
conditions and the following disclaimer. ;;;
; ;
; ;
; ;
derived from this software without specific prior written ;;;
permission. ;;;
;;;
; ;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
; ;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
; ;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
THIS SOFTWARE. ;;;
;;;
Load any necessary files here
These numbers may be modified the speakers range.
speaker's mean F0
speaker's range
These number should remain as they are | F0 model for English
(require 'f2bf0lr)
(set! cmu_us_fem_int_lr_params
'(
(model_f0_mean 170)
(model_f0_std 34)))
(define (cmu_us_fem::select_f0model)
"(cmu_us_fem::select_f0model)
Set up the F0 model for English."
(set! f0_lr_start f2b_f0_lr_start)
(set! f0_lr_mid f2b_f0_lr_mid)
(set! f0_lr_end f2b_f0_lr_end)
(set! int_lr_params cmu_us_fem_int_lr_params)
(Parameter.set 'Int_Target_Method Int_Targets_LR)
)
(define (cmu_us_fem::reset_f0model)
"(cmu_us_fem::reset_f0model)
Reset F0 model information."
t
)
(provide 'cmu_us_fem_f0model)
|
6808b49d345740d5bf348f2768d1b06c31d68a01a8a99e5b7c39e7407e451f79 | higherkindness/mu-haskell | Rpc.hs | # language ConstraintKinds #
# language DataKinds #
# language ExistentialQuantification #
{-# language FlexibleInstances #-}
# language GADTs #
# language MultiParamTypeClasses #
# language OverloadedStrings #
# language PolyKinds #
{-# language ScopedTypeVariables #-}
# language TypeApplications #
# language TypeFamilies #
{-# language TypeOperators #-}
# language UndecidableInstances #
{-|
Description : Protocol-independent declaration of services
This module defines a type-level language to describe
RPC-like microservices independently of the transport
and protocol.
-}
module Mu.Rpc (
Package', Package(..)
, Service', Service(..), Object, Union
, Method', Method(..), ObjectField
, LookupService, LookupMethod
, TypeRef(..), Argument', Argument(..), Return(..)
, TyInfo(..), RpcInfo(..), ReflectRpcInfo(..)
) where
import Data.Kind
import Data.Text (Text)
import qualified Data.Text as T
import GHC.TypeLits
import qualified Language.Haskell.TH as TH
import Network.HTTP.Types.Header
import Type.Reflection
import Mu.Schema
import Mu.Schema.Registry
-- | Packages whose names are given by type-level strings.
type Package' = Package Symbol Symbol Symbol (TypeRef Symbol)
-- | Services whose names are given by type-level strings.
type Service' = Service Symbol Symbol Symbol (TypeRef Symbol)
-- | Methods whose names are given by type-level strings.
type Method' = Method Symbol Symbol Symbol (TypeRef Symbol)
-- | Arguments whose names are given by type-level strings.
type Argument' = Argument Symbol Symbol (TypeRef Symbol)
-- | A package is a set of services.
data Package serviceName methodName argName tyRef
= Package (Maybe serviceName)
[Service serviceName methodName argName tyRef]
-- | A service is a set of methods.
data Service serviceName methodName argName tyRef
= Service serviceName
[Method serviceName methodName argName tyRef]
| OneOf serviceName [serviceName]
-- | A method is defined by its name, arguments, and return type.
data Method serviceName methodName argName tyRef
= Method methodName
[Argument serviceName argName tyRef]
(Return serviceName tyRef)
-- Synonyms for GraphQL
| An object is a set of fields , in GraphQL lingo .
type Object = 'Service
-- | A union is one of the objects.
type Union = 'OneOf
-- | A field in an object takes some input objects,
-- and returns a value or some other object,
in GraphQL lingo .
type ObjectField = 'Method
-- | Look up a service in a package definition using its name.
type family LookupService (ss :: [Service snm mnm anm tr]) (s :: snm)
:: Service snm mnm anm tr where
LookupService '[] s = TypeError ('Text "could not find method " ':<>: 'ShowType s)
LookupService ('Service s ms ': ss) s = 'Service s ms
LookupService ('OneOf s ms ': ss) s = 'OneOf s ms
LookupService (other ': ss) s = LookupService ss s
-- | Look up a method in a service definition using its name.
type family LookupMethod (s :: [Method snm mnm anm tr]) (m :: mnm)
:: Method snm mnm anm tr where
LookupMethod '[] m = TypeError ('Text "could not find method " ':<>: 'ShowType m)
LookupMethod ('Method m args r ': ms) m = 'Method m args r
LookupMethod (other ': ms) m = LookupMethod ms m
-- | Defines a reference to a type, either primitive or coming from the schema.
' TypeRef 's are used to define arguments and result types .
data TypeRef serviceName where
-- | A primitive type.
PrimitiveRef :: Type -> TypeRef serviceName
-- | Chain with another service.
ObjectRef :: serviceName -> TypeRef serviceName
-- | Point to schema.
SchemaRef :: Schema typeName fieldName -> typeName -> TypeRef serviceName
-- | Registry subject, type to convert to, and preferred serialization version
RegistryRef :: Registry -> Type -> Nat -> TypeRef serviceName
-- | To be used only during TH generation!
THRef :: TH.Type -> TypeRef serviceName
Combinators found in the gRPC and GraphQL languages .
-- | Represents a list of values.
ListRef :: TypeRef serviceName -> TypeRef serviceName
-- | Represents a possibly-missing value.
OptionalRef :: TypeRef serviceName -> TypeRef serviceName
instance Show (TypeRef s) where
show _ = "ty"
-- | Defines the way in which arguments are handled.
data Argument serviceName argName tyRef where
-- | Use a single value.
ArgSingle :: Maybe argName
-> tyRef
-> Argument serviceName argName tyRef
-- | Consume a stream of values.
ArgStream :: Maybe argName
-> tyRef
-> Argument serviceName argName tyRef
-- | Defines the different possibilities for returning
-- information from a method.
data Return serviceName tyRef where
-- | Fire and forget.
RetNothing :: Return serviceName tyRef
-- | Return a single value.
RetSingle :: tyRef -> Return serviceName tyRef
-- | Return a stream of values.
RetStream :: tyRef -> Return serviceName tyRef
-- | Return a value or an error.
RetThrows :: tyRef -> tyRef -> Return serviceName tyRef
-- | Reflection
data RpcInfo i
= NoRpcInfo
| RpcInfo { packageInfo :: Package Text Text Text TyInfo
, serviceInfo :: Service Text Text Text TyInfo
, methodInfo :: Maybe (Method Text Text Text TyInfo)
, headers :: RequestHeaders
, extraInfo :: i
}
data TyInfo
= TyList TyInfo
| TyOption TyInfo
| TyTy Text
deriving (Show, Eq)
instance Show (RpcInfo i) where
show NoRpcInfo
= "<no info>"
show (RpcInfo (Package p _) s m _ _)
= T.unpack $ showPkg p (showMth m (showSvc s))
where
showPkg Nothing = id
showPkg (Just pkg) = ((pkg <> ":") <>)
showMth Nothing = id
showMth (Just (Method mt _ _)) = (<> (":" <> mt))
showSvc (Service sv _) = sv
showSvc (OneOf sv _) = sv
class ReflectRpcInfo (p :: Package') (s :: Service') (m :: Method') where
reflectRpcInfo :: Proxy p -> Proxy s -> Proxy m -> RequestHeaders -> i -> RpcInfo i
class ReflectService (s :: Service') where
reflectService :: Proxy s -> Service Text Text Text TyInfo
class ReflectMethod (m :: Method') where
reflectMethod :: Proxy m -> Method Text Text Text TyInfo
class ReflectArg (arg :: Argument') where
reflectArg :: Proxy arg -> Argument Text Text TyInfo
class ReflectReturn (r :: Return Symbol (TypeRef Symbol)) where
reflectReturn :: Proxy r -> Return Text TyInfo
class ReflectTyRef (r :: TypeRef Symbol) where
reflectTyRef :: Proxy r -> TyInfo
class KnownMaySymbol (m :: Maybe Symbol) where
maySymbolVal :: Proxy m -> Maybe Text
instance KnownMaySymbol 'Nothing where
maySymbolVal _ = Nothing
instance (KnownSymbol s) => KnownMaySymbol ('Just s) where
maySymbolVal _ = Just $ T.pack $ symbolVal (Proxy @s)
class KnownSymbols (m :: [Symbol]) where
symbolsVal :: Proxy m -> [Text]
instance KnownSymbols '[] where
symbolsVal _ = []
instance (KnownSymbol s, KnownSymbols ss) => KnownSymbols (s ': ss) where
symbolsVal _ = T.pack (symbolVal (Proxy @s)) : symbolsVal (Proxy @ss)
class ReflectServices (ss :: [Service']) where
reflectServices :: Proxy ss -> [Service Text Text Text TyInfo]
instance ReflectServices '[] where
reflectServices _ = []
instance (ReflectService s, ReflectServices ss)
=> ReflectServices (s ': ss) where
reflectServices _ = reflectService (Proxy @s) : reflectServices (Proxy @ss)
class ReflectMethods (ms :: [Method']) where
reflectMethods :: Proxy ms -> [Method Text Text Text TyInfo]
instance ReflectMethods '[] where
reflectMethods _ = []
instance (ReflectMethod m, ReflectMethods ms)
=> ReflectMethods (m ': ms) where
reflectMethods _ = reflectMethod (Proxy @m) : reflectMethods (Proxy @ms)
class ReflectArgs (ms :: [Argument']) where
reflectArgs :: Proxy ms -> [Argument Text Text TyInfo]
instance ReflectArgs '[] where
reflectArgs _ = []
instance (ReflectArg m, ReflectArgs ms)
=> ReflectArgs (m ': ms) where
reflectArgs _ = reflectArg (Proxy @m) : reflectArgs (Proxy @ms)
instance (KnownMaySymbol pname, ReflectServices ss, ReflectService s, ReflectMethod m)
=> ReflectRpcInfo ('Package pname ss) s m where
reflectRpcInfo _ ps pm req extra
= RpcInfo (Package (maySymbolVal (Proxy @pname))
(reflectServices (Proxy @ss)))
(reflectService ps) (Just (reflectMethod pm)) req extra
instance (KnownSymbol sname, ReflectMethods ms)
=> ReflectService ('Service sname ms) where
reflectService _
= Service (T.pack $ symbolVal (Proxy @sname))
(reflectMethods (Proxy @ms))
instance (KnownSymbol sname, KnownSymbols elts)
=> ReflectService ('OneOf sname elts) where
reflectService _
= OneOf (T.pack $ symbolVal (Proxy @sname))
(symbolsVal (Proxy @elts))
instance (KnownSymbol mname, ReflectArgs args, ReflectReturn r)
=> ReflectMethod ('Method mname args r) where
reflectMethod _
= Method (T.pack $ symbolVal (Proxy @mname))
(reflectArgs (Proxy @args)) (reflectReturn (Proxy @r))
instance (KnownMaySymbol aname, ReflectTyRef t)
=> ReflectArg ('ArgSingle aname t) where
reflectArg _
= ArgSingle (maySymbolVal (Proxy @aname)) (reflectTyRef (Proxy @t))
instance (KnownMaySymbol aname, ReflectTyRef t)
=> ReflectArg ('ArgStream aname t) where
reflectArg _
= ArgStream (maySymbolVal (Proxy @aname)) (reflectTyRef (Proxy @t))
instance ReflectReturn 'RetNothing where
reflectReturn _ = RetNothing
instance (ReflectTyRef t)
=> ReflectReturn ('RetSingle t) where
reflectReturn _ = RetSingle (reflectTyRef (Proxy @t))
instance (ReflectTyRef t)
=> ReflectReturn ('RetStream t) where
reflectReturn _ = RetStream (reflectTyRef (Proxy @t))
instance (ReflectTyRef e, ReflectTyRef t)
=> ReflectReturn ('RetThrows e t) where
reflectReturn _ = RetThrows (reflectTyRef (Proxy @e))
(reflectTyRef (Proxy @t))
instance ReflectTyRef t => ReflectTyRef ('ListRef t) where
reflectTyRef _ = TyList (reflectTyRef (Proxy @t))
instance ReflectTyRef t => ReflectTyRef ('OptionalRef t) where
reflectTyRef _ = TyOption (reflectTyRef (Proxy @t))
instance Typeable t => ReflectTyRef ('PrimitiveRef t) where
reflectTyRef _ = TyTy (T.pack $ show $ typeRep @t)
instance KnownSymbol s => ReflectTyRef ('ObjectRef s) where
reflectTyRef _ = TyTy (T.pack $ symbolVal $ Proxy @s)
instance KnownSymbol s => ReflectTyRef ('SchemaRef sch s) where
reflectTyRef _ = TyTy (T.pack $ symbolVal $ Proxy @s)
instance Typeable t => ReflectTyRef ('RegistryRef r t n) where
reflectTyRef _ = TyTy (T.pack $ show $ typeRep @t)
| null | https://raw.githubusercontent.com/higherkindness/mu-haskell/e41ba786f556cfac962e0f183b36bf9ae81d69e4/core/rpc/src/Mu/Rpc.hs | haskell | # language FlexibleInstances #
# language ScopedTypeVariables #
# language TypeOperators #
|
Description : Protocol-independent declaration of services
This module defines a type-level language to describe
RPC-like microservices independently of the transport
and protocol.
| Packages whose names are given by type-level strings.
| Services whose names are given by type-level strings.
| Methods whose names are given by type-level strings.
| Arguments whose names are given by type-level strings.
| A package is a set of services.
| A service is a set of methods.
| A method is defined by its name, arguments, and return type.
Synonyms for GraphQL
| A union is one of the objects.
| A field in an object takes some input objects,
and returns a value or some other object,
| Look up a service in a package definition using its name.
| Look up a method in a service definition using its name.
| Defines a reference to a type, either primitive or coming from the schema.
| A primitive type.
| Chain with another service.
| Point to schema.
| Registry subject, type to convert to, and preferred serialization version
| To be used only during TH generation!
| Represents a list of values.
| Represents a possibly-missing value.
| Defines the way in which arguments are handled.
| Use a single value.
| Consume a stream of values.
| Defines the different possibilities for returning
information from a method.
| Fire and forget.
| Return a single value.
| Return a stream of values.
| Return a value or an error.
| Reflection | # language ConstraintKinds #
# language DataKinds #
# language ExistentialQuantification #
# language GADTs #
# language MultiParamTypeClasses #
# language OverloadedStrings #
# language PolyKinds #
# language TypeApplications #
# language TypeFamilies #
# language UndecidableInstances #
module Mu.Rpc (
Package', Package(..)
, Service', Service(..), Object, Union
, Method', Method(..), ObjectField
, LookupService, LookupMethod
, TypeRef(..), Argument', Argument(..), Return(..)
, TyInfo(..), RpcInfo(..), ReflectRpcInfo(..)
) where
import Data.Kind
import Data.Text (Text)
import qualified Data.Text as T
import GHC.TypeLits
import qualified Language.Haskell.TH as TH
import Network.HTTP.Types.Header
import Type.Reflection
import Mu.Schema
import Mu.Schema.Registry
type Package' = Package Symbol Symbol Symbol (TypeRef Symbol)
type Service' = Service Symbol Symbol Symbol (TypeRef Symbol)
type Method' = Method Symbol Symbol Symbol (TypeRef Symbol)
type Argument' = Argument Symbol Symbol (TypeRef Symbol)
data Package serviceName methodName argName tyRef
= Package (Maybe serviceName)
[Service serviceName methodName argName tyRef]
data Service serviceName methodName argName tyRef
= Service serviceName
[Method serviceName methodName argName tyRef]
| OneOf serviceName [serviceName]
data Method serviceName methodName argName tyRef
= Method methodName
[Argument serviceName argName tyRef]
(Return serviceName tyRef)
| An object is a set of fields , in GraphQL lingo .
type Object = 'Service
type Union = 'OneOf
in GraphQL lingo .
type ObjectField = 'Method
type family LookupService (ss :: [Service snm mnm anm tr]) (s :: snm)
:: Service snm mnm anm tr where
LookupService '[] s = TypeError ('Text "could not find method " ':<>: 'ShowType s)
LookupService ('Service s ms ': ss) s = 'Service s ms
LookupService ('OneOf s ms ': ss) s = 'OneOf s ms
LookupService (other ': ss) s = LookupService ss s
type family LookupMethod (s :: [Method snm mnm anm tr]) (m :: mnm)
:: Method snm mnm anm tr where
LookupMethod '[] m = TypeError ('Text "could not find method " ':<>: 'ShowType m)
LookupMethod ('Method m args r ': ms) m = 'Method m args r
LookupMethod (other ': ms) m = LookupMethod ms m
' TypeRef 's are used to define arguments and result types .
data TypeRef serviceName where
PrimitiveRef :: Type -> TypeRef serviceName
ObjectRef :: serviceName -> TypeRef serviceName
SchemaRef :: Schema typeName fieldName -> typeName -> TypeRef serviceName
RegistryRef :: Registry -> Type -> Nat -> TypeRef serviceName
THRef :: TH.Type -> TypeRef serviceName
Combinators found in the gRPC and GraphQL languages .
ListRef :: TypeRef serviceName -> TypeRef serviceName
OptionalRef :: TypeRef serviceName -> TypeRef serviceName
instance Show (TypeRef s) where
show _ = "ty"
data Argument serviceName argName tyRef where
ArgSingle :: Maybe argName
-> tyRef
-> Argument serviceName argName tyRef
ArgStream :: Maybe argName
-> tyRef
-> Argument serviceName argName tyRef
data Return serviceName tyRef where
RetNothing :: Return serviceName tyRef
RetSingle :: tyRef -> Return serviceName tyRef
RetStream :: tyRef -> Return serviceName tyRef
RetThrows :: tyRef -> tyRef -> Return serviceName tyRef
data RpcInfo i
= NoRpcInfo
| RpcInfo { packageInfo :: Package Text Text Text TyInfo
, serviceInfo :: Service Text Text Text TyInfo
, methodInfo :: Maybe (Method Text Text Text TyInfo)
, headers :: RequestHeaders
, extraInfo :: i
}
data TyInfo
= TyList TyInfo
| TyOption TyInfo
| TyTy Text
deriving (Show, Eq)
instance Show (RpcInfo i) where
show NoRpcInfo
= "<no info>"
show (RpcInfo (Package p _) s m _ _)
= T.unpack $ showPkg p (showMth m (showSvc s))
where
showPkg Nothing = id
showPkg (Just pkg) = ((pkg <> ":") <>)
showMth Nothing = id
showMth (Just (Method mt _ _)) = (<> (":" <> mt))
showSvc (Service sv _) = sv
showSvc (OneOf sv _) = sv
class ReflectRpcInfo (p :: Package') (s :: Service') (m :: Method') where
reflectRpcInfo :: Proxy p -> Proxy s -> Proxy m -> RequestHeaders -> i -> RpcInfo i
class ReflectService (s :: Service') where
reflectService :: Proxy s -> Service Text Text Text TyInfo
class ReflectMethod (m :: Method') where
reflectMethod :: Proxy m -> Method Text Text Text TyInfo
class ReflectArg (arg :: Argument') where
reflectArg :: Proxy arg -> Argument Text Text TyInfo
class ReflectReturn (r :: Return Symbol (TypeRef Symbol)) where
reflectReturn :: Proxy r -> Return Text TyInfo
class ReflectTyRef (r :: TypeRef Symbol) where
reflectTyRef :: Proxy r -> TyInfo
class KnownMaySymbol (m :: Maybe Symbol) where
maySymbolVal :: Proxy m -> Maybe Text
instance KnownMaySymbol 'Nothing where
maySymbolVal _ = Nothing
instance (KnownSymbol s) => KnownMaySymbol ('Just s) where
maySymbolVal _ = Just $ T.pack $ symbolVal (Proxy @s)
class KnownSymbols (m :: [Symbol]) where
symbolsVal :: Proxy m -> [Text]
instance KnownSymbols '[] where
symbolsVal _ = []
instance (KnownSymbol s, KnownSymbols ss) => KnownSymbols (s ': ss) where
symbolsVal _ = T.pack (symbolVal (Proxy @s)) : symbolsVal (Proxy @ss)
class ReflectServices (ss :: [Service']) where
reflectServices :: Proxy ss -> [Service Text Text Text TyInfo]
instance ReflectServices '[] where
reflectServices _ = []
instance (ReflectService s, ReflectServices ss)
=> ReflectServices (s ': ss) where
reflectServices _ = reflectService (Proxy @s) : reflectServices (Proxy @ss)
class ReflectMethods (ms :: [Method']) where
reflectMethods :: Proxy ms -> [Method Text Text Text TyInfo]
instance ReflectMethods '[] where
reflectMethods _ = []
instance (ReflectMethod m, ReflectMethods ms)
=> ReflectMethods (m ': ms) where
reflectMethods _ = reflectMethod (Proxy @m) : reflectMethods (Proxy @ms)
class ReflectArgs (ms :: [Argument']) where
reflectArgs :: Proxy ms -> [Argument Text Text TyInfo]
instance ReflectArgs '[] where
reflectArgs _ = []
instance (ReflectArg m, ReflectArgs ms)
=> ReflectArgs (m ': ms) where
reflectArgs _ = reflectArg (Proxy @m) : reflectArgs (Proxy @ms)
instance (KnownMaySymbol pname, ReflectServices ss, ReflectService s, ReflectMethod m)
=> ReflectRpcInfo ('Package pname ss) s m where
reflectRpcInfo _ ps pm req extra
= RpcInfo (Package (maySymbolVal (Proxy @pname))
(reflectServices (Proxy @ss)))
(reflectService ps) (Just (reflectMethod pm)) req extra
instance (KnownSymbol sname, ReflectMethods ms)
=> ReflectService ('Service sname ms) where
reflectService _
= Service (T.pack $ symbolVal (Proxy @sname))
(reflectMethods (Proxy @ms))
instance (KnownSymbol sname, KnownSymbols elts)
=> ReflectService ('OneOf sname elts) where
reflectService _
= OneOf (T.pack $ symbolVal (Proxy @sname))
(symbolsVal (Proxy @elts))
instance (KnownSymbol mname, ReflectArgs args, ReflectReturn r)
=> ReflectMethod ('Method mname args r) where
reflectMethod _
= Method (T.pack $ symbolVal (Proxy @mname))
(reflectArgs (Proxy @args)) (reflectReturn (Proxy @r))
instance (KnownMaySymbol aname, ReflectTyRef t)
=> ReflectArg ('ArgSingle aname t) where
reflectArg _
= ArgSingle (maySymbolVal (Proxy @aname)) (reflectTyRef (Proxy @t))
instance (KnownMaySymbol aname, ReflectTyRef t)
=> ReflectArg ('ArgStream aname t) where
reflectArg _
= ArgStream (maySymbolVal (Proxy @aname)) (reflectTyRef (Proxy @t))
instance ReflectReturn 'RetNothing where
reflectReturn _ = RetNothing
instance (ReflectTyRef t)
=> ReflectReturn ('RetSingle t) where
reflectReturn _ = RetSingle (reflectTyRef (Proxy @t))
instance (ReflectTyRef t)
=> ReflectReturn ('RetStream t) where
reflectReturn _ = RetStream (reflectTyRef (Proxy @t))
instance (ReflectTyRef e, ReflectTyRef t)
=> ReflectReturn ('RetThrows e t) where
reflectReturn _ = RetThrows (reflectTyRef (Proxy @e))
(reflectTyRef (Proxy @t))
instance ReflectTyRef t => ReflectTyRef ('ListRef t) where
reflectTyRef _ = TyList (reflectTyRef (Proxy @t))
instance ReflectTyRef t => ReflectTyRef ('OptionalRef t) where
reflectTyRef _ = TyOption (reflectTyRef (Proxy @t))
instance Typeable t => ReflectTyRef ('PrimitiveRef t) where
reflectTyRef _ = TyTy (T.pack $ show $ typeRep @t)
instance KnownSymbol s => ReflectTyRef ('ObjectRef s) where
reflectTyRef _ = TyTy (T.pack $ symbolVal $ Proxy @s)
instance KnownSymbol s => ReflectTyRef ('SchemaRef sch s) where
reflectTyRef _ = TyTy (T.pack $ symbolVal $ Proxy @s)
instance Typeable t => ReflectTyRef ('RegistryRef r t n) where
reflectTyRef _ = TyTy (T.pack $ show $ typeRep @t)
|
ac97548ed8f6e822d8a2d8d388c65b76a86021faa84482a624c58de20ecba175 | FlowerWrong/mblog | see.erl | %% ---
Excerpted from " Programming Erlang , Second Edition " ,
published by The Pragmatic Bookshelf .
%% Copyrights apply to this code. It may not be used to create training material,
%% courses, books, articles, and the like. Contact us if you are in doubt.
%% We make no guarantees that this code is fit for any purpose.
%% Visit for more book information.
%%---
-module(see).
-export([main/0, % starts the system
load_module/1,
modules_loaded/0,
log_error/1,
make_server/3,
cast/2,
rpc/2,
change_behaviour/2,
keep_alive/2,
make_global/2,
on_exit/2,
on_halt/1,
stop_system/1,
every/3,
lookup/2,
read/0,
write/1,
env/1,
make_scripts/0
]).
-import(lists, [member/2, map/2, reverse/1]).
main() ->
make_server(io,
fun start_io/0, fun handle_io/2),
make_server(code,
const([lists,error_hander,see|preloaded()]),
fun handle_code/2),
make_server(error_logger,
const(0), fun handle_error_logger/2),
make_server(halt_demon,
const([]), fun handle_halt_demon/2),
make_server(env,
fun start_env/0, fun handle_env/2),
Mod = get_module_name(),
load_module(Mod),
run(Mod).
run(Mod) ->
Pid = spawn_link(Mod, main, []),
on_exit(Pid, fun(Why) -> stop_system(Why) end).
load_module(Mod) ->
rpc(code, {load, Mod}).
modules_loaded() ->
rpc(code, modules_loaded).
handle_code(modules_loaded, Mods) ->
{length(Mods), Mods};
handle_code({load, Mod}, Mods) ->
case member(Mod, Mods) of
true ->
{already_loaded, Mods};
false ->
case primLoad(Mod) of
{ok,Mod} ->
{{ok,Mod}, [Mod|Mods]};
Error ->
{Error, Mods}
end
end.
primLoad(Module) ->
Str = atom_to_list(Module),
case erl_prim_loader:get_file(Str ++ ".beam") of
{ok, Bin, _FullName} ->
case erlang:load_module(Module, Bin) of
{module, Module} ->
{ok,Module};
{module, _} ->
{error, wrong_module_in_binary};
_Other ->
{error, {bad_object_code, Module}}
end;
_Error ->
{error, {cannot_locate, Module}}
end.
log_error(Error) -> cast(error_logger, {log, Error}).
handle_error_logger({log, Error}, N) ->
erlang:display({error, Error}),
{ok, N+1}.
on_halt(Fun) -> cast(halt_demon,{on_halt,Fun}).
stop_system(Why) -> cast(halt_demon,{stop_system,Why}).
handle_halt_demon({on_halt, Fun}, Funs) ->
{ok, [Fun|Funs]};
handle_halt_demon({stop_system, Why}, Funs) ->
case Why of
normal -> true;
_ -> erlang:display({stopping_system,Why})
end,
map(fun(F) -> F() end, Funs),
erlang:halt(),
{ok, []}.
read() -> rpc(io, read).
write(X) -> rpc(io, {write, X}).
start_io() ->
Port = open_port({fd,0,1}, [eof, binary]),
process_flag(trap_exit, true),
{false, Port}.
handle_io(read, {true, Port}) ->
{eof, {true, Port}};
handle_io(read, {false, Port}) ->
receive
{Port, {data, Bytes}} ->
{{ok, Bytes}, {false, Port}};
{Port, eof} ->
{eof, {true,Port}};
{'EXIT', Port, badsig} ->
handle_io(read, {false, Port});
{'EXIT', Port, _Why} ->
{eof, {true, Port}}
end;
handle_io({write,X}, {Flag,Port}) ->
Port ! {self(), {command, X}},
{ok, {Flag, Port}}.
env(Key) -> rpc(env, {lookup, Key}).
handle_env({lookup, Key}, Dict) ->
{lookup(Key, Dict), Dict}.
start_env() ->
Env = case init:get_argument(environment) of
{ok, [L]} ->
L;
error ->
fatal({missing, '-environment ...'})
end,
map(fun split_env/1, Env).
split_env(Str) -> split_env(Str, []).
split_env([$=|T], L) -> {reverse(L), T};
split_env([], L) -> {reverse(L), []};
split_env([H|T], L) -> split_env(T, [H|L]).
make_server(Name, FunD, FunH) ->
make_global(Name,
fun() ->
Data = FunD(),
server_loop(Name, Data, FunH)
end).
server_loop(Name, Data, Fun) ->
receive
{rpc, Pid, Q} ->
case (catch Fun(Q, Data)) of
{'EXIT', Why} ->
Pid ! {Name, exit, Why},
server_loop(Name, Data, Fun);
{Reply, Data1} ->
Pid ! {Name, Reply},
server_loop(Name, Data1, Fun)
end;
{cast, Pid, Q} ->
case (catch Fun(Q, Data)) of
{'EXIT', Why} ->
exit(Pid, Why),
server_loop(Name, Data, Fun);
Data1 ->
server_loop(Name, Data1, Fun)
end;
{eval, Fun1} ->
server_loop(Name, Data, Fun1)
end.
rpc(Name, Q) ->
Name ! {rpc, self(), Q},
receive
{Name, Reply} ->
Reply;
{Name, exit, Why} ->
exit(Why)
end.
cast(Name, Q) ->
Name ! {cast, self(), Q}.
change_behaviour(Name, Fun) ->
Name ! {eval, Fun}.
const(C) -> fun() -> C end.
keep_alive(Name, Fun) ->
Pid = make_global(Name, Fun),
on_exit(Pid,
fun(_Exit) -> keep_alive(Name, Fun) end).
make_global(Name, Fun) ->
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn(fun() ->
make_global(Self,Name,Fun)
end),
receive
{Pid, ack} ->
Pid
end;
Pid ->
Pid
end.
make_global(Pid, Name, Fun) ->
case register(Name, self()) of
{'EXIT', _} ->
Pid ! {self(), ack};
_ ->
Pid ! {self(), ack},
Fun()
end.
on_exit(Pid, Fun) ->
spawn(fun() ->
process_flag(trap_exit, true),
link(Pid),
receive
{'EXIT', Pid, Why} ->
Fun(Why)
end
end).
every(Pid, Time, Fun) ->
spawn(fun() ->
process_flag(trap_exit, true),
link(Pid),
every_loop(Pid, Time, Fun)
end).
every_loop(Pid, Time, Fun) ->
receive
{'EXIT', Pid, _Why} ->
true
after Time ->
Fun(),
every_loop(Pid, Time, Fun)
end.
get_module_name() ->
case init:get_argument(load) of
{ok, [[Arg]]} ->
module_name(Arg);
error ->
fatal({missing, '-load Mod'})
end.
lookup(Key, L) ->
case lists:keysearch(Key, 1, L) of
{value, T} -> {found, element(2, T)};
false -> not_found
end.
module_name(Str) ->
case (catch list_to_atom(Str)) of
{'EXIT', _} ->
log_error({bad_module_name,Str}),
stop_system(bad_start_module);
Mod -> Mod
end.
fatal(Term) ->
log_error({fatal, Term}),
stop_system({fatal, Term}).
preloaded() ->
[zlib,prim_file,prim_zip,prim_inet,erlang,otp_ring0,init,
erl_prim_loader].
make_scripts() ->
{ok, Cwd} = file:get_cwd(),
Script =
{script,{"see","1.0"},
[{preLoaded,preloaded()},
{progress,preloaded},
{path, [Cwd]},
{primLoad,
[lists,
error_handler,
see
]},
{kernel_load_completed},
{progress,kernel_load_completed},
{progress,started},
{apply,{see,main,[]}}
]},
io:format("Script:~p~n",[Script]),
file:write_file("see.boot", term_to_binary(Script)),
file:write_file("see",[
"#!/bin/sh\nerl ",
%%" -init_debug ",
" -boot ", Cwd, "/see ",
"-environment `printenv` -load $1\n"]),
os:cmd("chmod a+x see"),
init:stop(),
true.
| null | https://raw.githubusercontent.com/FlowerWrong/mblog/3233ede938d2019a7b57391405197ac19c805b27/categories/erlang/demo/jaerlang2_code/see/see.erl | erlang | ---
Copyrights apply to this code. It may not be used to create training material,
courses, books, articles, and the like. Contact us if you are in doubt.
We make no guarantees that this code is fit for any purpose.
Visit for more book information.
---
starts the system
" -init_debug ", | Excerpted from " Programming Erlang , Second Edition " ,
published by The Pragmatic Bookshelf .
-module(see).
load_module/1,
modules_loaded/0,
log_error/1,
make_server/3,
cast/2,
rpc/2,
change_behaviour/2,
keep_alive/2,
make_global/2,
on_exit/2,
on_halt/1,
stop_system/1,
every/3,
lookup/2,
read/0,
write/1,
env/1,
make_scripts/0
]).
-import(lists, [member/2, map/2, reverse/1]).
main() ->
make_server(io,
fun start_io/0, fun handle_io/2),
make_server(code,
const([lists,error_hander,see|preloaded()]),
fun handle_code/2),
make_server(error_logger,
const(0), fun handle_error_logger/2),
make_server(halt_demon,
const([]), fun handle_halt_demon/2),
make_server(env,
fun start_env/0, fun handle_env/2),
Mod = get_module_name(),
load_module(Mod),
run(Mod).
run(Mod) ->
Pid = spawn_link(Mod, main, []),
on_exit(Pid, fun(Why) -> stop_system(Why) end).
load_module(Mod) ->
rpc(code, {load, Mod}).
modules_loaded() ->
rpc(code, modules_loaded).
handle_code(modules_loaded, Mods) ->
{length(Mods), Mods};
handle_code({load, Mod}, Mods) ->
case member(Mod, Mods) of
true ->
{already_loaded, Mods};
false ->
case primLoad(Mod) of
{ok,Mod} ->
{{ok,Mod}, [Mod|Mods]};
Error ->
{Error, Mods}
end
end.
primLoad(Module) ->
Str = atom_to_list(Module),
case erl_prim_loader:get_file(Str ++ ".beam") of
{ok, Bin, _FullName} ->
case erlang:load_module(Module, Bin) of
{module, Module} ->
{ok,Module};
{module, _} ->
{error, wrong_module_in_binary};
_Other ->
{error, {bad_object_code, Module}}
end;
_Error ->
{error, {cannot_locate, Module}}
end.
log_error(Error) -> cast(error_logger, {log, Error}).
handle_error_logger({log, Error}, N) ->
erlang:display({error, Error}),
{ok, N+1}.
on_halt(Fun) -> cast(halt_demon,{on_halt,Fun}).
stop_system(Why) -> cast(halt_demon,{stop_system,Why}).
handle_halt_demon({on_halt, Fun}, Funs) ->
{ok, [Fun|Funs]};
handle_halt_demon({stop_system, Why}, Funs) ->
case Why of
normal -> true;
_ -> erlang:display({stopping_system,Why})
end,
map(fun(F) -> F() end, Funs),
erlang:halt(),
{ok, []}.
read() -> rpc(io, read).
write(X) -> rpc(io, {write, X}).
start_io() ->
Port = open_port({fd,0,1}, [eof, binary]),
process_flag(trap_exit, true),
{false, Port}.
handle_io(read, {true, Port}) ->
{eof, {true, Port}};
handle_io(read, {false, Port}) ->
receive
{Port, {data, Bytes}} ->
{{ok, Bytes}, {false, Port}};
{Port, eof} ->
{eof, {true,Port}};
{'EXIT', Port, badsig} ->
handle_io(read, {false, Port});
{'EXIT', Port, _Why} ->
{eof, {true, Port}}
end;
handle_io({write,X}, {Flag,Port}) ->
Port ! {self(), {command, X}},
{ok, {Flag, Port}}.
env(Key) -> rpc(env, {lookup, Key}).
handle_env({lookup, Key}, Dict) ->
{lookup(Key, Dict), Dict}.
start_env() ->
Env = case init:get_argument(environment) of
{ok, [L]} ->
L;
error ->
fatal({missing, '-environment ...'})
end,
map(fun split_env/1, Env).
split_env(Str) -> split_env(Str, []).
split_env([$=|T], L) -> {reverse(L), T};
split_env([], L) -> {reverse(L), []};
split_env([H|T], L) -> split_env(T, [H|L]).
make_server(Name, FunD, FunH) ->
make_global(Name,
fun() ->
Data = FunD(),
server_loop(Name, Data, FunH)
end).
server_loop(Name, Data, Fun) ->
receive
{rpc, Pid, Q} ->
case (catch Fun(Q, Data)) of
{'EXIT', Why} ->
Pid ! {Name, exit, Why},
server_loop(Name, Data, Fun);
{Reply, Data1} ->
Pid ! {Name, Reply},
server_loop(Name, Data1, Fun)
end;
{cast, Pid, Q} ->
case (catch Fun(Q, Data)) of
{'EXIT', Why} ->
exit(Pid, Why),
server_loop(Name, Data, Fun);
Data1 ->
server_loop(Name, Data1, Fun)
end;
{eval, Fun1} ->
server_loop(Name, Data, Fun1)
end.
rpc(Name, Q) ->
Name ! {rpc, self(), Q},
receive
{Name, Reply} ->
Reply;
{Name, exit, Why} ->
exit(Why)
end.
cast(Name, Q) ->
Name ! {cast, self(), Q}.
change_behaviour(Name, Fun) ->
Name ! {eval, Fun}.
const(C) -> fun() -> C end.
keep_alive(Name, Fun) ->
Pid = make_global(Name, Fun),
on_exit(Pid,
fun(_Exit) -> keep_alive(Name, Fun) end).
make_global(Name, Fun) ->
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn(fun() ->
make_global(Self,Name,Fun)
end),
receive
{Pid, ack} ->
Pid
end;
Pid ->
Pid
end.
make_global(Pid, Name, Fun) ->
case register(Name, self()) of
{'EXIT', _} ->
Pid ! {self(), ack};
_ ->
Pid ! {self(), ack},
Fun()
end.
on_exit(Pid, Fun) ->
spawn(fun() ->
process_flag(trap_exit, true),
link(Pid),
receive
{'EXIT', Pid, Why} ->
Fun(Why)
end
end).
every(Pid, Time, Fun) ->
spawn(fun() ->
process_flag(trap_exit, true),
link(Pid),
every_loop(Pid, Time, Fun)
end).
every_loop(Pid, Time, Fun) ->
receive
{'EXIT', Pid, _Why} ->
true
after Time ->
Fun(),
every_loop(Pid, Time, Fun)
end.
get_module_name() ->
case init:get_argument(load) of
{ok, [[Arg]]} ->
module_name(Arg);
error ->
fatal({missing, '-load Mod'})
end.
lookup(Key, L) ->
case lists:keysearch(Key, 1, L) of
{value, T} -> {found, element(2, T)};
false -> not_found
end.
module_name(Str) ->
case (catch list_to_atom(Str)) of
{'EXIT', _} ->
log_error({bad_module_name,Str}),
stop_system(bad_start_module);
Mod -> Mod
end.
fatal(Term) ->
log_error({fatal, Term}),
stop_system({fatal, Term}).
preloaded() ->
[zlib,prim_file,prim_zip,prim_inet,erlang,otp_ring0,init,
erl_prim_loader].
make_scripts() ->
{ok, Cwd} = file:get_cwd(),
Script =
{script,{"see","1.0"},
[{preLoaded,preloaded()},
{progress,preloaded},
{path, [Cwd]},
{primLoad,
[lists,
error_handler,
see
]},
{kernel_load_completed},
{progress,kernel_load_completed},
{progress,started},
{apply,{see,main,[]}}
]},
io:format("Script:~p~n",[Script]),
file:write_file("see.boot", term_to_binary(Script)),
file:write_file("see",[
"#!/bin/sh\nerl ",
" -boot ", Cwd, "/see ",
"-environment `printenv` -load $1\n"]),
os:cmd("chmod a+x see"),
init:stop(),
true.
|
ee4d5c7ce60bd97524abc0f16664e5ddc1e06f4399cebdbbf129fa91122a3cb5 | glondu/belenios | spool.mli | (**************************************************************************)
(* BELENIOS *)
(* *)
Copyright © 2012 - 2023
(* *)
(* This program is free software: you can redistribute it and/or modify *)
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation , either version 3 of the
(* License, or (at your option) any later version, with the additional *)
exemption that compiling , linking , and/or using OpenSSL is allowed .
(* *)
(* This program is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *)
(* Affero General Public License for more details. *)
(* *)
You should have received a copy of the GNU Affero General Public
(* License along with this program. If not, see *)
(* </>. *)
(**************************************************************************)
open Belenios_core.Serializable_t
open Web_serializable_t
type 'a t
type 'a list
val filename : 'a t -> string
val get : uuid:uuid -> 'a t -> 'a option Lwt.t
val get_default : default:'a -> uuid:uuid -> 'a t -> 'a Lwt.t
val get_raw_list : uuid:uuid -> string list -> string List.t option Lwt.t
val get_fold_s : uuid:uuid -> 'a list -> ('a -> 'b -> 'b Lwt.t) -> 'b -> 'b option Lwt.t
val get_fold_s_default : uuid:uuid -> 'a list -> ('a -> 'b -> 'b Lwt.t) -> 'b -> 'b Lwt.t
val set : uuid:uuid -> 'a t -> 'a -> unit Lwt.t
val set_list : uuid:uuid -> 'a list -> 'a List.t -> unit Lwt.t
val del : uuid:uuid -> 'a t -> unit Lwt.t
val draft : draft_election t
val hide_result : datetime t
val dates : election_dates t
val state : election_state t
val decryption_tokens : decryption_tokens t
val metadata : metadata t
val private_key : number t
val private_keys : string list
val skipped_shufflers : skipped_shufflers t
val shuffle_token : shuffle_token t
val extended_records : extended_record list
val records : string list
val credential_mappings : credential_mapping list
val audit_cache : audit_cache t
val chain_filename : uuid -> string
val chain : uuid -> string t
val last_event : last_event t
val get_voters : uuid:uuid -> voter_list option Lwt.t
| null | https://raw.githubusercontent.com/glondu/belenios/5306402c15c6a76438b13b8b9da0f45d02a0563d/src/web/server/common/spool.mli | ocaml | ************************************************************************
BELENIOS
This program is free software: you can redistribute it and/or modify
License, or (at your option) any later version, with the additional
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Affero General Public License for more details.
License along with this program. If not, see
</>.
************************************************************************ | Copyright © 2012 - 2023
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation , either version 3 of the
exemption that compiling , linking , and/or using OpenSSL is allowed .
You should have received a copy of the GNU Affero General Public
open Belenios_core.Serializable_t
open Web_serializable_t
type 'a t
type 'a list
val filename : 'a t -> string
val get : uuid:uuid -> 'a t -> 'a option Lwt.t
val get_default : default:'a -> uuid:uuid -> 'a t -> 'a Lwt.t
val get_raw_list : uuid:uuid -> string list -> string List.t option Lwt.t
val get_fold_s : uuid:uuid -> 'a list -> ('a -> 'b -> 'b Lwt.t) -> 'b -> 'b option Lwt.t
val get_fold_s_default : uuid:uuid -> 'a list -> ('a -> 'b -> 'b Lwt.t) -> 'b -> 'b Lwt.t
val set : uuid:uuid -> 'a t -> 'a -> unit Lwt.t
val set_list : uuid:uuid -> 'a list -> 'a List.t -> unit Lwt.t
val del : uuid:uuid -> 'a t -> unit Lwt.t
val draft : draft_election t
val hide_result : datetime t
val dates : election_dates t
val state : election_state t
val decryption_tokens : decryption_tokens t
val metadata : metadata t
val private_key : number t
val private_keys : string list
val skipped_shufflers : skipped_shufflers t
val shuffle_token : shuffle_token t
val extended_records : extended_record list
val records : string list
val credential_mappings : credential_mapping list
val audit_cache : audit_cache t
val chain_filename : uuid -> string
val chain : uuid -> string t
val last_event : last_event t
val get_voters : uuid:uuid -> voter_list option Lwt.t
|
6282460248eeb66a13db8d456ec559933a6bfb1d9ba83c53156145fb9f1231aa | district0x/district-designer | smart_contract_events.cljs | (ns tokens.shared.spec.smart-contract-events
(:require
[cljs.spec.alpha :as s]
[district-designer.shared.spec.ipfs-events :refer [ipfs-hash? address? edn? event-type]]
[district-designer.shared.spec.smart-contract-events]
[tokens.shared.spec.ipfs-events]))
(s/def ::token address?)
(s/def ::token-type #{:erc20
:erc721
:erc1155})
(s/def ::token-ipfs-abi ipfs-hash?)
(s/def ::token-version integer?)
(s/def ::token-name string?)
(s/def ::token-symbol string?)
(s/def ::base-uri string?)
(s/def ::decimal-units nat-int?)
(s/def ::owner address?)
(s/def :token-created/ipfs-data
(s/keys :req [:token-contract/fields
:token-contract/metadata-format
:token-contract/metadata-format-settings]))
(defmethod event-type :tokens/token-created [_]
(s/merge
:district-designer.shared.spec.ipfs-events/event-base
(s/keys :req-un [:district-designer.shared.spec.smart-contract-events/district
::token
::token-type
::token-ipfs-abi
::token-version
::token-name
::token-symbol
::base-uri
::decimal-units
::owner
:token-created/ipfs-data])))
(s/def ::new-factory address?)
(defmethod event-type :tokens/factory-updated [_]
(s/merge
:district-designer.shared.spec.ipfs-events/event-base
(s/keys :req-un [::new-factory
::token-type]))) | null | https://raw.githubusercontent.com/district0x/district-designer/3f2a585b866b6e173f2cb7b32ecadcffa33e2804/src/tokens/shared/spec/smart_contract_events.cljs | clojure | (ns tokens.shared.spec.smart-contract-events
(:require
[cljs.spec.alpha :as s]
[district-designer.shared.spec.ipfs-events :refer [ipfs-hash? address? edn? event-type]]
[district-designer.shared.spec.smart-contract-events]
[tokens.shared.spec.ipfs-events]))
(s/def ::token address?)
(s/def ::token-type #{:erc20
:erc721
:erc1155})
(s/def ::token-ipfs-abi ipfs-hash?)
(s/def ::token-version integer?)
(s/def ::token-name string?)
(s/def ::token-symbol string?)
(s/def ::base-uri string?)
(s/def ::decimal-units nat-int?)
(s/def ::owner address?)
(s/def :token-created/ipfs-data
(s/keys :req [:token-contract/fields
:token-contract/metadata-format
:token-contract/metadata-format-settings]))
(defmethod event-type :tokens/token-created [_]
(s/merge
:district-designer.shared.spec.ipfs-events/event-base
(s/keys :req-un [:district-designer.shared.spec.smart-contract-events/district
::token
::token-type
::token-ipfs-abi
::token-version
::token-name
::token-symbol
::base-uri
::decimal-units
::owner
:token-created/ipfs-data])))
(s/def ::new-factory address?)
(defmethod event-type :tokens/factory-updated [_]
(s/merge
:district-designer.shared.spec.ipfs-events/event-base
(s/keys :req-un [::new-factory
::token-type]))) | |
7bbe106137badc13f8e3da0fcf89d5c2fd8db310c82e3c0225130324757c9c05 | jimrthy/frereth | user.clj | (ns user
(:require
[clojure.repl :refer [doc]]
[clojure.spec.alpha :as s]
[clojure.tools.namespace.repl :as tools-ns :refer [set-refresh-dirs]]
[expound.alpha :as expound]
[mount.core :as mount]
;; this is the top-level dependent component...mount will find the rest via ns requires
[tracker.server-components.http-server :refer [http-server]]))
;; ==================== SERVER ====================
(set-refresh-dirs "src/main" "src/dev" "src/test")
;; Change the default output of spec to be more readable
(alter-var-root #'s/*explain-out* (constantly expound/printer))
(defn start
"Start the web server"
[] (mount/start))
(defn stop
"Stop the web server"
[] (mount/stop))
(defn restart
"Stop, reload code, and restart the server. If there is a compile error, use:
```
(tools-ns/refresh)
```
to recompile, and then use `start` once things are good."
[]
(stop)
(tools-ns/refresh :after 'user/start))
(comment
(start)
(stop)
(restart)
)
| null | https://raw.githubusercontent.com/jimrthy/frereth/e1c4a5c031355ff1ff3bb60741eb03dff2377e1d/apps/racing/sonic-forces/tracker/src/dev/user.clj | clojure | this is the top-level dependent component...mount will find the rest via ns requires
==================== SERVER ====================
Change the default output of spec to be more readable | (ns user
(:require
[clojure.repl :refer [doc]]
[clojure.spec.alpha :as s]
[clojure.tools.namespace.repl :as tools-ns :refer [set-refresh-dirs]]
[expound.alpha :as expound]
[mount.core :as mount]
[tracker.server-components.http-server :refer [http-server]]))
(set-refresh-dirs "src/main" "src/dev" "src/test")
(alter-var-root #'s/*explain-out* (constantly expound/printer))
(defn start
"Start the web server"
[] (mount/start))
(defn stop
"Stop the web server"
[] (mount/stop))
(defn restart
"Stop, reload code, and restart the server. If there is a compile error, use:
```
(tools-ns/refresh)
```
to recompile, and then use `start` once things are good."
[]
(stop)
(tools-ns/refresh :after 'user/start))
(comment
(start)
(stop)
(restart)
)
|
57632e26d3d7b699961e41dfb1485f1ba877b286db220649edb222de9b1a778f | NalaGinrut/guile-lua-rebirth | io.scm | Copyright ( C ) 2016
" Mu Lei " known as " NalaGinrut " < >
;; This file is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;; This file is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
(define-module (language lua stdlib io)
#:use-module (ice-9 match)
#:use-module (ice-9 format)
#:export (primitive:lua-print))
NOTE : ` print ' returns nothing ( unspecified in ) . A variable assigned to
;; `print' application result should get `nil' as its value.
(define (primitive:lua-print thunk)
(define (fix x)
(match x
((? boolean?) (if x 'true 'false))
('(marker nil) 'nil)
((? unspecified?) "")
(else x)))
(call-with-values
(lambda () (thunk))
(lambda args
(for-each (lambda (xx) (format #t "~a~/" (fix xx))) args)
(newline))))
| null | https://raw.githubusercontent.com/NalaGinrut/guile-lua-rebirth/ca52e0f481d3b1702810b822c7d316b18456d13e/language/lua/stdlib/io.scm | scheme | This file is free software: you can redistribute it and/or modify
(at your option) any later version.
This file is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
`print' application result should get `nil' as its value. | Copyright ( C ) 2016
" Mu Lei " known as " NalaGinrut " < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(define-module (language lua stdlib io)
#:use-module (ice-9 match)
#:use-module (ice-9 format)
#:export (primitive:lua-print))
NOTE : ` print ' returns nothing ( unspecified in ) . A variable assigned to
(define (primitive:lua-print thunk)
(define (fix x)
(match x
((? boolean?) (if x 'true 'false))
('(marker nil) 'nil)
((? unspecified?) "")
(else x)))
(call-with-values
(lambda () (thunk))
(lambda args
(for-each (lambda (xx) (format #t "~a~/" (fix xx))) args)
(newline))))
|
66e6d7549bc56ebfb9956edcf795ed38d96be9468ca9e6f4a943be2294098844 | clarkhillm/Hawkmoth | messageBox.clj | (ns
^{:author cWX205128}
framework.messageBox
(:use framework.managedResoreces)
(:import [java.util HashMap ArrayDeque]))
(def box (HashMap.))
(defn box_init [threadName] (.put box threadName (ArrayDeque.)))
(defn sendMessage [threadName message]
(if (nil? (.get box threadName)) (box_init threadName))
(if (string? message) (.add (.get box threadName) message)) (blocking_clear watcherName))
(defn getMessage [threadName]
(if (nil? (.get box threadName)) (box_init threadName))
(if-not (.isEmpty (.get box threadName))
(do
(def value (.removeFirst (.get box threadName)))
(blocking_clear watcherName)
value) nil))
(def messageAttachment (HashMap.))
(defn getAttachment [key]
(def attachment (.get messageAttachment key)) (.remove key) attachment)
(defn setAttachment [attachment]
(.put messageAttachment (.hashCode attachment) attachment) (.hashCode attachment))
| null | https://raw.githubusercontent.com/clarkhillm/Hawkmoth/608767bb86c380fadaf12f9201e2198e5766eabb/src/framework/messageBox.clj | clojure | (ns
^{:author cWX205128}
framework.messageBox
(:use framework.managedResoreces)
(:import [java.util HashMap ArrayDeque]))
(def box (HashMap.))
(defn box_init [threadName] (.put box threadName (ArrayDeque.)))
(defn sendMessage [threadName message]
(if (nil? (.get box threadName)) (box_init threadName))
(if (string? message) (.add (.get box threadName) message)) (blocking_clear watcherName))
(defn getMessage [threadName]
(if (nil? (.get box threadName)) (box_init threadName))
(if-not (.isEmpty (.get box threadName))
(do
(def value (.removeFirst (.get box threadName)))
(blocking_clear watcherName)
value) nil))
(def messageAttachment (HashMap.))
(defn getAttachment [key]
(def attachment (.get messageAttachment key)) (.remove key) attachment)
(defn setAttachment [attachment]
(.put messageAttachment (.hashCode attachment) attachment) (.hashCode attachment))
| |
325dc31c9a625eed20667b2c369a5e107f1ed50b92b55a745fa5f8978fe522ed | spawngrid/htoad | i18n_manager.erl | Author :
Created : Feb 26 , 2010
%% Description: TODO: Add description to dets_generator
-module(i18n_manager).
%%
%% Include files
%%
%% Exported Functions
%%
-export([generate_pos/1]).
-define(EPOT_TABLE,epos).
-define(EPOT_TABLE_FUZZY,epos_fuzzy).
%%
%% API Functions
%%
generate_pos([Lang,Files])->
io:format("~s -> ~s ~n",[Lang,Files]),
{ok, SplittedLocales} = string:tokens(Lang,","),
{ok, SplittedFiles} = string:tokens(Files, ","),
ProcessedFiles = sources_parser:parse(SplittedFiles),
io:format("Parsed tokens are ~p~n",[ProcessedFiles]),
BaseDir = "lang/default/",
PopulateTable = fun(Language)->
io:format("-------------------------Generating po file for ~s-------------------------~n",[Language]),
open_table(Language),
put(locale, Language),
insert_tokens(ProcessedFiles),
%%Recover already present translations
TranslationsForLanguage = po_scanner:scan(BaseDir ++ Language ++ "/gettext.po"),
io:format("Updating translations~n"),
insert_translations(TranslationsForLanguage),
Data = dets_data(),
io:format("Generating po file ~n"),
Fuzzy = dets_fuzzy(),
po_generator:generate_file(Language, Data, Fuzzy),
io:format("Closing files ~n"),
close_tables(Language),
io:format("All files closed ~n")
end,
lists:map(PopulateTable, SplittedLocales),
init:stop()
.
%%
%% Local Functions
%%
%% Open a temporal table for a given locale
open_table(Locale)->
Dir = "./lang/tmp/" ++ Locale,
io:format("Creating dir ~s~n",[Dir]),
file:del_dir(Dir),
file:make_dir(Dir),
OpenTable = fun({TableName, TableFile}) ->
File = Dir ++ TableFile,
case dets:open_file(TableName, [{file, File}]) of
{ok,Ref} -> io:format("Opened DETS ~p ~p~n",[TableName,Ref]);
_Error -> io:format("Error opening DETS~p~n",[_Error])
end
end,
lists:map(OpenTable, [{?EPOT_TABLE,"/epot.dets"},{?EPOT_TABLE_FUZZY,"/epot_fuzzy.dets"}]).
%%TODO better way to do cleanup
close_tables(Locale) ->
%%dets:delete_all_objects(?EPOT_TABLE),
ok = dets:close(?EPOT_TABLE),
ok = dets:close(?EPOT_TABLE_FUZZY),
file:delete("./lang/tmp/" ++ Locale ++ "/epot.dets"),
file:delete("./lang/tmp/" ++ Locale ++ "/epot_fuzzy.dets").
%%Get all data from dets table
dets_data() -> dets:foldl(fun(E, Acc) -> [E|Acc] end, [], ?EPOT_TABLE).
dets_fuzzy() -> dets:foldl(fun(E, Acc) -> [E|Acc] end, [], ?EPOT_TABLE_FUZZY).
insert_tokens([]) -> noop;
insert_tokens([{Id,{Fname,Line,_Col}}|Tail]) ->
insert_token(Id, Id, Fname, Line),
insert_tokens(Tail).
insert_token(Id, Translation,Fname,Line)->
FileInfo = get_file_info(Id), %%File info are all files where this string is present
AllFileReferences = lists:sort( [{Fname,Line} | FileInfo] ),
dets:insert(?EPOT_TABLE, {Id, Translation,AllFileReferences}).
insert_translations([]) -> noop;
insert_translations(L = [H|T]) ->
%%io:format("Remaining ~p~n",[L]),
case H of
{comment, _} ->
%%Comments are skipped
insert_translations(T);
_Other ->
[{id,Id}, {str,Str}|Tail] = L,
insert_translation(Id,Str),
insert_translations(Tail)
end.
insert_translation(Id, Translation) ->
io:format("Updating translation for ~p to ~p ~n",[Id,Translation]),
case Id of
[] ->
noop;
Id ->
case dets:lookup(?EPOT_TABLE,Id) of
[] ->
translation !
dets:insert(?EPOT_TABLE_FUZZY, {Id, Translation,fuzzy});
[{Id, _StoredTranslation,FileInfo}] ->
TODO check for translation unicity
io:format("Recovered translation for ~p ~p ~n",[Id,_StoredTranslation]),
dets:insert(?EPOT_TABLE, {Id, Translation,FileInfo})
end
end.
get_file_info(Key) ->
case dets:lookup(?EPOT_TABLE, Key) of
[] -> [];
[{_,_,Finfo}|_] -> Finfo
end.
| null | https://raw.githubusercontent.com/spawngrid/htoad/f0c7dfbd911b29fb0c406b7c26606f553af11194/deps/erlydtl/src/i18n/i18n_manager.erl | erlang | Description: TODO: Add description to dets_generator
Include files
Exported Functions
API Functions
Recover already present translations
Local Functions
Open a temporal table for a given locale
TODO better way to do cleanup
dets:delete_all_objects(?EPOT_TABLE),
Get all data from dets table
File info are all files where this string is present
io:format("Remaining ~p~n",[L]),
Comments are skipped | Author :
Created : Feb 26 , 2010
-module(i18n_manager).
-export([generate_pos/1]).
-define(EPOT_TABLE,epos).
-define(EPOT_TABLE_FUZZY,epos_fuzzy).
generate_pos([Lang,Files])->
io:format("~s -> ~s ~n",[Lang,Files]),
{ok, SplittedLocales} = string:tokens(Lang,","),
{ok, SplittedFiles} = string:tokens(Files, ","),
ProcessedFiles = sources_parser:parse(SplittedFiles),
io:format("Parsed tokens are ~p~n",[ProcessedFiles]),
BaseDir = "lang/default/",
PopulateTable = fun(Language)->
io:format("-------------------------Generating po file for ~s-------------------------~n",[Language]),
open_table(Language),
put(locale, Language),
insert_tokens(ProcessedFiles),
TranslationsForLanguage = po_scanner:scan(BaseDir ++ Language ++ "/gettext.po"),
io:format("Updating translations~n"),
insert_translations(TranslationsForLanguage),
Data = dets_data(),
io:format("Generating po file ~n"),
Fuzzy = dets_fuzzy(),
po_generator:generate_file(Language, Data, Fuzzy),
io:format("Closing files ~n"),
close_tables(Language),
io:format("All files closed ~n")
end,
lists:map(PopulateTable, SplittedLocales),
init:stop()
.
open_table(Locale)->
Dir = "./lang/tmp/" ++ Locale,
io:format("Creating dir ~s~n",[Dir]),
file:del_dir(Dir),
file:make_dir(Dir),
OpenTable = fun({TableName, TableFile}) ->
File = Dir ++ TableFile,
case dets:open_file(TableName, [{file, File}]) of
{ok,Ref} -> io:format("Opened DETS ~p ~p~n",[TableName,Ref]);
_Error -> io:format("Error opening DETS~p~n",[_Error])
end
end,
lists:map(OpenTable, [{?EPOT_TABLE,"/epot.dets"},{?EPOT_TABLE_FUZZY,"/epot_fuzzy.dets"}]).
close_tables(Locale) ->
ok = dets:close(?EPOT_TABLE),
ok = dets:close(?EPOT_TABLE_FUZZY),
file:delete("./lang/tmp/" ++ Locale ++ "/epot.dets"),
file:delete("./lang/tmp/" ++ Locale ++ "/epot_fuzzy.dets").
dets_data() -> dets:foldl(fun(E, Acc) -> [E|Acc] end, [], ?EPOT_TABLE).
dets_fuzzy() -> dets:foldl(fun(E, Acc) -> [E|Acc] end, [], ?EPOT_TABLE_FUZZY).
insert_tokens([]) -> noop;
insert_tokens([{Id,{Fname,Line,_Col}}|Tail]) ->
insert_token(Id, Id, Fname, Line),
insert_tokens(Tail).
insert_token(Id, Translation,Fname,Line)->
AllFileReferences = lists:sort( [{Fname,Line} | FileInfo] ),
dets:insert(?EPOT_TABLE, {Id, Translation,AllFileReferences}).
insert_translations([]) -> noop;
insert_translations(L = [H|T]) ->
case H of
{comment, _} ->
insert_translations(T);
_Other ->
[{id,Id}, {str,Str}|Tail] = L,
insert_translation(Id,Str),
insert_translations(Tail)
end.
insert_translation(Id, Translation) ->
io:format("Updating translation for ~p to ~p ~n",[Id,Translation]),
case Id of
[] ->
noop;
Id ->
case dets:lookup(?EPOT_TABLE,Id) of
[] ->
translation !
dets:insert(?EPOT_TABLE_FUZZY, {Id, Translation,fuzzy});
[{Id, _StoredTranslation,FileInfo}] ->
TODO check for translation unicity
io:format("Recovered translation for ~p ~p ~n",[Id,_StoredTranslation]),
dets:insert(?EPOT_TABLE, {Id, Translation,FileInfo})
end
end.
get_file_info(Key) ->
case dets:lookup(?EPOT_TABLE, Key) of
[] -> [];
[{_,_,Finfo}|_] -> Finfo
end.
|
d6fd392388529b848a11713084227b1e817841bdf46bd32d721800968f1e4b31 | g-andrade/quack | quic_data_kv.erl | -module(quic_data_kv).
-include("quic_data_kv.hrl").
%% ------------------------------------------------------------------
%% API Function Exports
%% ------------------------------------------------------------------
-export([fully_decode/1]).
-export([decode/1]).
-export([decode_tagged_values/1]).
-export([decode_tag_list/1]).
-export([encode/1]).
-export([encode_tagged_values/1]).
-export([encode_tag_list/1]).
%% ------------------------------------------------------------------
%% Type Exports
%% ------------------------------------------------------------------
-export_type([data_kv/0]).
%% ------------------------------------------------------------------
%% API Function Definitions
%% ------------------------------------------------------------------
fully_decode(Data) ->
{DataKv, <<>>} = decode(Data),
DataKv.
decode(<<PayloadTag:4/binary, Body/binary>>) ->
case decode_tagged_values(Body) of
incomplete -> incomplete;
{TaggedValuesMap, RemainingData} ->
{#data_kv{ tag = decode_tag(PayloadTag),
tagged_values = TaggedValuesMap },
RemainingData}
end;
decode(<<_/binary>>) ->
incomplete.
decode_tagged_values(<<TaggedValuesLength:2/little-unsigned-integer-unit:8, 0:16,
Body/binary>>) ->
EncodedTaggedValueEndOffsetsSize = 2 * 4 * TaggedValuesLength,
<<EncodedTaggedValueEndOffsets:EncodedTaggedValueEndOffsetsSize/binary,
EncodedValues/binary>> = Body,
BaseOffset = 0,
TaggedValueEndOffsets =
(fun F(<<EncodedTag:4/binary, EndOffset:4/little-unsigned-integer-unit:8, Next/binary>>) ->
[{decode_tag(EncodedTag), EndOffset - BaseOffset} | F(Next)];
F(<<>>) ->
[]
end)(EncodedTaggedValueEndOffsets),
{_LastElementTag, LastElementEndOffset} = lists:last(TaggedValueEndOffsets),
case LastElementEndOffset > iolist_size(EncodedValues) of
true -> incomplete;
false ->
{TaggedValuesList, FinalEndOffset} =
lists:mapfoldl(
fun ({Tag, EndOffset}, StartOffset) ->
{{Tag, binary:part(EncodedValues, StartOffset, EndOffset - StartOffset)},
EndOffset}
end,
0,
TaggedValueEndOffsets),
{maps:from_list(TaggedValuesList),
binary:part(EncodedValues, FinalEndOffset, byte_size(EncodedValues) - FinalEndOffset)}
end.
-spec decode_tag_list(binary()) -> [binary(), ...].
decode_tag_list(<<EncodedTag:4/binary, Next/binary>>) ->
[decode_tag(EncodedTag) | decode_tag_list(Next)];
decode_tag_list(<<>>) ->
[].
encode(#data_kv{ tag = PayloadTag,
tagged_values = TaggedValuesMap }) ->
[% message tag
encode_tag(PayloadTag, 4),
% body
encode_tagged_values(TaggedValuesMap)].
encode_tagged_values(TaggedValuesMap) ->
UnsortedTaggedValues = maps:to_list(TaggedValuesMap),
TaggedValues =
lists:sort(fun ({KeyA, _}, {KeyB, _}) ->
<<NumKeyA:4/little-unsigned-integer-unit:8>> = encode_tag(KeyA, 4),
<<NumKeyB:4/little-unsigned-integer-unit:8>> = encode_tag(KeyB, 4),
NumKeyA =< NumKeyB
end,
UnsortedTaggedValues),
Values = [Value || {_Tag, Value} <- TaggedValues],
{TaggedValueEndOffsets, _} =
lists:mapfoldl(
fun ({Tag, Value}, Acc) ->
NewAcc = Acc + iolist_size(Value),
{{Tag, NewAcc}, NewAcc}
end,
0,
TaggedValues),
number of keyvalue pairs with two - byte filler
[quic_util:encode_uint(length(TaggedValues), 2), 0, 0],
% tags and offsets
[[encode_tag(Tag, 4), quic_util:encode_uint(EndOffset, 4)]
|| {Tag, EndOffset} <- TaggedValueEndOffsets],
Values].
-spec encode_tag_list([binary(), ...]) -> [binary(), ...].
encode_tag_list(Tags) ->
[encode_tag(Tag, 4) || Tag <- Tags].
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
decode_tag(Tag) ->
TagStr = binary_to_list(Tag),
list_to_binary(lists:takewhile(fun (Char) -> Char =/= 0 end, TagStr)).
encode_tag(Tag, Size) when is_binary(Tag); is_list(Tag) ->
Filler = string:copies([0], Size - iolist_size(Tag)),
iolist_to_binary([Tag, Filler]).
| null | https://raw.githubusercontent.com/g-andrade/quack/b6decb23674dd97f6918d8ac637ede2ef7bcc507/src/quic_data_kv.erl | erlang | ------------------------------------------------------------------
API Function Exports
------------------------------------------------------------------
------------------------------------------------------------------
Type Exports
------------------------------------------------------------------
------------------------------------------------------------------
API Function Definitions
------------------------------------------------------------------
message tag
body
tags and offsets
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------ | -module(quic_data_kv).
-include("quic_data_kv.hrl").
-export([fully_decode/1]).
-export([decode/1]).
-export([decode_tagged_values/1]).
-export([decode_tag_list/1]).
-export([encode/1]).
-export([encode_tagged_values/1]).
-export([encode_tag_list/1]).
-export_type([data_kv/0]).
fully_decode(Data) ->
{DataKv, <<>>} = decode(Data),
DataKv.
decode(<<PayloadTag:4/binary, Body/binary>>) ->
case decode_tagged_values(Body) of
incomplete -> incomplete;
{TaggedValuesMap, RemainingData} ->
{#data_kv{ tag = decode_tag(PayloadTag),
tagged_values = TaggedValuesMap },
RemainingData}
end;
decode(<<_/binary>>) ->
incomplete.
decode_tagged_values(<<TaggedValuesLength:2/little-unsigned-integer-unit:8, 0:16,
Body/binary>>) ->
EncodedTaggedValueEndOffsetsSize = 2 * 4 * TaggedValuesLength,
<<EncodedTaggedValueEndOffsets:EncodedTaggedValueEndOffsetsSize/binary,
EncodedValues/binary>> = Body,
BaseOffset = 0,
TaggedValueEndOffsets =
(fun F(<<EncodedTag:4/binary, EndOffset:4/little-unsigned-integer-unit:8, Next/binary>>) ->
[{decode_tag(EncodedTag), EndOffset - BaseOffset} | F(Next)];
F(<<>>) ->
[]
end)(EncodedTaggedValueEndOffsets),
{_LastElementTag, LastElementEndOffset} = lists:last(TaggedValueEndOffsets),
case LastElementEndOffset > iolist_size(EncodedValues) of
true -> incomplete;
false ->
{TaggedValuesList, FinalEndOffset} =
lists:mapfoldl(
fun ({Tag, EndOffset}, StartOffset) ->
{{Tag, binary:part(EncodedValues, StartOffset, EndOffset - StartOffset)},
EndOffset}
end,
0,
TaggedValueEndOffsets),
{maps:from_list(TaggedValuesList),
binary:part(EncodedValues, FinalEndOffset, byte_size(EncodedValues) - FinalEndOffset)}
end.
-spec decode_tag_list(binary()) -> [binary(), ...].
decode_tag_list(<<EncodedTag:4/binary, Next/binary>>) ->
[decode_tag(EncodedTag) | decode_tag_list(Next)];
decode_tag_list(<<>>) ->
[].
encode(#data_kv{ tag = PayloadTag,
tagged_values = TaggedValuesMap }) ->
encode_tag(PayloadTag, 4),
encode_tagged_values(TaggedValuesMap)].
encode_tagged_values(TaggedValuesMap) ->
UnsortedTaggedValues = maps:to_list(TaggedValuesMap),
TaggedValues =
lists:sort(fun ({KeyA, _}, {KeyB, _}) ->
<<NumKeyA:4/little-unsigned-integer-unit:8>> = encode_tag(KeyA, 4),
<<NumKeyB:4/little-unsigned-integer-unit:8>> = encode_tag(KeyB, 4),
NumKeyA =< NumKeyB
end,
UnsortedTaggedValues),
Values = [Value || {_Tag, Value} <- TaggedValues],
{TaggedValueEndOffsets, _} =
lists:mapfoldl(
fun ({Tag, Value}, Acc) ->
NewAcc = Acc + iolist_size(Value),
{{Tag, NewAcc}, NewAcc}
end,
0,
TaggedValues),
number of keyvalue pairs with two - byte filler
[quic_util:encode_uint(length(TaggedValues), 2), 0, 0],
[[encode_tag(Tag, 4), quic_util:encode_uint(EndOffset, 4)]
|| {Tag, EndOffset} <- TaggedValueEndOffsets],
Values].
-spec encode_tag_list([binary(), ...]) -> [binary(), ...].
encode_tag_list(Tags) ->
[encode_tag(Tag, 4) || Tag <- Tags].
decode_tag(Tag) ->
TagStr = binary_to_list(Tag),
list_to_binary(lists:takewhile(fun (Char) -> Char =/= 0 end, TagStr)).
encode_tag(Tag, Size) when is_binary(Tag); is_list(Tag) ->
Filler = string:copies([0], Size - iolist_size(Tag)),
iolist_to_binary([Tag, Filler]).
|
8452cbe282d238a92a67844b034dc0c2bb3c07f934155b8d245f4c1e083b3ad7 | rmloveland/scheme48-0.53 | define-primitive.scm | ; -*- Mode: Scheme; Syntax: Scheme; Package: Scheme; -*-
Copyright ( c ) 1993 - 1999 by and . See file COPYING .
These are hacked to ensure that all calls to INPUT - TYPE - PREDICATE and
INPUT - TYPE - COERCION are evaluated at load time ( because they do n't
; have readily reconstructed types).
(define-syntax define-primitive
(syntax-rules ()
((define-primitive opcode input-types action)
(define-consing-primitive opcode input-types #f action))
((define-primitive opcode input-types action returner)
(define-consing-primitive opcode input-types #f action returner))))
(define-syntax define-consing-primitive
(syntax-rules ()
((define-consing-primitive opcode input-types space-proc action)
(let ((proc (primitive-procedure-action input-types space-proc action)))
(define-opcode opcode (proc))))
((define-consing-primitive opcode input-types space-proc action returner)
(let ((proc (primitive-procedure-action input-types space-proc action returner)))
(define-opcode opcode (proc))))))
(define-syntax primitive-procedure-action
(lambda (exp rename compare)
(destructure (((p-p-b input-types space-proc action . returner-option) exp))
(let* ((nargs (length input-types))
(%action (rename 'action))
(%key (rename 'key))
(%ensure-space (rename 'ensure-space))
(%*val* (rename '*val*))
(%arg2 (rename 'arg2))
(%arg3 (rename 'arg3))
(%arg4 (rename 'arg4))
(%arg5 (rename 'arg5))
(%pop (rename 'pop))
(%let (rename 'let))
(%let* (rename 'let*))
(%lambda (rename 'lambda))
(%if (rename 'if))
(%and (rename 'and))
(%goto (rename 'goto))
(%input-type-predicate (rename 'input-type-predicate))
(%input-type-coercion (rename 'input-type-coercion))
(%raise-exception (rename 'raise-exception))
(%wrong-type-argument (rename 'wrong-type-argument))
(shorten (lambda (l1 l2)
(map (lambda (x1 x2) x2 x1) l1 l2)))
(places (reverse (shorten (list %*val* %arg2 %arg3 %arg4 %arg5)
input-types)))
(preds (reverse (shorten (map rename
'(pred1 pred2 pred3 pred4 pred5))
input-types)))
(x->ys (reverse (shorten (map rename
'(x->y1 x->y2 x->y3 x->y4 x->y5))
input-types))))
(if (> nargs 5)
(error "time to add more arguments to DEFINE-PRIMITIVE"))
`(,%let (,@(map (lambda (type pred)
`(,pred (,%input-type-predicate ,type)))
input-types
preds)
,@(map (lambda (type x->y)
`(,x->y (,%input-type-coercion ,type)))
input-types
x->ys)
(,%action ,action))
(,%lambda ()
(,%let* (,@(if space-proc
`((,%key (,%ensure-space (,space-proc ,%*val*))))
'())
,@(if (>= nargs 2) `((,%arg2 (,%pop))) `())
,@(if (>= nargs 3) `((,%arg3 (,%pop))) `())
,@(if (>= nargs 4) `((,%arg4 (,%pop))) `())
,@(if (>= nargs 5) `((,%arg5 (,%pop))) `())
)
(,%if (,%and ,@(map (lambda (pred place)
`(,pred ,place))
preds
places))
,(let ((yow `(,%action
,@(map (lambda (x->y place)
`(,x->y ,place))
x->ys
places)
,@(if space-proc `(,%key) '()))))
(if (null? returner-option)
yow
`(,%goto ,(car returner-option) ,yow)))
(,%raise-exception ,%wrong-type-argument
0
. ,places)))))))))
;----------------
; Checking inputs and coercing results
wins
(lambda (f) (f pred coercer)))
(define (input-type-predicate type) (type (lambda (x y) y x)))
(define (input-type-coercion type) (type (lambda (x y) x y)))
(define (no-coercion x) x)
(define any-> (input-type (lambda (x) x #t) no-coercion))
(define fixnum-> (input-type fixnum? extract-fixnum))
(define char-> (input-type vm-char? extract-char))
(define vm-char-> (input-type vm-char? no-coercion))
(define boolean-> (input-type vm-boolean? extract-boolean))
(define location-> (input-type location? no-coercion))
(define string-> (input-type vm-string? no-coercion))
(define vector-> (input-type vm-vector? no-coercion))
(define code-vector-> (input-type code-vector? no-coercion))
; Output coercion
(define (return val)
(set! *val* val)
(goto continue 0))
(define return-any return)
(define (return-boolean x)
(goto return (enter-boolean x)))
(define (return-fixnum x)
(goto return (enter-fixnum x)))
(define (return-char x)
(goto return (enter-char x)))
(define (return-unspecific x)
x ;ignored
(goto return unspecific-value))
(define (no-result)
(goto return unspecific-value))
| null | https://raw.githubusercontent.com/rmloveland/scheme48-0.53/1ae4531fac7150bd2af42d124da9b50dd1b89ec1/scheme/vm/define-primitive.scm | scheme | -*- Mode: Scheme; Syntax: Scheme; Package: Scheme; -*-
have readily reconstructed types).
----------------
Checking inputs and coercing results
Output coercion
ignored | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
These are hacked to ensure that all calls to INPUT - TYPE - PREDICATE and
INPUT - TYPE - COERCION are evaluated at load time ( because they do n't
(define-syntax define-primitive
(syntax-rules ()
((define-primitive opcode input-types action)
(define-consing-primitive opcode input-types #f action))
((define-primitive opcode input-types action returner)
(define-consing-primitive opcode input-types #f action returner))))
(define-syntax define-consing-primitive
(syntax-rules ()
((define-consing-primitive opcode input-types space-proc action)
(let ((proc (primitive-procedure-action input-types space-proc action)))
(define-opcode opcode (proc))))
((define-consing-primitive opcode input-types space-proc action returner)
(let ((proc (primitive-procedure-action input-types space-proc action returner)))
(define-opcode opcode (proc))))))
(define-syntax primitive-procedure-action
(lambda (exp rename compare)
(destructure (((p-p-b input-types space-proc action . returner-option) exp))
(let* ((nargs (length input-types))
(%action (rename 'action))
(%key (rename 'key))
(%ensure-space (rename 'ensure-space))
(%*val* (rename '*val*))
(%arg2 (rename 'arg2))
(%arg3 (rename 'arg3))
(%arg4 (rename 'arg4))
(%arg5 (rename 'arg5))
(%pop (rename 'pop))
(%let (rename 'let))
(%let* (rename 'let*))
(%lambda (rename 'lambda))
(%if (rename 'if))
(%and (rename 'and))
(%goto (rename 'goto))
(%input-type-predicate (rename 'input-type-predicate))
(%input-type-coercion (rename 'input-type-coercion))
(%raise-exception (rename 'raise-exception))
(%wrong-type-argument (rename 'wrong-type-argument))
(shorten (lambda (l1 l2)
(map (lambda (x1 x2) x2 x1) l1 l2)))
(places (reverse (shorten (list %*val* %arg2 %arg3 %arg4 %arg5)
input-types)))
(preds (reverse (shorten (map rename
'(pred1 pred2 pred3 pred4 pred5))
input-types)))
(x->ys (reverse (shorten (map rename
'(x->y1 x->y2 x->y3 x->y4 x->y5))
input-types))))
(if (> nargs 5)
(error "time to add more arguments to DEFINE-PRIMITIVE"))
`(,%let (,@(map (lambda (type pred)
`(,pred (,%input-type-predicate ,type)))
input-types
preds)
,@(map (lambda (type x->y)
`(,x->y (,%input-type-coercion ,type)))
input-types
x->ys)
(,%action ,action))
(,%lambda ()
(,%let* (,@(if space-proc
`((,%key (,%ensure-space (,space-proc ,%*val*))))
'())
,@(if (>= nargs 2) `((,%arg2 (,%pop))) `())
,@(if (>= nargs 3) `((,%arg3 (,%pop))) `())
,@(if (>= nargs 4) `((,%arg4 (,%pop))) `())
,@(if (>= nargs 5) `((,%arg5 (,%pop))) `())
)
(,%if (,%and ,@(map (lambda (pred place)
`(,pred ,place))
preds
places))
,(let ((yow `(,%action
,@(map (lambda (x->y place)
`(,x->y ,place))
x->ys
places)
,@(if space-proc `(,%key) '()))))
(if (null? returner-option)
yow
`(,%goto ,(car returner-option) ,yow)))
(,%raise-exception ,%wrong-type-argument
0
. ,places)))))))))
wins
(lambda (f) (f pred coercer)))
(define (input-type-predicate type) (type (lambda (x y) y x)))
(define (input-type-coercion type) (type (lambda (x y) x y)))
(define (no-coercion x) x)
(define any-> (input-type (lambda (x) x #t) no-coercion))
(define fixnum-> (input-type fixnum? extract-fixnum))
(define char-> (input-type vm-char? extract-char))
(define vm-char-> (input-type vm-char? no-coercion))
(define boolean-> (input-type vm-boolean? extract-boolean))
(define location-> (input-type location? no-coercion))
(define string-> (input-type vm-string? no-coercion))
(define vector-> (input-type vm-vector? no-coercion))
(define code-vector-> (input-type code-vector? no-coercion))
(define (return val)
(set! *val* val)
(goto continue 0))
(define return-any return)
(define (return-boolean x)
(goto return (enter-boolean x)))
(define (return-fixnum x)
(goto return (enter-fixnum x)))
(define (return-char x)
(goto return (enter-char x)))
(define (return-unspecific x)
(goto return unspecific-value))
(define (no-result)
(goto return unspecific-value))
|
0b907a6c287875a91581b467d749bdb7bb7a6646f0bd47e856381b42c859e810 | TorXakis/TorXakis | ValExprImplsExtension.hs |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
-----------------------------------------------------------------------------
-- |
-- Module : ValExprImplsExtension
Copyright : ( c ) TNO and Radboud University
-- License : BSD3 (See LICENSE at root directory of this repository)
--
Maintainer : ( Embedded Systems Innovation by )
-- Stability : experimental
-- Portability : portable
--
-- This module defines extension of functions on and constructors of value expressions.
--
-----------------------------------------------------------------------------
module ValExprImplsExtension
* Derived Boolean operators
* * Or ( \/ )
cstrOr
-- ** Exclusive or (\|/)
, cstrXor
-- ** Implies (=>)
, cstrImplies
-- * Derived Integer operators:
-- ** Unary Plus
, cstrUnaryPlus
-- ** Unary Minus = negate single argument
, cstrUnaryMinus
* * Plus = Sum of two terms
, cstrPlus
-- ** Minus
, cstrMinus
* * Times = Product of two terms
, cstrTimes
-- ** Absolute value
, cstrAbs
-- * Derived Integer comparisons
-- ** Less than (<)
, cstrLT
-- ** Less Equal (<=)
, cstrLE
-- ** Greater Equal (>=)
, cstrGE
-- ** Greater Than (>)
, cstrGT
)
where
import qualified Data.Set as Set
import FreeMonoidX
import ValExprDefs
import ValExprImpls
-- | Apply operator Or (\\\/) on the provided set of value expressions.
Preconditions are /not/ checked .
cstrOr :: Ord v => Set.Set (ValExpr v) -> ValExpr v
a \/ b = = not ( not a /\ not b )
cstrOr = cstrNot . cstrAnd . Set.map cstrNot
| Apply operator ( \\\|/ ) on the provided set of value expressions .
Preconditions are /not/ checked .
cstrXor :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrXor a b = cstrOr (Set.fromList [ cstrAnd (Set.fromList [a, cstrNot b])
, cstrAnd (Set.fromList [cstrNot a, b])
])
-- | Apply operator Implies (=>) on the provided value expressions.
Preconditions are /not/ checked .
cstrImplies :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
a = > b = = not a \/ b = = not ( a /\ not b )
cstrImplies a b = (cstrNot . cstrAnd) (Set.insert a (Set.singleton (cstrNot b)))
-- | Apply unary operator Plus on the provided value expression.
Preconditions are /not/ checked .
cstrUnaryPlus :: ValExpr v -> ValExpr v
cstrUnaryPlus = id
| Apply unary operator Minus on the provided value expression .
Preconditions are /not/ checked .
cstrUnaryMinus :: Ord v => ValExpr v -> ValExpr v
cstrUnaryMinus v = cstrSum (fromOccurListT [(v,-1)])
-- | Apply operator Add on the provided value expressions.
Preconditions are /not/ checked .
cstrPlus :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrPlus a b = cstrSum (fromListT [a,b])
| Apply operator Minus on the provided value expressions .
Preconditions are /not/ checked .
cstrMinus :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrMinus a b = cstrSum (fromOccurListT [(a,1),(b,-1)])
| Apply operator Times on the provided value expressions .
Preconditions are /not/ checked .
cstrTimes :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrTimes a b = cstrProduct (fromListT [a,b])
-- | Apply operator Absolute value (abs) on the provided value expression.
Preconditions are /not/ checked .
cstrAbs :: Ord v => ValExpr v -> ValExpr v
cstrAbs a = cstrITE (cstrGEZ a) a (cstrUnaryMinus a)
| Apply operator LT ( < ) on the provided value expression .
Preconditions are /not/ checked .
cstrLT :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
-- a < b <==> a - b < 0 <==> Not ( a - b >= 0 )
cstrLT ve1 ve2 = cstrNot (cstrGEZ (cstrSum (fromOccurListT [(ve1,1),(ve2,-1)])))
| Apply operator GT ( > ) on the provided value expression .
Preconditions are /not/ checked .
cstrGT :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
-- a > b <==> 0 > b - a <==> Not ( 0 <= b - a )
cstrGT ve1 ve2 = cstrNot (cstrGEZ (cstrSum (fromOccurListT [(ve1,-1),(ve2,1)])))
| Apply operator LE ( < =) on the provided value expression .
Preconditions are /not/ checked .
cstrLE :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
-- a <= b <==> 0 <= b - a
cstrLE ve1 ve2 = cstrGEZ (cstrSum (fromOccurListT [(ve1,-1),(ve2,1)]))
| Apply operator GE ( > =) on the provided value expression .
Preconditions are /not/ checked .
cstrGE :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
a > = b < = = > a - b > = 0
cstrGE ve1 ve2 = cstrGEZ (cstrSum (fromOccurListT [(ve1,1),(ve2,-1)]))
| null | https://raw.githubusercontent.com/TorXakis/TorXakis/038463824b3d358df6b6b3ff08732335b7dbdb53/sys/valexpr/src/ValExprImplsExtension.hs | haskell | ---------------------------------------------------------------------------
|
Module : ValExprImplsExtension
License : BSD3 (See LICENSE at root directory of this repository)
Stability : experimental
Portability : portable
This module defines extension of functions on and constructors of value expressions.
---------------------------------------------------------------------------
** Exclusive or (\|/)
** Implies (=>)
* Derived Integer operators:
** Unary Plus
** Unary Minus = negate single argument
** Minus
** Absolute value
* Derived Integer comparisons
** Less than (<)
** Less Equal (<=)
** Greater Equal (>=)
** Greater Than (>)
| Apply operator Or (\\\/) on the provided set of value expressions.
| Apply operator Implies (=>) on the provided value expressions.
| Apply unary operator Plus on the provided value expression.
| Apply operator Add on the provided value expressions.
| Apply operator Absolute value (abs) on the provided value expression.
a < b <==> a - b < 0 <==> Not ( a - b >= 0 )
a > b <==> 0 > b - a <==> Not ( 0 <= b - a )
a <= b <==> 0 <= b - a |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
Copyright : ( c ) TNO and Radboud University
Maintainer : ( Embedded Systems Innovation by )
module ValExprImplsExtension
* Derived Boolean operators
* * Or ( \/ )
cstrOr
, cstrXor
, cstrImplies
, cstrUnaryPlus
, cstrUnaryMinus
* * Plus = Sum of two terms
, cstrPlus
, cstrMinus
* * Times = Product of two terms
, cstrTimes
, cstrAbs
, cstrLT
, cstrLE
, cstrGE
, cstrGT
)
where
import qualified Data.Set as Set
import FreeMonoidX
import ValExprDefs
import ValExprImpls
Preconditions are /not/ checked .
cstrOr :: Ord v => Set.Set (ValExpr v) -> ValExpr v
a \/ b = = not ( not a /\ not b )
cstrOr = cstrNot . cstrAnd . Set.map cstrNot
| Apply operator ( \\\|/ ) on the provided set of value expressions .
Preconditions are /not/ checked .
cstrXor :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrXor a b = cstrOr (Set.fromList [ cstrAnd (Set.fromList [a, cstrNot b])
, cstrAnd (Set.fromList [cstrNot a, b])
])
Preconditions are /not/ checked .
cstrImplies :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
a = > b = = not a \/ b = = not ( a /\ not b )
cstrImplies a b = (cstrNot . cstrAnd) (Set.insert a (Set.singleton (cstrNot b)))
Preconditions are /not/ checked .
cstrUnaryPlus :: ValExpr v -> ValExpr v
cstrUnaryPlus = id
| Apply unary operator Minus on the provided value expression .
Preconditions are /not/ checked .
cstrUnaryMinus :: Ord v => ValExpr v -> ValExpr v
cstrUnaryMinus v = cstrSum (fromOccurListT [(v,-1)])
Preconditions are /not/ checked .
cstrPlus :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrPlus a b = cstrSum (fromListT [a,b])
| Apply operator Minus on the provided value expressions .
Preconditions are /not/ checked .
cstrMinus :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrMinus a b = cstrSum (fromOccurListT [(a,1),(b,-1)])
| Apply operator Times on the provided value expressions .
Preconditions are /not/ checked .
cstrTimes :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrTimes a b = cstrProduct (fromListT [a,b])
Preconditions are /not/ checked .
cstrAbs :: Ord v => ValExpr v -> ValExpr v
cstrAbs a = cstrITE (cstrGEZ a) a (cstrUnaryMinus a)
| Apply operator LT ( < ) on the provided value expression .
Preconditions are /not/ checked .
cstrLT :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrLT ve1 ve2 = cstrNot (cstrGEZ (cstrSum (fromOccurListT [(ve1,1),(ve2,-1)])))
| Apply operator GT ( > ) on the provided value expression .
Preconditions are /not/ checked .
cstrGT :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrGT ve1 ve2 = cstrNot (cstrGEZ (cstrSum (fromOccurListT [(ve1,-1),(ve2,1)])))
| Apply operator LE ( < =) on the provided value expression .
Preconditions are /not/ checked .
cstrLE :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
cstrLE ve1 ve2 = cstrGEZ (cstrSum (fromOccurListT [(ve1,-1),(ve2,1)]))
| Apply operator GE ( > =) on the provided value expression .
Preconditions are /not/ checked .
cstrGE :: Ord v => ValExpr v -> ValExpr v -> ValExpr v
a > = b < = = > a - b > = 0
cstrGE ve1 ve2 = cstrGEZ (cstrSum (fromOccurListT [(ve1,1),(ve2,-1)]))
|
d12c4f38e5eeef440eff173d4b4bfd68281bc47350647f553dbf53f3740cee33 | ocheron/cryptostore | PBES1.hs | -- |
-- Module : Data.Store.PKCS5.PBES1
-- License : BSD-style
Maintainer : < >
-- Stability : experimental
-- Portability : unknown
--
-- Password-Based Encryption Schemes
{-# LANGUAGE BangPatterns #-}
# LANGUAGE RecordWildCards #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE MultiParamTypeClasses #
module Crypto.Store.PKCS5.PBES1
( PBEParameter(..)
, Key
, ProtectionPassword
, emptyNotTerminated
, fromProtectionPassword
, toProtectionPassword
, toProtectionPasswords
, pkcs5
, pkcs12
, pkcs12rc2
, pkcs12stream
, pkcs12mac
, rc4Combine
) where
import Basement.Block (Block)
import Basement.Compat.IsList
import Basement.Endianness
import qualified Basement.String as S
import Crypto.Cipher.Types
import qualified Crypto.Cipher.RC4 as RC4
import qualified Crypto.Hash as Hash
import Data.ASN1.Types
import Data.Bits
import Data.ByteArray (ByteArray, ByteArrayAccess)
import qualified Data.ByteArray as B
import Data.ByteString (ByteString)
import Data.Maybe (fromMaybe)
import Data.Memory.PtrMethods
import Data.String (IsString(..))
import Data.Word
import Foreign.Ptr (plusPtr)
import Foreign.Storable
import Crypto.Store.ASN1.Parse
import Crypto.Store.ASN1.Generate
import Crypto.Store.CMS.Algorithms
import Crypto.Store.CMS.Util
import Crypto.Store.Error
| A password stored as a sequence of UTF-8 bytes .
--
-- Some key-derivation functions add restrictions to what characters
-- are supported.
--
-- The data type provides a special value 'emptyNotTerminated' that is used
-- as alternate representation of empty passwords on some systems and that
-- produces encryption results different than an empty bytearray.
--
-- Conversion to/from a regular sequence of bytes is possible with functions
-- 'toProtectionPassword' and 'fromProtectionPassword'.
--
-- Beware: the 'fromString' implementation correctly handles multi-byte
characters , so here is not equivalent to the ' ByteString ' counterpart .
data ProtectionPassword = NullPassword | PasswordUTF8 ByteString
deriving Eq
instance Show ProtectionPassword where
showsPrec _ NullPassword = showString "emptyNotTerminated"
showsPrec d (PasswordUTF8 b) = showParen (d > 10) $
showString "toProtectionPassword " . showsPrec 11 b
instance IsString ProtectionPassword where
fromString = PasswordUTF8 . B.convert . S.toBytes S.UTF8 . fromString
instance ByteArrayAccess ProtectionPassword where
length = applyPP 0 B.length
withByteArray = B.withByteArray . fromProtectionPassword
applyPP :: a -> (ByteString -> a) -> ProtectionPassword -> a
applyPP d _ NullPassword = d
applyPP _ f (PasswordUTF8 b) = f b
-- | A value denoting an empty password, but having a special encoding when
-- deriving a symmetric key on some systems, like the certificate export
wizard on Windows .
--
-- This value is different from @'toProtectionPassword' ""@ and can be tried
-- when decrypting content with a password known to be empty.
emptyNotTerminated :: ProtectionPassword
emptyNotTerminated = NullPassword
| Extract the UTF-8 bytes in a password value .
fromProtectionPassword :: ProtectionPassword -> ByteString
fromProtectionPassword = applyPP B.empty id
| Build a password value from a sequence of UTF-8 bytes .
--
-- When the password is empty, the special value 'emptyNotTerminated' may
-- be tried as well.
toProtectionPassword :: ByteString -> ProtectionPassword
toProtectionPassword = PasswordUTF8
toProtectionPasswords :: ByteString -> [ProtectionPassword]
toProtectionPasswords bs
| B.null bs = [PasswordUTF8 B.empty, NullPassword]
| otherwise = [PasswordUTF8 bs]
-- | Secret key.
type Key = B.ScrubbedBytes
-- | PBES1 parameters.
data PBEParameter = PBEParameter
^ 8 - octet salt value
, pbeIterationCount :: Int -- ^ Iteration count
}
deriving (Show,Eq)
instance ASN1Elem e => ProduceASN1Object e PBEParameter where
asn1s PBEParameter{..} =
let salt = gOctetString pbeSalt
iters = gIntVal (toInteger pbeIterationCount)
in asn1Container Sequence (salt . iters)
instance Monoid e => ParseASN1Object e PBEParameter where
parse = onNextContainer Sequence $ do
OctetString salt <- getNext
IntVal iters <- getNext
return PBEParameter { pbeSalt = salt
, pbeIterationCount = fromInteger iters }
cbcWith :: (BlockCipher cipher, ByteArrayAccess iv)
=> ContentEncryptionCipher cipher -> iv -> ContentEncryptionParams
cbcWith cipher iv = ParamsCBC cipher getIV
where
getIV = fromMaybe (error "PKCS5: bad initialization vector") (makeIV iv)
rc2cbcWith :: ByteArrayAccess iv => Int -> iv -> ContentEncryptionParams
rc2cbcWith len iv = ParamsCBCRC2 len getIV
where
getIV = fromMaybe (error "PKCS5: bad RC2 initialization vector") (makeIV iv)
-- | RC4 encryption or decryption.
rc4Combine :: (ByteArrayAccess key, ByteArray ba) => key -> ba -> Either StoreError ba
rc4Combine key = Right . snd . RC4.combine (RC4.initialize key)
| Conversion to UCS2 from UTF-8 , ignoring non - BMP bits .
toUCS2 :: ByteArray bucs2 => ProtectionPassword -> Maybe bucs2
toUCS2 NullPassword = Just B.empty
toUCS2 (PasswordUTF8 pwdUTF8)
| B.null r = Just pwdUCS2
| otherwise = Nothing
where
(p, _, r) = S.fromBytes S.UTF8 $ B.snoc (B.convert pwdUTF8) 0
pwdBlock = fromList $ map ucs2 $ toList p :: Block (BE Word16)
pwdUCS2 = B.convert pwdBlock
ucs2 :: Char -> BE Word16
ucs2 = toBE . toEnum . fromEnum
-- PBES1, RFC 8018 section 6.1.2
-- | Apply PBKDF1 on the specified password and run an encryption or decryption
-- function on some input using derived key and IV.
pkcs5 :: (Hash.HashAlgorithm hash, BlockCipher cipher)
=> (StoreError -> result)
-> (Key -> ContentEncryptionParams -> ByteString -> result)
-> DigestProxy hash
-> ContentEncryptionCipher cipher
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs5 failure encdec hashAlg cec pbeParam bs pwd
| proxyBlockSize cec /= 8 = failure (InvalidParameter "Invalid cipher block size")
| otherwise =
case pbkdf1 hashAlg (fromProtectionPassword pwd) pbeParam 16 of
Left err -> failure err
Right dk ->
let (key, iv) = B.splitAt 8 (dk :: Key)
in encdec key (cbcWith cec iv) bs
PBKDF1 , RFC 8018 section 5.1
pbkdf1 :: (Hash.HashAlgorithm hash, ByteArrayAccess password, ByteArray out)
=> DigestProxy hash
-> password
-> PBEParameter
-> Int
-> Either StoreError out
pbkdf1 hashAlg pwd PBEParameter{..} dkLen
| dkLen > B.length t1 = Left (InvalidParameter "Derived key too long")
| otherwise = Right (B.convert $ B.takeView tc dkLen)
where
a = hashFromProxy hashAlg
t1 = Hash.hashFinalize (Hash.hashUpdate (Hash.hashUpdate (Hash.hashInitWith a) pwd) pbeSalt)
tc = iterate (Hash.hashWith a) t1 !! pred pbeIterationCount
PKCS#12 encryption , RFC 7292 appendix B.2
| Apply PKCS # 12 derivation on the specified password and run an encryption
-- or decryption function on some input using derived key and IV.
pkcs12 :: (Hash.HashAlgorithm hash, BlockCipher cipher)
=> (StoreError -> result)
-> (Key -> ContentEncryptionParams -> ByteString -> result)
-> DigestProxy hash
-> ContentEncryptionCipher cipher
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12 failure encdec hashAlg cec pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let ivLen = proxyBlockSize cec
iv = pkcs12Derive hashAlg pbeParam 2 pwdUCS2 ivLen :: B.Bytes
eScheme = cbcWith cec iv
keyLen = getMaximumKeySize eScheme
key = pkcs12Derive hashAlg pbeParam 1 pwdUCS2 keyLen :: Key
in encdec key eScheme bs
| Apply PKCS # 12 derivation on the specified password and run an encryption
-- or decryption function on some input using derived key and IV. This variant
uses an RC2 cipher with the EKL specified ( effective key length ) .
pkcs12rc2 :: Hash.HashAlgorithm hash
=> (StoreError -> result)
-> (Key -> ContentEncryptionParams -> ByteString -> result)
-> DigestProxy hash
-> Int
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12rc2 failure encdec hashAlg len pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let ivLen = 8
iv = pkcs12Derive hashAlg pbeParam 2 pwdUCS2 ivLen :: B.Bytes
eScheme = rc2cbcWith len iv
keyLen = getMaximumKeySize eScheme
key = pkcs12Derive hashAlg pbeParam 1 pwdUCS2 keyLen :: Key
in encdec key eScheme bs
| Apply PKCS # 12 derivation on the specified password and run an encryption
-- or decryption function on some input using derived key. This variant does
-- not derive any IV and is required for RC4.
pkcs12stream :: Hash.HashAlgorithm hash
=> (StoreError -> result)
-> (Key -> ByteString -> result)
-> DigestProxy hash
-> Int
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12stream failure encdec hashAlg keyLen pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let key = pkcs12Derive hashAlg pbeParam 1 pwdUCS2 keyLen :: Key
in encdec key bs
| Apply PKCS # 12 derivation on the specified password and run a MAC function
-- on some input using derived key.
pkcs12mac :: Hash.HashAlgorithm hash
=> (StoreError -> result)
-> (Key -> MACAlgorithm -> ByteString -> result)
-> DigestProxy hash
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12mac failure macFn hashAlg pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let macAlg = HMAC hashAlg
keyLen = getMaximumKeySize macAlg
key = pkcs12Derive hashAlg pbeParam 3 pwdUCS2 keyLen :: Key
in macFn key macAlg bs
passwordNotUTF8 :: StoreError
passwordNotUTF8 = InvalidPassword "Provided password is not valid UTF-8"
pkcs12Derive :: (Hash.HashAlgorithm hash, ByteArray bout)
=> DigestProxy hash
-> PBEParameter
-> Word8
-> ByteString -- password (UCS2)
-> Int
-> bout
pkcs12Derive hashAlg PBEParameter{..} idByte pwdUCS2 n =
B.take n $ B.concat $ take c $ loop t (s `B.append` p)
where
a = hashFromProxy hashAlg
v = getV (DigestAlgorithm hashAlg)
u = Hash.hashDigestSize a
c = (n + u - 1) `div` u
d = B.replicate v idByte :: B.Bytes
t = Hash.hashUpdate (Hash.hashInitWith a) d
p = pwdUCS2 `extendedToMult` v
s = pbeSalt `extendedToMult` v
loop :: Hash.HashAlgorithm hash
=> Hash.Context hash -> ByteString -> [Hash.Digest hash]
loop x i = let z = Hash.hashFinalize (Hash.hashUpdate x i)
ai = iterate Hash.hash z !! pred pbeIterationCount
b = ai `extendedTo` v
j = B.concat $ map (add1 b) (chunks v i)
in ai : loop x j
getV :: DigestAlgorithm -> Int
getV (DigestAlgorithm MD2) = 64
getV (DigestAlgorithm MD4) = 64
getV (DigestAlgorithm MD5) = 64
getV (DigestAlgorithm SHA1) = 64
getV (DigestAlgorithm SHA224) = 64
getV (DigestAlgorithm SHA256) = 64
getV (DigestAlgorithm SHA384) = 128
getV (DigestAlgorithm SHA512) = 128
getV t = error ("pkcs12Derive: unsupported hash: " ++ show t)
hashFromProxy :: proxy a -> a
hashFromProxy _ = undefined
-- Split in chunks of size 'n'
chunks :: ByteArray ba => Int -> ba -> [ba]
chunks n bs
| len > n = let (c, cs) = B.splitAt n bs in c : chunks n cs
| len > 0 = [bs]
| otherwise = []
where
len = B.length bs
Concatenate copies of input ' bs ' to create output of length ' n '
-- bytes (the final copy may be truncated)
extendedTo :: (ByteArrayAccess bin, ByteArray bout) => bin -> Int -> bout
bs `extendedTo` n =
B.allocAndFreeze n $ \pout ->
B.withByteArray bs $ \pin -> do
mapM_ (\off -> memCopy (pout `plusPtr` off) pin len)
(enumFromThenTo 0 len (n - len))
memCopy (pout `plusPtr` (n - r)) pin r
where
len = B.length bs
r = n `mod` len
# NOINLINE extendedTo #
Concatenate copies of input ' bs ' to create output whose length is a
-- multiple of 'n' bytes (the final copy may be truncated). If input
-- is the empty string, so is the output.
extendedToMult :: ByteArray ba => ba -> Int -> ba
bs `extendedToMult` n
| len > n = bs `B.append` B.take (n - len `mod` n) bs
| len == n = bs
| len > 0 = bs `extendedTo` n
| otherwise = B.empty
where
len = B.length bs
Add two bytearrays ( considered as big - endian integers ) and increment the
result . Output has size of the first bytearray .
add1 :: ByteString -> ByteString -> ByteString
add1 a b =
B.allocAndFreeze alen $ \pc ->
B.withByteArray a $ \pa ->
B.withByteArray b $ \pb ->
loop3 pa pb pc alen blen 1
where
alen = B.length a
blen = B.length b
-- main loop when both 'a' and 'b' have remaining bytes
loop3 !pa !pb !pc !ma !mb !c
| ma == 0 = return ()
| mb == 0 = loop2 pa pc ma c
| otherwise = do
let na = pred ma
nb = pred mb
ba <- peekElemOff pa na
bb <- peekElemOff pb nb
let (cc, bc) = carryAdd3 c ba bb
pokeElemOff pc na bc
loop3 pa pb pc na nb cc
-- when 'b' is smaller and bytes are exhausted we propagate
-- carry on 'a' alone
loop2 !pa !pc !ma !c
| ma == 0 = return ()
| otherwise = do
let na = pred ma
ba <- peekElemOff pa na
let (cc, bc) = carryAdd2 c ba
pokeElemOff pc na bc
loop2 pa pc na cc
split16 :: Word16 -> (Word8, Word8)
split16 x = (fromIntegral (shiftR x 8), fromIntegral x)
carryAdd2 :: Word8 -> Word8 -> (Word8, Word8)
carryAdd2 a b = split16 (fromIntegral a + fromIntegral b)
carryAdd3 :: Word8 -> Word8 -> Word8 -> (Word8, Word8)
carryAdd3 a b c = split16 (fromIntegral a + fromIntegral b + fromIntegral c)
| null | https://raw.githubusercontent.com/ocheron/cryptostore/b4e742b4ed0146ddf510b765f64317b6f8be58d7/src/Crypto/Store/PKCS5/PBES1.hs | haskell | |
Module : Data.Store.PKCS5.PBES1
License : BSD-style
Stability : experimental
Portability : unknown
Password-Based Encryption Schemes
# LANGUAGE BangPatterns #
# LANGUAGE GADTs #
Some key-derivation functions add restrictions to what characters
are supported.
The data type provides a special value 'emptyNotTerminated' that is used
as alternate representation of empty passwords on some systems and that
produces encryption results different than an empty bytearray.
Conversion to/from a regular sequence of bytes is possible with functions
'toProtectionPassword' and 'fromProtectionPassword'.
Beware: the 'fromString' implementation correctly handles multi-byte
| A value denoting an empty password, but having a special encoding when
deriving a symmetric key on some systems, like the certificate export
This value is different from @'toProtectionPassword' ""@ and can be tried
when decrypting content with a password known to be empty.
When the password is empty, the special value 'emptyNotTerminated' may
be tried as well.
| Secret key.
| PBES1 parameters.
^ Iteration count
| RC4 encryption or decryption.
PBES1, RFC 8018 section 6.1.2
| Apply PBKDF1 on the specified password and run an encryption or decryption
function on some input using derived key and IV.
or decryption function on some input using derived key and IV.
or decryption function on some input using derived key and IV. This variant
or decryption function on some input using derived key. This variant does
not derive any IV and is required for RC4.
on some input using derived key.
password (UCS2)
Split in chunks of size 'n'
bytes (the final copy may be truncated)
multiple of 'n' bytes (the final copy may be truncated). If input
is the empty string, so is the output.
main loop when both 'a' and 'b' have remaining bytes
when 'b' is smaller and bytes are exhausted we propagate
carry on 'a' alone | Maintainer : < >
# LANGUAGE RecordWildCards #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Crypto.Store.PKCS5.PBES1
( PBEParameter(..)
, Key
, ProtectionPassword
, emptyNotTerminated
, fromProtectionPassword
, toProtectionPassword
, toProtectionPasswords
, pkcs5
, pkcs12
, pkcs12rc2
, pkcs12stream
, pkcs12mac
, rc4Combine
) where
import Basement.Block (Block)
import Basement.Compat.IsList
import Basement.Endianness
import qualified Basement.String as S
import Crypto.Cipher.Types
import qualified Crypto.Cipher.RC4 as RC4
import qualified Crypto.Hash as Hash
import Data.ASN1.Types
import Data.Bits
import Data.ByteArray (ByteArray, ByteArrayAccess)
import qualified Data.ByteArray as B
import Data.ByteString (ByteString)
import Data.Maybe (fromMaybe)
import Data.Memory.PtrMethods
import Data.String (IsString(..))
import Data.Word
import Foreign.Ptr (plusPtr)
import Foreign.Storable
import Crypto.Store.ASN1.Parse
import Crypto.Store.ASN1.Generate
import Crypto.Store.CMS.Algorithms
import Crypto.Store.CMS.Util
import Crypto.Store.Error
| A password stored as a sequence of UTF-8 bytes .
characters , so here is not equivalent to the ' ByteString ' counterpart .
data ProtectionPassword = NullPassword | PasswordUTF8 ByteString
deriving Eq
instance Show ProtectionPassword where
showsPrec _ NullPassword = showString "emptyNotTerminated"
showsPrec d (PasswordUTF8 b) = showParen (d > 10) $
showString "toProtectionPassword " . showsPrec 11 b
instance IsString ProtectionPassword where
fromString = PasswordUTF8 . B.convert . S.toBytes S.UTF8 . fromString
instance ByteArrayAccess ProtectionPassword where
length = applyPP 0 B.length
withByteArray = B.withByteArray . fromProtectionPassword
applyPP :: a -> (ByteString -> a) -> ProtectionPassword -> a
applyPP d _ NullPassword = d
applyPP _ f (PasswordUTF8 b) = f b
wizard on Windows .
emptyNotTerminated :: ProtectionPassword
emptyNotTerminated = NullPassword
| Extract the UTF-8 bytes in a password value .
fromProtectionPassword :: ProtectionPassword -> ByteString
fromProtectionPassword = applyPP B.empty id
| Build a password value from a sequence of UTF-8 bytes .
toProtectionPassword :: ByteString -> ProtectionPassword
toProtectionPassword = PasswordUTF8
toProtectionPasswords :: ByteString -> [ProtectionPassword]
toProtectionPasswords bs
| B.null bs = [PasswordUTF8 B.empty, NullPassword]
| otherwise = [PasswordUTF8 bs]
type Key = B.ScrubbedBytes
data PBEParameter = PBEParameter
^ 8 - octet salt value
}
deriving (Show,Eq)
instance ASN1Elem e => ProduceASN1Object e PBEParameter where
asn1s PBEParameter{..} =
let salt = gOctetString pbeSalt
iters = gIntVal (toInteger pbeIterationCount)
in asn1Container Sequence (salt . iters)
instance Monoid e => ParseASN1Object e PBEParameter where
parse = onNextContainer Sequence $ do
OctetString salt <- getNext
IntVal iters <- getNext
return PBEParameter { pbeSalt = salt
, pbeIterationCount = fromInteger iters }
cbcWith :: (BlockCipher cipher, ByteArrayAccess iv)
=> ContentEncryptionCipher cipher -> iv -> ContentEncryptionParams
cbcWith cipher iv = ParamsCBC cipher getIV
where
getIV = fromMaybe (error "PKCS5: bad initialization vector") (makeIV iv)
rc2cbcWith :: ByteArrayAccess iv => Int -> iv -> ContentEncryptionParams
rc2cbcWith len iv = ParamsCBCRC2 len getIV
where
getIV = fromMaybe (error "PKCS5: bad RC2 initialization vector") (makeIV iv)
rc4Combine :: (ByteArrayAccess key, ByteArray ba) => key -> ba -> Either StoreError ba
rc4Combine key = Right . snd . RC4.combine (RC4.initialize key)
| Conversion to UCS2 from UTF-8 , ignoring non - BMP bits .
toUCS2 :: ByteArray bucs2 => ProtectionPassword -> Maybe bucs2
toUCS2 NullPassword = Just B.empty
toUCS2 (PasswordUTF8 pwdUTF8)
| B.null r = Just pwdUCS2
| otherwise = Nothing
where
(p, _, r) = S.fromBytes S.UTF8 $ B.snoc (B.convert pwdUTF8) 0
pwdBlock = fromList $ map ucs2 $ toList p :: Block (BE Word16)
pwdUCS2 = B.convert pwdBlock
ucs2 :: Char -> BE Word16
ucs2 = toBE . toEnum . fromEnum
pkcs5 :: (Hash.HashAlgorithm hash, BlockCipher cipher)
=> (StoreError -> result)
-> (Key -> ContentEncryptionParams -> ByteString -> result)
-> DigestProxy hash
-> ContentEncryptionCipher cipher
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs5 failure encdec hashAlg cec pbeParam bs pwd
| proxyBlockSize cec /= 8 = failure (InvalidParameter "Invalid cipher block size")
| otherwise =
case pbkdf1 hashAlg (fromProtectionPassword pwd) pbeParam 16 of
Left err -> failure err
Right dk ->
let (key, iv) = B.splitAt 8 (dk :: Key)
in encdec key (cbcWith cec iv) bs
PBKDF1 , RFC 8018 section 5.1
pbkdf1 :: (Hash.HashAlgorithm hash, ByteArrayAccess password, ByteArray out)
=> DigestProxy hash
-> password
-> PBEParameter
-> Int
-> Either StoreError out
pbkdf1 hashAlg pwd PBEParameter{..} dkLen
| dkLen > B.length t1 = Left (InvalidParameter "Derived key too long")
| otherwise = Right (B.convert $ B.takeView tc dkLen)
where
a = hashFromProxy hashAlg
t1 = Hash.hashFinalize (Hash.hashUpdate (Hash.hashUpdate (Hash.hashInitWith a) pwd) pbeSalt)
tc = iterate (Hash.hashWith a) t1 !! pred pbeIterationCount
PKCS#12 encryption , RFC 7292 appendix B.2
| Apply PKCS # 12 derivation on the specified password and run an encryption
pkcs12 :: (Hash.HashAlgorithm hash, BlockCipher cipher)
=> (StoreError -> result)
-> (Key -> ContentEncryptionParams -> ByteString -> result)
-> DigestProxy hash
-> ContentEncryptionCipher cipher
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12 failure encdec hashAlg cec pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let ivLen = proxyBlockSize cec
iv = pkcs12Derive hashAlg pbeParam 2 pwdUCS2 ivLen :: B.Bytes
eScheme = cbcWith cec iv
keyLen = getMaximumKeySize eScheme
key = pkcs12Derive hashAlg pbeParam 1 pwdUCS2 keyLen :: Key
in encdec key eScheme bs
| Apply PKCS # 12 derivation on the specified password and run an encryption
uses an RC2 cipher with the EKL specified ( effective key length ) .
pkcs12rc2 :: Hash.HashAlgorithm hash
=> (StoreError -> result)
-> (Key -> ContentEncryptionParams -> ByteString -> result)
-> DigestProxy hash
-> Int
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12rc2 failure encdec hashAlg len pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let ivLen = 8
iv = pkcs12Derive hashAlg pbeParam 2 pwdUCS2 ivLen :: B.Bytes
eScheme = rc2cbcWith len iv
keyLen = getMaximumKeySize eScheme
key = pkcs12Derive hashAlg pbeParam 1 pwdUCS2 keyLen :: Key
in encdec key eScheme bs
| Apply PKCS # 12 derivation on the specified password and run an encryption
pkcs12stream :: Hash.HashAlgorithm hash
=> (StoreError -> result)
-> (Key -> ByteString -> result)
-> DigestProxy hash
-> Int
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12stream failure encdec hashAlg keyLen pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let key = pkcs12Derive hashAlg pbeParam 1 pwdUCS2 keyLen :: Key
in encdec key bs
| Apply PKCS # 12 derivation on the specified password and run a MAC function
pkcs12mac :: Hash.HashAlgorithm hash
=> (StoreError -> result)
-> (Key -> MACAlgorithm -> ByteString -> result)
-> DigestProxy hash
-> PBEParameter
-> ByteString
-> ProtectionPassword
-> result
pkcs12mac failure macFn hashAlg pbeParam bs pwdUTF8 =
case toUCS2 pwdUTF8 of
Nothing -> failure passwordNotUTF8
Just pwdUCS2 ->
let macAlg = HMAC hashAlg
keyLen = getMaximumKeySize macAlg
key = pkcs12Derive hashAlg pbeParam 3 pwdUCS2 keyLen :: Key
in macFn key macAlg bs
passwordNotUTF8 :: StoreError
passwordNotUTF8 = InvalidPassword "Provided password is not valid UTF-8"
pkcs12Derive :: (Hash.HashAlgorithm hash, ByteArray bout)
=> DigestProxy hash
-> PBEParameter
-> Word8
-> Int
-> bout
pkcs12Derive hashAlg PBEParameter{..} idByte pwdUCS2 n =
B.take n $ B.concat $ take c $ loop t (s `B.append` p)
where
a = hashFromProxy hashAlg
v = getV (DigestAlgorithm hashAlg)
u = Hash.hashDigestSize a
c = (n + u - 1) `div` u
d = B.replicate v idByte :: B.Bytes
t = Hash.hashUpdate (Hash.hashInitWith a) d
p = pwdUCS2 `extendedToMult` v
s = pbeSalt `extendedToMult` v
loop :: Hash.HashAlgorithm hash
=> Hash.Context hash -> ByteString -> [Hash.Digest hash]
loop x i = let z = Hash.hashFinalize (Hash.hashUpdate x i)
ai = iterate Hash.hash z !! pred pbeIterationCount
b = ai `extendedTo` v
j = B.concat $ map (add1 b) (chunks v i)
in ai : loop x j
getV :: DigestAlgorithm -> Int
getV (DigestAlgorithm MD2) = 64
getV (DigestAlgorithm MD4) = 64
getV (DigestAlgorithm MD5) = 64
getV (DigestAlgorithm SHA1) = 64
getV (DigestAlgorithm SHA224) = 64
getV (DigestAlgorithm SHA256) = 64
getV (DigestAlgorithm SHA384) = 128
getV (DigestAlgorithm SHA512) = 128
getV t = error ("pkcs12Derive: unsupported hash: " ++ show t)
hashFromProxy :: proxy a -> a
hashFromProxy _ = undefined
chunks :: ByteArray ba => Int -> ba -> [ba]
chunks n bs
| len > n = let (c, cs) = B.splitAt n bs in c : chunks n cs
| len > 0 = [bs]
| otherwise = []
where
len = B.length bs
Concatenate copies of input ' bs ' to create output of length ' n '
extendedTo :: (ByteArrayAccess bin, ByteArray bout) => bin -> Int -> bout
bs `extendedTo` n =
B.allocAndFreeze n $ \pout ->
B.withByteArray bs $ \pin -> do
mapM_ (\off -> memCopy (pout `plusPtr` off) pin len)
(enumFromThenTo 0 len (n - len))
memCopy (pout `plusPtr` (n - r)) pin r
where
len = B.length bs
r = n `mod` len
# NOINLINE extendedTo #
Concatenate copies of input ' bs ' to create output whose length is a
extendedToMult :: ByteArray ba => ba -> Int -> ba
bs `extendedToMult` n
| len > n = bs `B.append` B.take (n - len `mod` n) bs
| len == n = bs
| len > 0 = bs `extendedTo` n
| otherwise = B.empty
where
len = B.length bs
Add two bytearrays ( considered as big - endian integers ) and increment the
result . Output has size of the first bytearray .
add1 :: ByteString -> ByteString -> ByteString
add1 a b =
B.allocAndFreeze alen $ \pc ->
B.withByteArray a $ \pa ->
B.withByteArray b $ \pb ->
loop3 pa pb pc alen blen 1
where
alen = B.length a
blen = B.length b
loop3 !pa !pb !pc !ma !mb !c
| ma == 0 = return ()
| mb == 0 = loop2 pa pc ma c
| otherwise = do
let na = pred ma
nb = pred mb
ba <- peekElemOff pa na
bb <- peekElemOff pb nb
let (cc, bc) = carryAdd3 c ba bb
pokeElemOff pc na bc
loop3 pa pb pc na nb cc
loop2 !pa !pc !ma !c
| ma == 0 = return ()
| otherwise = do
let na = pred ma
ba <- peekElemOff pa na
let (cc, bc) = carryAdd2 c ba
pokeElemOff pc na bc
loop2 pa pc na cc
split16 :: Word16 -> (Word8, Word8)
split16 x = (fromIntegral (shiftR x 8), fromIntegral x)
carryAdd2 :: Word8 -> Word8 -> (Word8, Word8)
carryAdd2 a b = split16 (fromIntegral a + fromIntegral b)
carryAdd3 :: Word8 -> Word8 -> Word8 -> (Word8, Word8)
carryAdd3 a b c = split16 (fromIntegral a + fromIntegral b + fromIntegral c)
|
8c064b0af190b2cef040055fa5c4b8daf7ff942608a8673f649b7354e3d24611 | ml4tp/tcoq | summary.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Pp
open CErrors
open Util
module Dyn = Dyn.Make(struct end)
type marshallable = [ `Yes | `No | `Shallow ]
type 'a summary_declaration = {
freeze_function : marshallable -> 'a;
unfreeze_function : 'a -> unit;
init_function : unit -> unit }
let summaries = ref Int.Map.empty
let mangle id = id ^ "-SUMMARY"
let internal_declare_summary hash sumname sdecl =
let (infun, outfun) = Dyn.Easy.make_dyn (mangle sumname) in
let dyn_freeze b = infun (sdecl.freeze_function b)
and dyn_unfreeze sum = sdecl.unfreeze_function (outfun sum)
and dyn_init = sdecl.init_function in
let ddecl = {
freeze_function = dyn_freeze;
unfreeze_function = dyn_unfreeze;
init_function = dyn_init }
in
summaries := Int.Map.add hash (sumname, ddecl) !summaries
let all_declared_summaries = ref Int.Set.empty
let summary_names = ref []
let name_of_summary name =
try List.assoc name !summary_names
with Not_found -> "summary name not found"
let declare_summary sumname decl =
let hash = String.hash sumname in
let () = if Int.Map.mem hash !summaries then
let (name, _) = Int.Map.find hash !summaries in
anomaly ~label:"Summary.declare_summary"
(str "Colliding summary names: " ++ str sumname ++ str " vs. " ++ str name)
in
all_declared_summaries := Int.Set.add hash !all_declared_summaries;
summary_names := (hash, sumname) :: !summary_names;
internal_declare_summary hash sumname decl
type frozen = {
summaries : (int * Dyn.t) list;
* Ordered list w.r.t . the first component .
ml_module : Dyn.t option;
(** Special handling of the ml_module summary. *)
}
let empty_frozen = { summaries = []; ml_module = None; }
let ml_modules = "ML-MODULES"
let ml_modules_summary = String.hash ml_modules
let freeze_summaries ~marshallable : frozen =
let fold id (_, decl) accu =
to debug missing Lazy.force
if marshallable < > ` No then begin
let i d , _ = Int.Map.find i d ! summaries in
prerr_endline ( " begin marshalling " ^ i d ) ;
ignore(Marshal.to_string ( decl.freeze_function marshallable ) [ ] ) ;
prerr_endline ( " end marshalling " ^ i d ) ;
end ;
/debug
if marshallable <> `No then begin
let id, _ = Int.Map.find id !summaries in
prerr_endline ("begin marshalling " ^ id);
ignore(Marshal.to_string (decl.freeze_function marshallable) []);
prerr_endline ("end marshalling " ^ id);
end;
/debug *)
let state = decl.freeze_function marshallable in
if Int.equal id ml_modules_summary then { accu with ml_module = Some state }
else { accu with summaries = (id, state) :: accu.summaries }
in
Int.Map.fold_right fold !summaries empty_frozen
let unfreeze_summaries fs =
The unfreezing of [ ml_modules_summary ] has to be anticipated since it
* may modify the content of [ summaries ] ny loading new ML modules
* may modify the content of [summaries] ny loading new ML modules *)
let (_, decl) =
try Int.Map.find ml_modules_summary !summaries
with Not_found -> anomaly (str "Undeclared summary " ++ str ml_modules)
in
let () = match fs.ml_module with
| None -> anomaly (str "Undeclared summary " ++ str ml_modules)
| Some state -> decl.unfreeze_function state
in
let fold id (_, decl) states =
if Int.equal id ml_modules_summary then states
else match states with
| [] ->
let () = decl.init_function () in
[]
| (nid, state) :: rstates ->
if Int.equal id nid then
let () = decl.unfreeze_function state in rstates
else
let () = decl.init_function () in states
in
let fold id decl state =
try fold id decl state
with e when CErrors.noncritical e ->
let e = CErrors.push e in
Printf.eprintf "Error unfrezing summay %s\n%s\n%!"
(name_of_summary id) (Pp.string_of_ppcmds (CErrors.iprint e));
iraise e
in
(** We rely on the order of the frozen list, and the order of folding *)
ignore (Int.Map.fold_left fold !summaries fs.summaries)
let init_summaries () =
Int.Map.iter (fun _ (_, decl) -> decl.init_function ()) !summaries
* For global tables registered statically before the end of coqtop
launch , the following empty [ init_function ] could be used .
launch, the following empty [init_function] could be used. *)
let nop () = ()
(** Selective freeze *)
type frozen_bits = (int * Dyn.t) list
let ids_of_string_list complement ids =
if not complement then List.map String.hash ids
else
let fold accu id =
let id = String.hash id in
Int.Set.remove id accu
in
let ids = List.fold_left fold !all_declared_summaries ids in
Int.Set.elements ids
let freeze_summary ~marshallable ?(complement=false) ids =
let ids = ids_of_string_list complement ids in
List.map (fun id ->
let (_, summary) = Int.Map.find id !summaries in
id, summary.freeze_function marshallable)
ids
let unfreeze_summary datas =
List.iter
(fun (id, data) ->
let (name, summary) = Int.Map.find id !summaries in
try summary.unfreeze_function data
with e ->
let e = CErrors.push e in
prerr_endline ("Exception unfreezing " ^ name);
iraise e)
datas
let surgery_summary { summaries; ml_module } bits =
let summaries = List.map (fun (id, _ as orig) ->
try id, List.assoc id bits
with Not_found -> orig)
summaries in
{ summaries; ml_module }
let project_summary { summaries; ml_module } ?(complement=false) ids =
let ids = ids_of_string_list complement ids in
List.filter (fun (id, _) -> List.mem id ids) summaries
let pointer_equal l1 l2 =
let ptr_equal d1 d2 =
let Dyn.Dyn (t1, x1) = d1 in
let Dyn.Dyn (t2, x2) = d2 in
match Dyn.eq t1 t2 with
| None -> false
| Some Refl -> x1 == x2
in
CList.for_all2eq
(fun (id1,v1) (id2,v2) -> id1 = id2 && ptr_equal v1 v2) l1 l2
(** All-in-one reference declaration + registration *)
let ref ?(freeze=fun _ r -> r) ~name x =
let r = ref x in
declare_summary name
{ freeze_function = (fun b -> freeze b !r);
unfreeze_function = ((:=) r);
init_function = (fun () -> r := x) };
r
module Local = struct
type 'a local_ref = ('a CEphemeron.key * string) ref
let (:=) r v = r := (CEphemeron.create v, snd !r)
let (!) r =
let key, name = !r in
try CEphemeron.get key
with CEphemeron.InvalidKey ->
let _, { init_function } =
Int.Map.find (String.hash (mangle name)) !summaries in
init_function ();
CEphemeron.get (fst !r)
let ref ?(freeze=fun x -> x) ~name init =
let r = Pervasives.ref (CEphemeron.create init, name) in
declare_summary name
{ freeze_function = (fun _ -> freeze !r);
unfreeze_function = ((:=) r);
init_function = (fun () -> r := init) };
r
end
let dump = Dyn.dump
| null | https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/library/summary.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Special handling of the ml_module summary.
* We rely on the order of the frozen list, and the order of folding
* Selective freeze
* All-in-one reference declaration + registration | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open CErrors
open Util
module Dyn = Dyn.Make(struct end)
type marshallable = [ `Yes | `No | `Shallow ]
type 'a summary_declaration = {
freeze_function : marshallable -> 'a;
unfreeze_function : 'a -> unit;
init_function : unit -> unit }
let summaries = ref Int.Map.empty
let mangle id = id ^ "-SUMMARY"
let internal_declare_summary hash sumname sdecl =
let (infun, outfun) = Dyn.Easy.make_dyn (mangle sumname) in
let dyn_freeze b = infun (sdecl.freeze_function b)
and dyn_unfreeze sum = sdecl.unfreeze_function (outfun sum)
and dyn_init = sdecl.init_function in
let ddecl = {
freeze_function = dyn_freeze;
unfreeze_function = dyn_unfreeze;
init_function = dyn_init }
in
summaries := Int.Map.add hash (sumname, ddecl) !summaries
let all_declared_summaries = ref Int.Set.empty
let summary_names = ref []
let name_of_summary name =
try List.assoc name !summary_names
with Not_found -> "summary name not found"
let declare_summary sumname decl =
let hash = String.hash sumname in
let () = if Int.Map.mem hash !summaries then
let (name, _) = Int.Map.find hash !summaries in
anomaly ~label:"Summary.declare_summary"
(str "Colliding summary names: " ++ str sumname ++ str " vs. " ++ str name)
in
all_declared_summaries := Int.Set.add hash !all_declared_summaries;
summary_names := (hash, sumname) :: !summary_names;
internal_declare_summary hash sumname decl
type frozen = {
summaries : (int * Dyn.t) list;
* Ordered list w.r.t . the first component .
ml_module : Dyn.t option;
}
let empty_frozen = { summaries = []; ml_module = None; }
let ml_modules = "ML-MODULES"
let ml_modules_summary = String.hash ml_modules
let freeze_summaries ~marshallable : frozen =
let fold id (_, decl) accu =
to debug missing Lazy.force
if marshallable < > ` No then begin
let i d , _ = Int.Map.find i d ! summaries in
prerr_endline ( " begin marshalling " ^ i d ) ;
ignore(Marshal.to_string ( decl.freeze_function marshallable ) [ ] ) ;
prerr_endline ( " end marshalling " ^ i d ) ;
end ;
/debug
if marshallable <> `No then begin
let id, _ = Int.Map.find id !summaries in
prerr_endline ("begin marshalling " ^ id);
ignore(Marshal.to_string (decl.freeze_function marshallable) []);
prerr_endline ("end marshalling " ^ id);
end;
/debug *)
let state = decl.freeze_function marshallable in
if Int.equal id ml_modules_summary then { accu with ml_module = Some state }
else { accu with summaries = (id, state) :: accu.summaries }
in
Int.Map.fold_right fold !summaries empty_frozen
let unfreeze_summaries fs =
The unfreezing of [ ml_modules_summary ] has to be anticipated since it
* may modify the content of [ summaries ] ny loading new ML modules
* may modify the content of [summaries] ny loading new ML modules *)
let (_, decl) =
try Int.Map.find ml_modules_summary !summaries
with Not_found -> anomaly (str "Undeclared summary " ++ str ml_modules)
in
let () = match fs.ml_module with
| None -> anomaly (str "Undeclared summary " ++ str ml_modules)
| Some state -> decl.unfreeze_function state
in
let fold id (_, decl) states =
if Int.equal id ml_modules_summary then states
else match states with
| [] ->
let () = decl.init_function () in
[]
| (nid, state) :: rstates ->
if Int.equal id nid then
let () = decl.unfreeze_function state in rstates
else
let () = decl.init_function () in states
in
let fold id decl state =
try fold id decl state
with e when CErrors.noncritical e ->
let e = CErrors.push e in
Printf.eprintf "Error unfrezing summay %s\n%s\n%!"
(name_of_summary id) (Pp.string_of_ppcmds (CErrors.iprint e));
iraise e
in
ignore (Int.Map.fold_left fold !summaries fs.summaries)
let init_summaries () =
Int.Map.iter (fun _ (_, decl) -> decl.init_function ()) !summaries
* For global tables registered statically before the end of coqtop
launch , the following empty [ init_function ] could be used .
launch, the following empty [init_function] could be used. *)
let nop () = ()
type frozen_bits = (int * Dyn.t) list
let ids_of_string_list complement ids =
if not complement then List.map String.hash ids
else
let fold accu id =
let id = String.hash id in
Int.Set.remove id accu
in
let ids = List.fold_left fold !all_declared_summaries ids in
Int.Set.elements ids
let freeze_summary ~marshallable ?(complement=false) ids =
let ids = ids_of_string_list complement ids in
List.map (fun id ->
let (_, summary) = Int.Map.find id !summaries in
id, summary.freeze_function marshallable)
ids
let unfreeze_summary datas =
List.iter
(fun (id, data) ->
let (name, summary) = Int.Map.find id !summaries in
try summary.unfreeze_function data
with e ->
let e = CErrors.push e in
prerr_endline ("Exception unfreezing " ^ name);
iraise e)
datas
let surgery_summary { summaries; ml_module } bits =
let summaries = List.map (fun (id, _ as orig) ->
try id, List.assoc id bits
with Not_found -> orig)
summaries in
{ summaries; ml_module }
let project_summary { summaries; ml_module } ?(complement=false) ids =
let ids = ids_of_string_list complement ids in
List.filter (fun (id, _) -> List.mem id ids) summaries
let pointer_equal l1 l2 =
let ptr_equal d1 d2 =
let Dyn.Dyn (t1, x1) = d1 in
let Dyn.Dyn (t2, x2) = d2 in
match Dyn.eq t1 t2 with
| None -> false
| Some Refl -> x1 == x2
in
CList.for_all2eq
(fun (id1,v1) (id2,v2) -> id1 = id2 && ptr_equal v1 v2) l1 l2
let ref ?(freeze=fun _ r -> r) ~name x =
let r = ref x in
declare_summary name
{ freeze_function = (fun b -> freeze b !r);
unfreeze_function = ((:=) r);
init_function = (fun () -> r := x) };
r
module Local = struct
type 'a local_ref = ('a CEphemeron.key * string) ref
let (:=) r v = r := (CEphemeron.create v, snd !r)
let (!) r =
let key, name = !r in
try CEphemeron.get key
with CEphemeron.InvalidKey ->
let _, { init_function } =
Int.Map.find (String.hash (mangle name)) !summaries in
init_function ();
CEphemeron.get (fst !r)
let ref ?(freeze=fun x -> x) ~name init =
let r = Pervasives.ref (CEphemeron.create init, name) in
declare_summary name
{ freeze_function = (fun _ -> freeze !r);
unfreeze_function = ((:=) r);
init_function = (fun () -> r := init) };
r
end
let dump = Dyn.dump
|
7d4b26c602f6edfa4bfebaeaef52d65bc378ad0a88c680e658a8627a5b1c01af | thheller/shadow-cljs | relay_tcp.clj | (ns shadow.cljs.devtools.server.relay-tcp
(:require [shadow.jvm-log :as log]
[clojure.core.async :as async :refer (>!! <!!)]
[shadow.remote.relay.api :as relay]
[clojure.tools.reader.edn :as edn]
[shadow.cljs.devtools.api :as api])
(:import [java.net SocketException ServerSocket InetAddress]
[java.io BufferedWriter InputStreamReader OutputStreamWriter]
[clojure.lang LineNumberingPushbackReader]))
(defmacro ^:private thread
[^String name daemon & body]
`(doto (Thread. (fn [] ~@body) ~name)
(.setDaemon ~daemon)
(.start)))
(defn connection-loop [relay config socket]
(let [socket-in
(-> (.getInputStream socket)
(InputStreamReader.)
(LineNumberingPushbackReader.))
out
(-> (.getOutputStream socket)
(OutputStreamWriter.)
(BufferedWriter.))
to-relay
(async/chan 128)
from-relay
(async/chan 128)
relay-close
(relay/connect relay to-relay from-relay {:type :tcp-client})]
(thread
(str "shadow-cljs:tcp-relay:client-read")
(let [EOF (Object.)]
(try
(loop []
(let [res (edn/read {:eof EOF} socket-in)]
(if (identical? res EOF)
(do (async/close! to-relay)
(async/close! from-relay))
(when (>!! to-relay res)
(recur)))))
(catch Exception e
(log/debug-ex e ::read-ex)))))
(loop []
(when-some [msg (<!! from-relay)]
(when (try
(.write out (str (pr-str msg) "\n"))
(.flush out)
true
(catch SocketException se
;; writing to lost connection throws se
nil)
(catch Exception e
(log/warn-ex e ::socket-repl-ex)
nil))
(recur))))
(.close socket)))
(defn start
[relay
{:keys [host port]
:or {host "localhost"
port 0}
:as config}]
(let [addr
(InetAddress/getByName host) ;; nil returns loopback
server-socket
(ServerSocket. port 0 addr)
sockets-ref
(atom #{})
server-thread
(thread
(str "shadow-cljs:tcp-relay:accept") true
(try
(loop []
(when (not (.isClosed server-socket))
(try
(let [conn (.accept server-socket)]
(swap! sockets-ref conj conn)
(thread
(str "shadow-cljs:tcp-relay:client-loop") false
(connection-loop relay config conn)
(swap! sockets-ref disj conn)))
(catch SocketException _disconnect))
(recur)))))]
{:server-thread server-thread
:server-socket server-socket
:sockets-ref sockets-ref
:relay relay
:host host
:port (.getLocalPort server-socket)}))
(defn stop [{:keys [server-socket server-thread sockets-ref]}]
(.close server-socket)
(doseq [s @sockets-ref]
(.close s))
(.interrupt server-thread))
(comment
(def x (start (:relay (api/get-runtime!)) {:port 8201}))
(stop x))
| null | https://raw.githubusercontent.com/thheller/shadow-cljs/ba0a02aec050c6bc8db1932916009400f99d3cce/src/main/shadow/cljs/devtools/server/relay_tcp.clj | clojure | writing to lost connection throws se
nil returns loopback | (ns shadow.cljs.devtools.server.relay-tcp
(:require [shadow.jvm-log :as log]
[clojure.core.async :as async :refer (>!! <!!)]
[shadow.remote.relay.api :as relay]
[clojure.tools.reader.edn :as edn]
[shadow.cljs.devtools.api :as api])
(:import [java.net SocketException ServerSocket InetAddress]
[java.io BufferedWriter InputStreamReader OutputStreamWriter]
[clojure.lang LineNumberingPushbackReader]))
(defmacro ^:private thread
[^String name daemon & body]
`(doto (Thread. (fn [] ~@body) ~name)
(.setDaemon ~daemon)
(.start)))
(defn connection-loop [relay config socket]
(let [socket-in
(-> (.getInputStream socket)
(InputStreamReader.)
(LineNumberingPushbackReader.))
out
(-> (.getOutputStream socket)
(OutputStreamWriter.)
(BufferedWriter.))
to-relay
(async/chan 128)
from-relay
(async/chan 128)
relay-close
(relay/connect relay to-relay from-relay {:type :tcp-client})]
(thread
(str "shadow-cljs:tcp-relay:client-read")
(let [EOF (Object.)]
(try
(loop []
(let [res (edn/read {:eof EOF} socket-in)]
(if (identical? res EOF)
(do (async/close! to-relay)
(async/close! from-relay))
(when (>!! to-relay res)
(recur)))))
(catch Exception e
(log/debug-ex e ::read-ex)))))
(loop []
(when-some [msg (<!! from-relay)]
(when (try
(.write out (str (pr-str msg) "\n"))
(.flush out)
true
(catch SocketException se
nil)
(catch Exception e
(log/warn-ex e ::socket-repl-ex)
nil))
(recur))))
(.close socket)))
(defn start
[relay
{:keys [host port]
:or {host "localhost"
port 0}
:as config}]
(let [addr
server-socket
(ServerSocket. port 0 addr)
sockets-ref
(atom #{})
server-thread
(thread
(str "shadow-cljs:tcp-relay:accept") true
(try
(loop []
(when (not (.isClosed server-socket))
(try
(let [conn (.accept server-socket)]
(swap! sockets-ref conj conn)
(thread
(str "shadow-cljs:tcp-relay:client-loop") false
(connection-loop relay config conn)
(swap! sockets-ref disj conn)))
(catch SocketException _disconnect))
(recur)))))]
{:server-thread server-thread
:server-socket server-socket
:sockets-ref sockets-ref
:relay relay
:host host
:port (.getLocalPort server-socket)}))
(defn stop [{:keys [server-socket server-thread sockets-ref]}]
(.close server-socket)
(doseq [s @sockets-ref]
(.close s))
(.interrupt server-thread))
(comment
(def x (start (:relay (api/get-runtime!)) {:port 8201}))
(stop x))
|
9e9feb51f5626e50f3027b7839940769306ffe9c8f2d338f448ddf851b2ba6e3 | heroku/sdiff | model_join.erl | -module(model_join).
-define(MAGIC_KEY, <<"$$ THIS IS SPECIAL $$">>).
-export([start_link/1, stop/1, join/0]).
start_link(disterl) ->
application:ensure_all_started(sdiff),
ets:new(client, [named_table, public, set]),
ets:new(server, [named_table, public, set]),
{ok, SdiffServ} = sdiff_serv:start_link({local,server},
fun(K) ->
case ets:lookup(server, K) of
[] -> {delete, K};
[{_,V}] -> {write, K, V}
end
end),
{ok, Middleman} = sdiff_access_msg_server:start_link(SdiffServ),
register(server_middleman, Middleman),
application:set_env(sdiff, config, {sdiff_access_msg_client, Middleman});
start_link(tcp) ->
application:ensure_all_started(ranch),
application:ensure_all_started(sdiff),
ets:new(client, [named_table, public, set]),
ets:new(server, [named_table, public, set]),
{ok, _} = sdiff_serv:start_link({local,server},
fun(K) ->
case ets:lookup(server, K) of
[] -> {delete, K};
[{_,V}] -> {write, K, V}
end
end),
Port = 8765,
{ok,_} = ranch:start_listener(
server, 5,
ranch_tcp,
[{port, Port},
{nodelay,true},
{max_connections, 1000}],
sdiff_access_tcp_ranch_server,
[server]),
application:set_env(sdiff, config, {sdiff_access_tcp_client,
{{127,0,0,1}, Port, [], 10000}}).
stop(disterl) ->
Middleman = whereis(server_middleman),
Server = whereis(server),
Client = whereis(client),
lager:debug("links: ~p", [process_info(self(), links)]),
lager:debug("shutdown: ~p", [[self(), Client, Middleman, Server]]),
[unlink(Pid) || Pid <- [Client, Middleman, Server],
Pid =/= undefined],
[begin
exit(Pid, shutdown), wait_dead(Pid)
end || Pid <- [Client, Middleman, Server],
Pid =/= undefined],
ets:delete(server),
ets:delete(client);
stop(tcp) ->
[begin unlink(Pid), exit(Pid, shutdown), wait_dead(Pid) end
|| Pid <- [whereis(client)], Pid =/= undefined],
ranch:stop_listener(server),
[begin unlink(Pid), exit(Pid, shutdown), wait_dead(Pid) end
|| Pid <- [whereis(server)], Pid =/= undefined],
ets:delete(server),
ets:delete(client).
join() ->
%% Synchronize a stream here. We do it by inserting a magic key with a unique
%% value and waiting for it to come to the client, meaning everything in-
%% between should be there.
%% This relies on a property by which a stream of updates is in order.
%% However, because things aren't interrupted by diff sequences, little
%% this property only holds with synchronous diffs and not asynchronous ones.
%% The other problem is that the connection process is asynchronous and can
fail if it came too fast right after the first declaration of
%% readiness. Because of this, we try to re-write the token value
%% multiple times in the loop.
Unique = term_to_binary(make_ref()),
write_wait_for_value(?MAGIC_KEY, Unique, timer:seconds(3)),
%% We're in sync. Turn the tables to maps
{tab_to_map(client), tab_to_map(server)}.
write_wait_for_value(Key, Val, N) when N =< 0 ->
error({join_timeout, {expected, Val, ets:lookup(client, Key)}});
write_wait_for_value(Key, Val, N) ->
model_server:write(Key, Val),
case ets:lookup(client, Key) of
[{Key, Val}] ->
ok;
_ -> % oldval
timer:sleep(10),
write_wait_for_value(Key, Val, N-10)
end.
tab_to_map(Tid) ->
maps:remove(?MAGIC_KEY, maps:from_list(ets:tab2list(Tid))).
wait_dead(Pid) ->
case is_process_alive(Pid) of
true -> timer:sleep(1), wait_dead(Pid);
false -> ok
end.
| null | https://raw.githubusercontent.com/heroku/sdiff/15e3fb2974919b7349387288a89881c2850f3073/test/model_join.erl | erlang | Synchronize a stream here. We do it by inserting a magic key with a unique
value and waiting for it to come to the client, meaning everything in-
between should be there.
This relies on a property by which a stream of updates is in order.
However, because things aren't interrupted by diff sequences, little
this property only holds with synchronous diffs and not asynchronous ones.
The other problem is that the connection process is asynchronous and can
readiness. Because of this, we try to re-write the token value
multiple times in the loop.
We're in sync. Turn the tables to maps
oldval | -module(model_join).
-define(MAGIC_KEY, <<"$$ THIS IS SPECIAL $$">>).
-export([start_link/1, stop/1, join/0]).
start_link(disterl) ->
application:ensure_all_started(sdiff),
ets:new(client, [named_table, public, set]),
ets:new(server, [named_table, public, set]),
{ok, SdiffServ} = sdiff_serv:start_link({local,server},
fun(K) ->
case ets:lookup(server, K) of
[] -> {delete, K};
[{_,V}] -> {write, K, V}
end
end),
{ok, Middleman} = sdiff_access_msg_server:start_link(SdiffServ),
register(server_middleman, Middleman),
application:set_env(sdiff, config, {sdiff_access_msg_client, Middleman});
start_link(tcp) ->
application:ensure_all_started(ranch),
application:ensure_all_started(sdiff),
ets:new(client, [named_table, public, set]),
ets:new(server, [named_table, public, set]),
{ok, _} = sdiff_serv:start_link({local,server},
fun(K) ->
case ets:lookup(server, K) of
[] -> {delete, K};
[{_,V}] -> {write, K, V}
end
end),
Port = 8765,
{ok,_} = ranch:start_listener(
server, 5,
ranch_tcp,
[{port, Port},
{nodelay,true},
{max_connections, 1000}],
sdiff_access_tcp_ranch_server,
[server]),
application:set_env(sdiff, config, {sdiff_access_tcp_client,
{{127,0,0,1}, Port, [], 10000}}).
stop(disterl) ->
Middleman = whereis(server_middleman),
Server = whereis(server),
Client = whereis(client),
lager:debug("links: ~p", [process_info(self(), links)]),
lager:debug("shutdown: ~p", [[self(), Client, Middleman, Server]]),
[unlink(Pid) || Pid <- [Client, Middleman, Server],
Pid =/= undefined],
[begin
exit(Pid, shutdown), wait_dead(Pid)
end || Pid <- [Client, Middleman, Server],
Pid =/= undefined],
ets:delete(server),
ets:delete(client);
stop(tcp) ->
[begin unlink(Pid), exit(Pid, shutdown), wait_dead(Pid) end
|| Pid <- [whereis(client)], Pid =/= undefined],
ranch:stop_listener(server),
[begin unlink(Pid), exit(Pid, shutdown), wait_dead(Pid) end
|| Pid <- [whereis(server)], Pid =/= undefined],
ets:delete(server),
ets:delete(client).
join() ->
fail if it came too fast right after the first declaration of
Unique = term_to_binary(make_ref()),
write_wait_for_value(?MAGIC_KEY, Unique, timer:seconds(3)),
{tab_to_map(client), tab_to_map(server)}.
write_wait_for_value(Key, Val, N) when N =< 0 ->
error({join_timeout, {expected, Val, ets:lookup(client, Key)}});
write_wait_for_value(Key, Val, N) ->
model_server:write(Key, Val),
case ets:lookup(client, Key) of
[{Key, Val}] ->
ok;
timer:sleep(10),
write_wait_for_value(Key, Val, N-10)
end.
tab_to_map(Tid) ->
maps:remove(?MAGIC_KEY, maps:from_list(ets:tab2list(Tid))).
wait_dead(Pid) ->
case is_process_alive(Pid) of
true -> timer:sleep(1), wait_dead(Pid);
false -> ok
end.
|
220f3e715f935d0e1db3e13f19ce33c2bc01919dc467559806870d4997c30628 | ocaml-flambda/flambda-backend | compilenv.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Gallium , INRIA Rocquencourt
, OCamlPro
and ,
(* *)
Copyright 2010 Institut National de Recherche en Informatique et
(* en Automatique *)
(* Copyright 2013--2016 OCamlPro SAS *)
Copyright 2014 - -2016 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Compilation environments for compilation units *)
[@@@ocaml.warning "+a-4-9-40-41-42"]
open Config
open Cmx_format
module File_sections = Flambda_backend_utils.File_sections
module CU = Compilation_unit
type error =
Not_a_unit_info of string
| Corrupted_unit_info of string
| Illegal_renaming of CU.t * CU.t * string
exception Error of error
let global_infos_table =
(CU.Name.Tbl.create 17 : unit_infos option CU.Name.Tbl.t)
let export_infos_table =
(CU.Name.Tbl.create 10 : Export_info.t CU.Name.Tbl.t)
let reset_info_tables () =
CU.Name.Tbl.reset global_infos_table;
CU.Name.Tbl.reset export_infos_table
let imported_sets_of_closures_table =
(Set_of_closures_id.Tbl.create 10
: Simple_value_approx.function_declarations option
Set_of_closures_id.Tbl.t)
module CstMap =
Map.Make(struct
type t = Clambda.ustructured_constant
let compare = Clambda.compare_structured_constants
PR#6442 : it is incorrect to use Stdlib.compare on values of type t
because it compares " 0.0 " and " -0.0 " equal .
because it compares "0.0" and "-0.0" equal. *)
end)
module SymMap = Misc.Stdlib.String.Map
module String = Misc.Stdlib.String
type structured_constants =
{
strcst_shared: string CstMap.t;
strcst_all: Clambda.ustructured_constant SymMap.t;
}
let structured_constants_empty =
{
strcst_shared = CstMap.empty;
strcst_all = SymMap.empty;
}
let structured_constants = ref structured_constants_empty
let exported_constants = Hashtbl.create 17
let merged_environment = ref Export_info.empty
module Checks : sig
(* mutable state *)
type t = Cmx_format.checks
val create : unit -> t
val reset : t -> unit
val merge : t -> into:t -> unit
end = struct
type t = Cmx_format.checks
let create () =
{
ui_noalloc_functions = String.Set.empty;
}
let reset t =
t.ui_noalloc_functions <- String.Set.empty
let merge src ~into:dst =
if !Flambda_backend_flags.alloc_check
then (
dst.ui_noalloc_functions
<- String.Set.union dst.ui_noalloc_functions src.ui_noalloc_functions)
end
let cached_checks : Cmx_format.checks = Checks.create ()
let cache_checks c = Checks.merge c ~into:cached_checks
let default_ui_export_info =
if Config.flambda then
Cmx_format.Flambda1 Export_info.empty
else if Config.flambda2 then
Cmx_format.Flambda2 None
else
Cmx_format.Clambda Value_unknown
let current_unit =
{ ui_unit = CU.dummy;
ui_defines = [];
ui_imports_cmi = [];
ui_imports_cmx = [];
ui_generic_fns = { curry_fun = []; apply_fun = []; send_fun = [] };
ui_force_link = false;
ui_checks = Checks.create ();
ui_export_info = default_ui_export_info }
let reset compilation_unit =
CU.Name.Tbl.clear global_infos_table;
Set_of_closures_id.Tbl.clear imported_sets_of_closures_table;
Checks.reset cached_checks;
CU.set_current (Some compilation_unit);
current_unit.ui_unit <- compilation_unit;
current_unit.ui_defines <- [compilation_unit];
current_unit.ui_imports_cmi <- [];
current_unit.ui_imports_cmx <- [];
current_unit.ui_generic_fns <-
{ curry_fun = []; apply_fun = []; send_fun = [] };
current_unit.ui_force_link <- !Clflags.link_everything;
Checks.reset current_unit.ui_checks;
Hashtbl.clear exported_constants;
structured_constants := structured_constants_empty;
current_unit.ui_export_info <- default_ui_export_info;
merged_environment := Export_info.empty;
CU.Name.Tbl.clear export_infos_table
let current_unit_infos () =
current_unit
let read_unit_info filename =
let ic = open_in_bin filename in
try
let buffer = really_input_string ic (String.length cmx_magic_number) in
if buffer <> cmx_magic_number then begin
close_in ic;
raise(Error(Not_a_unit_info filename))
end;
let uir = (input_value ic : unit_infos_raw) in
let first_section_offset = pos_in ic in
seek_in ic (first_section_offset + uir.uir_sections_length);
let crc = Digest.input ic in
(* This consumes the channel *)
let sections = File_sections.create uir.uir_section_toc filename ic ~first_section_offset in
let export_info =
match uir.uir_export_info with
| Clambda_raw info -> Clambda info
| Flambda1_raw info -> Flambda1 info
| Flambda2_raw None -> Flambda2 None
| Flambda2_raw (Some info) ->
Flambda2 (Some (Flambda2_cmx.Flambda_cmx_format.from_raw ~sections info))
in
let ui = {
ui_unit = uir.uir_unit;
ui_defines = uir.uir_defines;
ui_imports_cmi = uir.uir_imports_cmi |> Array.to_list;
ui_imports_cmx = uir.uir_imports_cmx |> Array.to_list;
ui_generic_fns = uir.uir_generic_fns;
ui_export_info = export_info;
ui_checks = uir.uir_checks;
ui_force_link = uir.uir_force_link
}
in
(ui, crc)
with End_of_file | Failure _ ->
close_in ic;
raise(Error(Corrupted_unit_info(filename)))
let read_library_info filename =
let ic = open_in_bin filename in
let buffer = really_input_string ic (String.length cmxa_magic_number) in
if buffer <> cmxa_magic_number then
raise(Error(Not_a_unit_info filename));
let infos = (input_value ic : library_infos) in
close_in ic;
infos
(* Read and cache info on global identifiers *)
let get_unit_info comp_unit =
(* If this fails, it likely means that someone didn't call
[CU.which_cmx_file]. *)
assert (CU.can_access_cmx_file comp_unit ~accessed_by:current_unit.ui_unit);
(* CR lmaurer: Surely this should just compare [comp_unit] to
[current_unit.ui_unit], but doing so seems to break Closure. We should fix
that. *)
if CU.Name.equal (CU.name comp_unit) (CU.name current_unit.ui_unit)
then
Some current_unit
else begin
let cmx_name = CU.name comp_unit in
try
CU.Name.Tbl.find global_infos_table cmx_name
with Not_found ->
let (infos, crc) =
if Env.is_imported_opaque cmx_name then (None, None)
else begin
try
let filename =
Load_path.find_uncap ((cmx_name |> CU.Name.to_string) ^ ".cmx") in
let (ui, crc) = read_unit_info filename in
if not (CU.equal ui.ui_unit comp_unit) then
raise(Error(Illegal_renaming(comp_unit, ui.ui_unit, filename)));
cache_checks ui.ui_checks;
(Some ui, Some crc)
with Not_found ->
let warn = Warnings.No_cmx_file (cmx_name |> CU.Name.to_string) in
Location.prerr_warning Location.none warn;
(None, None)
end
in
let import = Import_info.create_normal comp_unit ~crc in
current_unit.ui_imports_cmx <- import :: current_unit.ui_imports_cmx;
CU.Name.Tbl.add global_infos_table cmx_name infos;
infos
end
let which_cmx_file comp_unit =
CU.which_cmx_file comp_unit ~accessed_by:(CU.get_current_exn ())
let get_unit_export_info comp_unit =
match get_unit_info comp_unit with
| None -> None
| Some ui -> Some ui.ui_export_info
let get_global_info comp_unit =
get_unit_info (which_cmx_file comp_unit)
let get_global_export_info id =
match get_global_info id with
| None -> None
| Some ui -> Some ui.ui_export_info
let cache_unit_info ui =
cache_checks ui.ui_checks;
CU.Name.Tbl.add global_infos_table (CU.name ui.ui_unit) (Some ui)
(* Return the approximation of a global identifier *)
let get_clambda_approx ui =
assert(not Config.flambda);
match ui.ui_export_info with
| Flambda1 _ | Flambda2 _ -> assert false
| Clambda approx -> approx
let toplevel_approx :
(CU.t, Clambda.value_approximation) Hashtbl.t = Hashtbl.create 16
let record_global_approx_toplevel () =
Hashtbl.add toplevel_approx
current_unit.ui_unit
(get_clambda_approx current_unit)
let global_approx comp_unit =
try Hashtbl.find toplevel_approx comp_unit
with Not_found ->
match get_global_info comp_unit with
| None -> Clambda.Value_unknown
| Some ui -> get_clambda_approx ui
(* Register the approximation of the module being compiled *)
let set_global_approx approx =
assert(not Config.flambda);
current_unit.ui_export_info <- Clambda approx
Exporting and importing cross module information ( Flambda only )
let get_flambda_export_info ui =
assert(Config.flambda);
match ui.ui_export_info with
| Clambda _ | Flambda2 _ -> assert false
| Flambda1 ei -> ei
let set_export_info export_info =
assert(Config.flambda);
current_unit.ui_export_info <- Flambda1 export_info
let flambda2_set_export_info export_info =
assert(Config.flambda2);
current_unit.ui_export_info <- Flambda2 (Some export_info)
let approx_for_global comp_unit =
if CU.equal comp_unit CU.predef_exn
then invalid_arg "approx_for_global with predef_exn compilation unit";
let accessible_comp_unit = which_cmx_file comp_unit in
let cmx_name = CU.name accessible_comp_unit in
match CU.Name.Tbl.find export_infos_table cmx_name with
| otherwise -> Some otherwise
| exception Not_found ->
match get_unit_info accessible_comp_unit with
| None -> None
| Some ui ->
let exported = get_flambda_export_info ui in
CU.Name.Tbl.add export_infos_table cmx_name exported;
merged_environment := Export_info.merge !merged_environment exported;
Some exported
let approx_env () = !merged_environment
(* Record that a currying function or application function is needed *)
let need_curry_fun kind arity result =
let fns = current_unit.ui_generic_fns in
if not (List.mem (kind, arity, result) fns.curry_fun) then
current_unit.ui_generic_fns <-
{ fns with curry_fun = (kind, arity, result) :: fns.curry_fun }
let need_apply_fun arity result mode =
assert(List.compare_length_with arity 0 > 0);
let fns = current_unit.ui_generic_fns in
if not (List.mem (arity, result, mode) fns.apply_fun) then
current_unit.ui_generic_fns <-
{ fns with apply_fun = (arity, result, mode) :: fns.apply_fun }
let need_send_fun arity result mode =
let fns = current_unit.ui_generic_fns in
if not (List.mem (arity, result, mode) fns.send_fun) then
current_unit.ui_generic_fns <-
{ fns with send_fun = (arity, result, mode) :: fns.send_fun }
(* Write the description of the current unit *)
CR mshinwell : let 's think about this later , quadratic algorithm
let ensure_sharing_between_cmi_and_cmx_imports cmi_imports cmx_imports =
( * If a [ CU.t ] in the .cmx imports also occurs in the .cmi imports , use
the one in the .cmi imports , to increase sharing . ( Such a [ CU.t ] in
the imports may already have part of its value shared with the
first [ CU.Name.t ] component in the .cmi imports , c.f .
[ Persistent_env.ensure_crc_sharing ] , so it 's best to pick this [ CU.t ] . )
let ensure_sharing_between_cmi_and_cmx_imports cmi_imports cmx_imports =
(* If a [CU.t] in the .cmx imports also occurs in the .cmi imports, use
the one in the .cmi imports, to increase sharing. (Such a [CU.t] in
the .cmi imports may already have part of its value shared with the
first [CU.Name.t] component in the .cmi imports, c.f.
[Persistent_env.ensure_crc_sharing], so it's best to pick this [CU.t].) *)
List.map (fun ((comp_unit, crc) as import) ->
match
List.find_map (function
| _, None -> None
| _, Some (comp_unit', _) ->
if CU.equal comp_unit comp_unit' then Some comp_unit'
else None)
cmi_imports
with
| None -> import
| Some comp_unit -> comp_unit, crc)
cmx_imports
*)
let write_unit_info info filename =
let raw_export_info, sections =
match info.ui_export_info with
| Clambda info -> Clambda_raw info, File_sections.empty
| Flambda1 info -> Flambda1_raw info, File_sections.empty
| Flambda2 None -> Flambda2_raw None, File_sections.empty
| Flambda2 (Some info) ->
let info, sections = Flambda2_cmx.Flambda_cmx_format.to_raw info in
Flambda2_raw (Some info), sections
in
let serialized_sections, toc, total_length = File_sections.serialize sections in
let raw_info = {
uir_unit = info.ui_unit;
uir_defines = info.ui_defines;
uir_imports_cmi = Array.of_list info.ui_imports_cmi;
uir_imports_cmx = Array.of_list info.ui_imports_cmx;
uir_generic_fns = info.ui_generic_fns;
uir_export_info = raw_export_info;
uir_checks = info.ui_checks;
uir_force_link = info.ui_force_link;
uir_section_toc = toc;
uir_sections_length = total_length;
} in
let oc = open_out_bin filename in
output_string oc cmx_magic_number;
output_value oc raw_info;
Array.iter (output_string oc) serialized_sections;
flush oc;
let crc = Digest.file filename in
Digest.output oc crc;
close_out oc
let save_unit_info filename =
current_unit.ui_imports_cmi <- Env.imports();
write_unit_info current_unit filename
let snapshot () = !structured_constants
let backtrack s = structured_constants := s
let new_const_symbol () =
Symbol.for_new_const_in_current_unit ()
|> Symbol.linkage_name
|> Linkage_name.to_string
let new_structured_constant cst ~shared =
let {strcst_shared; strcst_all} = !structured_constants in
if shared then
try
CstMap.find cst strcst_shared
with Not_found ->
let lbl = new_const_symbol() in
structured_constants :=
{
strcst_shared = CstMap.add cst lbl strcst_shared;
strcst_all = SymMap.add lbl cst strcst_all;
};
lbl
else
let lbl = new_const_symbol() in
structured_constants :=
{
strcst_shared;
strcst_all = SymMap.add lbl cst strcst_all;
};
lbl
let add_exported_constant s =
Hashtbl.replace exported_constants s ()
let clear_structured_constants () =
structured_constants := structured_constants_empty
let structured_constant_of_symbol s =
SymMap.find_opt s (!structured_constants).strcst_all
let structured_constants () =
let provenance : Clambda.usymbol_provenance =
{ original_idents = [];
module_path =
(* CR-someday lmaurer: Properly construct a [Path.t] from the module name
with its pack prefix. *)
Path.Pident (Ident.create_persistent (Compilation_unit.Name.to_string (
Compilation_unit.name (Compilation_unit.get_current_exn ()))));
}
in
SymMap.bindings (!structured_constants).strcst_all
|> List.map
(fun (symbol, definition) ->
{
Clambda.symbol;
exported = Hashtbl.mem exported_constants symbol;
definition;
provenance = Some provenance;
})
let require_global global_ident =
ignore (get_global_info global_ident : Cmx_format.unit_infos option)
(* Error report *)
open Format
let report_error ppf = function
| Not_a_unit_info filename ->
fprintf ppf "%a@ is not a compilation unit description."
Location.print_filename filename
| Corrupted_unit_info filename ->
fprintf ppf "Corrupted compilation unit description@ %a"
Location.print_filename filename
| Illegal_renaming(name, modname, filename) ->
fprintf ppf "%a@ contains the description for unit\
@ %a when %a was expected"
Location.print_filename filename
CU.print name
CU.print modname
let () =
Location.register_error_of_exn
(function
| Error err -> Some (Location.error_of_printer_file report_error err)
| _ -> None
)
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/121bb5f26c0e844c5bf8eba2abb3c63110ee92d9/middle_end/compilenv.ml | ocaml | ************************************************************************
OCaml
en Automatique
Copyright 2013--2016 OCamlPro SAS
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Compilation environments for compilation units
mutable state
This consumes the channel
Read and cache info on global identifiers
If this fails, it likely means that someone didn't call
[CU.which_cmx_file].
CR lmaurer: Surely this should just compare [comp_unit] to
[current_unit.ui_unit], but doing so seems to break Closure. We should fix
that.
Return the approximation of a global identifier
Register the approximation of the module being compiled
Record that a currying function or application function is needed
Write the description of the current unit
If a [CU.t] in the .cmx imports also occurs in the .cmi imports, use
the one in the .cmi imports, to increase sharing. (Such a [CU.t] in
the .cmi imports may already have part of its value shared with the
first [CU.Name.t] component in the .cmi imports, c.f.
[Persistent_env.ensure_crc_sharing], so it's best to pick this [CU.t].)
CR-someday lmaurer: Properly construct a [Path.t] from the module name
with its pack prefix.
Error report | , projet Gallium , INRIA Rocquencourt
, OCamlPro
and ,
Copyright 2010 Institut National de Recherche en Informatique et
Copyright 2014 - -2016 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
[@@@ocaml.warning "+a-4-9-40-41-42"]
open Config
open Cmx_format
module File_sections = Flambda_backend_utils.File_sections
module CU = Compilation_unit
type error =
Not_a_unit_info of string
| Corrupted_unit_info of string
| Illegal_renaming of CU.t * CU.t * string
exception Error of error
let global_infos_table =
(CU.Name.Tbl.create 17 : unit_infos option CU.Name.Tbl.t)
let export_infos_table =
(CU.Name.Tbl.create 10 : Export_info.t CU.Name.Tbl.t)
let reset_info_tables () =
CU.Name.Tbl.reset global_infos_table;
CU.Name.Tbl.reset export_infos_table
let imported_sets_of_closures_table =
(Set_of_closures_id.Tbl.create 10
: Simple_value_approx.function_declarations option
Set_of_closures_id.Tbl.t)
module CstMap =
Map.Make(struct
type t = Clambda.ustructured_constant
let compare = Clambda.compare_structured_constants
PR#6442 : it is incorrect to use Stdlib.compare on values of type t
because it compares " 0.0 " and " -0.0 " equal .
because it compares "0.0" and "-0.0" equal. *)
end)
module SymMap = Misc.Stdlib.String.Map
module String = Misc.Stdlib.String
type structured_constants =
{
strcst_shared: string CstMap.t;
strcst_all: Clambda.ustructured_constant SymMap.t;
}
let structured_constants_empty =
{
strcst_shared = CstMap.empty;
strcst_all = SymMap.empty;
}
let structured_constants = ref structured_constants_empty
let exported_constants = Hashtbl.create 17
let merged_environment = ref Export_info.empty
module Checks : sig
type t = Cmx_format.checks
val create : unit -> t
val reset : t -> unit
val merge : t -> into:t -> unit
end = struct
type t = Cmx_format.checks
let create () =
{
ui_noalloc_functions = String.Set.empty;
}
let reset t =
t.ui_noalloc_functions <- String.Set.empty
let merge src ~into:dst =
if !Flambda_backend_flags.alloc_check
then (
dst.ui_noalloc_functions
<- String.Set.union dst.ui_noalloc_functions src.ui_noalloc_functions)
end
let cached_checks : Cmx_format.checks = Checks.create ()
let cache_checks c = Checks.merge c ~into:cached_checks
let default_ui_export_info =
if Config.flambda then
Cmx_format.Flambda1 Export_info.empty
else if Config.flambda2 then
Cmx_format.Flambda2 None
else
Cmx_format.Clambda Value_unknown
let current_unit =
{ ui_unit = CU.dummy;
ui_defines = [];
ui_imports_cmi = [];
ui_imports_cmx = [];
ui_generic_fns = { curry_fun = []; apply_fun = []; send_fun = [] };
ui_force_link = false;
ui_checks = Checks.create ();
ui_export_info = default_ui_export_info }
let reset compilation_unit =
CU.Name.Tbl.clear global_infos_table;
Set_of_closures_id.Tbl.clear imported_sets_of_closures_table;
Checks.reset cached_checks;
CU.set_current (Some compilation_unit);
current_unit.ui_unit <- compilation_unit;
current_unit.ui_defines <- [compilation_unit];
current_unit.ui_imports_cmi <- [];
current_unit.ui_imports_cmx <- [];
current_unit.ui_generic_fns <-
{ curry_fun = []; apply_fun = []; send_fun = [] };
current_unit.ui_force_link <- !Clflags.link_everything;
Checks.reset current_unit.ui_checks;
Hashtbl.clear exported_constants;
structured_constants := structured_constants_empty;
current_unit.ui_export_info <- default_ui_export_info;
merged_environment := Export_info.empty;
CU.Name.Tbl.clear export_infos_table
let current_unit_infos () =
current_unit
let read_unit_info filename =
let ic = open_in_bin filename in
try
let buffer = really_input_string ic (String.length cmx_magic_number) in
if buffer <> cmx_magic_number then begin
close_in ic;
raise(Error(Not_a_unit_info filename))
end;
let uir = (input_value ic : unit_infos_raw) in
let first_section_offset = pos_in ic in
seek_in ic (first_section_offset + uir.uir_sections_length);
let crc = Digest.input ic in
let sections = File_sections.create uir.uir_section_toc filename ic ~first_section_offset in
let export_info =
match uir.uir_export_info with
| Clambda_raw info -> Clambda info
| Flambda1_raw info -> Flambda1 info
| Flambda2_raw None -> Flambda2 None
| Flambda2_raw (Some info) ->
Flambda2 (Some (Flambda2_cmx.Flambda_cmx_format.from_raw ~sections info))
in
let ui = {
ui_unit = uir.uir_unit;
ui_defines = uir.uir_defines;
ui_imports_cmi = uir.uir_imports_cmi |> Array.to_list;
ui_imports_cmx = uir.uir_imports_cmx |> Array.to_list;
ui_generic_fns = uir.uir_generic_fns;
ui_export_info = export_info;
ui_checks = uir.uir_checks;
ui_force_link = uir.uir_force_link
}
in
(ui, crc)
with End_of_file | Failure _ ->
close_in ic;
raise(Error(Corrupted_unit_info(filename)))
let read_library_info filename =
let ic = open_in_bin filename in
let buffer = really_input_string ic (String.length cmxa_magic_number) in
if buffer <> cmxa_magic_number then
raise(Error(Not_a_unit_info filename));
let infos = (input_value ic : library_infos) in
close_in ic;
infos
let get_unit_info comp_unit =
assert (CU.can_access_cmx_file comp_unit ~accessed_by:current_unit.ui_unit);
if CU.Name.equal (CU.name comp_unit) (CU.name current_unit.ui_unit)
then
Some current_unit
else begin
let cmx_name = CU.name comp_unit in
try
CU.Name.Tbl.find global_infos_table cmx_name
with Not_found ->
let (infos, crc) =
if Env.is_imported_opaque cmx_name then (None, None)
else begin
try
let filename =
Load_path.find_uncap ((cmx_name |> CU.Name.to_string) ^ ".cmx") in
let (ui, crc) = read_unit_info filename in
if not (CU.equal ui.ui_unit comp_unit) then
raise(Error(Illegal_renaming(comp_unit, ui.ui_unit, filename)));
cache_checks ui.ui_checks;
(Some ui, Some crc)
with Not_found ->
let warn = Warnings.No_cmx_file (cmx_name |> CU.Name.to_string) in
Location.prerr_warning Location.none warn;
(None, None)
end
in
let import = Import_info.create_normal comp_unit ~crc in
current_unit.ui_imports_cmx <- import :: current_unit.ui_imports_cmx;
CU.Name.Tbl.add global_infos_table cmx_name infos;
infos
end
let which_cmx_file comp_unit =
CU.which_cmx_file comp_unit ~accessed_by:(CU.get_current_exn ())
let get_unit_export_info comp_unit =
match get_unit_info comp_unit with
| None -> None
| Some ui -> Some ui.ui_export_info
let get_global_info comp_unit =
get_unit_info (which_cmx_file comp_unit)
let get_global_export_info id =
match get_global_info id with
| None -> None
| Some ui -> Some ui.ui_export_info
let cache_unit_info ui =
cache_checks ui.ui_checks;
CU.Name.Tbl.add global_infos_table (CU.name ui.ui_unit) (Some ui)
let get_clambda_approx ui =
assert(not Config.flambda);
match ui.ui_export_info with
| Flambda1 _ | Flambda2 _ -> assert false
| Clambda approx -> approx
let toplevel_approx :
(CU.t, Clambda.value_approximation) Hashtbl.t = Hashtbl.create 16
let record_global_approx_toplevel () =
Hashtbl.add toplevel_approx
current_unit.ui_unit
(get_clambda_approx current_unit)
let global_approx comp_unit =
try Hashtbl.find toplevel_approx comp_unit
with Not_found ->
match get_global_info comp_unit with
| None -> Clambda.Value_unknown
| Some ui -> get_clambda_approx ui
let set_global_approx approx =
assert(not Config.flambda);
current_unit.ui_export_info <- Clambda approx
Exporting and importing cross module information ( Flambda only )
let get_flambda_export_info ui =
assert(Config.flambda);
match ui.ui_export_info with
| Clambda _ | Flambda2 _ -> assert false
| Flambda1 ei -> ei
let set_export_info export_info =
assert(Config.flambda);
current_unit.ui_export_info <- Flambda1 export_info
let flambda2_set_export_info export_info =
assert(Config.flambda2);
current_unit.ui_export_info <- Flambda2 (Some export_info)
let approx_for_global comp_unit =
if CU.equal comp_unit CU.predef_exn
then invalid_arg "approx_for_global with predef_exn compilation unit";
let accessible_comp_unit = which_cmx_file comp_unit in
let cmx_name = CU.name accessible_comp_unit in
match CU.Name.Tbl.find export_infos_table cmx_name with
| otherwise -> Some otherwise
| exception Not_found ->
match get_unit_info accessible_comp_unit with
| None -> None
| Some ui ->
let exported = get_flambda_export_info ui in
CU.Name.Tbl.add export_infos_table cmx_name exported;
merged_environment := Export_info.merge !merged_environment exported;
Some exported
let approx_env () = !merged_environment
let need_curry_fun kind arity result =
let fns = current_unit.ui_generic_fns in
if not (List.mem (kind, arity, result) fns.curry_fun) then
current_unit.ui_generic_fns <-
{ fns with curry_fun = (kind, arity, result) :: fns.curry_fun }
let need_apply_fun arity result mode =
assert(List.compare_length_with arity 0 > 0);
let fns = current_unit.ui_generic_fns in
if not (List.mem (arity, result, mode) fns.apply_fun) then
current_unit.ui_generic_fns <-
{ fns with apply_fun = (arity, result, mode) :: fns.apply_fun }
let need_send_fun arity result mode =
let fns = current_unit.ui_generic_fns in
if not (List.mem (arity, result, mode) fns.send_fun) then
current_unit.ui_generic_fns <-
{ fns with send_fun = (arity, result, mode) :: fns.send_fun }
CR mshinwell : let 's think about this later , quadratic algorithm
let ensure_sharing_between_cmi_and_cmx_imports cmi_imports cmx_imports =
( * If a [ CU.t ] in the .cmx imports also occurs in the .cmi imports , use
the one in the .cmi imports , to increase sharing . ( Such a [ CU.t ] in
the imports may already have part of its value shared with the
first [ CU.Name.t ] component in the .cmi imports , c.f .
[ Persistent_env.ensure_crc_sharing ] , so it 's best to pick this [ CU.t ] . )
let ensure_sharing_between_cmi_and_cmx_imports cmi_imports cmx_imports =
List.map (fun ((comp_unit, crc) as import) ->
match
List.find_map (function
| _, None -> None
| _, Some (comp_unit', _) ->
if CU.equal comp_unit comp_unit' then Some comp_unit'
else None)
cmi_imports
with
| None -> import
| Some comp_unit -> comp_unit, crc)
cmx_imports
*)
let write_unit_info info filename =
let raw_export_info, sections =
match info.ui_export_info with
| Clambda info -> Clambda_raw info, File_sections.empty
| Flambda1 info -> Flambda1_raw info, File_sections.empty
| Flambda2 None -> Flambda2_raw None, File_sections.empty
| Flambda2 (Some info) ->
let info, sections = Flambda2_cmx.Flambda_cmx_format.to_raw info in
Flambda2_raw (Some info), sections
in
let serialized_sections, toc, total_length = File_sections.serialize sections in
let raw_info = {
uir_unit = info.ui_unit;
uir_defines = info.ui_defines;
uir_imports_cmi = Array.of_list info.ui_imports_cmi;
uir_imports_cmx = Array.of_list info.ui_imports_cmx;
uir_generic_fns = info.ui_generic_fns;
uir_export_info = raw_export_info;
uir_checks = info.ui_checks;
uir_force_link = info.ui_force_link;
uir_section_toc = toc;
uir_sections_length = total_length;
} in
let oc = open_out_bin filename in
output_string oc cmx_magic_number;
output_value oc raw_info;
Array.iter (output_string oc) serialized_sections;
flush oc;
let crc = Digest.file filename in
Digest.output oc crc;
close_out oc
let save_unit_info filename =
current_unit.ui_imports_cmi <- Env.imports();
write_unit_info current_unit filename
let snapshot () = !structured_constants
let backtrack s = structured_constants := s
let new_const_symbol () =
Symbol.for_new_const_in_current_unit ()
|> Symbol.linkage_name
|> Linkage_name.to_string
let new_structured_constant cst ~shared =
let {strcst_shared; strcst_all} = !structured_constants in
if shared then
try
CstMap.find cst strcst_shared
with Not_found ->
let lbl = new_const_symbol() in
structured_constants :=
{
strcst_shared = CstMap.add cst lbl strcst_shared;
strcst_all = SymMap.add lbl cst strcst_all;
};
lbl
else
let lbl = new_const_symbol() in
structured_constants :=
{
strcst_shared;
strcst_all = SymMap.add lbl cst strcst_all;
};
lbl
let add_exported_constant s =
Hashtbl.replace exported_constants s ()
let clear_structured_constants () =
structured_constants := structured_constants_empty
let structured_constant_of_symbol s =
SymMap.find_opt s (!structured_constants).strcst_all
let structured_constants () =
let provenance : Clambda.usymbol_provenance =
{ original_idents = [];
module_path =
Path.Pident (Ident.create_persistent (Compilation_unit.Name.to_string (
Compilation_unit.name (Compilation_unit.get_current_exn ()))));
}
in
SymMap.bindings (!structured_constants).strcst_all
|> List.map
(fun (symbol, definition) ->
{
Clambda.symbol;
exported = Hashtbl.mem exported_constants symbol;
definition;
provenance = Some provenance;
})
let require_global global_ident =
ignore (get_global_info global_ident : Cmx_format.unit_infos option)
open Format
let report_error ppf = function
| Not_a_unit_info filename ->
fprintf ppf "%a@ is not a compilation unit description."
Location.print_filename filename
| Corrupted_unit_info filename ->
fprintf ppf "Corrupted compilation unit description@ %a"
Location.print_filename filename
| Illegal_renaming(name, modname, filename) ->
fprintf ppf "%a@ contains the description for unit\
@ %a when %a was expected"
Location.print_filename filename
CU.print name
CU.print modname
let () =
Location.register_error_of_exn
(function
| Error err -> Some (Location.error_of_printer_file report_error err)
| _ -> None
)
|
c060c7a6d844eb0adbbe9e2736f7773e074eb5153ec80973b5c404932fbcd7c9 | agentm/project-m36 | HTML.hs | module ProjectM36.Relation.Show.HTML where
import ProjectM36.Base
import ProjectM36.Relation
import ProjectM36.Tuple
import ProjectM36.Atom
import ProjectM36.Attribute as A
import ProjectM36.AtomType
import qualified Data.List as L
import Data.Text (Text, pack)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid
#endif
attributesAsHTML :: Attributes -> Text
attributesAsHTML attrs = "<tr>" <> T.concat (map oneAttrHTML (A.toList attrs)) <> "</tr>"
where
oneAttrHTML attr = "<th>" <> prettyAttribute attr <> "</th>"
relationAsHTML :: Relation -> Text
-- web browsers don't display tables with empty cells or empty headers, so we have to insert some placeholders- it's not technically the same, but looks as expected in the browser
relationAsHTML rel@(Relation attrNameSet tupleSet)
| rel == relationTrue = pm36relcss <>
tablestart <>
"<tr><th></th></tr>" <>
"<tr><td></td></tr>" <>
tablefooter <> "</table>"
| rel == relationFalse = pm36relcss <>
tablestart <>
"<tr><th></th></tr>" <>
tablefooter <>
"</table>"
| otherwise = pm36relcss <>
tablestart <>
attributesAsHTML attrNameSet <>
tupleSetAsHTML tupleSet <>
tablefooter <>
"</table>"
where
pm36relcss = "<style>.pm36relation {empty-cells: show;} .pm36relation tbody td, .pm36relation th { border: 1px solid black;}</style>"
tablefooter = "<tfoot><tr><td colspan=\"100%\">" <> pack (show (cardinality rel)) <> " tuples</td></tr></tfoot>"
tablestart = "<table class=\"pm36relation\"\">"
writeHTML :: Text -> IO ()
writeHTML = TIO.writeFile "/home/agentm/rel.html"
writeRel :: Relation -> IO ()
writeRel = writeHTML . relationAsHTML
tupleAsHTML :: RelationTuple -> Text
tupleAsHTML tuple = "<tr>" <> T.concat (L.map tupleFrag (tupleAssocs tuple)) <> "</tr>"
where
tupleFrag tup = "<td>" <> atomAsHTML (snd tup) <> "</td>"
atomAsHTML (RelationAtom rel) = relationAsHTML rel
atomAsHTML (TextAtom t) = """ <> t <> """
atomAsHTML atom = atomToText atom
tupleSetAsHTML :: RelationTupleSet -> Text
tupleSetAsHTML tupSet = foldr folder "" (asList tupSet)
where
folder tuple acc = acc <> tupleAsHTML tuple
| null | https://raw.githubusercontent.com/agentm/project-m36/57a75b35e84bebf0945db6dae53350fda83f24b6/src/lib/ProjectM36/Relation/Show/HTML.hs | haskell | web browsers don't display tables with empty cells or empty headers, so we have to insert some placeholders- it's not technically the same, but looks as expected in the browser | module ProjectM36.Relation.Show.HTML where
import ProjectM36.Base
import ProjectM36.Relation
import ProjectM36.Tuple
import ProjectM36.Atom
import ProjectM36.Attribute as A
import ProjectM36.AtomType
import qualified Data.List as L
import Data.Text (Text, pack)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid
#endif
attributesAsHTML :: Attributes -> Text
attributesAsHTML attrs = "<tr>" <> T.concat (map oneAttrHTML (A.toList attrs)) <> "</tr>"
where
oneAttrHTML attr = "<th>" <> prettyAttribute attr <> "</th>"
relationAsHTML :: Relation -> Text
relationAsHTML rel@(Relation attrNameSet tupleSet)
| rel == relationTrue = pm36relcss <>
tablestart <>
"<tr><th></th></tr>" <>
"<tr><td></td></tr>" <>
tablefooter <> "</table>"
| rel == relationFalse = pm36relcss <>
tablestart <>
"<tr><th></th></tr>" <>
tablefooter <>
"</table>"
| otherwise = pm36relcss <>
tablestart <>
attributesAsHTML attrNameSet <>
tupleSetAsHTML tupleSet <>
tablefooter <>
"</table>"
where
pm36relcss = "<style>.pm36relation {empty-cells: show;} .pm36relation tbody td, .pm36relation th { border: 1px solid black;}</style>"
tablefooter = "<tfoot><tr><td colspan=\"100%\">" <> pack (show (cardinality rel)) <> " tuples</td></tr></tfoot>"
tablestart = "<table class=\"pm36relation\"\">"
writeHTML :: Text -> IO ()
writeHTML = TIO.writeFile "/home/agentm/rel.html"
writeRel :: Relation -> IO ()
writeRel = writeHTML . relationAsHTML
tupleAsHTML :: RelationTuple -> Text
tupleAsHTML tuple = "<tr>" <> T.concat (L.map tupleFrag (tupleAssocs tuple)) <> "</tr>"
where
tupleFrag tup = "<td>" <> atomAsHTML (snd tup) <> "</td>"
atomAsHTML (RelationAtom rel) = relationAsHTML rel
atomAsHTML (TextAtom t) = """ <> t <> """
atomAsHTML atom = atomToText atom
tupleSetAsHTML :: RelationTupleSet -> Text
tupleSetAsHTML tupSet = foldr folder "" (asList tupSet)
where
folder tuple acc = acc <> tupleAsHTML tuple
|
fc9e8469845a6c01283ec7905a1f06050617068b13cdc84a9a368eb59d91722f | babashka/nbb | common.cljs | (ns nbb.impl.common)
(def opts (atom nil))
| null | https://raw.githubusercontent.com/babashka/nbb/4d06aa142a5fb5baac48a8ad8e611d672f779b5f/src/nbb/impl/common.cljs | clojure | (ns nbb.impl.common)
(def opts (atom nil))
| |
f123edf5c44d618c41ad2c805889e24d05427c250b80de84528af4fd70c505b2 | input-output-hk/plutus | KnownTypeAst.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE DefaultSignatures #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
# LANGUAGE StandaloneKindSignatures #
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
# LANGUAGE UndecidableInstances #
module PlutusCore.Builtin.KnownTypeAst
( TyNameRep (..)
, TyVarRep
, TyAppRep
, TyForallRep
, Hole
, RepHole
, TypeHole
, KnownBuiltinTypeAst
, KnownTypeAst (..)
, Delete
, Merge
) where
import PlutusCore.Builtin.Emitter
import PlutusCore.Builtin.KnownKind
import PlutusCore.Builtin.Polymorphism
import PlutusCore.Core
import PlutusCore.Evaluation.Result
import PlutusCore.MkPlc hiding (error)
import PlutusCore.Name
import Data.Kind qualified as GHC (Constraint, Type)
import Data.Proxy
import Data.Some.GADT qualified as GADT
import Data.Text qualified as Text
import Data.Type.Bool
import GHC.TypeLits
import Universe
Note [ Rep vs Type context ]
Say you define an @Id@ built - in function and specify its Haskell type signature :
i d : : forall a. a - > a
This gets picked up by the ' TypeScheme ' inference machinery , which detects @a@ and instantiates it
to @Opaque val Var0@ where is some concrete type ( the exact details do n't matter here )
representing a type variable of kind @*@ with the @0@ unique , so @id@ elaborates to
i d : : Opaque val Var0 - > Opaque val Var0
But consider also the case where you want to define @id@ only over lists . The signature of the
built - in function then is
idList : : forall a. Opaque val [ a ] - > Opaque val [ a ]
Now the ' Opaque ' is explicit and the ' TypeScheme ' inference machinery needs to go under it in order
to instantiate Which now does not get instantiated to an ' Opaque ' as before , since we 're
already inside an ' Opaque ' and can just use directly . So @idList@ elaborates to
idList : : Opaque val [ Var0 ] - > Opaque val [ Var0 ]
Now let 's make up some syntax for annotating contexts so that it 's clear what 's going on :
idList @Type |
: : ( @Type | Opaque val ( @Rep | [ Var0 ] ) )
- > ( @Type | Opaque val ( @Rep | [ Var0 ] ) )
' @ann | ' annotates everything to the right of it . The whole thing then reads as
1 . a builtin is always defined in the Type context
2 . @->@ preserves the Type context , i.e. it accepts it and passes it down to the domain and codomain
3 . @Opaque val@ switches the context from Type to Rep , i.e. it accepts the Type context , but
creates the Rep context for its argument that represents a type
So why the distinction ?
The difference between the Rep and the Type contexts that we 've seen so far is that in the Rep
context we do n't need any @Opaque@ , but this is a very superficial reason to keep the distinction
between contexts , since everything that is legal in the Type context is legal in the Rep context
as well . For example we could 've elaborated @idList@ into a bit more verbose
idList : : Opaque val [ Opaque val Var0 ] - > Opaque val [ Opaque val Var0 ]
and the world would n't end because of that , everything would work correctly .
The opposite however is not true : certain types that are legal in the Rep context are not legal in
the Type one and this is the reason why the distinction exists . The simplest example is
i d : : Var0 - > Var0
represents a type variable and it 's a data family with no inhabitants , so it does not
make sense to try to unlift a value of that type .
Now let 's say we added a @term@ argument to and said that when @Var0 term@ is a @GHC.Type@ ,
it has a @term@ inside , just like ' Opaque ' . Then we would be able to unlift it , but we also have
things like @TyAppRep@ , @TyForallRep@ and that set is open , any type can be represented
using such combinators and we can even name particular types , e.g. we could have @PlcListRep@ ,
so we 'd have to special - case @GHC.Type@ for each of them and it would be a huge mess .
So instead of mixing up types whose values are actually unliftable with types that are only used
for type checking , we keep the distinction explicit .
The barrier between and is the barrier between the Type and the Rep contexts and
that barrier must always be some explicit type constructor that switches the context from Type to
Rep. We 've only considered ' Opaque ' as an example of such type constructor , but we also have
' SomeConstant ' as another example .
Some type constructors turn any context into the Type one , for example ' ' and
' Emitter ' , although they are useless inside the Rep context , given that it 's only for type checking
and they do n't exist in the type language of .
These @*Rep@ data families like ' TyVarRep ' , ' TyAppRep ' etc all require the Rep context and preserve
it , since they 're only for representing types for type checking purposes .
We call a thing in a Rep or ' Type ' context a ' RepHole ' or ' TypeHole ' respectively . The reason for
the name is that the inference machinery looks at the thing and tries to instantiate it , like fill
a hole .
We could also have a third type of hole / context , Name , because binders bind names rather than
variables and so it makes sense to infer names sometimes , like for ' TyForallRep ' for example .
We do n't do that currently , because we do n't have such builtins anyway .
And there could be even fancier kinds of holes like " infer anything " for cases where the hole
is determined by some other part of the signature . We do n't have that either , for the same reason .
For the user defining a builtin this all is pretty much invisible .
Say you define an @Id@ built-in function and specify its Haskell type signature:
id :: forall a. a -> a
This gets picked up by the 'TypeScheme' inference machinery, which detects @a@ and instantiates it
to @Opaque val Var0@ where @Var0@ is some concrete type (the exact details don't matter here)
representing a Plutus type variable of kind @*@ with the @0@ unique, so @id@ elaborates to
id :: Opaque val Var0 -> Opaque val Var0
But consider also the case where you want to define @id@ only over lists. The signature of the
built-in function then is
idList :: forall a. Opaque val [a] -> Opaque val [a]
Now the 'Opaque' is explicit and the 'TypeScheme' inference machinery needs to go under it in order
to instantiate @a@. Which now does not get instantiated to an 'Opaque' as before, since we're
already inside an 'Opaque' and can just use @Var0@ directly. So @idList@ elaborates to
idList :: Opaque val [Var0] -> Opaque val [Var0]
Now let's make up some syntax for annotating contexts so that it's clear what's going on:
idList @Type |
:: (@Type | Opaque val (@Rep | [Var0]))
-> (@Type | Opaque val (@Rep | [Var0]))
'@ann |' annotates everything to the right of it. The whole thing then reads as
1. a builtin is always defined in the Type context
2. @->@ preserves the Type context, i.e. it accepts it and passes it down to the domain and codomain
3. @Opaque val@ switches the context from Type to Rep, i.e. it accepts the Type context, but
creates the Rep context for its argument that represents a Plutus type
So why the distinction?
The difference between the Rep and the Type contexts that we've seen so far is that in the Rep
context we don't need any @Opaque@, but this is a very superficial reason to keep the distinction
between contexts, since everything that is legal in the Type context is legal in the Rep context
as well. For example we could've elaborated @idList@ into a bit more verbose
idList :: Opaque val [Opaque val Var0] -> Opaque val [Opaque val Var0]
and the world wouldn't end because of that, everything would work correctly.
The opposite however is not true: certain types that are legal in the Rep context are not legal in
the Type one and this is the reason why the distinction exists. The simplest example is
id :: Var0 -> Var0
@Var0@ represents a Plutus type variable and it's a data family with no inhabitants, so it does not
make sense to try to unlift a value of that type.
Now let's say we added a @term@ argument to @Var0@ and said that when @Var0 term@ is a @GHC.Type@,
it has a @term@ inside, just like 'Opaque'. Then we would be able to unlift it, but we also have
things like @TyAppRep@, @TyForallRep@ and that set is open, any Plutus type can be represented
using such combinators and we can even name particular types, e.g. we could have @PlcListRep@,
so we'd have to special-case @GHC.Type@ for each of them and it would be a huge mess.
So instead of mixing up types whose values are actually unliftable with types that are only used
for type checking, we keep the distinction explicit.
The barrier between Haskell and Plutus is the barrier between the Type and the Rep contexts and
that barrier must always be some explicit type constructor that switches the context from Type to
Rep. We've only considered 'Opaque' as an example of such type constructor, but we also have
'SomeConstant' as another example.
Some type constructors turn any context into the Type one, for example 'EvaluationResult' and
'Emitter', although they are useless inside the Rep context, given that it's only for type checking
Plutus and they don't exist in the type language of Plutus.
These @*Rep@ data families like 'TyVarRep', 'TyAppRep' etc all require the Rep context and preserve
it, since they're only for representing Plutus types for type checking purposes.
We call a thing in a Rep or 'Type' context a 'RepHole' or 'TypeHole' respectively. The reason for
the name is that the inference machinery looks at the thing and tries to instantiate it, like fill
a hole.
We could also have a third type of hole/context, Name, because binders bind names rather than
variables and so it makes sense to infer names sometimes, like for 'TyForallRep' for example.
We don't do that currently, because we don't have such builtins anyway.
And there could be even fancier kinds of holes like "infer anything" for cases where the hole
is determined by some other part of the signature. We don't have that either, for the same reason.
For the user defining a builtin this all is pretty much invisible.
-}
-- See Note [Rep vs Type context].
-- | The kind of holes.
data Hole
-- See Note [Rep vs Type context].
-- | A hole in the Rep context.
type RepHole :: forall a hole. a -> hole
data family RepHole x
-- See Note [Rep vs Type context].
-- | A hole in the Type context.
type TypeHole :: forall hole. GHC.Type -> hole
data family TypeHole a
-- | For annotating an uninstantiated built-in type, so that it gets handled by the right instance
-- or type family.
type BuiltinHead :: forall k. k -> k
data family BuiltinHead f
-- | Take an iterated application of a built-in type and elaborate every function application
inside of it to ' ' , plus annotate the head with ' BuiltinHead ' .
-- The idea is that we don't need to process built-in types manually if we simply add some
-- annotations for instance resolution to look for. Think what we'd have to do manually for, say,
' ' : traverse the spine of the application and collect all the holes into a list , which is
-- troubling, because type applications are left-nested and lists are right-nested, so we'd have to
use accumulators or an explicit ' Reverse ' type family . And then we also have ' KnownTypeAst ' and
' ToBinds ' , so handling built - in types in a special way for each of those would be a hassle ,
especially given the fact that type - level is not exactly good at computing things .
With the ' ElaborateBuiltin ' approach we get ' KnownTypeAst ' , ' ' and ' ToBinds ' for free .
type ElaborateBuiltin :: forall k. k -> k
type family ElaborateBuiltin a where
ElaborateBuiltin (f x) = ElaborateBuiltin f `TyAppRep` x
ElaborateBuiltin f = BuiltinHead f
| A constraint for \"@a@ is a ' KnownTypeAst ' by means of being included in @uni@\ " .
type KnownBuiltinTypeAst uni a = KnownTypeAst uni (ElaborateBuiltin a)
type KnownTypeAst :: forall a. (GHC.Type -> GHC.Type) -> a -> GHC.Constraint
class KnownTypeAst uni x where
-- | Whether @x@ is a built-in type.
type IsBuiltin x :: Bool
type IsBuiltin x = IsBuiltin (ElaborateBuiltin x)
-- | Return every part of the type that can be a to-be-instantiated type variable.
For example , in @Integer@ there 's no such types and in @(a , b)@ it 's the two arguments
( @a@ and ) and the same applies to @a - > b@ ( to mention a type that is not built - in ) .
type ToHoles x :: [Hole]
type ToHoles x = ToHoles (ElaborateBuiltin x)
-- | Collect all unique variables (a variable consists of a textual name, a unique and a kind)
in an
type ToBinds x :: [GADT.Some TyNameRep]
type ToBinds x = ToBinds (ElaborateBuiltin x)
| The type representing @a@ used on the PLC side .
toTypeAst :: proxy x -> Type TyName uni ()
default toTypeAst :: KnownBuiltinTypeAst uni x => proxy x -> Type TyName uni ()
toTypeAst _ = toTypeAst $ Proxy @(ElaborateBuiltin x)
# INLINE toTypeAst #
instance KnownTypeAst uni a => KnownTypeAst uni (EvaluationResult a) where
type IsBuiltin (EvaluationResult a) = 'False
type ToHoles (EvaluationResult a) = '[TypeHole a]
type ToBinds (EvaluationResult a) = ToBinds a
toTypeAst _ = toTypeAst $ Proxy @a
# INLINE toTypeAst #
instance KnownTypeAst uni a => KnownTypeAst uni (Emitter a) where
type IsBuiltin (Emitter a) = 'False
type ToHoles (Emitter a) = '[TypeHole a]
type ToBinds (Emitter a) = ToBinds a
toTypeAst _ = toTypeAst $ Proxy @a
# INLINE toTypeAst #
instance KnownTypeAst uni rep => KnownTypeAst uni (SomeConstant uni rep) where
type IsBuiltin (SomeConstant uni rep) = 'False
type ToHoles (SomeConstant _ rep) = '[RepHole rep]
type ToBinds (SomeConstant _ rep) = ToBinds rep
toTypeAst _ = toTypeAst $ Proxy @rep
# INLINE toTypeAst #
instance KnownTypeAst uni rep => KnownTypeAst uni (Opaque val rep) where
type IsBuiltin (Opaque val rep) = 'False
type ToHoles (Opaque _ rep) = '[RepHole rep]
type ToBinds (Opaque _ rep) = ToBinds rep
toTypeAst _ = toTypeAst $ Proxy @rep
# INLINE toTypeAst #
toTyNameAst
:: forall text uniq. (KnownSymbol text, KnownNat uniq)
=> Proxy ('TyNameRep text uniq) -> TyName
toTyNameAst _ =
TyName $ Name
(Text.pack $ symbolVal @text Proxy)
(Unique . fromIntegral $ natVal @uniq Proxy)
# INLINE toTyNameAst #
instance uni `Contains` f => KnownTypeAst uni (BuiltinHead f) where
type IsBuiltin (BuiltinHead f) = 'True
type ToHoles (BuiltinHead f) = '[]
type ToBinds (BuiltinHead f) = '[]
toTypeAst _ = mkTyBuiltin @_ @f ()
# INLINE toTypeAst #
instance (KnownTypeAst uni a, KnownTypeAst uni b) => KnownTypeAst uni (a -> b) where
type IsBuiltin (a -> b) = 'False
type ToHoles (a -> b) = '[TypeHole a, TypeHole b]
type ToBinds (a -> b) = Merge (ToBinds a) (ToBinds b)
toTypeAst _ = TyFun () (toTypeAst $ Proxy @a) (toTypeAst $ Proxy @b)
# INLINE toTypeAst #
instance (name ~ 'TyNameRep text uniq, KnownSymbol text, KnownNat uniq) =>
KnownTypeAst uni (TyVarRep name) where
type IsBuiltin (TyVarRep name) = 'False
type ToHoles (TyVarRep name) = '[]
type ToBinds (TyVarRep name) = '[ 'GADT.Some name ]
toTypeAst _ = TyVar () . toTyNameAst $ Proxy @('TyNameRep text uniq)
# INLINE toTypeAst #
instance (KnownTypeAst uni fun, KnownTypeAst uni arg) => KnownTypeAst uni (TyAppRep fun arg) where
type IsBuiltin (TyAppRep fun arg) = IsBuiltin fun && IsBuiltin arg
type ToHoles (TyAppRep fun arg) = '[RepHole fun, RepHole arg]
type ToBinds (TyAppRep fun arg) = Merge (ToBinds fun) (ToBinds arg)
toTypeAst _ = TyApp () (toTypeAst $ Proxy @fun) (toTypeAst $ Proxy @arg)
# INLINE toTypeAst #
instance
( name ~ 'TyNameRep @kind text uniq, KnownSymbol text, KnownNat uniq
, KnownKind kind, KnownTypeAst uni a
) => KnownTypeAst uni (TyForallRep name a) where
type IsBuiltin (TyForallRep name a) = 'False
type ToHoles (TyForallRep name a) = '[RepHole a]
type ToBinds (TyForallRep name a) = Delete ('GADT.Some name) (ToBinds a)
toTypeAst _ =
TyForall ()
(toTyNameAst $ Proxy @('TyNameRep text uniq))
(demoteKind $ knownKind @kind)
(toTypeAst $ Proxy @a)
# INLINE toTypeAst #
Utils
| Delete all from a list .
type family Delete x xs :: [a] where
Delete _ '[] = '[]
Delete x (x ': xs) = Delete x xs
Delete x (y ': xs) = y ': Delete x xs
| Delete all elements appearing in the first list from the second one and concatenate the lists .
type family Merge xs ys :: [a] where
Merge '[] ys = ys
Merge (x ': xs) ys = x ': Delete x (Merge xs ys)
| null | https://raw.githubusercontent.com/input-output-hk/plutus/e00e617e7bdf8ff14d308d1ad5583a8739b0b000/plutus-core/plutus-core/src/PlutusCore/Builtin/KnownTypeAst.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
See Note [Rep vs Type context].
| The kind of holes.
See Note [Rep vs Type context].
| A hole in the Rep context.
See Note [Rep vs Type context].
| A hole in the Type context.
| For annotating an uninstantiated built-in type, so that it gets handled by the right instance
or type family.
| Take an iterated application of a built-in type and elaborate every function application
The idea is that we don't need to process built-in types manually if we simply add some
annotations for instance resolution to look for. Think what we'd have to do manually for, say,
troubling, because type applications are left-nested and lists are right-nested, so we'd have to
| Whether @x@ is a built-in type.
| Return every part of the type that can be a to-be-instantiated type variable.
| Collect all unique variables (a variable consists of a textual name, a unique and a kind) | # LANGUAGE DefaultSignatures #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
# LANGUAGE StandaloneKindSignatures #
# LANGUAGE TypeApplications #
# LANGUAGE UndecidableInstances #
module PlutusCore.Builtin.KnownTypeAst
( TyNameRep (..)
, TyVarRep
, TyAppRep
, TyForallRep
, Hole
, RepHole
, TypeHole
, KnownBuiltinTypeAst
, KnownTypeAst (..)
, Delete
, Merge
) where
import PlutusCore.Builtin.Emitter
import PlutusCore.Builtin.KnownKind
import PlutusCore.Builtin.Polymorphism
import PlutusCore.Core
import PlutusCore.Evaluation.Result
import PlutusCore.MkPlc hiding (error)
import PlutusCore.Name
import Data.Kind qualified as GHC (Constraint, Type)
import Data.Proxy
import Data.Some.GADT qualified as GADT
import Data.Text qualified as Text
import Data.Type.Bool
import GHC.TypeLits
import Universe
Note [ Rep vs Type context ]
Say you define an @Id@ built - in function and specify its Haskell type signature :
i d : : forall a. a - > a
This gets picked up by the ' TypeScheme ' inference machinery , which detects @a@ and instantiates it
to @Opaque val Var0@ where is some concrete type ( the exact details do n't matter here )
representing a type variable of kind @*@ with the @0@ unique , so @id@ elaborates to
i d : : Opaque val Var0 - > Opaque val Var0
But consider also the case where you want to define @id@ only over lists . The signature of the
built - in function then is
idList : : forall a. Opaque val [ a ] - > Opaque val [ a ]
Now the ' Opaque ' is explicit and the ' TypeScheme ' inference machinery needs to go under it in order
to instantiate Which now does not get instantiated to an ' Opaque ' as before , since we 're
already inside an ' Opaque ' and can just use directly . So @idList@ elaborates to
idList : : Opaque val [ Var0 ] - > Opaque val [ Var0 ]
Now let 's make up some syntax for annotating contexts so that it 's clear what 's going on :
idList @Type |
: : ( @Type | Opaque val ( @Rep | [ Var0 ] ) )
- > ( @Type | Opaque val ( @Rep | [ Var0 ] ) )
' @ann | ' annotates everything to the right of it . The whole thing then reads as
1 . a builtin is always defined in the Type context
2 . @->@ preserves the Type context , i.e. it accepts it and passes it down to the domain and codomain
3 . @Opaque val@ switches the context from Type to Rep , i.e. it accepts the Type context , but
creates the Rep context for its argument that represents a type
So why the distinction ?
The difference between the Rep and the Type contexts that we 've seen so far is that in the Rep
context we do n't need any @Opaque@ , but this is a very superficial reason to keep the distinction
between contexts , since everything that is legal in the Type context is legal in the Rep context
as well . For example we could 've elaborated @idList@ into a bit more verbose
idList : : Opaque val [ Opaque val Var0 ] - > Opaque val [ Opaque val Var0 ]
and the world would n't end because of that , everything would work correctly .
The opposite however is not true : certain types that are legal in the Rep context are not legal in
the Type one and this is the reason why the distinction exists . The simplest example is
i d : : Var0 - > Var0
represents a type variable and it 's a data family with no inhabitants , so it does not
make sense to try to unlift a value of that type .
Now let 's say we added a @term@ argument to and said that when @Var0 term@ is a @GHC.Type@ ,
it has a @term@ inside , just like ' Opaque ' . Then we would be able to unlift it , but we also have
things like @TyAppRep@ , @TyForallRep@ and that set is open , any type can be represented
using such combinators and we can even name particular types , e.g. we could have @PlcListRep@ ,
so we 'd have to special - case @GHC.Type@ for each of them and it would be a huge mess .
So instead of mixing up types whose values are actually unliftable with types that are only used
for type checking , we keep the distinction explicit .
The barrier between and is the barrier between the Type and the Rep contexts and
that barrier must always be some explicit type constructor that switches the context from Type to
Rep. We 've only considered ' Opaque ' as an example of such type constructor , but we also have
' SomeConstant ' as another example .
Some type constructors turn any context into the Type one , for example ' ' and
' Emitter ' , although they are useless inside the Rep context , given that it 's only for type checking
and they do n't exist in the type language of .
These @*Rep@ data families like ' TyVarRep ' , ' TyAppRep ' etc all require the Rep context and preserve
it , since they 're only for representing types for type checking purposes .
We call a thing in a Rep or ' Type ' context a ' RepHole ' or ' TypeHole ' respectively . The reason for
the name is that the inference machinery looks at the thing and tries to instantiate it , like fill
a hole .
We could also have a third type of hole / context , Name , because binders bind names rather than
variables and so it makes sense to infer names sometimes , like for ' TyForallRep ' for example .
We do n't do that currently , because we do n't have such builtins anyway .
And there could be even fancier kinds of holes like " infer anything " for cases where the hole
is determined by some other part of the signature . We do n't have that either , for the same reason .
For the user defining a builtin this all is pretty much invisible .
Say you define an @Id@ built-in function and specify its Haskell type signature:
id :: forall a. a -> a
This gets picked up by the 'TypeScheme' inference machinery, which detects @a@ and instantiates it
to @Opaque val Var0@ where @Var0@ is some concrete type (the exact details don't matter here)
representing a Plutus type variable of kind @*@ with the @0@ unique, so @id@ elaborates to
id :: Opaque val Var0 -> Opaque val Var0
But consider also the case where you want to define @id@ only over lists. The signature of the
built-in function then is
idList :: forall a. Opaque val [a] -> Opaque val [a]
Now the 'Opaque' is explicit and the 'TypeScheme' inference machinery needs to go under it in order
to instantiate @a@. Which now does not get instantiated to an 'Opaque' as before, since we're
already inside an 'Opaque' and can just use @Var0@ directly. So @idList@ elaborates to
idList :: Opaque val [Var0] -> Opaque val [Var0]
Now let's make up some syntax for annotating contexts so that it's clear what's going on:
idList @Type |
:: (@Type | Opaque val (@Rep | [Var0]))
-> (@Type | Opaque val (@Rep | [Var0]))
'@ann |' annotates everything to the right of it. The whole thing then reads as
1. a builtin is always defined in the Type context
2. @->@ preserves the Type context, i.e. it accepts it and passes it down to the domain and codomain
3. @Opaque val@ switches the context from Type to Rep, i.e. it accepts the Type context, but
creates the Rep context for its argument that represents a Plutus type
So why the distinction?
The difference between the Rep and the Type contexts that we've seen so far is that in the Rep
context we don't need any @Opaque@, but this is a very superficial reason to keep the distinction
between contexts, since everything that is legal in the Type context is legal in the Rep context
as well. For example we could've elaborated @idList@ into a bit more verbose
idList :: Opaque val [Opaque val Var0] -> Opaque val [Opaque val Var0]
and the world wouldn't end because of that, everything would work correctly.
The opposite however is not true: certain types that are legal in the Rep context are not legal in
the Type one and this is the reason why the distinction exists. The simplest example is
id :: Var0 -> Var0
@Var0@ represents a Plutus type variable and it's a data family with no inhabitants, so it does not
make sense to try to unlift a value of that type.
Now let's say we added a @term@ argument to @Var0@ and said that when @Var0 term@ is a @GHC.Type@,
it has a @term@ inside, just like 'Opaque'. Then we would be able to unlift it, but we also have
things like @TyAppRep@, @TyForallRep@ and that set is open, any Plutus type can be represented
using such combinators and we can even name particular types, e.g. we could have @PlcListRep@,
so we'd have to special-case @GHC.Type@ for each of them and it would be a huge mess.
So instead of mixing up types whose values are actually unliftable with types that are only used
for type checking, we keep the distinction explicit.
The barrier between Haskell and Plutus is the barrier between the Type and the Rep contexts and
that barrier must always be some explicit type constructor that switches the context from Type to
Rep. We've only considered 'Opaque' as an example of such type constructor, but we also have
'SomeConstant' as another example.
Some type constructors turn any context into the Type one, for example 'EvaluationResult' and
'Emitter', although they are useless inside the Rep context, given that it's only for type checking
Plutus and they don't exist in the type language of Plutus.
These @*Rep@ data families like 'TyVarRep', 'TyAppRep' etc all require the Rep context and preserve
it, since they're only for representing Plutus types for type checking purposes.
We call a thing in a Rep or 'Type' context a 'RepHole' or 'TypeHole' respectively. The reason for
the name is that the inference machinery looks at the thing and tries to instantiate it, like fill
a hole.
We could also have a third type of hole/context, Name, because binders bind names rather than
variables and so it makes sense to infer names sometimes, like for 'TyForallRep' for example.
We don't do that currently, because we don't have such builtins anyway.
And there could be even fancier kinds of holes like "infer anything" for cases where the hole
is determined by some other part of the signature. We don't have that either, for the same reason.
For the user defining a builtin this all is pretty much invisible.
-}
data Hole
type RepHole :: forall a hole. a -> hole
data family RepHole x
type TypeHole :: forall hole. GHC.Type -> hole
data family TypeHole a
type BuiltinHead :: forall k. k -> k
data family BuiltinHead f
inside of it to ' ' , plus annotate the head with ' BuiltinHead ' .
' ' : traverse the spine of the application and collect all the holes into a list , which is
use accumulators or an explicit ' Reverse ' type family . And then we also have ' KnownTypeAst ' and
' ToBinds ' , so handling built - in types in a special way for each of those would be a hassle ,
especially given the fact that type - level is not exactly good at computing things .
With the ' ElaborateBuiltin ' approach we get ' KnownTypeAst ' , ' ' and ' ToBinds ' for free .
type ElaborateBuiltin :: forall k. k -> k
type family ElaborateBuiltin a where
ElaborateBuiltin (f x) = ElaborateBuiltin f `TyAppRep` x
ElaborateBuiltin f = BuiltinHead f
| A constraint for \"@a@ is a ' KnownTypeAst ' by means of being included in @uni@\ " .
type KnownBuiltinTypeAst uni a = KnownTypeAst uni (ElaborateBuiltin a)
type KnownTypeAst :: forall a. (GHC.Type -> GHC.Type) -> a -> GHC.Constraint
class KnownTypeAst uni x where
type IsBuiltin x :: Bool
type IsBuiltin x = IsBuiltin (ElaborateBuiltin x)
For example , in @Integer@ there 's no such types and in @(a , b)@ it 's the two arguments
( @a@ and ) and the same applies to @a - > b@ ( to mention a type that is not built - in ) .
type ToHoles x :: [Hole]
type ToHoles x = ToHoles (ElaborateBuiltin x)
in an
type ToBinds x :: [GADT.Some TyNameRep]
type ToBinds x = ToBinds (ElaborateBuiltin x)
| The type representing @a@ used on the PLC side .
toTypeAst :: proxy x -> Type TyName uni ()
default toTypeAst :: KnownBuiltinTypeAst uni x => proxy x -> Type TyName uni ()
toTypeAst _ = toTypeAst $ Proxy @(ElaborateBuiltin x)
# INLINE toTypeAst #
instance KnownTypeAst uni a => KnownTypeAst uni (EvaluationResult a) where
type IsBuiltin (EvaluationResult a) = 'False
type ToHoles (EvaluationResult a) = '[TypeHole a]
type ToBinds (EvaluationResult a) = ToBinds a
toTypeAst _ = toTypeAst $ Proxy @a
# INLINE toTypeAst #
instance KnownTypeAst uni a => KnownTypeAst uni (Emitter a) where
type IsBuiltin (Emitter a) = 'False
type ToHoles (Emitter a) = '[TypeHole a]
type ToBinds (Emitter a) = ToBinds a
toTypeAst _ = toTypeAst $ Proxy @a
# INLINE toTypeAst #
instance KnownTypeAst uni rep => KnownTypeAst uni (SomeConstant uni rep) where
type IsBuiltin (SomeConstant uni rep) = 'False
type ToHoles (SomeConstant _ rep) = '[RepHole rep]
type ToBinds (SomeConstant _ rep) = ToBinds rep
toTypeAst _ = toTypeAst $ Proxy @rep
# INLINE toTypeAst #
instance KnownTypeAst uni rep => KnownTypeAst uni (Opaque val rep) where
type IsBuiltin (Opaque val rep) = 'False
type ToHoles (Opaque _ rep) = '[RepHole rep]
type ToBinds (Opaque _ rep) = ToBinds rep
toTypeAst _ = toTypeAst $ Proxy @rep
# INLINE toTypeAst #
toTyNameAst
:: forall text uniq. (KnownSymbol text, KnownNat uniq)
=> Proxy ('TyNameRep text uniq) -> TyName
toTyNameAst _ =
TyName $ Name
(Text.pack $ symbolVal @text Proxy)
(Unique . fromIntegral $ natVal @uniq Proxy)
# INLINE toTyNameAst #
instance uni `Contains` f => KnownTypeAst uni (BuiltinHead f) where
type IsBuiltin (BuiltinHead f) = 'True
type ToHoles (BuiltinHead f) = '[]
type ToBinds (BuiltinHead f) = '[]
toTypeAst _ = mkTyBuiltin @_ @f ()
# INLINE toTypeAst #
instance (KnownTypeAst uni a, KnownTypeAst uni b) => KnownTypeAst uni (a -> b) where
type IsBuiltin (a -> b) = 'False
type ToHoles (a -> b) = '[TypeHole a, TypeHole b]
type ToBinds (a -> b) = Merge (ToBinds a) (ToBinds b)
toTypeAst _ = TyFun () (toTypeAst $ Proxy @a) (toTypeAst $ Proxy @b)
# INLINE toTypeAst #
instance (name ~ 'TyNameRep text uniq, KnownSymbol text, KnownNat uniq) =>
KnownTypeAst uni (TyVarRep name) where
type IsBuiltin (TyVarRep name) = 'False
type ToHoles (TyVarRep name) = '[]
type ToBinds (TyVarRep name) = '[ 'GADT.Some name ]
toTypeAst _ = TyVar () . toTyNameAst $ Proxy @('TyNameRep text uniq)
# INLINE toTypeAst #
instance (KnownTypeAst uni fun, KnownTypeAst uni arg) => KnownTypeAst uni (TyAppRep fun arg) where
type IsBuiltin (TyAppRep fun arg) = IsBuiltin fun && IsBuiltin arg
type ToHoles (TyAppRep fun arg) = '[RepHole fun, RepHole arg]
type ToBinds (TyAppRep fun arg) = Merge (ToBinds fun) (ToBinds arg)
toTypeAst _ = TyApp () (toTypeAst $ Proxy @fun) (toTypeAst $ Proxy @arg)
# INLINE toTypeAst #
instance
( name ~ 'TyNameRep @kind text uniq, KnownSymbol text, KnownNat uniq
, KnownKind kind, KnownTypeAst uni a
) => KnownTypeAst uni (TyForallRep name a) where
type IsBuiltin (TyForallRep name a) = 'False
type ToHoles (TyForallRep name a) = '[RepHole a]
type ToBinds (TyForallRep name a) = Delete ('GADT.Some name) (ToBinds a)
toTypeAst _ =
TyForall ()
(toTyNameAst $ Proxy @('TyNameRep text uniq))
(demoteKind $ knownKind @kind)
(toTypeAst $ Proxy @a)
# INLINE toTypeAst #
Utils
| Delete all from a list .
type family Delete x xs :: [a] where
Delete _ '[] = '[]
Delete x (x ': xs) = Delete x xs
Delete x (y ': xs) = y ': Delete x xs
| Delete all elements appearing in the first list from the second one and concatenate the lists .
type family Merge xs ys :: [a] where
Merge '[] ys = ys
Merge (x ': xs) ys = x ': Delete x (Merge xs ys)
|
6ea8d7154ce9e754f4597229a4e6bccb0d13b732f7d7fbc04ba37469fbbba199 | akc/hops | CLIProperties.hs | -- |
Copyright :
Maintainer : < >
-- License : BSD-3
--
import System.Process
main = callCommand "tests/cli-properties.sh"
| null | https://raw.githubusercontent.com/akc/hops/fa5c0c731d586a50b86d2b0c08350d53769295e9/tests/CLIProperties.hs | haskell | |
License : BSD-3
| Copyright :
Maintainer : < >
import System.Process
main = callCommand "tests/cli-properties.sh"
|
1fb79e7672c9d6e0a13ceb425ce6bb0d2739521563de07139a49f5433e6cc0ba | mwand/eopl3 | environments.scm | (module environments (lib "eopl.ss" "eopl")
(require "data-structures.scm")
(provide init-env empty-env extend-env apply-env)
;;;;;;;;;;;;;;;; initial environment ;;;;;;;;;;;;;;;;
;; init-env : () -> environment
;; (init-env) builds an environment in which i is bound to the
expressed value 1 , v is bound to the expressed value 5 , and x is
bound to the expressed value 10 .
(define init-env
(lambda ()
(extend-env
'i (num-val 1)
(extend-env
'v (num-val 5)
(extend-env
'x (num-val 10)
(empty-env))))))
;;;;;;;;;;;;;;;; environment constructors and observers ;;;;;;;;;;;;;;;;
(define apply-env
(lambda (env search-sym)
(cases environment env
(empty-env ()
(eopl:error 'apply-env "No binding for ~s" search-sym))
(extend-env (bvar bval saved-env)
(if (eqv? search-sym bvar)
bval
(apply-env saved-env search-sym)))
(extend-env-rec (p-name b-var p-body saved-env)
(if (eqv? search-sym p-name)
(proc-val (procedure b-var p-body env))
(apply-env saved-env search-sym))))))
) | null | https://raw.githubusercontent.com/mwand/eopl3/b50e015be7f021d94c1af5f0e3a05d40dd2b0cbf/chapter7/inferred/environments.scm | scheme | initial environment ;;;;;;;;;;;;;;;;
init-env : () -> environment
(init-env) builds an environment in which i is bound to the
environment constructors and observers ;;;;;;;;;;;;;;;; | (module environments (lib "eopl.ss" "eopl")
(require "data-structures.scm")
(provide init-env empty-env extend-env apply-env)
expressed value 1 , v is bound to the expressed value 5 , and x is
bound to the expressed value 10 .
(define init-env
(lambda ()
(extend-env
'i (num-val 1)
(extend-env
'v (num-val 5)
(extend-env
'x (num-val 10)
(empty-env))))))
(define apply-env
(lambda (env search-sym)
(cases environment env
(empty-env ()
(eopl:error 'apply-env "No binding for ~s" search-sym))
(extend-env (bvar bval saved-env)
(if (eqv? search-sym bvar)
bval
(apply-env saved-env search-sym)))
(extend-env-rec (p-name b-var p-body saved-env)
(if (eqv? search-sym p-name)
(proc-val (procedure b-var p-body env))
(apply-env saved-env search-sym))))))
) |
0494d2bf7450ddd7dd94622081011f691d9f0f9f25043c205c5f08416c78b627 | avsm/mirage-duniverse | bin_prot_test.ml | open Bigarray
open Printf
open OUnit
open Bin_prot
open Common
open Utils
open ReadError
open Type_class
open Bin_prot.Std
module Bigstring = struct
type t = buf
let create = create_buf
let of_string str =
let len = String.length str in
let buf = create len in
blit_string_buf str buf ~len;
buf
let length buf = Array1.dim buf
end
let expect_exc test_exc f =
try
ignore (f ());
false
with
| exc -> test_exc exc
let expect_bounds_error f =
let test_exc = function
| Invalid_argument "index out of bounds" -> true
| _ -> false
in
expect_exc test_exc f
let expect_buffer_short f =
let exc = Buffer_short in
expect_exc ((=) exc) f
let expect_read_error exp_re exp_pos f =
let test_exc = function
| Read_error (re, pos) -> exp_re = re && exp_pos = pos
| _ -> false
in
expect_exc test_exc f
let expect_no_error f =
try
ignore (f ());
true
with
| _ -> false
let check_write_bounds_checks name buf write arg =
(name ^ ": negative bound") @?
expect_bounds_error (fun () -> write buf ~pos:~-1 arg);
(name ^ ": positive bound") @?
expect_buffer_short (fun () -> write buf ~pos:(Bigstring.length buf) arg)
let check_read_bounds_checks name buf read =
(name ^ ": negative bound") @?
expect_bounds_error (fun () -> read buf ~pos_ref:(ref ~-1));
(name ^ ": positive bound") @?
expect_buffer_short (fun () -> read buf ~pos_ref:(ref (Bigstring.length buf)))
let check_write_result name buf pos write arg exp_len =
let res_pos = write buf ~pos arg in
sprintf "%s: returned wrong write position (%d, expected %d)"
name res_pos (pos + exp_len)
@? (res_pos = pos + exp_len)
let check_read_result name buf pos read exp_ret exp_len =
let pos_ref = ref pos in
(name ^ ": returned wrong result") @? (read buf ~pos_ref = exp_ret);
sprintf "%s: returned wrong read position (%d, expected %d)"
name !pos_ref (pos + exp_len)
@? (!pos_ref - pos = exp_len)
let check_all_args tp_name read write buf args =
let write_name = "write_" ^ tp_name ^ " " in
let read_name = "read_" ^ tp_name ^ " " in
let buf_len = Bigstring.length buf in
let act (arg, str_arg, arg_len) =
let write_name_arg = write_name ^ str_arg in
let read_name_arg = read_name ^ str_arg in
for pos = 0 to 8 do
check_write_bounds_checks write_name buf write arg;
check_read_bounds_checks read_name buf read;
check_write_result write_name_arg buf pos write arg arg_len;
check_read_result read_name_arg buf pos read arg arg_len;
done;
(write_name_arg ^ ": write failed near bound") @? expect_no_error (fun () ->
write buf ~pos:(buf_len - arg_len) arg);
(read_name_arg ^ ": read failed near bound") @? expect_no_error (fun () ->
if read buf ~pos_ref:(ref (buf_len - arg_len)) <> arg then
failwith (read_name_arg ^ ": read near bound returned wrong result"));
let small_buf = Array1.sub buf 0 (buf_len - 1) in
(write_name_arg ^ ": write exceeds bound") @? expect_buffer_short (fun () ->
write small_buf ~pos:(buf_len - arg_len) arg);
(read_name_arg ^ ": read exceeds bound") @? expect_buffer_short (fun () ->
read small_buf ~pos_ref:(ref (buf_len - arg_len)))
in
List.iter act args
let mk_buf n =
let bstr = Bigstring.create n in
for i = 0 to n - 1 do bstr.{i} <- '\255' done;
bstr
let check_all extra_buf_size tp_name read write args =
let buf_len = extra_buf_size + 8 in
let buf = mk_buf buf_len in
match args with
| [] -> assert false
| (arg, _, _) :: _ ->
let write_name = "write_" ^ tp_name in
check_write_bounds_checks write_name buf write arg;
let read_name = "read_" ^ tp_name in
check_read_bounds_checks read_name buf read;
check_all_args tp_name read write buf args
let random_string n =
String.init n (fun _ -> Char.chr (Random.int 256))
let mk_int_test ~n ~len = n, Printf.sprintf "%x" n, len
let mk_nat0_test ~n ~len = Nat0.of_int n, Printf.sprintf "%x" n, len
let mk_float_test n = n, Printf.sprintf "%g" n, 8
let mk_int32_test ~n ~len = n, Printf.sprintf "%lx" n, len
let mk_int64_test ~n ~len = n, Printf.sprintf "%Lx" n, len
let mk_nativeint_test ~n ~len = n, Printf.sprintf "%nx" n, len
let mk_gen_float_vec tp n =
let vec = Array1.create tp fortran_layout n in
for i = 1 to n do
vec.{i} <- float i
done;
vec
let mk_float32_vec = mk_gen_float_vec float32
let mk_float64_vec = mk_gen_float_vec float64
let mk_bigstring n =
let bstr = Array1.create char c_layout n in
for i = 0 to n - 1 do
bstr.{i} <- Char.chr (Random.int 256)
done;
bstr
let mk_gen_float_mat tp m n =
let mat = Array2.create tp fortran_layout m n in
let fn = float m in
for c = 1 to n do
let ofs = float (c - 1) *. fn in
for r = 1 to m do
mat.{r, c} <- ofs +. float r
done;
done;
mat
let mk_float32_mat = mk_gen_float_mat float32
let mk_float64_mat = mk_gen_float_mat float64
let test =
"Bin_prot" >:::
[
"unit" >::
(fun () ->
check_all 1 "unit" Read.bin_read_unit Write.bin_write_unit
[
((), "()", 1);
];
);
"bool" >::
(fun () ->
check_all 1 "bool" Read.bin_read_bool Write.bin_write_bool
[
(true, "true", 1);
(false, "false", 1);
];
);
"string" >::
(fun () ->
check_all 66000 "string" Read.bin_read_string Write.bin_write_string
[
("", "\"\"", 1);
(random_string 1, "random 1", 1 + 1);
(random_string 10, "random 10", 10 + 1);
(random_string 127, "random 127", 127 + 1);
(random_string 128, "long 128", 128 + 3);
(random_string 65535, "long 65535", 65535 + 3);
(random_string 65536, "long 65536", 65536 + 5);
];
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\252\255\255\000" in
"String_too_long" @? expect_read_error String_too_long 0 (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\251\255\255\000" in
"StringMaximimum" @? expect_buffer_short (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\248\255\255\255\255\255\255\001" in
"String_too_long" @? expect_read_error String_too_long 0 (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\247\255\255\255\255\255\255\001" in
"StringMaximimum" @? expect_buffer_short (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0))
);
"char" >::
(fun () ->
check_all 1 "char" Read.bin_read_char Write.bin_write_char
[
('x', "x", 1);
('y', "y", 1);
];
);
"int" >::
(fun () ->
let small_int_tests =
[
mk_int_test ~n:~-0x01 ~len:2;
mk_int_test ~n: 0x00 ~len:1;
mk_int_test ~n: 0x01 ~len:1;
mk_int_test ~n:0x7e ~len:1;
mk_int_test ~n:0x7f ~len:1;
mk_int_test ~n:0x80 ~len:3;
mk_int_test ~n:0x81 ~len:3;
mk_int_test ~n:0x7ffe ~len:3;
mk_int_test ~n:0x7fff ~len:3;
mk_int_test ~n:0x8000 ~len:5;
mk_int_test ~n:0x8001 ~len:5;
mk_int_test ~n:0x3ffffffe ~len:5;
mk_int_test ~n:0x3fffffff ~len:5;
mk_int_test ~n:~-0x7f ~len:2;
mk_int_test ~n:~-0x80 ~len:2;
mk_int_test ~n:~-0x81 ~len:3;
mk_int_test ~n:~-0x82 ~len:3;
mk_int_test ~n:~-0x7fff ~len:3;
mk_int_test ~n:~-0x8000 ~len:3;
mk_int_test ~n:~-0x8001 ~len:5;
mk_int_test ~n:~-0x8002 ~len:5;
mk_int_test ~n:~-0x40000001 ~len:5;
mk_int_test ~n:~-0x40000000 ~len:5;
]
in
let all_int_tests =
if Sys.word_size = 32 then small_int_tests
else
mk_int_test ~n:(int_of_string "0x7ffffffe") ~len:5 ::
mk_int_test ~n:(int_of_string "0x7fffffff") ~len:5 ::
mk_int_test ~n:(int_of_string "0x80000000") ~len:9 ::
mk_int_test ~n:(int_of_string "0x80000001") ~len:9 ::
mk_int_test ~n:max_int ~len:9 ::
mk_int_test ~n:(int_of_string "-0x000000007fffffff") ~len:5 ::
mk_int_test ~n:(int_of_string "-0x0000000080000000") ~len:5 ::
mk_int_test ~n:(int_of_string "-0x0000000080000001") ~len:9 ::
mk_int_test ~n:(int_of_string "-0x0000000080000002") ~len:9 ::
mk_int_test ~n:min_int ~len:9 ::
small_int_tests
in
check_all 9 "int" Read.bin_read_int Write.bin_write_int
all_int_tests;
let bad_buf = Bigstring.of_string "\132" in
"Int_code" @? expect_read_error Int_code 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0));
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\255\255\255\064" in
"Int_overflow (positive)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\255\255\255\191" in
"Int_overflow (negative)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\064" in
"Int_overflow (positive)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\191" in
"Int_overflow (negative)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0))
);
"nat0" >::
(fun () ->
let small_int_tests =
[
mk_nat0_test ~n:0x00 ~len:1;
mk_nat0_test ~n:0x01 ~len:1;
mk_nat0_test ~n:0x7e ~len:1;
mk_nat0_test ~n:0x7f ~len:1;
mk_nat0_test ~n:0x80 ~len:3;
mk_nat0_test ~n:0x81 ~len:3;
mk_nat0_test ~n:0x7fff ~len:3;
mk_nat0_test ~n:0x8000 ~len:3;
mk_nat0_test ~n:0xffff ~len:3;
mk_nat0_test ~n:0x10000 ~len:5;
mk_nat0_test ~n:0x10001 ~len:5;
mk_nat0_test ~n:0x3ffffffe ~len:5;
mk_nat0_test ~n:0x3fffffff ~len:5;
]
in
let all_int_tests =
if Sys.word_size = 32 then small_int_tests
else
mk_nat0_test ~n:(int_of_string "0x7fffffff") ~len:5 ::
mk_nat0_test ~n:(int_of_string "0x80000000") ~len:5 ::
mk_nat0_test ~n:(int_of_string "0xffffffff") ~len:5 ::
mk_nat0_test ~n:(int_of_string "0x100000000") ~len:9 ::
mk_nat0_test ~n:(int_of_string "0x100000001") ~len:9 ::
mk_nat0_test ~n:max_int ~len:9 ::
small_int_tests
in
check_all 9 "nat0" Read.bin_read_nat0 Write.bin_write_nat0
all_int_tests;
let bad_buf = Bigstring.of_string "\128" in
"Nat0_code" @? expect_read_error Nat0_code 0 (fun () ->
Read.bin_read_nat0 bad_buf ~pos_ref:(ref 0));
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\255\255\255\064" in
"Nat0_overflow" @? expect_read_error Nat0_overflow 0 (fun () ->
Read.bin_read_nat0 bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\064" in
"Nat0_overflow" @? expect_read_error Nat0_overflow 0 (fun () ->
Read.bin_read_nat0 bad_buf ~pos_ref:(ref 0))
);
"float" >::
(fun () ->
let float_tests =
[
mk_float_test 0.;
mk_float_test (-0.);
mk_float_test (-1.);
mk_float_test 1.;
mk_float_test infinity;
mk_float_test (-.infinity);
mk_float_test 1e-310; (* subnormal *)
mk_float_test (-1e-310); (* subnormal *)
mk_float_test 3.141595;
]
in
check_all 8 "float" Read.bin_read_float Write.bin_write_float
float_tests
);
"int32" >::
(fun () ->
let int32_tests =
[
mk_int32_test ~n:(-0x01l) ~len:2;
mk_int32_test ~n: 0x00l ~len:1;
mk_int32_test ~n: 0x01l ~len:1;
mk_int32_test ~n:0x7el ~len:1;
mk_int32_test ~n:0x7fl ~len:1;
mk_int32_test ~n:0x80l ~len:3;
mk_int32_test ~n:0x81l ~len:3;
mk_int32_test ~n:0x7ffel ~len:3;
mk_int32_test ~n:0x7fffl ~len:3;
mk_int32_test ~n:0x8000l ~len:5;
mk_int32_test ~n:0x8001l ~len:5;
mk_int32_test ~n:0x7ffffffel ~len:5;
mk_int32_test ~n:0x7fffffffl ~len:5;
mk_int32_test ~n:(-0x7fl) ~len:2;
mk_int32_test ~n:(-0x80l) ~len:2;
mk_int32_test ~n:(-0x81l) ~len:3;
mk_int32_test ~n:(-0x82l) ~len:3;
mk_int32_test ~n:(-0x7fffl) ~len:3;
mk_int32_test ~n:(-0x8000l) ~len:3;
mk_int32_test ~n:(-0x8001l) ~len:5;
mk_int32_test ~n:(-0x8002l) ~len:5;
mk_int32_test ~n:(-0x80000001l) ~len:5;
mk_int32_test ~n:(-0x80000000l) ~len:5;
]
in
check_all 5 "int32" Read.bin_read_int32 Write.bin_write_int32
int32_tests;
let bad_buf = Bigstring.of_string "\132" in
"Int32_code" @? expect_read_error Int32_code 0 (fun () ->
Read.bin_read_int32 bad_buf ~pos_ref:(ref 0))
);
"int64" >::
(fun () ->
let int64_tests =
[
mk_int64_test ~n:(-0x01L) ~len:2;
mk_int64_test ~n: 0x00L ~len:1;
mk_int64_test ~n: 0x01L ~len:1;
mk_int64_test ~n:0x7eL ~len:1;
mk_int64_test ~n:0x7fL ~len:1;
mk_int64_test ~n:0x80L ~len:3;
mk_int64_test ~n:0x81L ~len:3;
mk_int64_test ~n:0x7ffeL ~len:3;
mk_int64_test ~n:0x7fffL ~len:3;
mk_int64_test ~n:0x8000L ~len:5;
mk_int64_test ~n:0x8001L ~len:5;
mk_int64_test ~n:0x7ffffffeL ~len:5;
mk_int64_test ~n:0x7fffffffL ~len:5;
mk_int64_test ~n:0x80000000L ~len:9;
mk_int64_test ~n:0x80000001L ~len:9;
mk_int64_test ~n:0x7ffffffffffffffeL ~len:9;
mk_int64_test ~n:0x7fffffffffffffffL ~len:9;
mk_int64_test ~n:(-0x7fL) ~len:2;
mk_int64_test ~n:(-0x80L) ~len:2;
mk_int64_test ~n:(-0x81L) ~len:3;
mk_int64_test ~n:(-0x82L) ~len:3;
mk_int64_test ~n:(-0x7fffL) ~len:3;
mk_int64_test ~n:(-0x8000L) ~len:3;
mk_int64_test ~n:(-0x8001L) ~len:5;
mk_int64_test ~n:(-0x8002L) ~len:5;
mk_int64_test ~n:(-0x7fffffffL) ~len:5;
mk_int64_test ~n:(-0x80000000L) ~len:5;
mk_int64_test ~n:(-0x80000001L) ~len:9;
mk_int64_test ~n:(-0x80000002L) ~len:9;
mk_int64_test ~n:(-0x8000000000000001L) ~len:9;
mk_int64_test ~n:(-0x8000000000000000L) ~len:9;
]
in
check_all 9 "int64" Read.bin_read_int64 Write.bin_write_int64
int64_tests;
let bad_buf = Bigstring.of_string "\132" in
"Int64_code" @? expect_read_error Int64_code 0 (fun () ->
Read.bin_read_int64 bad_buf ~pos_ref:(ref 0))
);
"nativeint" >::
(fun () ->
let small_nativeint_tests =
[
mk_nativeint_test ~n:(-0x01n) ~len:2;
mk_nativeint_test ~n: 0x00n ~len:1;
mk_nativeint_test ~n: 0x01n ~len:1;
mk_nativeint_test ~n:0x7en ~len:1;
mk_nativeint_test ~n:0x7fn ~len:1;
mk_nativeint_test ~n:0x80n ~len:3;
mk_nativeint_test ~n:0x81n ~len:3;
mk_nativeint_test ~n:0x7ffen ~len:3;
mk_nativeint_test ~n:0x7fffn ~len:3;
mk_nativeint_test ~n:0x8000n ~len:5;
mk_nativeint_test ~n:0x8001n ~len:5;
mk_nativeint_test ~n:0x7ffffffen ~len:5;
mk_nativeint_test ~n:0x7fffffffn ~len:5;
mk_nativeint_test ~n:(-0x7fn) ~len:2;
mk_nativeint_test ~n:(-0x80n) ~len:2;
mk_nativeint_test ~n:(-0x81n) ~len:3;
mk_nativeint_test ~n:(-0x82n) ~len:3;
mk_nativeint_test ~n:(-0x7fffn) ~len:3;
mk_nativeint_test ~n:(-0x8000n) ~len:3;
mk_nativeint_test ~n:(-0x8001n) ~len:5;
mk_nativeint_test ~n:(-0x8002n) ~len:5;
mk_nativeint_test ~n:(-0x7fffffffn) ~len:5;
mk_nativeint_test ~n:(-0x80000000n) ~len:5;
]
in
let nativeint_tests =
if Sys.word_size = 32 then small_nativeint_tests
else
mk_nativeint_test ~n:0x80000000n ~len:9 ::
mk_nativeint_test ~n:0x80000001n ~len:9 ::
mk_nativeint_test ~n:(-0x80000001n) ~len:9 ::
mk_nativeint_test ~n:(-0x80000002n) ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "0x7ffffffffffffffe") ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "0x7fffffffffffffff") ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "-0x8000000000000001") ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "-0x8000000000000000") ~len:9 ::
small_nativeint_tests
in
let size = if Sys.word_size = 32 then 5 else 9 in
check_all size "nativeint"
Read.bin_read_nativeint Write.bin_write_nativeint
nativeint_tests;
let bad_buf = Bigstring.of_string "\251" in
"Nativeint_code" @? expect_read_error Nativeint_code 0 (fun () ->
Read.bin_read_nativeint bad_buf ~pos_ref:(ref 0));
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\255" in
"Nativeint_code (overflow)" @? expect_read_error Nativeint_code 0 (fun () ->
Read.bin_read_nativeint bad_buf ~pos_ref:(ref 0))
);
"ref" >::
(fun () ->
check_all 1 "ref"
(Read.bin_read_ref Read.bin_read_int)
(Write.bin_write_ref Write.bin_write_int)
[(ref 42, "ref 42", 1)];
);
"option" >::
(fun () ->
check_all 2 "option"
(Read.bin_read_option Read.bin_read_int)
(Write.bin_write_option Write.bin_write_int)
[
(Some 42, "Some 42", 2);
(None, "None", 1);
];
);
"pair" >::
(fun () ->
check_all 9 "pair"
(Read.bin_read_pair Read.bin_read_float Read.bin_read_int)
(Write.bin_write_pair Write.bin_write_float Write.bin_write_int)
[((3.141, 42), "(3.141, 42)", 9)];
);
"triple" >::
(fun () ->
check_all 14 "triple"
(Read.bin_read_triple
Read.bin_read_float Read.bin_read_int Read.bin_read_string)
(Write.bin_write_triple
Write.bin_write_float Write.bin_write_int Write.bin_write_string)
[((3.141, 42, "test"), "(3.141, 42, \"test\")", 14)];
);
"list" >::
(fun () ->
check_all 12 "list"
(Read.bin_read_list Read.bin_read_int)
(Write.bin_write_list Write.bin_write_int)
[
([42; -1; 200; 33000], "[42; -1; 200; 33000]", 12);
([], "[]", 1);
];
);
"array" >::
(fun () ->
let bin_read_int_array = Read.bin_read_array Read.bin_read_int in
check_all 12 "array"
bin_read_int_array
(Write.bin_write_array Write.bin_write_int)
[
([| 42; -1; 200; 33000 |], "[|42; -1; 200; 33000|]", 12);
([||], "[||]", 1);
];
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\000\000\064\000" in
"Array_too_long" @? expect_read_error Array_too_long 0 (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\255\255\063\000" in
"ArrayMaximimum" @? expect_buffer_short (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\000\000\000\000\000\000\064\000" in
"Array_too_long" @? expect_read_error Array_too_long 0 (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\063\000" in
"ArrayMaximimum" @? expect_buffer_short (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0))
);
"float_array" >::
(fun () ->
check_all 33 "float_array"
Read.bin_read_float_array Write.bin_write_float_array
[
([| 42.; -1.; 200.; 33000. |], "[|42.; -1.; 200.; 33000.|]", 33);
([||], "[||]", 1);
];
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\000\000\032\000" in
"Array_too_long (float)" @? expect_read_error Array_too_long 0 (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\255\255\031\000" in
"ArrayMaximimum (float)" @? expect_buffer_short (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\000\000\000\000\000\000\064\000" in
"Array_too_long (float)" @? expect_read_error Array_too_long 0 (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\063\000" in
"ArrayMaximimum (float)" @? expect_buffer_short (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0));
(* Test that the binary forms of [float array] and [float_array] are the same *)
let arrays =
let rec loop acc len =
if len < 0 then acc
else
let a = Array.init len (fun i -> float_of_int (i + len)) in
let txt = Printf.sprintf "float array, len = %d" len in
let buf = len * 8 + Size.bin_size_nat0 (Nat0.unsafe_of_int len) in
loop ((a, txt, buf) :: acc) (len - 1)
in
loop [] 255 in
let len = 255 * 8 + Size.bin_size_nat0 (Nat0.unsafe_of_int 255) in
check_all len "float array -> float_array"
Read.bin_read_float_array (Write.bin_write_array Write.bin_write_float)
arrays;
check_all len "float_array -> float array"
(Read.bin_read_array Read.bin_read_float) (Write.bin_write_float_array)
arrays;
(* Check that the canonical closures used in the short circuit test of float
arrays are indeed allocated closures as opposed to [compare] for example
which is a primitive. Even if it looks like a tautology, it is not. (for
example, [compare == compare] is false. *)
assert (bin_write_float == bin_write_float);
assert (bin_read_float == bin_read_float);
assert (bin_size_float == bin_size_float);
);
"hashtbl" >::
(fun () ->
let bindings = List.rev [(42, 3.); (17, 2.); (42, 4.)] in
let htbl = Hashtbl.create (List.length bindings) in
List.iter (fun (k, v) -> Hashtbl.add htbl k v) bindings;
check_all 28 "hashtbl"
(Read.bin_read_hashtbl Read.bin_read_int Read.bin_read_float)
(Write.bin_write_hashtbl Write.bin_write_int Write.bin_write_float)
[
(htbl, "[(42, 3.); (17, 2.); (42, 4.)]", 28);
(Hashtbl.create 0, "[]", 1)
];
);
"float32_vec" >::
(fun () ->
let n = 128 in
let header = 3 in
let size = header + n * 4 in
let vec = mk_float32_vec n in
check_all size "float32_vec"
Read.bin_read_float32_vec
Write.bin_write_float32_vec
[
(vec, "[| ... |]", size);
(mk_float32_vec 0, "[||]", 1);
]
);
"float64_vec" >::
(fun () ->
let n = 127 in
let header = 1 in
let size = header + n * 8 in
let vec = mk_float64_vec n in
check_all size "float64_vec"
Read.bin_read_float64_vec
Write.bin_write_float64_vec
[
(vec, "[| ... |]", size);
(mk_float64_vec 0, "[||]", 1);
]
);
"vec" >::
(fun () ->
let n = 128 in
let header = 3 in
let size = header + n * 8 in
let vec = mk_float64_vec n in
check_all size "vec"
Read.bin_read_vec
Write.bin_write_vec
[
(vec, "[| ... |]", size);
(mk_float64_vec 0, "[||]", 1);
]
);
"float32_mat" >::
(fun () ->
let m = 128 in
let n = 127 in
let header = 3 + 1 in
let size = header + m * n * 4 in
let mat = mk_float32_mat m n in
check_all size "float32_mat"
Read.bin_read_float32_mat
Write.bin_write_float32_mat
[
(mat, "[| ... |]", size);
(mk_float32_mat 0 0, "[||]", 2);
]
);
"float64_mat" >::
(fun () ->
let m = 10 in
let n = 12 in
let header = 1 + 1 in
let size = header + m * n * 8 in
let mat = mk_float64_mat m n in
check_all size "float64_mat"
Read.bin_read_float64_mat
Write.bin_write_float64_mat
[
(mat, "[| ... |]", size);
(mk_float64_mat 0 0, "[||]", 2);
]
);
"mat" >::
(fun () ->
let m = 128 in
let n = 128 in
let header = 3 + 3 in
let size = header + m * n * 8 in
let mat = mk_float64_mat m n in
check_all size "mat"
Read.bin_read_mat
Write.bin_write_mat
[
(mat, "[| ... |]", size);
(mk_float64_mat 0 0, "[||]", 2);
]
);
"bigstring" >::
(fun () ->
let n = 128 in
let header = 3 in
let size = header + n in
let bstr = mk_bigstring n in
check_all size "bigstring"
Read.bin_read_bigstring
Write.bin_write_bigstring
[
(bstr, "[| ... |]", size);
(mk_bigstring 0, "[||]", 1);
]
);
"bigstring (big)" >::
(fun () ->
[ n ] is a 16bits integer that will be serialized differently depending on
whether it is considered as an integer or an unsigned integer .
whether it is considered as an integer or an unsigned integer. *)
let n = 40_000 in
let header = 3 in
let size = header + n in
let bstr = mk_bigstring n in
check_all size "bigstring"
Read.bin_read_bigstring
Write.bin_write_bigstring
[
(bstr, "[| ... |]", size);
(mk_bigstring 0, "[||]", 1);
]
);
"variant_tag" >::
(fun () ->
check_all 4 "variant_tag"
Read.bin_read_variant_int
Write.bin_write_variant_int
[
((Obj.magic `Foo : int), "`Foo", 4);
((Obj.magic `Bar : int), "`Bar", 4);
];
let bad_buf = Bigstring.of_string "\000\000\000\000" in
"Variant_tag" @? expect_read_error Variant_tag 0 (fun () ->
Read.bin_read_variant_int bad_buf ~pos_ref:(ref 0))
);
"int64_bits" >::
(fun () ->
check_all 8 "int64_bits"
Read.bin_read_int64_bits
Write.bin_write_int64_bits
[
(Int64.min_int, "min_int", 8);
(Int64.add Int64.min_int Int64.one, "min_int + 1", 8);
(Int64.minus_one, "-1", 8);
(Int64.zero, "0", 8);
(Int64.one, "1", 8);
(Int64.sub Int64.max_int Int64.one, "max_int - 1", 8);
(Int64.max_int, "max_int", 8);
];
);
"int_64bit" >::
(fun () ->
check_all 8 "int_64bit"
Read.bin_read_int_64bit
Write.bin_write_int_64bit
[
(min_int, "min_int", 8);
(min_int + 1, "min_int + 1", 8);
(-1, "-1", 8);
(0, "0", 8);
(1, "1", 8);
(max_int - 1, "max_int - 1", 8);
(max_int, "max_int", 8);
];
let bad_buf_max =
bin_dump bin_int64_bits.writer (Int64.succ (Int64.of_int max_int))
in
"Int_overflow (positive)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int_64bit bad_buf_max ~pos_ref:(ref 0));
let bad_buf_min =
bin_dump bin_int64_bits.writer (Int64.pred (Int64.of_int min_int))
in
"Int_overflow (negative)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int_64bit bad_buf_min ~pos_ref:(ref 0));
);
"network16_int" >::
(fun () ->
check_all 2 "network16_int"
Read.bin_read_network16_int
Write.bin_write_network16_int
[
No negative numbers - ambiguous on 64bit platforms
(0, "0", 2);
(1, "1", 2);
];
);
"network32_int" >::
(fun () ->
check_all 4 "network32_int"
Read.bin_read_network32_int
Write.bin_write_network32_int
[
No negative numbers - ambiguous on 64bit platforms
(0, "0", 4);
(1, "1", 4);
];
);
"network32_int32" >::
(fun () ->
check_all 4 "network32_int32"
Read.bin_read_network32_int32
Write.bin_write_network32_int32
[
(-1l, "-1", 4);
(0l, "0", 4);
(1l, "1", 4);
];
);
"network64_int" >::
(fun () ->
check_all 8 "network64_int"
Read.bin_read_network64_int
Write.bin_write_network64_int
[
(-1, "-1", 8);
(0, "0", 8);
(1, "1", 8);
];
);
"network64_int64" >::
(fun () ->
check_all 8 "network64_int64"
Read.bin_read_network64_int64
Write.bin_write_network64_int64
[
(-1L, "-1", 8);
(0L, "0", 8);
(1L, "1", 8);
];
);
]
module Common = struct
type tuple = float * string * int64
[@@deriving bin_io]
type 'a record = { a : int; b : 'a; c : 'a option }
[@@deriving bin_io]
type 'a singleton_record = { y : 'a }
[@@deriving bin_io]
type 'a inline_record =
| IR of { mutable ir_a : int; ir_b : 'a; ir_c : 'a option }
| Other of int
[@@deriving bin_io]
type 'a sum = Foo | Bar of int | Bla of 'a * string
[@@deriving bin_io]
type 'a variant = [ `Foo | `Bar of int | `Bla of 'a * string ]
[@@deriving bin_io]
type 'a poly_app = (tuple * int singleton_record * 'a record * 'a inline_record) variant sum list
[@@deriving bin_io]
type 'a rec_t1 = RecFoo1 of 'a rec_t2
and 'a rec_t2 = RecFoo2 of 'a poly_app * 'a rec_t1 | RecNone
[@@deriving bin_io]
type 'a poly_id = 'a rec_t1
[@@deriving bin_io]
type el = float poly_id
[@@deriving bin_io]
type els = el array
[@@deriving bin_io]
let test =
"Bin_prot_common" >:::
[
"Utils.bin_dump" >::
(fun () ->
let el =
let record = { a = 17; b = 2.78; c = None } in
let inline_record = IR {ir_a = 18; ir_b = 43210.; ir_c = None} in
let arg = (3.1, "foo", 42L), { y = 4321 }, record, inline_record in
let variant = `Bla (arg, "fdsa") in
let sum = Bla (variant, "asdf") in
let poly_app = [ sum ] in
RecFoo1 (RecFoo2 (poly_app, RecFoo1 RecNone))
in
let els = Array.make 10 el in
let buf = bin_dump ~header:true bin_els.writer els in
let pos_ref = ref 0 in
let els_len = Read.bin_read_int_64bit buf ~pos_ref in
"pos_ref for length incorrect" @? (!pos_ref = 8);
"els_len disagrees with bin_size" @? (els_len = bin_size_els els);
let new_els = bin_read_els buf ~pos_ref in
"new_els and els not equal" @? (els = new_els)
);
]
end
module Inline = struct
let compatible xs derived_tc inline_writer inline_reader inline_tc =
ListLabels.map xs ~f:(fun x ->
"" >:: fun () ->
"incorrect size from inline writer"
@? (derived_tc.writer.size x = inline_writer.size x);
"incorrect size from inline type class"
@? (derived_tc.writer.size x = inline_tc.writer.size x);
let buf = bin_dump derived_tc.writer x in
"incorrect bin dump from inline writer"
@? (buf = bin_dump inline_writer x);
"incorrect bin dump from inline type class"
@? (buf = bin_dump inline_tc.writer x);
let val_and_len reader =
let pos_ref = ref 0 in
let x = reader.read buf ~pos_ref in
(x, !pos_ref)
in
let (_, len) = val_and_len derived_tc.reader in
let (x', len') = val_and_len inline_reader in
"incorrect value from inline reader" @? (x = x');
"incorrect length from inline reader" @? (len = len');
let (x', len') = val_and_len inline_tc.reader in
"incorrect value from inline type class" @? (x = x');
"incorrect length from inline type class" @? (len = len');
)
;;
type variant_extension = [ float Common.variant | `Baz of int * float ]
[@@deriving bin_io]
let test =
"Bin_prot.Inline" >::: [
"simple tuple" >:::
compatible
[(50.5, "hello", 1234L)]
Common.bin_tuple
[%bin_writer : Common.tuple]
[%bin_reader : Common.tuple]
[%bin_type_class : Common.tuple];
"redefine tuple" >:::
compatible
[(50.5, "hello", 1234L)]
Common.bin_tuple
[%bin_writer : float * string * int64]
[%bin_reader : float * string * int64]
[%bin_type_class : float * string * int64];
"simple variant" >:::
compatible
[`Foo; `Bar 8; `Bla (33.3, "world")]
(Common.bin_variant bin_float)
[%bin_writer : float Common.variant]
[%bin_reader : float Common.variant]
[%bin_type_class : float Common.variant];
"redefine variant" >:::
compatible
[`Foo; `Bar 8; `Bla (33.3, "world")]
(Common.bin_variant bin_float)
[%bin_writer : [`Foo | `Bar of int | `Bla of float * string]]
[%bin_reader : [`Foo | `Bar of int | `Bla of float * string]]
[%bin_type_class : [`Foo | `Bar of int | `Bla of float * string]];
"variant_extension" >:::
compatible
[`Foo; `Bar 8; `Bla (33.3, "world"); `Baz (17, 17.71)]
bin_variant_extension
[%bin_writer : [ float Common.variant | `Baz of int * float ]]
[%bin_reader : [ float Common.variant | `Baz of int * float ]]
[%bin_type_class : [ float Common.variant | `Baz of int * float ]];
"sub variant" >:::
compatible
[ { Common. y = `Foo }; { y = `Bar 42 }; { y = `Bla (42, "world") } ]
(Common.bin_singleton_record (Common.bin_variant bin_int))
[%bin_writer : [`Foo | `Bar of int | `Bla of int * string] Common.singleton_record]
[%bin_reader : [`Foo | `Bar of int | `Bla of int * string] Common.singleton_record]
[%bin_type_class : [`Foo | `Bar of int | `Bla of int * string] Common.singleton_record];
]
;;
end
| null | https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/ppx_bin_prot/test/bin_prot_test.ml | ocaml | subnormal
subnormal
Test that the binary forms of [float array] and [float_array] are the same
Check that the canonical closures used in the short circuit test of float
arrays are indeed allocated closures as opposed to [compare] for example
which is a primitive. Even if it looks like a tautology, it is not. (for
example, [compare == compare] is false. | open Bigarray
open Printf
open OUnit
open Bin_prot
open Common
open Utils
open ReadError
open Type_class
open Bin_prot.Std
module Bigstring = struct
type t = buf
let create = create_buf
let of_string str =
let len = String.length str in
let buf = create len in
blit_string_buf str buf ~len;
buf
let length buf = Array1.dim buf
end
let expect_exc test_exc f =
try
ignore (f ());
false
with
| exc -> test_exc exc
let expect_bounds_error f =
let test_exc = function
| Invalid_argument "index out of bounds" -> true
| _ -> false
in
expect_exc test_exc f
let expect_buffer_short f =
let exc = Buffer_short in
expect_exc ((=) exc) f
let expect_read_error exp_re exp_pos f =
let test_exc = function
| Read_error (re, pos) -> exp_re = re && exp_pos = pos
| _ -> false
in
expect_exc test_exc f
let expect_no_error f =
try
ignore (f ());
true
with
| _ -> false
let check_write_bounds_checks name buf write arg =
(name ^ ": negative bound") @?
expect_bounds_error (fun () -> write buf ~pos:~-1 arg);
(name ^ ": positive bound") @?
expect_buffer_short (fun () -> write buf ~pos:(Bigstring.length buf) arg)
let check_read_bounds_checks name buf read =
(name ^ ": negative bound") @?
expect_bounds_error (fun () -> read buf ~pos_ref:(ref ~-1));
(name ^ ": positive bound") @?
expect_buffer_short (fun () -> read buf ~pos_ref:(ref (Bigstring.length buf)))
let check_write_result name buf pos write arg exp_len =
let res_pos = write buf ~pos arg in
sprintf "%s: returned wrong write position (%d, expected %d)"
name res_pos (pos + exp_len)
@? (res_pos = pos + exp_len)
let check_read_result name buf pos read exp_ret exp_len =
let pos_ref = ref pos in
(name ^ ": returned wrong result") @? (read buf ~pos_ref = exp_ret);
sprintf "%s: returned wrong read position (%d, expected %d)"
name !pos_ref (pos + exp_len)
@? (!pos_ref - pos = exp_len)
let check_all_args tp_name read write buf args =
let write_name = "write_" ^ tp_name ^ " " in
let read_name = "read_" ^ tp_name ^ " " in
let buf_len = Bigstring.length buf in
let act (arg, str_arg, arg_len) =
let write_name_arg = write_name ^ str_arg in
let read_name_arg = read_name ^ str_arg in
for pos = 0 to 8 do
check_write_bounds_checks write_name buf write arg;
check_read_bounds_checks read_name buf read;
check_write_result write_name_arg buf pos write arg arg_len;
check_read_result read_name_arg buf pos read arg arg_len;
done;
(write_name_arg ^ ": write failed near bound") @? expect_no_error (fun () ->
write buf ~pos:(buf_len - arg_len) arg);
(read_name_arg ^ ": read failed near bound") @? expect_no_error (fun () ->
if read buf ~pos_ref:(ref (buf_len - arg_len)) <> arg then
failwith (read_name_arg ^ ": read near bound returned wrong result"));
let small_buf = Array1.sub buf 0 (buf_len - 1) in
(write_name_arg ^ ": write exceeds bound") @? expect_buffer_short (fun () ->
write small_buf ~pos:(buf_len - arg_len) arg);
(read_name_arg ^ ": read exceeds bound") @? expect_buffer_short (fun () ->
read small_buf ~pos_ref:(ref (buf_len - arg_len)))
in
List.iter act args
let mk_buf n =
let bstr = Bigstring.create n in
for i = 0 to n - 1 do bstr.{i} <- '\255' done;
bstr
let check_all extra_buf_size tp_name read write args =
let buf_len = extra_buf_size + 8 in
let buf = mk_buf buf_len in
match args with
| [] -> assert false
| (arg, _, _) :: _ ->
let write_name = "write_" ^ tp_name in
check_write_bounds_checks write_name buf write arg;
let read_name = "read_" ^ tp_name in
check_read_bounds_checks read_name buf read;
check_all_args tp_name read write buf args
let random_string n =
String.init n (fun _ -> Char.chr (Random.int 256))
let mk_int_test ~n ~len = n, Printf.sprintf "%x" n, len
let mk_nat0_test ~n ~len = Nat0.of_int n, Printf.sprintf "%x" n, len
let mk_float_test n = n, Printf.sprintf "%g" n, 8
let mk_int32_test ~n ~len = n, Printf.sprintf "%lx" n, len
let mk_int64_test ~n ~len = n, Printf.sprintf "%Lx" n, len
let mk_nativeint_test ~n ~len = n, Printf.sprintf "%nx" n, len
let mk_gen_float_vec tp n =
let vec = Array1.create tp fortran_layout n in
for i = 1 to n do
vec.{i} <- float i
done;
vec
let mk_float32_vec = mk_gen_float_vec float32
let mk_float64_vec = mk_gen_float_vec float64
let mk_bigstring n =
let bstr = Array1.create char c_layout n in
for i = 0 to n - 1 do
bstr.{i} <- Char.chr (Random.int 256)
done;
bstr
let mk_gen_float_mat tp m n =
let mat = Array2.create tp fortran_layout m n in
let fn = float m in
for c = 1 to n do
let ofs = float (c - 1) *. fn in
for r = 1 to m do
mat.{r, c} <- ofs +. float r
done;
done;
mat
let mk_float32_mat = mk_gen_float_mat float32
let mk_float64_mat = mk_gen_float_mat float64
let test =
"Bin_prot" >:::
[
"unit" >::
(fun () ->
check_all 1 "unit" Read.bin_read_unit Write.bin_write_unit
[
((), "()", 1);
];
);
"bool" >::
(fun () ->
check_all 1 "bool" Read.bin_read_bool Write.bin_write_bool
[
(true, "true", 1);
(false, "false", 1);
];
);
"string" >::
(fun () ->
check_all 66000 "string" Read.bin_read_string Write.bin_write_string
[
("", "\"\"", 1);
(random_string 1, "random 1", 1 + 1);
(random_string 10, "random 10", 10 + 1);
(random_string 127, "random 127", 127 + 1);
(random_string 128, "long 128", 128 + 3);
(random_string 65535, "long 65535", 65535 + 3);
(random_string 65536, "long 65536", 65536 + 5);
];
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\252\255\255\000" in
"String_too_long" @? expect_read_error String_too_long 0 (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\251\255\255\000" in
"StringMaximimum" @? expect_buffer_short (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\248\255\255\255\255\255\255\001" in
"String_too_long" @? expect_read_error String_too_long 0 (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\247\255\255\255\255\255\255\001" in
"StringMaximimum" @? expect_buffer_short (fun () ->
Read.bin_read_string bad_buf ~pos_ref:(ref 0))
);
"char" >::
(fun () ->
check_all 1 "char" Read.bin_read_char Write.bin_write_char
[
('x', "x", 1);
('y', "y", 1);
];
);
"int" >::
(fun () ->
let small_int_tests =
[
mk_int_test ~n:~-0x01 ~len:2;
mk_int_test ~n: 0x00 ~len:1;
mk_int_test ~n: 0x01 ~len:1;
mk_int_test ~n:0x7e ~len:1;
mk_int_test ~n:0x7f ~len:1;
mk_int_test ~n:0x80 ~len:3;
mk_int_test ~n:0x81 ~len:3;
mk_int_test ~n:0x7ffe ~len:3;
mk_int_test ~n:0x7fff ~len:3;
mk_int_test ~n:0x8000 ~len:5;
mk_int_test ~n:0x8001 ~len:5;
mk_int_test ~n:0x3ffffffe ~len:5;
mk_int_test ~n:0x3fffffff ~len:5;
mk_int_test ~n:~-0x7f ~len:2;
mk_int_test ~n:~-0x80 ~len:2;
mk_int_test ~n:~-0x81 ~len:3;
mk_int_test ~n:~-0x82 ~len:3;
mk_int_test ~n:~-0x7fff ~len:3;
mk_int_test ~n:~-0x8000 ~len:3;
mk_int_test ~n:~-0x8001 ~len:5;
mk_int_test ~n:~-0x8002 ~len:5;
mk_int_test ~n:~-0x40000001 ~len:5;
mk_int_test ~n:~-0x40000000 ~len:5;
]
in
let all_int_tests =
if Sys.word_size = 32 then small_int_tests
else
mk_int_test ~n:(int_of_string "0x7ffffffe") ~len:5 ::
mk_int_test ~n:(int_of_string "0x7fffffff") ~len:5 ::
mk_int_test ~n:(int_of_string "0x80000000") ~len:9 ::
mk_int_test ~n:(int_of_string "0x80000001") ~len:9 ::
mk_int_test ~n:max_int ~len:9 ::
mk_int_test ~n:(int_of_string "-0x000000007fffffff") ~len:5 ::
mk_int_test ~n:(int_of_string "-0x0000000080000000") ~len:5 ::
mk_int_test ~n:(int_of_string "-0x0000000080000001") ~len:9 ::
mk_int_test ~n:(int_of_string "-0x0000000080000002") ~len:9 ::
mk_int_test ~n:min_int ~len:9 ::
small_int_tests
in
check_all 9 "int" Read.bin_read_int Write.bin_write_int
all_int_tests;
let bad_buf = Bigstring.of_string "\132" in
"Int_code" @? expect_read_error Int_code 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0));
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\255\255\255\064" in
"Int_overflow (positive)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\255\255\255\191" in
"Int_overflow (negative)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\064" in
"Int_overflow (positive)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\191" in
"Int_overflow (negative)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int bad_buf ~pos_ref:(ref 0))
);
"nat0" >::
(fun () ->
let small_int_tests =
[
mk_nat0_test ~n:0x00 ~len:1;
mk_nat0_test ~n:0x01 ~len:1;
mk_nat0_test ~n:0x7e ~len:1;
mk_nat0_test ~n:0x7f ~len:1;
mk_nat0_test ~n:0x80 ~len:3;
mk_nat0_test ~n:0x81 ~len:3;
mk_nat0_test ~n:0x7fff ~len:3;
mk_nat0_test ~n:0x8000 ~len:3;
mk_nat0_test ~n:0xffff ~len:3;
mk_nat0_test ~n:0x10000 ~len:5;
mk_nat0_test ~n:0x10001 ~len:5;
mk_nat0_test ~n:0x3ffffffe ~len:5;
mk_nat0_test ~n:0x3fffffff ~len:5;
]
in
let all_int_tests =
if Sys.word_size = 32 then small_int_tests
else
mk_nat0_test ~n:(int_of_string "0x7fffffff") ~len:5 ::
mk_nat0_test ~n:(int_of_string "0x80000000") ~len:5 ::
mk_nat0_test ~n:(int_of_string "0xffffffff") ~len:5 ::
mk_nat0_test ~n:(int_of_string "0x100000000") ~len:9 ::
mk_nat0_test ~n:(int_of_string "0x100000001") ~len:9 ::
mk_nat0_test ~n:max_int ~len:9 ::
small_int_tests
in
check_all 9 "nat0" Read.bin_read_nat0 Write.bin_write_nat0
all_int_tests;
let bad_buf = Bigstring.of_string "\128" in
"Nat0_code" @? expect_read_error Nat0_code 0 (fun () ->
Read.bin_read_nat0 bad_buf ~pos_ref:(ref 0));
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\255\255\255\064" in
"Nat0_overflow" @? expect_read_error Nat0_overflow 0 (fun () ->
Read.bin_read_nat0 bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\064" in
"Nat0_overflow" @? expect_read_error Nat0_overflow 0 (fun () ->
Read.bin_read_nat0 bad_buf ~pos_ref:(ref 0))
);
"float" >::
(fun () ->
let float_tests =
[
mk_float_test 0.;
mk_float_test (-0.);
mk_float_test (-1.);
mk_float_test 1.;
mk_float_test infinity;
mk_float_test (-.infinity);
mk_float_test 3.141595;
]
in
check_all 8 "float" Read.bin_read_float Write.bin_write_float
float_tests
);
"int32" >::
(fun () ->
let int32_tests =
[
mk_int32_test ~n:(-0x01l) ~len:2;
mk_int32_test ~n: 0x00l ~len:1;
mk_int32_test ~n: 0x01l ~len:1;
mk_int32_test ~n:0x7el ~len:1;
mk_int32_test ~n:0x7fl ~len:1;
mk_int32_test ~n:0x80l ~len:3;
mk_int32_test ~n:0x81l ~len:3;
mk_int32_test ~n:0x7ffel ~len:3;
mk_int32_test ~n:0x7fffl ~len:3;
mk_int32_test ~n:0x8000l ~len:5;
mk_int32_test ~n:0x8001l ~len:5;
mk_int32_test ~n:0x7ffffffel ~len:5;
mk_int32_test ~n:0x7fffffffl ~len:5;
mk_int32_test ~n:(-0x7fl) ~len:2;
mk_int32_test ~n:(-0x80l) ~len:2;
mk_int32_test ~n:(-0x81l) ~len:3;
mk_int32_test ~n:(-0x82l) ~len:3;
mk_int32_test ~n:(-0x7fffl) ~len:3;
mk_int32_test ~n:(-0x8000l) ~len:3;
mk_int32_test ~n:(-0x8001l) ~len:5;
mk_int32_test ~n:(-0x8002l) ~len:5;
mk_int32_test ~n:(-0x80000001l) ~len:5;
mk_int32_test ~n:(-0x80000000l) ~len:5;
]
in
check_all 5 "int32" Read.bin_read_int32 Write.bin_write_int32
int32_tests;
let bad_buf = Bigstring.of_string "\132" in
"Int32_code" @? expect_read_error Int32_code 0 (fun () ->
Read.bin_read_int32 bad_buf ~pos_ref:(ref 0))
);
"int64" >::
(fun () ->
let int64_tests =
[
mk_int64_test ~n:(-0x01L) ~len:2;
mk_int64_test ~n: 0x00L ~len:1;
mk_int64_test ~n: 0x01L ~len:1;
mk_int64_test ~n:0x7eL ~len:1;
mk_int64_test ~n:0x7fL ~len:1;
mk_int64_test ~n:0x80L ~len:3;
mk_int64_test ~n:0x81L ~len:3;
mk_int64_test ~n:0x7ffeL ~len:3;
mk_int64_test ~n:0x7fffL ~len:3;
mk_int64_test ~n:0x8000L ~len:5;
mk_int64_test ~n:0x8001L ~len:5;
mk_int64_test ~n:0x7ffffffeL ~len:5;
mk_int64_test ~n:0x7fffffffL ~len:5;
mk_int64_test ~n:0x80000000L ~len:9;
mk_int64_test ~n:0x80000001L ~len:9;
mk_int64_test ~n:0x7ffffffffffffffeL ~len:9;
mk_int64_test ~n:0x7fffffffffffffffL ~len:9;
mk_int64_test ~n:(-0x7fL) ~len:2;
mk_int64_test ~n:(-0x80L) ~len:2;
mk_int64_test ~n:(-0x81L) ~len:3;
mk_int64_test ~n:(-0x82L) ~len:3;
mk_int64_test ~n:(-0x7fffL) ~len:3;
mk_int64_test ~n:(-0x8000L) ~len:3;
mk_int64_test ~n:(-0x8001L) ~len:5;
mk_int64_test ~n:(-0x8002L) ~len:5;
mk_int64_test ~n:(-0x7fffffffL) ~len:5;
mk_int64_test ~n:(-0x80000000L) ~len:5;
mk_int64_test ~n:(-0x80000001L) ~len:9;
mk_int64_test ~n:(-0x80000002L) ~len:9;
mk_int64_test ~n:(-0x8000000000000001L) ~len:9;
mk_int64_test ~n:(-0x8000000000000000L) ~len:9;
]
in
check_all 9 "int64" Read.bin_read_int64 Write.bin_write_int64
int64_tests;
let bad_buf = Bigstring.of_string "\132" in
"Int64_code" @? expect_read_error Int64_code 0 (fun () ->
Read.bin_read_int64 bad_buf ~pos_ref:(ref 0))
);
"nativeint" >::
(fun () ->
let small_nativeint_tests =
[
mk_nativeint_test ~n:(-0x01n) ~len:2;
mk_nativeint_test ~n: 0x00n ~len:1;
mk_nativeint_test ~n: 0x01n ~len:1;
mk_nativeint_test ~n:0x7en ~len:1;
mk_nativeint_test ~n:0x7fn ~len:1;
mk_nativeint_test ~n:0x80n ~len:3;
mk_nativeint_test ~n:0x81n ~len:3;
mk_nativeint_test ~n:0x7ffen ~len:3;
mk_nativeint_test ~n:0x7fffn ~len:3;
mk_nativeint_test ~n:0x8000n ~len:5;
mk_nativeint_test ~n:0x8001n ~len:5;
mk_nativeint_test ~n:0x7ffffffen ~len:5;
mk_nativeint_test ~n:0x7fffffffn ~len:5;
mk_nativeint_test ~n:(-0x7fn) ~len:2;
mk_nativeint_test ~n:(-0x80n) ~len:2;
mk_nativeint_test ~n:(-0x81n) ~len:3;
mk_nativeint_test ~n:(-0x82n) ~len:3;
mk_nativeint_test ~n:(-0x7fffn) ~len:3;
mk_nativeint_test ~n:(-0x8000n) ~len:3;
mk_nativeint_test ~n:(-0x8001n) ~len:5;
mk_nativeint_test ~n:(-0x8002n) ~len:5;
mk_nativeint_test ~n:(-0x7fffffffn) ~len:5;
mk_nativeint_test ~n:(-0x80000000n) ~len:5;
]
in
let nativeint_tests =
if Sys.word_size = 32 then small_nativeint_tests
else
mk_nativeint_test ~n:0x80000000n ~len:9 ::
mk_nativeint_test ~n:0x80000001n ~len:9 ::
mk_nativeint_test ~n:(-0x80000001n) ~len:9 ::
mk_nativeint_test ~n:(-0x80000002n) ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "0x7ffffffffffffffe") ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "0x7fffffffffffffff") ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "-0x8000000000000001") ~len:9 ::
mk_nativeint_test ~n:(Nativeint.of_string "-0x8000000000000000") ~len:9 ::
small_nativeint_tests
in
let size = if Sys.word_size = 32 then 5 else 9 in
check_all size "nativeint"
Read.bin_read_nativeint Write.bin_write_nativeint
nativeint_tests;
let bad_buf = Bigstring.of_string "\251" in
"Nativeint_code" @? expect_read_error Nativeint_code 0 (fun () ->
Read.bin_read_nativeint bad_buf ~pos_ref:(ref 0));
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\255\255" in
"Nativeint_code (overflow)" @? expect_read_error Nativeint_code 0 (fun () ->
Read.bin_read_nativeint bad_buf ~pos_ref:(ref 0))
);
"ref" >::
(fun () ->
check_all 1 "ref"
(Read.bin_read_ref Read.bin_read_int)
(Write.bin_write_ref Write.bin_write_int)
[(ref 42, "ref 42", 1)];
);
"option" >::
(fun () ->
check_all 2 "option"
(Read.bin_read_option Read.bin_read_int)
(Write.bin_write_option Write.bin_write_int)
[
(Some 42, "Some 42", 2);
(None, "None", 1);
];
);
"pair" >::
(fun () ->
check_all 9 "pair"
(Read.bin_read_pair Read.bin_read_float Read.bin_read_int)
(Write.bin_write_pair Write.bin_write_float Write.bin_write_int)
[((3.141, 42), "(3.141, 42)", 9)];
);
"triple" >::
(fun () ->
check_all 14 "triple"
(Read.bin_read_triple
Read.bin_read_float Read.bin_read_int Read.bin_read_string)
(Write.bin_write_triple
Write.bin_write_float Write.bin_write_int Write.bin_write_string)
[((3.141, 42, "test"), "(3.141, 42, \"test\")", 14)];
);
"list" >::
(fun () ->
check_all 12 "list"
(Read.bin_read_list Read.bin_read_int)
(Write.bin_write_list Write.bin_write_int)
[
([42; -1; 200; 33000], "[42; -1; 200; 33000]", 12);
([], "[]", 1);
];
);
"array" >::
(fun () ->
let bin_read_int_array = Read.bin_read_array Read.bin_read_int in
check_all 12 "array"
bin_read_int_array
(Write.bin_write_array Write.bin_write_int)
[
([| 42; -1; 200; 33000 |], "[|42; -1; 200; 33000|]", 12);
([||], "[||]", 1);
];
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\000\000\064\000" in
"Array_too_long" @? expect_read_error Array_too_long 0 (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\255\255\063\000" in
"ArrayMaximimum" @? expect_buffer_short (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\000\000\000\000\000\000\064\000" in
"Array_too_long" @? expect_read_error Array_too_long 0 (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\063\000" in
"ArrayMaximimum" @? expect_buffer_short (fun () ->
bin_read_int_array bad_buf ~pos_ref:(ref 0))
);
"float_array" >::
(fun () ->
check_all 33 "float_array"
Read.bin_read_float_array Write.bin_write_float_array
[
([| 42.; -1.; 200.; 33000. |], "[|42.; -1.; 200.; 33000.|]", 33);
([||], "[||]", 1);
];
if Sys.word_size = 32 then
let bad_buf = Bigstring.of_string "\253\000\000\032\000" in
"Array_too_long (float)" @? expect_read_error Array_too_long 0 (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\253\255\255\031\000" in
"ArrayMaximimum (float)" @? expect_buffer_short (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0))
else
let bad_buf = Bigstring.of_string "\252\000\000\000\000\000\000\064\000" in
"Array_too_long (float)" @? expect_read_error Array_too_long 0 (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0));
let bad_buf = Bigstring.of_string "\252\255\255\255\255\255\255\063\000" in
"ArrayMaximimum (float)" @? expect_buffer_short (fun () ->
Read.bin_read_float_array bad_buf ~pos_ref:(ref 0));
let arrays =
let rec loop acc len =
if len < 0 then acc
else
let a = Array.init len (fun i -> float_of_int (i + len)) in
let txt = Printf.sprintf "float array, len = %d" len in
let buf = len * 8 + Size.bin_size_nat0 (Nat0.unsafe_of_int len) in
loop ((a, txt, buf) :: acc) (len - 1)
in
loop [] 255 in
let len = 255 * 8 + Size.bin_size_nat0 (Nat0.unsafe_of_int 255) in
check_all len "float array -> float_array"
Read.bin_read_float_array (Write.bin_write_array Write.bin_write_float)
arrays;
check_all len "float_array -> float array"
(Read.bin_read_array Read.bin_read_float) (Write.bin_write_float_array)
arrays;
assert (bin_write_float == bin_write_float);
assert (bin_read_float == bin_read_float);
assert (bin_size_float == bin_size_float);
);
"hashtbl" >::
(fun () ->
let bindings = List.rev [(42, 3.); (17, 2.); (42, 4.)] in
let htbl = Hashtbl.create (List.length bindings) in
List.iter (fun (k, v) -> Hashtbl.add htbl k v) bindings;
check_all 28 "hashtbl"
(Read.bin_read_hashtbl Read.bin_read_int Read.bin_read_float)
(Write.bin_write_hashtbl Write.bin_write_int Write.bin_write_float)
[
(htbl, "[(42, 3.); (17, 2.); (42, 4.)]", 28);
(Hashtbl.create 0, "[]", 1)
];
);
"float32_vec" >::
(fun () ->
let n = 128 in
let header = 3 in
let size = header + n * 4 in
let vec = mk_float32_vec n in
check_all size "float32_vec"
Read.bin_read_float32_vec
Write.bin_write_float32_vec
[
(vec, "[| ... |]", size);
(mk_float32_vec 0, "[||]", 1);
]
);
"float64_vec" >::
(fun () ->
let n = 127 in
let header = 1 in
let size = header + n * 8 in
let vec = mk_float64_vec n in
check_all size "float64_vec"
Read.bin_read_float64_vec
Write.bin_write_float64_vec
[
(vec, "[| ... |]", size);
(mk_float64_vec 0, "[||]", 1);
]
);
"vec" >::
(fun () ->
let n = 128 in
let header = 3 in
let size = header + n * 8 in
let vec = mk_float64_vec n in
check_all size "vec"
Read.bin_read_vec
Write.bin_write_vec
[
(vec, "[| ... |]", size);
(mk_float64_vec 0, "[||]", 1);
]
);
"float32_mat" >::
(fun () ->
let m = 128 in
let n = 127 in
let header = 3 + 1 in
let size = header + m * n * 4 in
let mat = mk_float32_mat m n in
check_all size "float32_mat"
Read.bin_read_float32_mat
Write.bin_write_float32_mat
[
(mat, "[| ... |]", size);
(mk_float32_mat 0 0, "[||]", 2);
]
);
"float64_mat" >::
(fun () ->
let m = 10 in
let n = 12 in
let header = 1 + 1 in
let size = header + m * n * 8 in
let mat = mk_float64_mat m n in
check_all size "float64_mat"
Read.bin_read_float64_mat
Write.bin_write_float64_mat
[
(mat, "[| ... |]", size);
(mk_float64_mat 0 0, "[||]", 2);
]
);
"mat" >::
(fun () ->
let m = 128 in
let n = 128 in
let header = 3 + 3 in
let size = header + m * n * 8 in
let mat = mk_float64_mat m n in
check_all size "mat"
Read.bin_read_mat
Write.bin_write_mat
[
(mat, "[| ... |]", size);
(mk_float64_mat 0 0, "[||]", 2);
]
);
"bigstring" >::
(fun () ->
let n = 128 in
let header = 3 in
let size = header + n in
let bstr = mk_bigstring n in
check_all size "bigstring"
Read.bin_read_bigstring
Write.bin_write_bigstring
[
(bstr, "[| ... |]", size);
(mk_bigstring 0, "[||]", 1);
]
);
"bigstring (big)" >::
(fun () ->
[ n ] is a 16bits integer that will be serialized differently depending on
whether it is considered as an integer or an unsigned integer .
whether it is considered as an integer or an unsigned integer. *)
let n = 40_000 in
let header = 3 in
let size = header + n in
let bstr = mk_bigstring n in
check_all size "bigstring"
Read.bin_read_bigstring
Write.bin_write_bigstring
[
(bstr, "[| ... |]", size);
(mk_bigstring 0, "[||]", 1);
]
);
"variant_tag" >::
(fun () ->
check_all 4 "variant_tag"
Read.bin_read_variant_int
Write.bin_write_variant_int
[
((Obj.magic `Foo : int), "`Foo", 4);
((Obj.magic `Bar : int), "`Bar", 4);
];
let bad_buf = Bigstring.of_string "\000\000\000\000" in
"Variant_tag" @? expect_read_error Variant_tag 0 (fun () ->
Read.bin_read_variant_int bad_buf ~pos_ref:(ref 0))
);
"int64_bits" >::
(fun () ->
check_all 8 "int64_bits"
Read.bin_read_int64_bits
Write.bin_write_int64_bits
[
(Int64.min_int, "min_int", 8);
(Int64.add Int64.min_int Int64.one, "min_int + 1", 8);
(Int64.minus_one, "-1", 8);
(Int64.zero, "0", 8);
(Int64.one, "1", 8);
(Int64.sub Int64.max_int Int64.one, "max_int - 1", 8);
(Int64.max_int, "max_int", 8);
];
);
"int_64bit" >::
(fun () ->
check_all 8 "int_64bit"
Read.bin_read_int_64bit
Write.bin_write_int_64bit
[
(min_int, "min_int", 8);
(min_int + 1, "min_int + 1", 8);
(-1, "-1", 8);
(0, "0", 8);
(1, "1", 8);
(max_int - 1, "max_int - 1", 8);
(max_int, "max_int", 8);
];
let bad_buf_max =
bin_dump bin_int64_bits.writer (Int64.succ (Int64.of_int max_int))
in
"Int_overflow (positive)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int_64bit bad_buf_max ~pos_ref:(ref 0));
let bad_buf_min =
bin_dump bin_int64_bits.writer (Int64.pred (Int64.of_int min_int))
in
"Int_overflow (negative)" @? expect_read_error Int_overflow 0 (fun () ->
Read.bin_read_int_64bit bad_buf_min ~pos_ref:(ref 0));
);
"network16_int" >::
(fun () ->
check_all 2 "network16_int"
Read.bin_read_network16_int
Write.bin_write_network16_int
[
No negative numbers - ambiguous on 64bit platforms
(0, "0", 2);
(1, "1", 2);
];
);
"network32_int" >::
(fun () ->
check_all 4 "network32_int"
Read.bin_read_network32_int
Write.bin_write_network32_int
[
No negative numbers - ambiguous on 64bit platforms
(0, "0", 4);
(1, "1", 4);
];
);
"network32_int32" >::
(fun () ->
check_all 4 "network32_int32"
Read.bin_read_network32_int32
Write.bin_write_network32_int32
[
(-1l, "-1", 4);
(0l, "0", 4);
(1l, "1", 4);
];
);
"network64_int" >::
(fun () ->
check_all 8 "network64_int"
Read.bin_read_network64_int
Write.bin_write_network64_int
[
(-1, "-1", 8);
(0, "0", 8);
(1, "1", 8);
];
);
"network64_int64" >::
(fun () ->
check_all 8 "network64_int64"
Read.bin_read_network64_int64
Write.bin_write_network64_int64
[
(-1L, "-1", 8);
(0L, "0", 8);
(1L, "1", 8);
];
);
]
module Common = struct
type tuple = float * string * int64
[@@deriving bin_io]
type 'a record = { a : int; b : 'a; c : 'a option }
[@@deriving bin_io]
type 'a singleton_record = { y : 'a }
[@@deriving bin_io]
type 'a inline_record =
| IR of { mutable ir_a : int; ir_b : 'a; ir_c : 'a option }
| Other of int
[@@deriving bin_io]
type 'a sum = Foo | Bar of int | Bla of 'a * string
[@@deriving bin_io]
type 'a variant = [ `Foo | `Bar of int | `Bla of 'a * string ]
[@@deriving bin_io]
type 'a poly_app = (tuple * int singleton_record * 'a record * 'a inline_record) variant sum list
[@@deriving bin_io]
type 'a rec_t1 = RecFoo1 of 'a rec_t2
and 'a rec_t2 = RecFoo2 of 'a poly_app * 'a rec_t1 | RecNone
[@@deriving bin_io]
type 'a poly_id = 'a rec_t1
[@@deriving bin_io]
type el = float poly_id
[@@deriving bin_io]
type els = el array
[@@deriving bin_io]
let test =
"Bin_prot_common" >:::
[
"Utils.bin_dump" >::
(fun () ->
let el =
let record = { a = 17; b = 2.78; c = None } in
let inline_record = IR {ir_a = 18; ir_b = 43210.; ir_c = None} in
let arg = (3.1, "foo", 42L), { y = 4321 }, record, inline_record in
let variant = `Bla (arg, "fdsa") in
let sum = Bla (variant, "asdf") in
let poly_app = [ sum ] in
RecFoo1 (RecFoo2 (poly_app, RecFoo1 RecNone))
in
let els = Array.make 10 el in
let buf = bin_dump ~header:true bin_els.writer els in
let pos_ref = ref 0 in
let els_len = Read.bin_read_int_64bit buf ~pos_ref in
"pos_ref for length incorrect" @? (!pos_ref = 8);
"els_len disagrees with bin_size" @? (els_len = bin_size_els els);
let new_els = bin_read_els buf ~pos_ref in
"new_els and els not equal" @? (els = new_els)
);
]
end
module Inline = struct
let compatible xs derived_tc inline_writer inline_reader inline_tc =
ListLabels.map xs ~f:(fun x ->
"" >:: fun () ->
"incorrect size from inline writer"
@? (derived_tc.writer.size x = inline_writer.size x);
"incorrect size from inline type class"
@? (derived_tc.writer.size x = inline_tc.writer.size x);
let buf = bin_dump derived_tc.writer x in
"incorrect bin dump from inline writer"
@? (buf = bin_dump inline_writer x);
"incorrect bin dump from inline type class"
@? (buf = bin_dump inline_tc.writer x);
let val_and_len reader =
let pos_ref = ref 0 in
let x = reader.read buf ~pos_ref in
(x, !pos_ref)
in
let (_, len) = val_and_len derived_tc.reader in
let (x', len') = val_and_len inline_reader in
"incorrect value from inline reader" @? (x = x');
"incorrect length from inline reader" @? (len = len');
let (x', len') = val_and_len inline_tc.reader in
"incorrect value from inline type class" @? (x = x');
"incorrect length from inline type class" @? (len = len');
)
;;
type variant_extension = [ float Common.variant | `Baz of int * float ]
[@@deriving bin_io]
let test =
"Bin_prot.Inline" >::: [
"simple tuple" >:::
compatible
[(50.5, "hello", 1234L)]
Common.bin_tuple
[%bin_writer : Common.tuple]
[%bin_reader : Common.tuple]
[%bin_type_class : Common.tuple];
"redefine tuple" >:::
compatible
[(50.5, "hello", 1234L)]
Common.bin_tuple
[%bin_writer : float * string * int64]
[%bin_reader : float * string * int64]
[%bin_type_class : float * string * int64];
"simple variant" >:::
compatible
[`Foo; `Bar 8; `Bla (33.3, "world")]
(Common.bin_variant bin_float)
[%bin_writer : float Common.variant]
[%bin_reader : float Common.variant]
[%bin_type_class : float Common.variant];
"redefine variant" >:::
compatible
[`Foo; `Bar 8; `Bla (33.3, "world")]
(Common.bin_variant bin_float)
[%bin_writer : [`Foo | `Bar of int | `Bla of float * string]]
[%bin_reader : [`Foo | `Bar of int | `Bla of float * string]]
[%bin_type_class : [`Foo | `Bar of int | `Bla of float * string]];
"variant_extension" >:::
compatible
[`Foo; `Bar 8; `Bla (33.3, "world"); `Baz (17, 17.71)]
bin_variant_extension
[%bin_writer : [ float Common.variant | `Baz of int * float ]]
[%bin_reader : [ float Common.variant | `Baz of int * float ]]
[%bin_type_class : [ float Common.variant | `Baz of int * float ]];
"sub variant" >:::
compatible
[ { Common. y = `Foo }; { y = `Bar 42 }; { y = `Bla (42, "world") } ]
(Common.bin_singleton_record (Common.bin_variant bin_int))
[%bin_writer : [`Foo | `Bar of int | `Bla of int * string] Common.singleton_record]
[%bin_reader : [`Foo | `Bar of int | `Bla of int * string] Common.singleton_record]
[%bin_type_class : [`Foo | `Bar of int | `Bla of int * string] Common.singleton_record];
]
;;
end
|
9e74f7f29f7b1649e326eb69675f21dcb6657589ea64e049d0dab18feb0338f5 | cbaggers/nineveh | axis.lisp | (in-package :nineveh.graphing)
Based on excellent article here :
;; -high-frequency-functions-using-a-gpu/
;;
;; We don't use dithering on this version
(defun-g axis ((uv :vec2) (xy-range :vec4) (axis-style :vec4))
(let* ((axis-thickness (w axis-style))
(axis-color (v! (s~ axis-style :xyz) 1))
(diff (/ (s~ xy-range :xz) (- (s~ xy-range :yw) (s~ xy-range :xz))))
(uv (+ uv diff)))
(+ (* axis-color (smoothstep axis-thickness 0 (abs (x uv))))
(* axis-color (smoothstep axis-thickness 0 (abs (y uv)))))))
| null | https://raw.githubusercontent.com/cbaggers/nineveh/0a10a84669cd9d1c584f54b9eab062986a5f1c47/graphing/axis.lisp | lisp | -high-frequency-functions-using-a-gpu/
We don't use dithering on this version | (in-package :nineveh.graphing)
Based on excellent article here :
(defun-g axis ((uv :vec2) (xy-range :vec4) (axis-style :vec4))
(let* ((axis-thickness (w axis-style))
(axis-color (v! (s~ axis-style :xyz) 1))
(diff (/ (s~ xy-range :xz) (- (s~ xy-range :yw) (s~ xy-range :xz))))
(uv (+ uv diff)))
(+ (* axis-color (smoothstep axis-thickness 0 (abs (x uv))))
(* axis-color (smoothstep axis-thickness 0 (abs (y uv)))))))
|
46cb0a1b736273ef14320204c79222479e4f5c79d1590500944b9c4719b11e3a | maxlapshin/fix | fix_reader_tests.erl | -module(fix_reader_tests).
-include_lib("eunit/include/eunit.hrl").
-include_lib("fix/include/business.hrl").
% -include_lib("trader/include/trader.hrl").
-compile(export_all).
Needed to keep test order and to ensure broken test does not affect other tests with left
reader_test_() ->
% Labels increase as line number increase. Use that fact for sorting
{ok, {_, [{labeled_exports, LExports}]}} = beam_lib:chunks(code:which(?MODULE), [labeled_exports]),
SLExports = lists:keysort(3, LExports),
TestFunctions = [F || {F,0,_} <- SLExports,
lists:prefix("test_", atom_to_list(F))],
{foreach, fun setup/0, fun cleanup/1,
[{atom_to_list(F), fun ?MODULE:F/0} || F <- TestFunctions] }.
setup() ->
% prepare
application:set_env(fix, fix_test_read, [
{host,"127.0.0.1"},
{port,6789},
{password,"TestPw"},
{target,"TestTarget"},
{sender,"TestSender"},
{heartbeat,30}
]),
fix_test_server:start(6789, []),
[].
cleanup(Mods) ->
% error_logger:delete_report_handler(sasl_report_tty_h),
error_logger : ) ,
fix_test_server:stop(),
meck:unload(Mods).
stop_normal_exitmsg(Pid) ->
monitor(process, Pid),
Pid ! {'EXIT', self(), normal},
?assertEqual(normal, receive {'DOWN', _, _, Pid, Reason} -> Reason after 200 -> error end),
ok.
test_read_conn_connect() ->
% start and check start is successful
StartResult = fix_read_conn:start_link(fix_test_read, []),
?assertMatch({ok, _}, StartResult),
{ok, R} = StartResult,
?assertEqual(ok, fix_read_conn:connect(R, [])),
stop_normal_exitmsg(R),
ok.
test_read_conn_connect_error() ->
application:set_env(fix, fix_test_read, [
{host,"127.0.0.1"},
{port,6788},
{password,"TestPw"},
{target,"TestTarget"},
{sender,"TestSender"},
{heartbeat,30}
]),
{ok, R} = fix_read_conn:start_link(fix_test_read, []),
?assertEqual({error, {connect, econnrefused}}, fix_read_conn:connect(R, [])),
stop_normal_exitmsg(R),
ok.
test_read_conn_logon_timeout() ->
fix_test_server:stop(),
fix_test_server:start(6789, [{on_fix, fun
(logon, _) -> timer:sleep(1000);
(_,_) -> ok
end}]),
We know that fix_read_conn uses gen_server timeout for logon
meck : expect(gen_tcp , send , fun(_S , _ D ) - > self ( ) ! timeout , ok end ) ,
{ok, R} = fix_read_conn:start_link(fix_test_read, []),
?assertEqual({error, logon_timeout}, fix_read_conn:connect(R, [])),
stop_normal_exitmsg(R),
ok.
test_supervision_tree() ->
% start supervisor where readers go
ReadersSupStartResult = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
?assertMatch({ok, _}, ReadersSupStartResult),
{ok, ReadersSup} = ReadersSupStartResult,
% Start reader and sync to ensure it completes startup
?assertMatch({ok, _}, fix_sup:start_reader(fix_test_read)),
?assertMatch([{fix_test_read, _, supervisor, _}], supervisor:which_children(fix_readers_sup)),
timer:sleep(10),
(catch gen_server:call(fix_test_read, sync)),
% Check reader supervisor children list
SupName = fix_sup:read_sup(fix_test_read),
?assertMatch([
{connection, _, worker, _},
{stocks, _, supervisor, _},
{manager, _, worker, _} ], supervisor:which_children(SupName)),
stop_normal_exitmsg(ReadersSup),
ok.
test_manager_schedules_reconnect() ->
Tester = self(),
meck:new(fix_read_manager, [passthrough]),
meck:expect(fix_read_manager, connect_after, fun (T) ->
Ref = erlang:make_ref(),
Msg = {timeout, Ref, connect},
case T of
0 -> self() ! Msg;
_ -> Tester ! {connect_timer, T, self(), Msg}
end,
Ref
end),
Make it fail for first time
fix_test_server:stop(),
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
{ok, _} = fix_sup:start_reader(fix_test_read),
% Ensure manager set timeout for next connect
Timers = receive_timers(100),
?assertMatch([_], Timers),
% "Repair" connection and send timeout event
fix_test_server:start(6789, []),
{_, _, Mgr, TmoMsg} = hd(Timers),
Mgr ! TmoMsg,
Sync to manager and ensure no more reconnects scheduled
?assertEqual(connected, fix_read_manager:status(Mgr)),
?assertEqual([], receive_timers(20)),
Ensure connect attempts were made from two different Pids ( which means connection was restarted only once )
? assertMatch ( [ _ , _ ] , lists : , : history(gen_tcp ) ) ) ,
% Turn off verbose error logging
% Now simulate error in connection
erlang:exit(whereis(fix_test_server), normal),
% Ensure new connection is started without timeout
?assertEqual([], receive_timers(40)),
?assertEqual(connected, fix_read_manager:status(Mgr)),
stop_normal_exitmsg(ReadersSup),
meck:unload(fix_read_manager),
ok.
receive_timers(Timeout) ->
receive
{connect_timer, _, _, _} = Timer ->
[Timer | receive_timers(10)]
after
Timeout -> []
end.
test_stock_subscribe() ->
Self = self(),
fix_test_server:stop(),
fix_test_server:start(6789, [{on_fix, fun
(market_data_request,Msg) ->
Exchange = proplists:get_value(security_exchange, Msg),
Symbol = proplists:get_value(symbol, Msg),
MdReqId = proplists:get_value(md_req_id, Msg),
Self ! {subscribe, Exchange, Symbol, MdReqId};
(_,_) -> ok
end}]),
% Pre-requisites
?assert(whereis('MICEX.TEST') == undefined),
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
% Request reader's stock and ensure it is started
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST'),
?assertNot(whereis('MICEX.TEST') == undefined),
ID1 = receive
{subscribe, <<"MICEX">>, <<"TEST">>, ID1_} -> ID1_
after
5 -> error(havent_subscribed_micex_test)
end,
receive
{subscribe, <<"MICEX">>, <<"TEST">>, _} -> error(double_subscribe_micex_test)
after
0 -> ok
end,
Add one more stock . Now it uses already started reader
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST2'),
ID2 = receive
{subscribe, <<"MICEX">>, <<"TEST2">>, ID2_} -> ID2_
after
5 -> error(havent_subscribed_micex_test2)
end,
receive
{subscribe, <<"MICEX">>, <<"TEST2">>, _} -> error(double_subscribe_micex_test2)
after
0 -> ok
end,
?assertNotEqual(ID1, ID2),
stop_normal_exitmsg(ReadersSup),
ok.
test_stock_route() ->
Construct event proxy for monitoring stock events
ok = meck:new(test_ev_proxy, [non_strict]),
ok = meck:expect(test_ev_proxy, init, fun(Arg) -> {ok, Arg} end),
ok = meck:expect(test_ev_proxy, handle_event, fun(Ev, [Dest, Self]) -> Dest ! {event, Self, Ev}, {ok, [Dest, Self]} end),
ok = meck:expect(test_ev_proxy, handle_info, fun(Ev, [Dest, Self]) -> Dest ! {info, Self, Ev}, {ok, [Dest, Self]} end),
% Pre-requisites
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
% Start stocks
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST'),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST2'),
% Install proxy
gen_event:add_handler('MICEX.TEST', test_ev_proxy, [self(), 'MICEX.TEST']),
gen_event:add_handler('MICEX.TEST2', test_ev_proxy, [self(), 'MICEX.TEST2']),
% Get IDs
SentRequests2 = [ Msg || { _ , { gen_tcp , send , [ fake_socket , Packet ] } , _ } < - meck : ) ,
% {ok, #market_data_request{} = Msg, _, _} <- [fix:decode(iolist_to_binary(Packet))]],
% [ID1] = [ReqID || #market_data_request{md_req_id = ReqID, fields = Fields} <- SentRequests2,
lists : member({symbol,<<"TEST " > > } , ) ] ,
Send some for first stock
% Conn = fix_read_conn:id(fix_test_read),
MDEntries = [ [ , trade } , { md_entry_px , 18.2 } , { md_entry_size , 14 } ] ] ,
FixMessage = # market_data_snapshot_full_refresh{md_req_id = ID1 , md_entries = MDEntries } ,
Conn ! { test_messages , [ FixMessage ] } ,
receive
{event, 'MICEX.TEST', #market_data_snapshot_full_refresh{symbol = 'MICEX.TEST'}} -> ok
after
100 -> error(timeout_routing)
end,
stop_normal_exitmsg(ReadersSup),
meck:unload(test_ev_proxy),
ok.
test_invalid_instrument() ->
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
% Start stocks
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST'),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST2'),
Manager = whereis(fix_test_read),
Connection = whereis(fix_test_read_conn),
?assertMatch({fix_connection, P} when is_pid(P), {fix_connection, Connection}),
?assertEqual(ok, fix_read_conn:status(Connection)),
?assertEqual(connected, fix_read_manager:status(Manager)),
?assertEqual({error, rejected}, fix_reader:stock(fix_test_read, 'MICEX.INVALID')),
?assertEqual({error, rejected}, fix_reader:subscribe(fix_test_read, 'MICEX.INVALID')),
timer : sleep(1000 ) ,
% ?assertEqual(connected, fix_read_manager:status(Manager)),
timer : sleep(1000 ) ,
?assertEqual(ok, fix_read_conn:status(Connection)),
stop_normal_exitmsg(ReadersSup),
ok.
receive_msgs(_, 0) -> [];
receive_msgs(Timeout, Count) ->
receive
Msg -> [Msg | receive_msgs(Timeout, Count - 1)]
after
Timeout -> []
end.
| null | https://raw.githubusercontent.com/maxlapshin/fix/4b5208c7c11528fe477954f48152a1922cf17630/test/fix_reader_tests.erl | erlang | -include_lib("trader/include/trader.hrl").
Labels increase as line number increase. Use that fact for sorting
prepare
error_logger:delete_report_handler(sasl_report_tty_h),
start and check start is successful
start supervisor where readers go
Start reader and sync to ensure it completes startup
Check reader supervisor children list
Ensure manager set timeout for next connect
"Repair" connection and send timeout event
Turn off verbose error logging
Now simulate error in connection
Ensure new connection is started without timeout
Pre-requisites
Request reader's stock and ensure it is started
Pre-requisites
Start stocks
Install proxy
Get IDs
{ok, #market_data_request{} = Msg, _, _} <- [fix:decode(iolist_to_binary(Packet))]],
[ID1] = [ReqID || #market_data_request{md_req_id = ReqID, fields = Fields} <- SentRequests2,
Conn = fix_read_conn:id(fix_test_read),
Start stocks
?assertEqual(connected, fix_read_manager:status(Manager)), | -module(fix_reader_tests).
-include_lib("eunit/include/eunit.hrl").
-include_lib("fix/include/business.hrl").
-compile(export_all).
Needed to keep test order and to ensure broken test does not affect other tests with left
reader_test_() ->
{ok, {_, [{labeled_exports, LExports}]}} = beam_lib:chunks(code:which(?MODULE), [labeled_exports]),
SLExports = lists:keysort(3, LExports),
TestFunctions = [F || {F,0,_} <- SLExports,
lists:prefix("test_", atom_to_list(F))],
{foreach, fun setup/0, fun cleanup/1,
[{atom_to_list(F), fun ?MODULE:F/0} || F <- TestFunctions] }.
setup() ->
application:set_env(fix, fix_test_read, [
{host,"127.0.0.1"},
{port,6789},
{password,"TestPw"},
{target,"TestTarget"},
{sender,"TestSender"},
{heartbeat,30}
]),
fix_test_server:start(6789, []),
[].
cleanup(Mods) ->
error_logger : ) ,
fix_test_server:stop(),
meck:unload(Mods).
stop_normal_exitmsg(Pid) ->
monitor(process, Pid),
Pid ! {'EXIT', self(), normal},
?assertEqual(normal, receive {'DOWN', _, _, Pid, Reason} -> Reason after 200 -> error end),
ok.
test_read_conn_connect() ->
StartResult = fix_read_conn:start_link(fix_test_read, []),
?assertMatch({ok, _}, StartResult),
{ok, R} = StartResult,
?assertEqual(ok, fix_read_conn:connect(R, [])),
stop_normal_exitmsg(R),
ok.
test_read_conn_connect_error() ->
application:set_env(fix, fix_test_read, [
{host,"127.0.0.1"},
{port,6788},
{password,"TestPw"},
{target,"TestTarget"},
{sender,"TestSender"},
{heartbeat,30}
]),
{ok, R} = fix_read_conn:start_link(fix_test_read, []),
?assertEqual({error, {connect, econnrefused}}, fix_read_conn:connect(R, [])),
stop_normal_exitmsg(R),
ok.
test_read_conn_logon_timeout() ->
fix_test_server:stop(),
fix_test_server:start(6789, [{on_fix, fun
(logon, _) -> timer:sleep(1000);
(_,_) -> ok
end}]),
We know that fix_read_conn uses gen_server timeout for logon
meck : expect(gen_tcp , send , fun(_S , _ D ) - > self ( ) ! timeout , ok end ) ,
{ok, R} = fix_read_conn:start_link(fix_test_read, []),
?assertEqual({error, logon_timeout}, fix_read_conn:connect(R, [])),
stop_normal_exitmsg(R),
ok.
test_supervision_tree() ->
ReadersSupStartResult = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
?assertMatch({ok, _}, ReadersSupStartResult),
{ok, ReadersSup} = ReadersSupStartResult,
?assertMatch({ok, _}, fix_sup:start_reader(fix_test_read)),
?assertMatch([{fix_test_read, _, supervisor, _}], supervisor:which_children(fix_readers_sup)),
timer:sleep(10),
(catch gen_server:call(fix_test_read, sync)),
SupName = fix_sup:read_sup(fix_test_read),
?assertMatch([
{connection, _, worker, _},
{stocks, _, supervisor, _},
{manager, _, worker, _} ], supervisor:which_children(SupName)),
stop_normal_exitmsg(ReadersSup),
ok.
test_manager_schedules_reconnect() ->
Tester = self(),
meck:new(fix_read_manager, [passthrough]),
meck:expect(fix_read_manager, connect_after, fun (T) ->
Ref = erlang:make_ref(),
Msg = {timeout, Ref, connect},
case T of
0 -> self() ! Msg;
_ -> Tester ! {connect_timer, T, self(), Msg}
end,
Ref
end),
Make it fail for first time
fix_test_server:stop(),
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
{ok, _} = fix_sup:start_reader(fix_test_read),
Timers = receive_timers(100),
?assertMatch([_], Timers),
fix_test_server:start(6789, []),
{_, _, Mgr, TmoMsg} = hd(Timers),
Mgr ! TmoMsg,
Sync to manager and ensure no more reconnects scheduled
?assertEqual(connected, fix_read_manager:status(Mgr)),
?assertEqual([], receive_timers(20)),
Ensure connect attempts were made from two different Pids ( which means connection was restarted only once )
? assertMatch ( [ _ , _ ] , lists : , : history(gen_tcp ) ) ) ,
erlang:exit(whereis(fix_test_server), normal),
?assertEqual([], receive_timers(40)),
?assertEqual(connected, fix_read_manager:status(Mgr)),
stop_normal_exitmsg(ReadersSup),
meck:unload(fix_read_manager),
ok.
receive_timers(Timeout) ->
receive
{connect_timer, _, _, _} = Timer ->
[Timer | receive_timers(10)]
after
Timeout -> []
end.
test_stock_subscribe() ->
Self = self(),
fix_test_server:stop(),
fix_test_server:start(6789, [{on_fix, fun
(market_data_request,Msg) ->
Exchange = proplists:get_value(security_exchange, Msg),
Symbol = proplists:get_value(symbol, Msg),
MdReqId = proplists:get_value(md_req_id, Msg),
Self ! {subscribe, Exchange, Symbol, MdReqId};
(_,_) -> ok
end}]),
?assert(whereis('MICEX.TEST') == undefined),
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST'),
?assertNot(whereis('MICEX.TEST') == undefined),
ID1 = receive
{subscribe, <<"MICEX">>, <<"TEST">>, ID1_} -> ID1_
after
5 -> error(havent_subscribed_micex_test)
end,
receive
{subscribe, <<"MICEX">>, <<"TEST">>, _} -> error(double_subscribe_micex_test)
after
0 -> ok
end,
Add one more stock . Now it uses already started reader
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST2'),
ID2 = receive
{subscribe, <<"MICEX">>, <<"TEST2">>, ID2_} -> ID2_
after
5 -> error(havent_subscribed_micex_test2)
end,
receive
{subscribe, <<"MICEX">>, <<"TEST2">>, _} -> error(double_subscribe_micex_test2)
after
0 -> ok
end,
?assertNotEqual(ID1, ID2),
stop_normal_exitmsg(ReadersSup),
ok.
test_stock_route() ->
Construct event proxy for monitoring stock events
ok = meck:new(test_ev_proxy, [non_strict]),
ok = meck:expect(test_ev_proxy, init, fun(Arg) -> {ok, Arg} end),
ok = meck:expect(test_ev_proxy, handle_event, fun(Ev, [Dest, Self]) -> Dest ! {event, Self, Ev}, {ok, [Dest, Self]} end),
ok = meck:expect(test_ev_proxy, handle_info, fun(Ev, [Dest, Self]) -> Dest ! {info, Self, Ev}, {ok, [Dest, Self]} end),
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST'),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST2'),
gen_event:add_handler('MICEX.TEST', test_ev_proxy, [self(), 'MICEX.TEST']),
gen_event:add_handler('MICEX.TEST2', test_ev_proxy, [self(), 'MICEX.TEST2']),
SentRequests2 = [ Msg || { _ , { gen_tcp , send , [ fake_socket , Packet ] } , _ } < - meck : ) ,
lists : member({symbol,<<"TEST " > > } , ) ] ,
Send some for first stock
MDEntries = [ [ , trade } , { md_entry_px , 18.2 } , { md_entry_size , 14 } ] ] ,
FixMessage = # market_data_snapshot_full_refresh{md_req_id = ID1 , md_entries = MDEntries } ,
Conn ! { test_messages , [ FixMessage ] } ,
receive
{event, 'MICEX.TEST', #market_data_snapshot_full_refresh{symbol = 'MICEX.TEST'}} -> ok
after
100 -> error(timeout_routing)
end,
stop_normal_exitmsg(ReadersSup),
meck:unload(test_ev_proxy),
ok.
test_invalid_instrument() ->
{ok, ReadersSup} = supervisor:start_link({local, fix_readers_sup}, fix_sup, [readers]),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST'),
{ok, _} = fix_reader:stock(fix_test_read, 'MICEX.TEST2'),
Manager = whereis(fix_test_read),
Connection = whereis(fix_test_read_conn),
?assertMatch({fix_connection, P} when is_pid(P), {fix_connection, Connection}),
?assertEqual(ok, fix_read_conn:status(Connection)),
?assertEqual(connected, fix_read_manager:status(Manager)),
?assertEqual({error, rejected}, fix_reader:stock(fix_test_read, 'MICEX.INVALID')),
?assertEqual({error, rejected}, fix_reader:subscribe(fix_test_read, 'MICEX.INVALID')),
timer : sleep(1000 ) ,
timer : sleep(1000 ) ,
?assertEqual(ok, fix_read_conn:status(Connection)),
stop_normal_exitmsg(ReadersSup),
ok.
receive_msgs(_, 0) -> [];
receive_msgs(Timeout, Count) ->
receive
Msg -> [Msg | receive_msgs(Timeout, Count - 1)]
after
Timeout -> []
end.
|
044bdbfd64d9e9beef230b47bc70476631733075269f98465e7cb29a2e0645e7 | AbstractMachinesLab/caramel | ast_mapper.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, LexiFi
(* *)
Copyright 2012 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
A generic Parsetree mapping class
[ @@@ocaml.warning " +9 " ]
( * Ensure that record patterns do n't miss any field .
[@@@ocaml.warning "+9"]
(* Ensure that record patterns don't miss any field. *)
*)
open Parsetree
open Ast_helper
open Location
type mapper = {
attribute: mapper -> attribute -> attribute;
attributes: mapper -> attribute list -> attribute list;
case: mapper -> case -> case;
cases: mapper -> case list -> case list;
class_declaration: mapper -> class_declaration -> class_declaration;
class_description: mapper -> class_description -> class_description;
class_expr: mapper -> class_expr -> class_expr;
class_field: mapper -> class_field -> class_field;
class_signature: mapper -> class_signature -> class_signature;
class_structure: mapper -> class_structure -> class_structure;
class_type: mapper -> class_type -> class_type;
class_type_declaration: mapper -> class_type_declaration
-> class_type_declaration;
class_type_field: mapper -> class_type_field -> class_type_field;
constructor_declaration: mapper -> constructor_declaration
-> constructor_declaration;
expr: mapper -> expression -> expression;
extension: mapper -> extension -> extension;
extension_constructor: mapper -> extension_constructor
-> extension_constructor;
include_declaration: mapper -> include_declaration -> include_declaration;
include_description: mapper -> include_description -> include_description;
label_declaration: mapper -> label_declaration -> label_declaration;
location: mapper -> Location.t -> Location.t;
module_binding: mapper -> module_binding -> module_binding;
module_declaration: mapper -> module_declaration -> module_declaration;
module_expr: mapper -> module_expr -> module_expr;
module_type: mapper -> module_type -> module_type;
module_type_declaration: mapper -> module_type_declaration
-> module_type_declaration;
open_description: mapper -> open_description -> open_description;
pat: mapper -> pattern -> pattern;
payload: mapper -> payload -> payload;
signature: mapper -> signature -> signature;
signature_item: mapper -> signature_item -> signature_item;
structure: mapper -> structure -> structure;
structure_item: mapper -> structure_item -> structure_item;
typ: mapper -> core_type -> core_type;
type_declaration: mapper -> type_declaration -> type_declaration;
type_extension: mapper -> type_extension -> type_extension;
type_kind: mapper -> type_kind -> type_kind;
value_binding: mapper -> value_binding -> value_binding;
value_description: mapper -> value_description -> value_description;
with_constraint: mapper -> with_constraint -> with_constraint;
}
let map_fst f (x, y) = (f x, y)
let map_snd f (x, y) = (x, f y)
let map_tuple f1 f2 (x, y) = (f1 x, f2 y)
let map_tuple3 f1 f2 f3 (x, y, z) = (f1 x, f2 y, f3 z)
let map_opt f = function None -> None | Some x -> Some (f x)
let map_loc sub {loc; txt} = {loc = sub.location sub loc; txt}
module T = struct
(* Type expressions for the core language *)
let row_field sub = function
| Rtag (l, attrs, b, tl) ->
Rtag (l, sub.attributes sub attrs, b, List.map (sub.typ sub) tl)
| Rinherit t -> Rinherit (sub.typ sub t)
let map sub {ptyp_desc = desc; ptyp_loc = loc; ptyp_attributes = attrs} =
let open Typ in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Ptyp_any -> any ~loc ~attrs ()
| Ptyp_var s -> var ~loc ~attrs s
| Ptyp_arrow (lab, t1, t2) ->
arrow ~loc ~attrs lab (sub.typ sub t1) (sub.typ sub t2)
| Ptyp_tuple tyl -> tuple ~loc ~attrs (List.map (sub.typ sub) tyl)
| Ptyp_constr (lid, tl) ->
constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl)
| Ptyp_object (l, o) ->
let f (s, a, t) =
(map_loc sub s, sub.attributes sub a, sub.typ sub t) in
object_ ~loc ~attrs (List.map f l) o
| Ptyp_class (lid, tl) ->
class_ ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl)
| Ptyp_alias (t, s) -> alias ~loc ~attrs (sub.typ sub t) s
| Ptyp_variant (rl, b, ll) ->
variant ~loc ~attrs (List.map (row_field sub) rl) b ll
| Ptyp_poly (sl, t) -> poly ~loc ~attrs
(List.map (map_loc sub) sl) (sub.typ sub t)
| Ptyp_package (lid, l) ->
package ~loc ~attrs (map_loc sub lid)
(List.map (map_tuple (map_loc sub) (sub.typ sub)) l)
| Ptyp_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_type_declaration sub
{ptype_name; ptype_params; ptype_cstrs;
ptype_kind;
ptype_private;
ptype_manifest;
ptype_attributes;
ptype_loc} =
Type.mk (map_loc sub ptype_name)
~params:(List.map (map_fst (sub.typ sub)) ptype_params)
~priv:ptype_private
~cstrs:(List.map
(map_tuple3 (sub.typ sub) (sub.typ sub) (sub.location sub))
ptype_cstrs)
~kind:(sub.type_kind sub ptype_kind)
?manifest:(map_opt (sub.typ sub) ptype_manifest)
~loc:(sub.location sub ptype_loc)
~attrs:(sub.attributes sub ptype_attributes)
let map_type_kind sub = function
| Ptype_abstract -> Ptype_abstract
| Ptype_variant l ->
Ptype_variant (List.map (sub.constructor_declaration sub) l)
| Ptype_record l -> Ptype_record (List.map (sub.label_declaration sub) l)
| Ptype_open -> Ptype_open
let map_constructor_arguments sub = function
| Pcstr_tuple l -> Pcstr_tuple (List.map (sub.typ sub) l)
| Pcstr_record l ->
Pcstr_record (List.map (sub.label_declaration sub) l)
let map_type_extension sub
{ptyext_path; ptyext_params;
ptyext_constructors;
ptyext_private;
ptyext_attributes} =
Te.mk
(map_loc sub ptyext_path)
(List.map (sub.extension_constructor sub) ptyext_constructors)
~params:(List.map (map_fst (sub.typ sub)) ptyext_params)
~priv:ptyext_private
~attrs:(sub.attributes sub ptyext_attributes)
let map_extension_constructor_kind sub = function
Pext_decl(ctl, cto) ->
Pext_decl(map_constructor_arguments sub ctl, map_opt (sub.typ sub) cto)
| Pext_rebind li ->
Pext_rebind (map_loc sub li)
let map_extension_constructor sub
{pext_name;
pext_kind;
pext_loc;
pext_attributes} =
Te.constructor
(map_loc sub pext_name)
(map_extension_constructor_kind sub pext_kind)
~loc:(sub.location sub pext_loc)
~attrs:(sub.attributes sub pext_attributes)
end
module CT = struct
(* Type expressions for the class language *)
let map sub {pcty_loc = loc; pcty_desc = desc; pcty_attributes = attrs} =
let open Cty in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pcty_constr (lid, tys) ->
constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys)
| Pcty_signature x -> signature ~loc ~attrs (sub.class_signature sub x)
| Pcty_arrow (lab, t, ct) ->
arrow ~loc ~attrs lab (sub.typ sub t) (sub.class_type sub ct)
| Pcty_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_field sub {pctf_desc = desc; pctf_loc = loc; pctf_attributes = attrs}
=
let open Ctf in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pctf_inherit ct -> inherit_ ~loc ~attrs (sub.class_type sub ct)
| Pctf_val (s, m, v, t) ->
val_ ~loc ~attrs (map_loc sub s) m v (sub.typ sub t)
| Pctf_method (s, p, v, t) ->
method_ ~loc ~attrs (map_loc sub s) p v (sub.typ sub t)
| Pctf_constraint (t1, t2) ->
constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2)
| Pctf_attribute x -> attribute ~loc (sub.attribute sub x)
| Pctf_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_signature sub {pcsig_self; pcsig_fields} =
Csig.mk
(sub.typ sub pcsig_self)
(List.map (sub.class_type_field sub) pcsig_fields)
end
module MT = struct
(* Type expressions for the module language *)
let map sub {pmty_desc = desc; pmty_loc = loc; pmty_attributes = attrs} =
let open Mty in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pmty_ident s -> ident ~loc ~attrs (map_loc sub s)
| Pmty_alias s -> alias ~loc ~attrs (map_loc sub s)
| Pmty_signature sg -> signature ~loc ~attrs (sub.signature sub sg)
| Pmty_functor (s, mt1, mt2) ->
functor_ ~loc ~attrs (map_loc sub s)
(Misc.may_map (sub.module_type sub) mt1)
(sub.module_type sub mt2)
| Pmty_with (mt, l) ->
with_ ~loc ~attrs (sub.module_type sub mt)
(List.map (sub.with_constraint sub) l)
| Pmty_typeof me -> typeof_ ~loc ~attrs (sub.module_expr sub me)
| Pmty_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_with_constraint sub = function
| Pwith_type (lid, d) ->
Pwith_type (map_loc sub lid, sub.type_declaration sub d)
| Pwith_module (lid, lid2) ->
Pwith_module (map_loc sub lid, map_loc sub lid2)
| Pwith_typesubst d -> Pwith_typesubst (sub.type_declaration sub d)
| Pwith_modsubst (s, lid) ->
Pwith_modsubst (map_loc sub s, map_loc sub lid)
let map_signature_item sub {psig_desc = desc; psig_loc = loc} =
let open Sig in
let loc = sub.location sub loc in
match desc with
| Psig_value vd -> value ~loc (sub.value_description sub vd)
| Psig_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l)
| Psig_typext te -> type_extension ~loc (sub.type_extension sub te)
| Psig_exception ed -> exception_ ~loc (sub.extension_constructor sub ed)
| Psig_module x -> module_ ~loc (sub.module_declaration sub x)
| Psig_recmodule l ->
rec_module ~loc (List.map (sub.module_declaration sub) l)
| Psig_modtype x -> modtype ~loc (sub.module_type_declaration sub x)
| Psig_open x -> open_ ~loc (sub.open_description sub x)
| Psig_include x -> include_ ~loc (sub.include_description sub x)
| Psig_class l -> class_ ~loc (List.map (sub.class_description sub) l)
| Psig_class_type l ->
class_type ~loc (List.map (sub.class_type_declaration sub) l)
| Psig_extension (x, attrs) ->
extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs)
| Psig_attribute x -> attribute ~loc (sub.attribute sub x)
end
module M = struct
(* Value expressions for the module language *)
let map sub {pmod_loc = loc; pmod_desc = desc; pmod_attributes = attrs} =
let open Mod in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pmod_ident x -> ident ~loc ~attrs (map_loc sub x)
| Pmod_structure str -> structure ~loc ~attrs (sub.structure sub str)
| Pmod_functor (arg, arg_ty, body) ->
functor_ ~loc ~attrs (map_loc sub arg)
(Misc.may_map (sub.module_type sub) arg_ty)
(sub.module_expr sub body)
| Pmod_apply (m1, m2) ->
apply ~loc ~attrs (sub.module_expr sub m1) (sub.module_expr sub m2)
| Pmod_constraint (m, mty) ->
constraint_ ~loc ~attrs (sub.module_expr sub m)
(sub.module_type sub mty)
| Pmod_unpack e -> unpack ~loc ~attrs (sub.expr sub e)
| Pmod_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_structure_item sub {pstr_loc = loc; pstr_desc = desc} =
let open Str in
let loc = sub.location sub loc in
match desc with
| Pstr_eval (x, attrs) ->
eval ~loc ~attrs:(sub.attributes sub attrs) (sub.expr sub x)
| Pstr_value (r, vbs) -> value ~loc r (List.map (sub.value_binding sub) vbs)
| Pstr_primitive vd -> primitive ~loc (sub.value_description sub vd)
| Pstr_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l)
| Pstr_typext te -> type_extension ~loc (sub.type_extension sub te)
| Pstr_exception ed -> exception_ ~loc (sub.extension_constructor sub ed)
| Pstr_module x -> module_ ~loc (sub.module_binding sub x)
| Pstr_recmodule l -> rec_module ~loc (List.map (sub.module_binding sub) l)
| Pstr_modtype x -> modtype ~loc (sub.module_type_declaration sub x)
| Pstr_open x -> open_ ~loc (sub.open_description sub x)
| Pstr_class l -> class_ ~loc (List.map (sub.class_declaration sub) l)
| Pstr_class_type l ->
class_type ~loc (List.map (sub.class_type_declaration sub) l)
| Pstr_include x -> include_ ~loc (sub.include_declaration sub x)
| Pstr_extension (x, attrs) ->
extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs)
| Pstr_attribute x -> attribute ~loc (sub.attribute sub x)
end
module E = struct
(* Value expressions for the core language *)
let map sub {pexp_loc = loc; pexp_desc = desc; pexp_attributes = attrs} =
let open Exp in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pexp_ident x -> ident ~loc ~attrs (map_loc sub x)
| Pexp_constant x -> constant ~loc ~attrs x
| Pexp_let (r, vbs, e) ->
let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs)
(sub.expr sub e)
| Pexp_fun (lab, def, p, e) ->
fun_ ~loc ~attrs lab (map_opt (sub.expr sub) def) (sub.pat sub p)
(sub.expr sub e)
| Pexp_function pel -> function_ ~loc ~attrs (sub.cases sub pel)
| Pexp_apply (e, l) ->
apply ~loc ~attrs (sub.expr sub e) (List.map (map_snd (sub.expr sub)) l)
| Pexp_match (e, pel) ->
match_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel)
| Pexp_try (e, pel) -> try_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel)
| Pexp_tuple el -> tuple ~loc ~attrs (List.map (sub.expr sub) el)
| Pexp_construct (lid, arg) ->
construct ~loc ~attrs (map_loc sub lid) (map_opt (sub.expr sub) arg)
| Pexp_variant (lab, eo) ->
variant ~loc ~attrs lab (map_opt (sub.expr sub) eo)
| Pexp_record (l, eo) ->
record ~loc ~attrs (List.map (map_tuple (map_loc sub) (sub.expr sub)) l)
(map_opt (sub.expr sub) eo)
| Pexp_field (e, lid) ->
field ~loc ~attrs (sub.expr sub e) (map_loc sub lid)
| Pexp_setfield (e1, lid, e2) ->
setfield ~loc ~attrs (sub.expr sub e1) (map_loc sub lid)
(sub.expr sub e2)
| Pexp_array el -> array ~loc ~attrs (List.map (sub.expr sub) el)
| Pexp_ifthenelse (e1, e2, e3) ->
ifthenelse ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)
(map_opt (sub.expr sub) e3)
| Pexp_sequence (e1, e2) ->
sequence ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)
| Pexp_while (e1, e2) ->
while_ ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)
| Pexp_for (p, e1, e2, d, e3) ->
for_ ~loc ~attrs (sub.pat sub p) (sub.expr sub e1) (sub.expr sub e2) d
(sub.expr sub e3)
| Pexp_coerce (e, t1, t2) ->
coerce ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t1)
(sub.typ sub t2)
| Pexp_constraint (e, t) ->
constraint_ ~loc ~attrs (sub.expr sub e) (sub.typ sub t)
| Pexp_send (e, s) ->
send ~loc ~attrs (sub.expr sub e) (map_loc sub s)
| Pexp_new lid -> new_ ~loc ~attrs (map_loc sub lid)
| Pexp_setinstvar (s, e) ->
setinstvar ~loc ~attrs (map_loc sub s) (sub.expr sub e)
| Pexp_override sel ->
override ~loc ~attrs
(List.map (map_tuple (map_loc sub) (sub.expr sub)) sel)
| Pexp_letmodule (s, me, e) ->
letmodule ~loc ~attrs (map_loc sub s) (sub.module_expr sub me)
(sub.expr sub e)
| Pexp_letexception (cd, e) ->
letexception ~loc ~attrs
(sub.extension_constructor sub cd)
(sub.expr sub e)
| Pexp_assert e -> assert_ ~loc ~attrs (sub.expr sub e)
| Pexp_lazy e -> lazy_ ~loc ~attrs (sub.expr sub e)
| Pexp_poly (e, t) ->
poly ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t)
| Pexp_object cls -> object_ ~loc ~attrs (sub.class_structure sub cls)
| Pexp_newtype (s, e) ->
newtype ~loc ~attrs (map_loc sub s) (sub.expr sub e)
| Pexp_pack me -> pack ~loc ~attrs (sub.module_expr sub me)
| Pexp_open (ovf, lid, e) ->
open_ ~loc ~attrs ovf (map_loc sub lid) (sub.expr sub e)
| Pexp_extension x -> extension ~loc ~attrs (sub.extension sub x)
| Pexp_unreachable -> unreachable ~loc ~attrs ()
end
module P = struct
(* Patterns *)
let map sub {ppat_desc = desc; ppat_loc = loc; ppat_attributes = attrs} =
let open Pat in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Ppat_any -> any ~loc ~attrs ()
| Ppat_var s -> var ~loc ~attrs (map_loc sub s)
| Ppat_alias (p, s) -> alias ~loc ~attrs (sub.pat sub p) (map_loc sub s)
| Ppat_constant c -> constant ~loc ~attrs c
| Ppat_interval (c1, c2) -> interval ~loc ~attrs c1 c2
| Ppat_tuple pl -> tuple ~loc ~attrs (List.map (sub.pat sub) pl)
| Ppat_construct (l, p) ->
construct ~loc ~attrs (map_loc sub l) (map_opt (sub.pat sub) p)
| Ppat_variant (l, p) -> variant ~loc ~attrs l (map_opt (sub.pat sub) p)
| Ppat_record (lpl, cf) ->
record ~loc ~attrs
(List.map (map_tuple (map_loc sub) (sub.pat sub)) lpl) cf
| Ppat_array pl -> array ~loc ~attrs (List.map (sub.pat sub) pl)
| Ppat_or (p1, p2) -> or_ ~loc ~attrs (sub.pat sub p1) (sub.pat sub p2)
| Ppat_constraint (p, t) ->
constraint_ ~loc ~attrs (sub.pat sub p) (sub.typ sub t)
| Ppat_type s -> type_ ~loc ~attrs (map_loc sub s)
| Ppat_lazy p -> lazy_ ~loc ~attrs (sub.pat sub p)
| Ppat_unpack s -> unpack ~loc ~attrs (map_loc sub s)
| Ppat_open (lid,p) -> open_ ~loc ~attrs (map_loc sub lid) (sub.pat sub p)
| Ppat_exception p -> exception_ ~loc ~attrs (sub.pat sub p)
| Ppat_extension x -> extension ~loc ~attrs (sub.extension sub x)
end
module CE = struct
(* Value expressions for the class language *)
let map sub {pcl_loc = loc; pcl_desc = desc; pcl_attributes = attrs} =
let open Cl in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pcl_constr (lid, tys) ->
constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys)
| Pcl_structure s ->
structure ~loc ~attrs (sub.class_structure sub s)
| Pcl_fun (lab, e, p, ce) ->
fun_ ~loc ~attrs lab
(map_opt (sub.expr sub) e)
(sub.pat sub p)
(sub.class_expr sub ce)
| Pcl_apply (ce, l) ->
apply ~loc ~attrs (sub.class_expr sub ce)
(List.map (map_snd (sub.expr sub)) l)
| Pcl_let (r, vbs, ce) ->
let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs)
(sub.class_expr sub ce)
| Pcl_constraint (ce, ct) ->
constraint_ ~loc ~attrs (sub.class_expr sub ce) (sub.class_type sub ct)
| Pcl_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_kind sub = function
| Cfk_concrete (o, e) -> Cfk_concrete (o, sub.expr sub e)
| Cfk_virtual t -> Cfk_virtual (sub.typ sub t)
let map_field sub {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs} =
let open Cf in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pcf_inherit (o, ce, s) ->
inherit_ ~loc ~attrs o (sub.class_expr sub ce)
(map_opt (map_loc sub) s)
| Pcf_val (s, m, k) -> val_ ~loc ~attrs (map_loc sub s) m (map_kind sub k)
| Pcf_method (s, p, k) ->
method_ ~loc ~attrs (map_loc sub s) p (map_kind sub k)
| Pcf_constraint (t1, t2) ->
constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2)
| Pcf_initializer e -> initializer_ ~loc ~attrs (sub.expr sub e)
| Pcf_attribute x -> attribute ~loc (sub.attribute sub x)
| Pcf_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_structure sub {pcstr_self; pcstr_fields} =
{
pcstr_self = sub.pat sub pcstr_self;
pcstr_fields = List.map (sub.class_field sub) pcstr_fields;
}
let class_infos sub f {pci_virt; pci_params = pl; pci_name; pci_expr;
pci_loc; pci_attributes} =
Ci.mk
~virt:pci_virt
~params:(List.map (map_fst (sub.typ sub)) pl)
(map_loc sub pci_name)
(f pci_expr)
~loc:(sub.location sub pci_loc)
~attrs:(sub.attributes sub pci_attributes)
end
Now , a generic AST mapper , to be extended to cover all kinds and
cases of the OCaml grammar . The default behavior of the mapper is
the identity .
cases of the OCaml grammar. The default behavior of the mapper is
the identity. *)
let default_mapper =
{
structure = (fun this l -> List.map (this.structure_item this) l);
structure_item = M.map_structure_item;
module_expr = M.map;
signature = (fun this l -> List.map (this.signature_item this) l);
signature_item = MT.map_signature_item;
module_type = MT.map;
with_constraint = MT.map_with_constraint;
class_declaration =
(fun this -> CE.class_infos this (this.class_expr this));
class_expr = CE.map;
class_field = CE.map_field;
class_structure = CE.map_structure;
class_type = CT.map;
class_type_field = CT.map_field;
class_signature = CT.map_signature;
class_type_declaration =
(fun this -> CE.class_infos this (this.class_type this));
class_description =
(fun this -> CE.class_infos this (this.class_type this));
type_declaration = T.map_type_declaration;
type_kind = T.map_type_kind;
typ = T.map;
type_extension = T.map_type_extension;
extension_constructor = T.map_extension_constructor;
value_description =
(fun this {pval_name; pval_type; pval_prim; pval_loc;
pval_attributes} ->
Val.mk
(map_loc this pval_name)
(this.typ this pval_type)
~attrs:(this.attributes this pval_attributes)
~loc:(this.location this pval_loc)
~prim:pval_prim
);
pat = P.map;
expr = E.map;
module_declaration =
(fun this {pmd_name; pmd_type; pmd_attributes; pmd_loc} ->
Md.mk
(map_loc this pmd_name)
(this.module_type this pmd_type)
~attrs:(this.attributes this pmd_attributes)
~loc:(this.location this pmd_loc)
);
module_type_declaration =
(fun this {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} ->
Mtd.mk
(map_loc this pmtd_name)
?typ:(map_opt (this.module_type this) pmtd_type)
~attrs:(this.attributes this pmtd_attributes)
~loc:(this.location this pmtd_loc)
);
module_binding =
(fun this {pmb_name; pmb_expr; pmb_attributes; pmb_loc} ->
Mb.mk (map_loc this pmb_name) (this.module_expr this pmb_expr)
~attrs:(this.attributes this pmb_attributes)
~loc:(this.location this pmb_loc)
);
open_description =
(fun this {popen_lid; popen_override; popen_attributes; popen_loc} ->
Opn.mk (map_loc this popen_lid)
~override:popen_override
~loc:(this.location this popen_loc)
~attrs:(this.attributes this popen_attributes)
);
include_description =
(fun this {pincl_mod; pincl_attributes; pincl_loc} ->
Incl.mk (this.module_type this pincl_mod)
~loc:(this.location this pincl_loc)
~attrs:(this.attributes this pincl_attributes)
);
include_declaration =
(fun this {pincl_mod; pincl_attributes; pincl_loc} ->
Incl.mk (this.module_expr this pincl_mod)
~loc:(this.location this pincl_loc)
~attrs:(this.attributes this pincl_attributes)
);
value_binding =
(fun this {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} ->
Vb.mk
(this.pat this pvb_pat)
(this.expr this pvb_expr)
~loc:(this.location this pvb_loc)
~attrs:(this.attributes this pvb_attributes)
);
constructor_declaration =
(fun this {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} ->
Type.constructor
(map_loc this pcd_name)
~args:(T.map_constructor_arguments this pcd_args)
?res:(map_opt (this.typ this) pcd_res)
~loc:(this.location this pcd_loc)
~attrs:(this.attributes this pcd_attributes)
);
label_declaration =
(fun this {pld_name; pld_type; pld_loc; pld_mutable; pld_attributes} ->
Type.field
(map_loc this pld_name)
(this.typ this pld_type)
~mut:pld_mutable
~loc:(this.location this pld_loc)
~attrs:(this.attributes this pld_attributes)
);
cases = (fun this l -> List.map (this.case this) l);
case =
(fun this {pc_lhs; pc_guard; pc_rhs} ->
{
pc_lhs = this.pat this pc_lhs;
pc_guard = map_opt (this.expr this) pc_guard;
pc_rhs = this.expr this pc_rhs;
}
);
location = (fun _this l -> l);
extension = (fun this (s, e) -> (map_loc this s, this.payload this e));
attribute = (fun this (s, e) -> (map_loc this s, this.payload this e));
attributes = (fun this l -> List.map (this.attribute this) l);
payload =
(fun this -> function
| PStr x -> PStr (this.structure this x)
| PSig x -> PSig (this.signature this x)
| PTyp x -> PTyp (this.typ this x)
| PPat (x, g) -> PPat (this.pat this x, map_opt (this.expr this) g)
);
}
let rec extension_of_error {loc; msg; if_highlight; sub} =
{ loc; txt = "ocaml.error" },
PStr ([Str.eval (Exp.constant (Pconst_string (msg, None)));
Str.eval (Exp.constant (Pconst_string (if_highlight, None)))] @
(List.map (fun ext -> Str.extension (extension_of_error ext)) sub))
let attribute_of_warning loc s =
{ loc; txt = "ocaml.ppwarning" },
PStr ([Str.eval ~loc (Exp.constant (Pconst_string (s, None)))])
module StringMap = Map.Make(struct
type t = string
let compare = compare
end)
let cookies = ref StringMap.empty
let get_cookie k =
try Some (StringMap.find k !cookies)
with Not_found -> None
let set_cookie k v =
cookies := StringMap.add k v !cookies
let tool_name_ref = ref "_none_"
let tool_name () = !tool_name_ref
module PpxContext = struct
open Longident
open Asttypes
open Ast_helper
let lid name = { txt = Lident name; loc = Location.none }
let make_string x = Exp.constant (Pconst_string (x, None))
let make_bool x =
if x
then Exp.construct (lid "true") None
else Exp.construct (lid "false") None
let rec make_list f lst =
match lst with
| x :: rest ->
Exp.construct (lid "::") (Some (Exp.tuple [f x; make_list f rest]))
| [] ->
Exp.construct (lid "[]") None
let make_pair f1 f2 (x1, x2) =
Exp.tuple [f1 x1; f2 x2]
let make_option f opt =
match opt with
| Some x -> Exp.construct (lid "Some") (Some (f x))
| None -> Exp.construct (lid "None") None
let get_cookies () =
lid "cookies",
make_list (make_pair make_string (fun x -> x))
(StringMap.bindings !cookies)
let mk fields =
{ txt = "ocaml.ppx.context"; loc = Location.none },
Parsetree.PStr [Str.eval (Exp.record fields None)]
let make ~tool_name () =
let fields =
[
lid "tool_name", make_string tool_name;
lid "include_dirs", make_list make_string !Clflags.include_dirs;
lid "load_path", make_list make_string !Config.load_path;
lid "open_modules", make_list make_string !Clflags.open_modules;
lid "for_package", make_option make_string !Clflags.for_package;
lid "debug", make_bool !Clflags.debug;
get_cookies ()
]
in
mk fields
let get_fields = function
| PStr [{pstr_desc = Pstr_eval
({ pexp_desc = Pexp_record (fields, None) }, [])}] ->
fields
| _ ->
raise_errorf "Internal error: invalid [@@@ocaml.ppx.context] syntax"
let restore fields =
let field name payload =
let rec get_string = function
| { pexp_desc = Pexp_constant (Pconst_string (str, None)) } -> str
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] string syntax" name
and get_bool pexp =
match pexp with
| {pexp_desc = Pexp_construct ({txt = Longident.Lident "true"},
None)} ->
true
| {pexp_desc = Pexp_construct ({txt = Longident.Lident "false"},
None)} ->
false
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] bool syntax" name
and get_list elem = function
| {pexp_desc =
Pexp_construct ({txt = Longident.Lident "::"},
Some {pexp_desc = Pexp_tuple [exp; rest]}) } ->
elem exp :: get_list elem rest
| {pexp_desc =
Pexp_construct ({txt = Longident.Lident "[]"}, None)} ->
[]
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] list syntax" name
and get_pair f1 f2 = function
| {pexp_desc = Pexp_tuple [e1; e2]} ->
(f1 e1, f2 e2)
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] pair syntax" name
and get_option elem = function
| { pexp_desc =
Pexp_construct ({ txt = Longident.Lident "Some" }, Some exp) } ->
Some (elem exp)
| { pexp_desc =
Pexp_construct ({ txt = Longident.Lident "None" }, None) } ->
None
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] option syntax" name
in
match name with
| "tool_name" ->
tool_name_ref := get_string payload
| "include_dirs" ->
Clflags.include_dirs := get_list get_string payload
| "load_path" ->
Config.load_path := get_list get_string payload
| "open_modules" ->
Clflags.open_modules := get_list get_string payload
| "for_package" ->
Clflags.for_package := get_option get_string payload
| "debug" ->
Clflags.debug := get_bool payload
| "cookies" ->
let l = get_list (get_pair get_string (fun x -> x)) payload in
cookies :=
List.fold_left
(fun s (k, v) -> StringMap.add k v s) StringMap.empty
l
| _ ->
()
in
List.iter (function ({txt=Lident name}, x) -> field name x | _ -> ()) fields
let update_cookies fields =
let fields =
List.filter
(function ({txt=Lident "cookies"}, _) -> false | _ -> true)
fields
in
fields @ [get_cookies ()]
end
let ppx_context = PpxContext.make
let ext_of_exn exn =
match error_of_exn exn with
| Some error -> extension_of_error error
| None -> raise exn
let apply_lazy ~source ~target mapper =
let implem ast =
let fields, ast =
match ast with
| {pstr_desc = Pstr_attribute ({txt = "ocaml.ppx.context"}, x)} :: l ->
PpxContext.get_fields x, l
| _ -> [], ast
in
PpxContext.restore fields;
let ast =
try
let mapper = mapper () in
mapper.structure mapper ast
with exn ->
[{pstr_desc = Pstr_extension (ext_of_exn exn, []);
pstr_loc = Location.none}]
in
let fields = PpxContext.update_cookies fields in
Str.attribute (PpxContext.mk fields) :: ast
in
let iface ast =
let fields, ast =
match ast with
| {psig_desc = Psig_attribute ({txt = "ocaml.ppx.context"}, x)} :: l ->
PpxContext.get_fields x, l
| _ -> [], ast
in
PpxContext.restore fields;
let ast =
try
let mapper = mapper () in
mapper.signature mapper ast
with exn ->
[{psig_desc = Psig_extension (ext_of_exn exn, []);
psig_loc = Location.none}]
in
let fields = PpxContext.update_cookies fields in
Sig.attribute (PpxContext.mk fields) :: ast
in
let ic = open_in_bin source in
let magic =
really_input_string ic (String.length Config.ast_impl_magic_number)
in
let rewrite transform =
Location.input_name := input_value ic;
let ast = input_value ic in
close_in ic;
let ast = transform ast in
let oc = open_out_bin target in
output_string oc magic;
output_value oc !Location.input_name;
output_value oc ast;
close_out oc
and fail () =
close_in ic;
failwith "Ast_mapper: OCaml version mismatch or malformed input";
in
if magic = Config.ast_impl_magic_number then
rewrite (implem : structure -> structure)
else if magic = Config.ast_intf_magic_number then
rewrite (iface : signature -> signature)
else fail ()
let drop_ppx_context_str ~restore = function
| {pstr_desc = Pstr_attribute({Location.txt = "ocaml.ppx.context"}, a)}
:: items ->
if restore then
PpxContext.restore (PpxContext.get_fields a);
items
| items -> items
let drop_ppx_context_sig ~restore = function
| {psig_desc = Psig_attribute({Location.txt = "ocaml.ppx.context"}, a)}
:: items ->
if restore then
PpxContext.restore (PpxContext.get_fields a);
items
| items -> items
let add_ppx_context_str ~tool_name ast =
Ast_helper.Str.attribute (ppx_context ~tool_name ()) :: ast
let add_ppx_context_sig ~tool_name ast =
Ast_helper.Sig.attribute (ppx_context ~tool_name ()) :: ast
let apply ~source ~target mapper =
apply_lazy ~source ~target (fun () -> mapper)
let run_main mapper =
try
let a = Sys.argv in
let n = Array.length a in
if n > 2 then
let mapper () =
try mapper (Array.to_list (Array.sub a 1 (n - 3)))
with exn ->
(* PR #6463 *)
let f _ _ = raise exn in
{default_mapper with structure = f; signature = f}
in
apply_lazy ~source:a.(n - 2) ~target:a.(n - 1) mapper
else begin
Printf.eprintf "Usage: %s [extra_args] <infile> <outfile>\n%!"
Sys.executable_name;
exit 2
end
with exn ->
prerr_endline (Printexc.to_string exn);
exit 2
let register_function = ref (fun _name f -> run_main f)
let register name f = !register_function name f
| null | https://raw.githubusercontent.com/AbstractMachinesLab/caramel/7d4e505d6032e22a630d2e3bd7085b77d0efbb0c/vendor/ocaml-lsp-1.4.0/ocaml-lsp-server/vendor/merlin/upstream/ocaml_405/parsing/ast_mapper.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Ensure that record patterns don't miss any field.
Type expressions for the core language
Type expressions for the class language
Type expressions for the module language
Value expressions for the module language
Value expressions for the core language
Patterns
Value expressions for the class language
PR #6463 | , LexiFi
Copyright 2012 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
A generic Parsetree mapping class
[ @@@ocaml.warning " +9 " ]
( * Ensure that record patterns do n't miss any field .
[@@@ocaml.warning "+9"]
*)
open Parsetree
open Ast_helper
open Location
type mapper = {
attribute: mapper -> attribute -> attribute;
attributes: mapper -> attribute list -> attribute list;
case: mapper -> case -> case;
cases: mapper -> case list -> case list;
class_declaration: mapper -> class_declaration -> class_declaration;
class_description: mapper -> class_description -> class_description;
class_expr: mapper -> class_expr -> class_expr;
class_field: mapper -> class_field -> class_field;
class_signature: mapper -> class_signature -> class_signature;
class_structure: mapper -> class_structure -> class_structure;
class_type: mapper -> class_type -> class_type;
class_type_declaration: mapper -> class_type_declaration
-> class_type_declaration;
class_type_field: mapper -> class_type_field -> class_type_field;
constructor_declaration: mapper -> constructor_declaration
-> constructor_declaration;
expr: mapper -> expression -> expression;
extension: mapper -> extension -> extension;
extension_constructor: mapper -> extension_constructor
-> extension_constructor;
include_declaration: mapper -> include_declaration -> include_declaration;
include_description: mapper -> include_description -> include_description;
label_declaration: mapper -> label_declaration -> label_declaration;
location: mapper -> Location.t -> Location.t;
module_binding: mapper -> module_binding -> module_binding;
module_declaration: mapper -> module_declaration -> module_declaration;
module_expr: mapper -> module_expr -> module_expr;
module_type: mapper -> module_type -> module_type;
module_type_declaration: mapper -> module_type_declaration
-> module_type_declaration;
open_description: mapper -> open_description -> open_description;
pat: mapper -> pattern -> pattern;
payload: mapper -> payload -> payload;
signature: mapper -> signature -> signature;
signature_item: mapper -> signature_item -> signature_item;
structure: mapper -> structure -> structure;
structure_item: mapper -> structure_item -> structure_item;
typ: mapper -> core_type -> core_type;
type_declaration: mapper -> type_declaration -> type_declaration;
type_extension: mapper -> type_extension -> type_extension;
type_kind: mapper -> type_kind -> type_kind;
value_binding: mapper -> value_binding -> value_binding;
value_description: mapper -> value_description -> value_description;
with_constraint: mapper -> with_constraint -> with_constraint;
}
let map_fst f (x, y) = (f x, y)
let map_snd f (x, y) = (x, f y)
let map_tuple f1 f2 (x, y) = (f1 x, f2 y)
let map_tuple3 f1 f2 f3 (x, y, z) = (f1 x, f2 y, f3 z)
let map_opt f = function None -> None | Some x -> Some (f x)
let map_loc sub {loc; txt} = {loc = sub.location sub loc; txt}
module T = struct
let row_field sub = function
| Rtag (l, attrs, b, tl) ->
Rtag (l, sub.attributes sub attrs, b, List.map (sub.typ sub) tl)
| Rinherit t -> Rinherit (sub.typ sub t)
let map sub {ptyp_desc = desc; ptyp_loc = loc; ptyp_attributes = attrs} =
let open Typ in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Ptyp_any -> any ~loc ~attrs ()
| Ptyp_var s -> var ~loc ~attrs s
| Ptyp_arrow (lab, t1, t2) ->
arrow ~loc ~attrs lab (sub.typ sub t1) (sub.typ sub t2)
| Ptyp_tuple tyl -> tuple ~loc ~attrs (List.map (sub.typ sub) tyl)
| Ptyp_constr (lid, tl) ->
constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl)
| Ptyp_object (l, o) ->
let f (s, a, t) =
(map_loc sub s, sub.attributes sub a, sub.typ sub t) in
object_ ~loc ~attrs (List.map f l) o
| Ptyp_class (lid, tl) ->
class_ ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl)
| Ptyp_alias (t, s) -> alias ~loc ~attrs (sub.typ sub t) s
| Ptyp_variant (rl, b, ll) ->
variant ~loc ~attrs (List.map (row_field sub) rl) b ll
| Ptyp_poly (sl, t) -> poly ~loc ~attrs
(List.map (map_loc sub) sl) (sub.typ sub t)
| Ptyp_package (lid, l) ->
package ~loc ~attrs (map_loc sub lid)
(List.map (map_tuple (map_loc sub) (sub.typ sub)) l)
| Ptyp_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_type_declaration sub
{ptype_name; ptype_params; ptype_cstrs;
ptype_kind;
ptype_private;
ptype_manifest;
ptype_attributes;
ptype_loc} =
Type.mk (map_loc sub ptype_name)
~params:(List.map (map_fst (sub.typ sub)) ptype_params)
~priv:ptype_private
~cstrs:(List.map
(map_tuple3 (sub.typ sub) (sub.typ sub) (sub.location sub))
ptype_cstrs)
~kind:(sub.type_kind sub ptype_kind)
?manifest:(map_opt (sub.typ sub) ptype_manifest)
~loc:(sub.location sub ptype_loc)
~attrs:(sub.attributes sub ptype_attributes)
let map_type_kind sub = function
| Ptype_abstract -> Ptype_abstract
| Ptype_variant l ->
Ptype_variant (List.map (sub.constructor_declaration sub) l)
| Ptype_record l -> Ptype_record (List.map (sub.label_declaration sub) l)
| Ptype_open -> Ptype_open
let map_constructor_arguments sub = function
| Pcstr_tuple l -> Pcstr_tuple (List.map (sub.typ sub) l)
| Pcstr_record l ->
Pcstr_record (List.map (sub.label_declaration sub) l)
let map_type_extension sub
{ptyext_path; ptyext_params;
ptyext_constructors;
ptyext_private;
ptyext_attributes} =
Te.mk
(map_loc sub ptyext_path)
(List.map (sub.extension_constructor sub) ptyext_constructors)
~params:(List.map (map_fst (sub.typ sub)) ptyext_params)
~priv:ptyext_private
~attrs:(sub.attributes sub ptyext_attributes)
let map_extension_constructor_kind sub = function
Pext_decl(ctl, cto) ->
Pext_decl(map_constructor_arguments sub ctl, map_opt (sub.typ sub) cto)
| Pext_rebind li ->
Pext_rebind (map_loc sub li)
let map_extension_constructor sub
{pext_name;
pext_kind;
pext_loc;
pext_attributes} =
Te.constructor
(map_loc sub pext_name)
(map_extension_constructor_kind sub pext_kind)
~loc:(sub.location sub pext_loc)
~attrs:(sub.attributes sub pext_attributes)
end
module CT = struct
let map sub {pcty_loc = loc; pcty_desc = desc; pcty_attributes = attrs} =
let open Cty in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pcty_constr (lid, tys) ->
constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys)
| Pcty_signature x -> signature ~loc ~attrs (sub.class_signature sub x)
| Pcty_arrow (lab, t, ct) ->
arrow ~loc ~attrs lab (sub.typ sub t) (sub.class_type sub ct)
| Pcty_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_field sub {pctf_desc = desc; pctf_loc = loc; pctf_attributes = attrs}
=
let open Ctf in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pctf_inherit ct -> inherit_ ~loc ~attrs (sub.class_type sub ct)
| Pctf_val (s, m, v, t) ->
val_ ~loc ~attrs (map_loc sub s) m v (sub.typ sub t)
| Pctf_method (s, p, v, t) ->
method_ ~loc ~attrs (map_loc sub s) p v (sub.typ sub t)
| Pctf_constraint (t1, t2) ->
constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2)
| Pctf_attribute x -> attribute ~loc (sub.attribute sub x)
| Pctf_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_signature sub {pcsig_self; pcsig_fields} =
Csig.mk
(sub.typ sub pcsig_self)
(List.map (sub.class_type_field sub) pcsig_fields)
end
module MT = struct
let map sub {pmty_desc = desc; pmty_loc = loc; pmty_attributes = attrs} =
let open Mty in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pmty_ident s -> ident ~loc ~attrs (map_loc sub s)
| Pmty_alias s -> alias ~loc ~attrs (map_loc sub s)
| Pmty_signature sg -> signature ~loc ~attrs (sub.signature sub sg)
| Pmty_functor (s, mt1, mt2) ->
functor_ ~loc ~attrs (map_loc sub s)
(Misc.may_map (sub.module_type sub) mt1)
(sub.module_type sub mt2)
| Pmty_with (mt, l) ->
with_ ~loc ~attrs (sub.module_type sub mt)
(List.map (sub.with_constraint sub) l)
| Pmty_typeof me -> typeof_ ~loc ~attrs (sub.module_expr sub me)
| Pmty_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_with_constraint sub = function
| Pwith_type (lid, d) ->
Pwith_type (map_loc sub lid, sub.type_declaration sub d)
| Pwith_module (lid, lid2) ->
Pwith_module (map_loc sub lid, map_loc sub lid2)
| Pwith_typesubst d -> Pwith_typesubst (sub.type_declaration sub d)
| Pwith_modsubst (s, lid) ->
Pwith_modsubst (map_loc sub s, map_loc sub lid)
let map_signature_item sub {psig_desc = desc; psig_loc = loc} =
let open Sig in
let loc = sub.location sub loc in
match desc with
| Psig_value vd -> value ~loc (sub.value_description sub vd)
| Psig_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l)
| Psig_typext te -> type_extension ~loc (sub.type_extension sub te)
| Psig_exception ed -> exception_ ~loc (sub.extension_constructor sub ed)
| Psig_module x -> module_ ~loc (sub.module_declaration sub x)
| Psig_recmodule l ->
rec_module ~loc (List.map (sub.module_declaration sub) l)
| Psig_modtype x -> modtype ~loc (sub.module_type_declaration sub x)
| Psig_open x -> open_ ~loc (sub.open_description sub x)
| Psig_include x -> include_ ~loc (sub.include_description sub x)
| Psig_class l -> class_ ~loc (List.map (sub.class_description sub) l)
| Psig_class_type l ->
class_type ~loc (List.map (sub.class_type_declaration sub) l)
| Psig_extension (x, attrs) ->
extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs)
| Psig_attribute x -> attribute ~loc (sub.attribute sub x)
end
module M = struct
let map sub {pmod_loc = loc; pmod_desc = desc; pmod_attributes = attrs} =
let open Mod in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pmod_ident x -> ident ~loc ~attrs (map_loc sub x)
| Pmod_structure str -> structure ~loc ~attrs (sub.structure sub str)
| Pmod_functor (arg, arg_ty, body) ->
functor_ ~loc ~attrs (map_loc sub arg)
(Misc.may_map (sub.module_type sub) arg_ty)
(sub.module_expr sub body)
| Pmod_apply (m1, m2) ->
apply ~loc ~attrs (sub.module_expr sub m1) (sub.module_expr sub m2)
| Pmod_constraint (m, mty) ->
constraint_ ~loc ~attrs (sub.module_expr sub m)
(sub.module_type sub mty)
| Pmod_unpack e -> unpack ~loc ~attrs (sub.expr sub e)
| Pmod_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_structure_item sub {pstr_loc = loc; pstr_desc = desc} =
let open Str in
let loc = sub.location sub loc in
match desc with
| Pstr_eval (x, attrs) ->
eval ~loc ~attrs:(sub.attributes sub attrs) (sub.expr sub x)
| Pstr_value (r, vbs) -> value ~loc r (List.map (sub.value_binding sub) vbs)
| Pstr_primitive vd -> primitive ~loc (sub.value_description sub vd)
| Pstr_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l)
| Pstr_typext te -> type_extension ~loc (sub.type_extension sub te)
| Pstr_exception ed -> exception_ ~loc (sub.extension_constructor sub ed)
| Pstr_module x -> module_ ~loc (sub.module_binding sub x)
| Pstr_recmodule l -> rec_module ~loc (List.map (sub.module_binding sub) l)
| Pstr_modtype x -> modtype ~loc (sub.module_type_declaration sub x)
| Pstr_open x -> open_ ~loc (sub.open_description sub x)
| Pstr_class l -> class_ ~loc (List.map (sub.class_declaration sub) l)
| Pstr_class_type l ->
class_type ~loc (List.map (sub.class_type_declaration sub) l)
| Pstr_include x -> include_ ~loc (sub.include_declaration sub x)
| Pstr_extension (x, attrs) ->
extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs)
| Pstr_attribute x -> attribute ~loc (sub.attribute sub x)
end
module E = struct
let map sub {pexp_loc = loc; pexp_desc = desc; pexp_attributes = attrs} =
let open Exp in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pexp_ident x -> ident ~loc ~attrs (map_loc sub x)
| Pexp_constant x -> constant ~loc ~attrs x
| Pexp_let (r, vbs, e) ->
let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs)
(sub.expr sub e)
| Pexp_fun (lab, def, p, e) ->
fun_ ~loc ~attrs lab (map_opt (sub.expr sub) def) (sub.pat sub p)
(sub.expr sub e)
| Pexp_function pel -> function_ ~loc ~attrs (sub.cases sub pel)
| Pexp_apply (e, l) ->
apply ~loc ~attrs (sub.expr sub e) (List.map (map_snd (sub.expr sub)) l)
| Pexp_match (e, pel) ->
match_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel)
| Pexp_try (e, pel) -> try_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel)
| Pexp_tuple el -> tuple ~loc ~attrs (List.map (sub.expr sub) el)
| Pexp_construct (lid, arg) ->
construct ~loc ~attrs (map_loc sub lid) (map_opt (sub.expr sub) arg)
| Pexp_variant (lab, eo) ->
variant ~loc ~attrs lab (map_opt (sub.expr sub) eo)
| Pexp_record (l, eo) ->
record ~loc ~attrs (List.map (map_tuple (map_loc sub) (sub.expr sub)) l)
(map_opt (sub.expr sub) eo)
| Pexp_field (e, lid) ->
field ~loc ~attrs (sub.expr sub e) (map_loc sub lid)
| Pexp_setfield (e1, lid, e2) ->
setfield ~loc ~attrs (sub.expr sub e1) (map_loc sub lid)
(sub.expr sub e2)
| Pexp_array el -> array ~loc ~attrs (List.map (sub.expr sub) el)
| Pexp_ifthenelse (e1, e2, e3) ->
ifthenelse ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)
(map_opt (sub.expr sub) e3)
| Pexp_sequence (e1, e2) ->
sequence ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)
| Pexp_while (e1, e2) ->
while_ ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)
| Pexp_for (p, e1, e2, d, e3) ->
for_ ~loc ~attrs (sub.pat sub p) (sub.expr sub e1) (sub.expr sub e2) d
(sub.expr sub e3)
| Pexp_coerce (e, t1, t2) ->
coerce ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t1)
(sub.typ sub t2)
| Pexp_constraint (e, t) ->
constraint_ ~loc ~attrs (sub.expr sub e) (sub.typ sub t)
| Pexp_send (e, s) ->
send ~loc ~attrs (sub.expr sub e) (map_loc sub s)
| Pexp_new lid -> new_ ~loc ~attrs (map_loc sub lid)
| Pexp_setinstvar (s, e) ->
setinstvar ~loc ~attrs (map_loc sub s) (sub.expr sub e)
| Pexp_override sel ->
override ~loc ~attrs
(List.map (map_tuple (map_loc sub) (sub.expr sub)) sel)
| Pexp_letmodule (s, me, e) ->
letmodule ~loc ~attrs (map_loc sub s) (sub.module_expr sub me)
(sub.expr sub e)
| Pexp_letexception (cd, e) ->
letexception ~loc ~attrs
(sub.extension_constructor sub cd)
(sub.expr sub e)
| Pexp_assert e -> assert_ ~loc ~attrs (sub.expr sub e)
| Pexp_lazy e -> lazy_ ~loc ~attrs (sub.expr sub e)
| Pexp_poly (e, t) ->
poly ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t)
| Pexp_object cls -> object_ ~loc ~attrs (sub.class_structure sub cls)
| Pexp_newtype (s, e) ->
newtype ~loc ~attrs (map_loc sub s) (sub.expr sub e)
| Pexp_pack me -> pack ~loc ~attrs (sub.module_expr sub me)
| Pexp_open (ovf, lid, e) ->
open_ ~loc ~attrs ovf (map_loc sub lid) (sub.expr sub e)
| Pexp_extension x -> extension ~loc ~attrs (sub.extension sub x)
| Pexp_unreachable -> unreachable ~loc ~attrs ()
end
module P = struct
let map sub {ppat_desc = desc; ppat_loc = loc; ppat_attributes = attrs} =
let open Pat in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Ppat_any -> any ~loc ~attrs ()
| Ppat_var s -> var ~loc ~attrs (map_loc sub s)
| Ppat_alias (p, s) -> alias ~loc ~attrs (sub.pat sub p) (map_loc sub s)
| Ppat_constant c -> constant ~loc ~attrs c
| Ppat_interval (c1, c2) -> interval ~loc ~attrs c1 c2
| Ppat_tuple pl -> tuple ~loc ~attrs (List.map (sub.pat sub) pl)
| Ppat_construct (l, p) ->
construct ~loc ~attrs (map_loc sub l) (map_opt (sub.pat sub) p)
| Ppat_variant (l, p) -> variant ~loc ~attrs l (map_opt (sub.pat sub) p)
| Ppat_record (lpl, cf) ->
record ~loc ~attrs
(List.map (map_tuple (map_loc sub) (sub.pat sub)) lpl) cf
| Ppat_array pl -> array ~loc ~attrs (List.map (sub.pat sub) pl)
| Ppat_or (p1, p2) -> or_ ~loc ~attrs (sub.pat sub p1) (sub.pat sub p2)
| Ppat_constraint (p, t) ->
constraint_ ~loc ~attrs (sub.pat sub p) (sub.typ sub t)
| Ppat_type s -> type_ ~loc ~attrs (map_loc sub s)
| Ppat_lazy p -> lazy_ ~loc ~attrs (sub.pat sub p)
| Ppat_unpack s -> unpack ~loc ~attrs (map_loc sub s)
| Ppat_open (lid,p) -> open_ ~loc ~attrs (map_loc sub lid) (sub.pat sub p)
| Ppat_exception p -> exception_ ~loc ~attrs (sub.pat sub p)
| Ppat_extension x -> extension ~loc ~attrs (sub.extension sub x)
end
module CE = struct
let map sub {pcl_loc = loc; pcl_desc = desc; pcl_attributes = attrs} =
let open Cl in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pcl_constr (lid, tys) ->
constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys)
| Pcl_structure s ->
structure ~loc ~attrs (sub.class_structure sub s)
| Pcl_fun (lab, e, p, ce) ->
fun_ ~loc ~attrs lab
(map_opt (sub.expr sub) e)
(sub.pat sub p)
(sub.class_expr sub ce)
| Pcl_apply (ce, l) ->
apply ~loc ~attrs (sub.class_expr sub ce)
(List.map (map_snd (sub.expr sub)) l)
| Pcl_let (r, vbs, ce) ->
let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs)
(sub.class_expr sub ce)
| Pcl_constraint (ce, ct) ->
constraint_ ~loc ~attrs (sub.class_expr sub ce) (sub.class_type sub ct)
| Pcl_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_kind sub = function
| Cfk_concrete (o, e) -> Cfk_concrete (o, sub.expr sub e)
| Cfk_virtual t -> Cfk_virtual (sub.typ sub t)
let map_field sub {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs} =
let open Cf in
let loc = sub.location sub loc in
let attrs = sub.attributes sub attrs in
match desc with
| Pcf_inherit (o, ce, s) ->
inherit_ ~loc ~attrs o (sub.class_expr sub ce)
(map_opt (map_loc sub) s)
| Pcf_val (s, m, k) -> val_ ~loc ~attrs (map_loc sub s) m (map_kind sub k)
| Pcf_method (s, p, k) ->
method_ ~loc ~attrs (map_loc sub s) p (map_kind sub k)
| Pcf_constraint (t1, t2) ->
constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2)
| Pcf_initializer e -> initializer_ ~loc ~attrs (sub.expr sub e)
| Pcf_attribute x -> attribute ~loc (sub.attribute sub x)
| Pcf_extension x -> extension ~loc ~attrs (sub.extension sub x)
let map_structure sub {pcstr_self; pcstr_fields} =
{
pcstr_self = sub.pat sub pcstr_self;
pcstr_fields = List.map (sub.class_field sub) pcstr_fields;
}
let class_infos sub f {pci_virt; pci_params = pl; pci_name; pci_expr;
pci_loc; pci_attributes} =
Ci.mk
~virt:pci_virt
~params:(List.map (map_fst (sub.typ sub)) pl)
(map_loc sub pci_name)
(f pci_expr)
~loc:(sub.location sub pci_loc)
~attrs:(sub.attributes sub pci_attributes)
end
Now , a generic AST mapper , to be extended to cover all kinds and
cases of the OCaml grammar . The default behavior of the mapper is
the identity .
cases of the OCaml grammar. The default behavior of the mapper is
the identity. *)
let default_mapper =
{
structure = (fun this l -> List.map (this.structure_item this) l);
structure_item = M.map_structure_item;
module_expr = M.map;
signature = (fun this l -> List.map (this.signature_item this) l);
signature_item = MT.map_signature_item;
module_type = MT.map;
with_constraint = MT.map_with_constraint;
class_declaration =
(fun this -> CE.class_infos this (this.class_expr this));
class_expr = CE.map;
class_field = CE.map_field;
class_structure = CE.map_structure;
class_type = CT.map;
class_type_field = CT.map_field;
class_signature = CT.map_signature;
class_type_declaration =
(fun this -> CE.class_infos this (this.class_type this));
class_description =
(fun this -> CE.class_infos this (this.class_type this));
type_declaration = T.map_type_declaration;
type_kind = T.map_type_kind;
typ = T.map;
type_extension = T.map_type_extension;
extension_constructor = T.map_extension_constructor;
value_description =
(fun this {pval_name; pval_type; pval_prim; pval_loc;
pval_attributes} ->
Val.mk
(map_loc this pval_name)
(this.typ this pval_type)
~attrs:(this.attributes this pval_attributes)
~loc:(this.location this pval_loc)
~prim:pval_prim
);
pat = P.map;
expr = E.map;
module_declaration =
(fun this {pmd_name; pmd_type; pmd_attributes; pmd_loc} ->
Md.mk
(map_loc this pmd_name)
(this.module_type this pmd_type)
~attrs:(this.attributes this pmd_attributes)
~loc:(this.location this pmd_loc)
);
module_type_declaration =
(fun this {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} ->
Mtd.mk
(map_loc this pmtd_name)
?typ:(map_opt (this.module_type this) pmtd_type)
~attrs:(this.attributes this pmtd_attributes)
~loc:(this.location this pmtd_loc)
);
module_binding =
(fun this {pmb_name; pmb_expr; pmb_attributes; pmb_loc} ->
Mb.mk (map_loc this pmb_name) (this.module_expr this pmb_expr)
~attrs:(this.attributes this pmb_attributes)
~loc:(this.location this pmb_loc)
);
open_description =
(fun this {popen_lid; popen_override; popen_attributes; popen_loc} ->
Opn.mk (map_loc this popen_lid)
~override:popen_override
~loc:(this.location this popen_loc)
~attrs:(this.attributes this popen_attributes)
);
include_description =
(fun this {pincl_mod; pincl_attributes; pincl_loc} ->
Incl.mk (this.module_type this pincl_mod)
~loc:(this.location this pincl_loc)
~attrs:(this.attributes this pincl_attributes)
);
include_declaration =
(fun this {pincl_mod; pincl_attributes; pincl_loc} ->
Incl.mk (this.module_expr this pincl_mod)
~loc:(this.location this pincl_loc)
~attrs:(this.attributes this pincl_attributes)
);
value_binding =
(fun this {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} ->
Vb.mk
(this.pat this pvb_pat)
(this.expr this pvb_expr)
~loc:(this.location this pvb_loc)
~attrs:(this.attributes this pvb_attributes)
);
constructor_declaration =
(fun this {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} ->
Type.constructor
(map_loc this pcd_name)
~args:(T.map_constructor_arguments this pcd_args)
?res:(map_opt (this.typ this) pcd_res)
~loc:(this.location this pcd_loc)
~attrs:(this.attributes this pcd_attributes)
);
label_declaration =
(fun this {pld_name; pld_type; pld_loc; pld_mutable; pld_attributes} ->
Type.field
(map_loc this pld_name)
(this.typ this pld_type)
~mut:pld_mutable
~loc:(this.location this pld_loc)
~attrs:(this.attributes this pld_attributes)
);
cases = (fun this l -> List.map (this.case this) l);
case =
(fun this {pc_lhs; pc_guard; pc_rhs} ->
{
pc_lhs = this.pat this pc_lhs;
pc_guard = map_opt (this.expr this) pc_guard;
pc_rhs = this.expr this pc_rhs;
}
);
location = (fun _this l -> l);
extension = (fun this (s, e) -> (map_loc this s, this.payload this e));
attribute = (fun this (s, e) -> (map_loc this s, this.payload this e));
attributes = (fun this l -> List.map (this.attribute this) l);
payload =
(fun this -> function
| PStr x -> PStr (this.structure this x)
| PSig x -> PSig (this.signature this x)
| PTyp x -> PTyp (this.typ this x)
| PPat (x, g) -> PPat (this.pat this x, map_opt (this.expr this) g)
);
}
let rec extension_of_error {loc; msg; if_highlight; sub} =
{ loc; txt = "ocaml.error" },
PStr ([Str.eval (Exp.constant (Pconst_string (msg, None)));
Str.eval (Exp.constant (Pconst_string (if_highlight, None)))] @
(List.map (fun ext -> Str.extension (extension_of_error ext)) sub))
let attribute_of_warning loc s =
{ loc; txt = "ocaml.ppwarning" },
PStr ([Str.eval ~loc (Exp.constant (Pconst_string (s, None)))])
module StringMap = Map.Make(struct
type t = string
let compare = compare
end)
let cookies = ref StringMap.empty
let get_cookie k =
try Some (StringMap.find k !cookies)
with Not_found -> None
let set_cookie k v =
cookies := StringMap.add k v !cookies
let tool_name_ref = ref "_none_"
let tool_name () = !tool_name_ref
module PpxContext = struct
open Longident
open Asttypes
open Ast_helper
let lid name = { txt = Lident name; loc = Location.none }
let make_string x = Exp.constant (Pconst_string (x, None))
let make_bool x =
if x
then Exp.construct (lid "true") None
else Exp.construct (lid "false") None
let rec make_list f lst =
match lst with
| x :: rest ->
Exp.construct (lid "::") (Some (Exp.tuple [f x; make_list f rest]))
| [] ->
Exp.construct (lid "[]") None
let make_pair f1 f2 (x1, x2) =
Exp.tuple [f1 x1; f2 x2]
let make_option f opt =
match opt with
| Some x -> Exp.construct (lid "Some") (Some (f x))
| None -> Exp.construct (lid "None") None
let get_cookies () =
lid "cookies",
make_list (make_pair make_string (fun x -> x))
(StringMap.bindings !cookies)
let mk fields =
{ txt = "ocaml.ppx.context"; loc = Location.none },
Parsetree.PStr [Str.eval (Exp.record fields None)]
let make ~tool_name () =
let fields =
[
lid "tool_name", make_string tool_name;
lid "include_dirs", make_list make_string !Clflags.include_dirs;
lid "load_path", make_list make_string !Config.load_path;
lid "open_modules", make_list make_string !Clflags.open_modules;
lid "for_package", make_option make_string !Clflags.for_package;
lid "debug", make_bool !Clflags.debug;
get_cookies ()
]
in
mk fields
let get_fields = function
| PStr [{pstr_desc = Pstr_eval
({ pexp_desc = Pexp_record (fields, None) }, [])}] ->
fields
| _ ->
raise_errorf "Internal error: invalid [@@@ocaml.ppx.context] syntax"
let restore fields =
let field name payload =
let rec get_string = function
| { pexp_desc = Pexp_constant (Pconst_string (str, None)) } -> str
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] string syntax" name
and get_bool pexp =
match pexp with
| {pexp_desc = Pexp_construct ({txt = Longident.Lident "true"},
None)} ->
true
| {pexp_desc = Pexp_construct ({txt = Longident.Lident "false"},
None)} ->
false
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] bool syntax" name
and get_list elem = function
| {pexp_desc =
Pexp_construct ({txt = Longident.Lident "::"},
Some {pexp_desc = Pexp_tuple [exp; rest]}) } ->
elem exp :: get_list elem rest
| {pexp_desc =
Pexp_construct ({txt = Longident.Lident "[]"}, None)} ->
[]
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] list syntax" name
and get_pair f1 f2 = function
| {pexp_desc = Pexp_tuple [e1; e2]} ->
(f1 e1, f2 e2)
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] pair syntax" name
and get_option elem = function
| { pexp_desc =
Pexp_construct ({ txt = Longident.Lident "Some" }, Some exp) } ->
Some (elem exp)
| { pexp_desc =
Pexp_construct ({ txt = Longident.Lident "None" }, None) } ->
None
| _ -> raise_errorf "Internal error: invalid [@@@ocaml.ppx.context \
{ %s }] option syntax" name
in
match name with
| "tool_name" ->
tool_name_ref := get_string payload
| "include_dirs" ->
Clflags.include_dirs := get_list get_string payload
| "load_path" ->
Config.load_path := get_list get_string payload
| "open_modules" ->
Clflags.open_modules := get_list get_string payload
| "for_package" ->
Clflags.for_package := get_option get_string payload
| "debug" ->
Clflags.debug := get_bool payload
| "cookies" ->
let l = get_list (get_pair get_string (fun x -> x)) payload in
cookies :=
List.fold_left
(fun s (k, v) -> StringMap.add k v s) StringMap.empty
l
| _ ->
()
in
List.iter (function ({txt=Lident name}, x) -> field name x | _ -> ()) fields
let update_cookies fields =
let fields =
List.filter
(function ({txt=Lident "cookies"}, _) -> false | _ -> true)
fields
in
fields @ [get_cookies ()]
end
let ppx_context = PpxContext.make
let ext_of_exn exn =
match error_of_exn exn with
| Some error -> extension_of_error error
| None -> raise exn
let apply_lazy ~source ~target mapper =
let implem ast =
let fields, ast =
match ast with
| {pstr_desc = Pstr_attribute ({txt = "ocaml.ppx.context"}, x)} :: l ->
PpxContext.get_fields x, l
| _ -> [], ast
in
PpxContext.restore fields;
let ast =
try
let mapper = mapper () in
mapper.structure mapper ast
with exn ->
[{pstr_desc = Pstr_extension (ext_of_exn exn, []);
pstr_loc = Location.none}]
in
let fields = PpxContext.update_cookies fields in
Str.attribute (PpxContext.mk fields) :: ast
in
let iface ast =
let fields, ast =
match ast with
| {psig_desc = Psig_attribute ({txt = "ocaml.ppx.context"}, x)} :: l ->
PpxContext.get_fields x, l
| _ -> [], ast
in
PpxContext.restore fields;
let ast =
try
let mapper = mapper () in
mapper.signature mapper ast
with exn ->
[{psig_desc = Psig_extension (ext_of_exn exn, []);
psig_loc = Location.none}]
in
let fields = PpxContext.update_cookies fields in
Sig.attribute (PpxContext.mk fields) :: ast
in
let ic = open_in_bin source in
let magic =
really_input_string ic (String.length Config.ast_impl_magic_number)
in
let rewrite transform =
Location.input_name := input_value ic;
let ast = input_value ic in
close_in ic;
let ast = transform ast in
let oc = open_out_bin target in
output_string oc magic;
output_value oc !Location.input_name;
output_value oc ast;
close_out oc
and fail () =
close_in ic;
failwith "Ast_mapper: OCaml version mismatch or malformed input";
in
if magic = Config.ast_impl_magic_number then
rewrite (implem : structure -> structure)
else if magic = Config.ast_intf_magic_number then
rewrite (iface : signature -> signature)
else fail ()
let drop_ppx_context_str ~restore = function
| {pstr_desc = Pstr_attribute({Location.txt = "ocaml.ppx.context"}, a)}
:: items ->
if restore then
PpxContext.restore (PpxContext.get_fields a);
items
| items -> items
let drop_ppx_context_sig ~restore = function
| {psig_desc = Psig_attribute({Location.txt = "ocaml.ppx.context"}, a)}
:: items ->
if restore then
PpxContext.restore (PpxContext.get_fields a);
items
| items -> items
let add_ppx_context_str ~tool_name ast =
Ast_helper.Str.attribute (ppx_context ~tool_name ()) :: ast
let add_ppx_context_sig ~tool_name ast =
Ast_helper.Sig.attribute (ppx_context ~tool_name ()) :: ast
let apply ~source ~target mapper =
apply_lazy ~source ~target (fun () -> mapper)
let run_main mapper =
try
let a = Sys.argv in
let n = Array.length a in
if n > 2 then
let mapper () =
try mapper (Array.to_list (Array.sub a 1 (n - 3)))
with exn ->
let f _ _ = raise exn in
{default_mapper with structure = f; signature = f}
in
apply_lazy ~source:a.(n - 2) ~target:a.(n - 1) mapper
else begin
Printf.eprintf "Usage: %s [extra_args] <infile> <outfile>\n%!"
Sys.executable_name;
exit 2
end
with exn ->
prerr_endline (Printexc.to_string exn);
exit 2
let register_function = ref (fun _name f -> run_main f)
let register name f = !register_function name f
|
57c0223affa6f05dd18e87a60b8c71f74370cbffdc0cc8b4d58cee881d0c6465 | digitallyinduced/ihp | TypeInfo.hs | # LANGUAGE TemplateHaskell #
|
Module : IHP.Postgres . TypeInfo
Description : Extension Of The Database . PostgreSQL.Simple . TypeInfo Module
Copyright : ( c ) digitally induced GmbH , 2021
Module: IHP.Postgres.TypeInfo
Description: Extension Of The Database.PostgreSQL.Simple.TypeInfo Module
Copyright: (c) digitally induced GmbH, 2021
-}
module IHP.Postgres.TypeInfo where
import Database.PostgreSQL.Simple.FromField
tsvector :: TypeInfo
tsvector = Basic {
typoid = tsvectorOid,
typcategory = 'U',
typdelim = ',',
typname = "tsvector"
}
-- `SELECT oid, typname FROM pg_type WHERE typname ~ 'tsvector';`
tsvectorOid :: Oid
tsvectorOid = Oid 3614
# INLINE tsvector #
| null | https://raw.githubusercontent.com/digitallyinduced/ihp/c287c5b7c5212ea5e45f162391ffbc48c9f65172/IHP/Postgres/TypeInfo.hs | haskell | `SELECT oid, typname FROM pg_type WHERE typname ~ 'tsvector';` | # LANGUAGE TemplateHaskell #
|
Module : IHP.Postgres . TypeInfo
Description : Extension Of The Database . PostgreSQL.Simple . TypeInfo Module
Copyright : ( c ) digitally induced GmbH , 2021
Module: IHP.Postgres.TypeInfo
Description: Extension Of The Database.PostgreSQL.Simple.TypeInfo Module
Copyright: (c) digitally induced GmbH, 2021
-}
module IHP.Postgres.TypeInfo where
import Database.PostgreSQL.Simple.FromField
tsvector :: TypeInfo
tsvector = Basic {
typoid = tsvectorOid,
typcategory = 'U',
typdelim = ',',
typname = "tsvector"
}
tsvectorOid :: Oid
tsvectorOid = Oid 3614
# INLINE tsvector #
|
3e14b38ff35275023b805631d0bdecfb9a5654287d84c8502dbf6966c56196b6 | 2600hz-archive/whistle | ecallmgr_amqp_pool.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2011 , VoIP INC
%%% @doc
Manage a pool of amqp queues
%%% @end
Created : 28 Mar 2011 by < >
%%%-------------------------------------------------------------------
-module(ecallmgr_amqp_pool).
-behaviour(gen_server).
%% API
-export([start_link/0, start_link/1, route_req/1, route_req/2, reg_query/1, reg_query/2, media_req/1, media_req/2]).
-export([authn_req/1, authn_req/2, authz_req/1, authz_req/2]).
-export([worker_free/3, worker_count/0]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("ecallmgr.hrl").
-define(SERVER, ?MODULE).
-define(WORKER_COUNT, 10).
-define(DEFAULT_TIMEOUT, 5000).
every X ms , compare RequestsPer to WorkerCount
%% If RP < WC, reduce Ws by max(WC-RP, OrigWC)
-define(BACKOFF_PERIOD, 1000). % arbitrary at this point
-record(state, {
worker_count = ?WORKER_COUNT :: integer()
,orig_worker_count = ?WORKER_COUNT :: integer() % scale back workers after a period of time
,workers = queue:new() :: queue()
,requests_per = 0 :: non_neg_integer()
,elapsed_micro_per = 0 :: non_neg_integer()
}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
( ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [?WORKER_COUNT], []).
start_link(WorkerCount) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [WorkerCount], []).
authn_req(Prop) ->
authn_req(Prop, ?DEFAULT_TIMEOUT).
authn_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:authn_req/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_AUTHN_REQ) end
}, Timeout).
authz_req(Prop) ->
authz_req(Prop, ?DEFAULT_TIMEOUT).
authz_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:authz_req/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_AUTHZ_REQ) end
}, Timeout).
route_req(Prop) ->
route_req(Prop, ?DEFAULT_TIMEOUT).
route_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:route_req/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_ROUTE_REQ) end
}, Timeout).
reg_query(Prop) ->
reg_query(Prop, ?DEFAULT_TIMEOUT).
reg_query(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:reg_query/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_REG_QUERY) end
}, Timeout).
media_req(Prop) ->
media_req(Prop, ?DEFAULT_TIMEOUT).
media_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:media_req/1, get(callid)
,fun(JSON) -> amqp_util:callevt_publish(JSON) end
}, Timeout).
worker_free(Srv, Worker, Elapsed) ->
gen_server:cast(Srv, {worker_free, Worker, Elapsed}).
worker_count() ->
ecallmgr_amqp_pool_worker_sup:worker_count().
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init([Count]) ->
process_flag(trap_exit, true),
'ok' = ecallmgr_amqp_pool_worker_sup:release_all(),
erlang:send_after(?BACKOFF_PERIOD, self(), reduce_labor_force),
Ws = lists:foldr(fun(W, Ws0) -> queue:in(W, Ws0) end, queue:new(), [ start_worker() || _ <- lists:seq(1, Count) ]),
{ok, #state{worker_count=Count, workers=Ws, orig_worker_count=Count}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call({request, Prop, ApiFun, CallId, PubFun}, From, #state{workers=W, worker_count=WC, requests_per=RP}=State) ->
case queue:out(W) of
{{value, Worker}, W1} ->
ecallmgr_amqp_pool_worker:start_req(Worker, Prop, ApiFun, CallId, PubFun, From, self()),
{noreply, State#state{workers=W1, requests_per=RP+1}, hibernate};
{empty, _} ->
Worker = start_worker(),
?LOG("starting additional worker ~p", [Worker]),
ecallmgr_amqp_pool_worker:start_req(Worker, Prop, ApiFun, CallId, PubFun, From, self()),
{noreply, State#state{worker_count=WC+1, requests_per=RP+1}, hibernate}
end.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast({worker_free, _Worker, _Elapsed}=Req, State) ->
handle_info(Req, State).
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info({worker_free, W, Elapsed}, #state{workers=Ws, elapsed_micro_per=EMP}=State) ->
{noreply, State#state{workers=queue:in(W, Ws), elapsed_micro_per=EMP+Elapsed}, hibernate};
handle_info({'EXIT', W, _Reason}, #state{workers=Ws, worker_count=WC, orig_worker_count=OWC}=State) when WC < OWC ->
?LOG("Worker down: ~p", [_Reason]),
Ws1 = queue:in(start_worker(), queue:filter(fun(W1) when W =:= W1 -> false; (_) -> true end, Ws)),
{noreply, State#state{workers=Ws1, worker_count=worker_count()}, hibernate};
handle_info({'EXIT', W, _Reason}, #state{workers=Ws}=State) ->
?LOG("Worker down: ~p", [_Reason]),
Ws1 = queue:filter(fun(W1) when W =:= W1 -> false; (_) -> true end, Ws),
{noreply, State#state{workers=Ws1, worker_count=worker_count()}, hibernate};
handle_info(reduce_labor_force
,#state{workers=Ws, worker_count=WC, requests_per=RP, orig_worker_count=OWC, elapsed_micro_per=EMP}=State)
when RP > 0 andalso EMP > 0 andalso WC > OWC ->
AvgMicro = EMP div RP, % average micro per request
?LOG("Req per ~b: ~b", [?BACKOFF_PERIOD, RP]),
?LOG("Avg micro per req: ~b (~b total micro)", [AvgMicro, EMP]),
WsNeeded = round((1 / AvgMicro) * (?BACKOFF_PERIOD * 1000)), % avg workers needed
?LOG("WsNeeded: ~b (have ~b)", [WsNeeded, WC]),
erlang:send_after(?BACKOFF_PERIOD, self(), reduce_labor_force),
case round((WC - WsNeeded) * 0.1) of
Reduce when Reduce > 0 ->
?LOG_SYS("Reducing worker count from ~b by ~b", [WC, Reduce]),
Ws1 = reduce_workers(Ws, Reduce, OWC),
{noreply, State#state{workers=Ws1, worker_count=worker_count(), requests_per=0, elapsed_micro_per=0}, hibernate};
_Other ->
?LOG_SYS("Not reducing workers (~b suggested)", [_Other]),
{noreply, State#state{requests_per=0, elapsed_micro_per=0}}
end;
handle_info(reduce_labor_force, #state{requests_per=RP, worker_count=WC, orig_worker_count=OWC, workers=Ws}=State) ->
erlang:send_after(?BACKOFF_PERIOD, self(), reduce_labor_force),
case round((WC - RP) * 0.1) of
Reduce when Reduce > 0 andalso WC > OWC ->
?LOG("Reducing worker count from ~b by ~b", [WC, Reduce]),
Ws1 = reduce_workers(Ws, Reduce, OWC),
?LOG("Queue len before ~b and after ~b", [queue:len(Ws), queue:len(Ws1)]),
{noreply, State#state{requests_per=0, elapsed_micro_per=0, workers=Ws1, worker_count=worker_count()}, hibernate};
_Else ->
{noreply, State#state{requests_per=0, elapsed_micro_per=0}, hibernate}
end;
handle_info(_Info, State) ->
?LOG("Unhandled message: ~p", [_Info]),
{noreply, State}.
reduce_workers(Ws, Reduce, OWC) ->
lists:foldl(fun(_, Q0) ->
case queue:len(Q0) =< OWC of
true -> Q0;
false ->
{{value, W}, Q1} = queue:out(Q0),
ecallmgr_amqp_pool_worker:stop(W),
Q1
end
end, Ws, lists:seq(1,Reduce)).
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
?LOG("Terminating: ~p", [_Reason]).
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
start_worker() ->
{ok, Pid} = ecallmgr_amqp_pool_worker_sup:start_child(),
link(Pid),
?LOG("Worker ~p started", [Pid]),
Pid.
| null | https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/ecallmgr/src/ecallmgr_amqp_pool.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
gen_server callbacks
If RP < WC, reduce Ws by max(WC-RP, OrigWC)
arbitrary at this point
scale back workers after a period of time
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
average micro per request
avg workers needed
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
=================================================================== | @author < >
( C ) 2011 , VoIP INC
Manage a pool of amqp queues
Created : 28 Mar 2011 by < >
-module(ecallmgr_amqp_pool).
-behaviour(gen_server).
-export([start_link/0, start_link/1, route_req/1, route_req/2, reg_query/1, reg_query/2, media_req/1, media_req/2]).
-export([authn_req/1, authn_req/2, authz_req/1, authz_req/2]).
-export([worker_free/3, worker_count/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("ecallmgr.hrl").
-define(SERVER, ?MODULE).
-define(WORKER_COUNT, 10).
-define(DEFAULT_TIMEOUT, 5000).
every X ms , compare RequestsPer to WorkerCount
-record(state, {
worker_count = ?WORKER_COUNT :: integer()
,workers = queue:new() :: queue()
,requests_per = 0 :: non_neg_integer()
,elapsed_micro_per = 0 :: non_neg_integer()
}).
( ) - > { ok , Pid } | ignore | { error , Error }
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [?WORKER_COUNT], []).
start_link(WorkerCount) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [WorkerCount], []).
authn_req(Prop) ->
authn_req(Prop, ?DEFAULT_TIMEOUT).
authn_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:authn_req/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_AUTHN_REQ) end
}, Timeout).
authz_req(Prop) ->
authz_req(Prop, ?DEFAULT_TIMEOUT).
authz_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:authz_req/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_AUTHZ_REQ) end
}, Timeout).
route_req(Prop) ->
route_req(Prop, ?DEFAULT_TIMEOUT).
route_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:route_req/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_ROUTE_REQ) end
}, Timeout).
reg_query(Prop) ->
reg_query(Prop, ?DEFAULT_TIMEOUT).
reg_query(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:reg_query/1, get(callid)
,fun(JSON) -> amqp_util:callmgr_publish(JSON, <<"application/json">>, ?KEY_REG_QUERY) end
}, Timeout).
media_req(Prop) ->
media_req(Prop, ?DEFAULT_TIMEOUT).
media_req(Prop, Timeout) ->
gen_server:call(?SERVER, {request, Prop, fun wh_api:media_req/1, get(callid)
,fun(JSON) -> amqp_util:callevt_publish(JSON) end
}, Timeout).
worker_free(Srv, Worker, Elapsed) ->
gen_server:cast(Srv, {worker_free, Worker, Elapsed}).
worker_count() ->
ecallmgr_amqp_pool_worker_sup:worker_count().
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init([Count]) ->
process_flag(trap_exit, true),
'ok' = ecallmgr_amqp_pool_worker_sup:release_all(),
erlang:send_after(?BACKOFF_PERIOD, self(), reduce_labor_force),
Ws = lists:foldr(fun(W, Ws0) -> queue:in(W, Ws0) end, queue:new(), [ start_worker() || _ <- lists:seq(1, Count) ]),
{ok, #state{worker_count=Count, workers=Ws, orig_worker_count=Count}}.
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({request, Prop, ApiFun, CallId, PubFun}, From, #state{workers=W, worker_count=WC, requests_per=RP}=State) ->
case queue:out(W) of
{{value, Worker}, W1} ->
ecallmgr_amqp_pool_worker:start_req(Worker, Prop, ApiFun, CallId, PubFun, From, self()),
{noreply, State#state{workers=W1, requests_per=RP+1}, hibernate};
{empty, _} ->
Worker = start_worker(),
?LOG("starting additional worker ~p", [Worker]),
ecallmgr_amqp_pool_worker:start_req(Worker, Prop, ApiFun, CallId, PubFun, From, self()),
{noreply, State#state{worker_count=WC+1, requests_per=RP+1}, hibernate}
end.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast({worker_free, _Worker, _Elapsed}=Req, State) ->
handle_info(Req, State).
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info({worker_free, W, Elapsed}, #state{workers=Ws, elapsed_micro_per=EMP}=State) ->
{noreply, State#state{workers=queue:in(W, Ws), elapsed_micro_per=EMP+Elapsed}, hibernate};
handle_info({'EXIT', W, _Reason}, #state{workers=Ws, worker_count=WC, orig_worker_count=OWC}=State) when WC < OWC ->
?LOG("Worker down: ~p", [_Reason]),
Ws1 = queue:in(start_worker(), queue:filter(fun(W1) when W =:= W1 -> false; (_) -> true end, Ws)),
{noreply, State#state{workers=Ws1, worker_count=worker_count()}, hibernate};
handle_info({'EXIT', W, _Reason}, #state{workers=Ws}=State) ->
?LOG("Worker down: ~p", [_Reason]),
Ws1 = queue:filter(fun(W1) when W =:= W1 -> false; (_) -> true end, Ws),
{noreply, State#state{workers=Ws1, worker_count=worker_count()}, hibernate};
handle_info(reduce_labor_force
,#state{workers=Ws, worker_count=WC, requests_per=RP, orig_worker_count=OWC, elapsed_micro_per=EMP}=State)
when RP > 0 andalso EMP > 0 andalso WC > OWC ->
?LOG("Req per ~b: ~b", [?BACKOFF_PERIOD, RP]),
?LOG("Avg micro per req: ~b (~b total micro)", [AvgMicro, EMP]),
?LOG("WsNeeded: ~b (have ~b)", [WsNeeded, WC]),
erlang:send_after(?BACKOFF_PERIOD, self(), reduce_labor_force),
case round((WC - WsNeeded) * 0.1) of
Reduce when Reduce > 0 ->
?LOG_SYS("Reducing worker count from ~b by ~b", [WC, Reduce]),
Ws1 = reduce_workers(Ws, Reduce, OWC),
{noreply, State#state{workers=Ws1, worker_count=worker_count(), requests_per=0, elapsed_micro_per=0}, hibernate};
_Other ->
?LOG_SYS("Not reducing workers (~b suggested)", [_Other]),
{noreply, State#state{requests_per=0, elapsed_micro_per=0}}
end;
handle_info(reduce_labor_force, #state{requests_per=RP, worker_count=WC, orig_worker_count=OWC, workers=Ws}=State) ->
erlang:send_after(?BACKOFF_PERIOD, self(), reduce_labor_force),
case round((WC - RP) * 0.1) of
Reduce when Reduce > 0 andalso WC > OWC ->
?LOG("Reducing worker count from ~b by ~b", [WC, Reduce]),
Ws1 = reduce_workers(Ws, Reduce, OWC),
?LOG("Queue len before ~b and after ~b", [queue:len(Ws), queue:len(Ws1)]),
{noreply, State#state{requests_per=0, elapsed_micro_per=0, workers=Ws1, worker_count=worker_count()}, hibernate};
_Else ->
{noreply, State#state{requests_per=0, elapsed_micro_per=0}, hibernate}
end;
handle_info(_Info, State) ->
?LOG("Unhandled message: ~p", [_Info]),
{noreply, State}.
reduce_workers(Ws, Reduce, OWC) ->
lists:foldl(fun(_, Q0) ->
case queue:len(Q0) =< OWC of
true -> Q0;
false ->
{{value, W}, Q1} = queue:out(Q0),
ecallmgr_amqp_pool_worker:stop(W),
Q1
end
end, Ws, lists:seq(1,Reduce)).
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(_Reason, _State) ->
?LOG("Terminating: ~p", [_Reason]).
@private
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
start_worker() ->
{ok, Pid} = ecallmgr_amqp_pool_worker_sup:start_child(),
link(Pid),
?LOG("Worker ~p started", [Pid]),
Pid.
|
0f4e392421b27245743768918c75f4933908df3bee4d8fd1ff53f5cc895e3ff4 | bsansouci/reasonglexampleproject | seq.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1999 - 2004 ,
Institut National de Recherche en Informatique et en Automatique .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : seq.ml , v 1.3 2004/09/21 18:15:46 weis Exp $
open Images
let load_sequence_as_pixmaps ~window file =
let seq = load_sequence file [] in
let seq = unoptimize_sequence seq in
List.map
(fun frame ->
Ximage.pixmap_of_image window None
frame.frame_image, frame.frame_delay)
seq.seq_frames
| null | https://raw.githubusercontent.com/bsansouci/reasonglexampleproject/4ecef2cdad3a1a157318d1d64dba7def92d8a924/vendor/camlimages/examples/liv/seq.ml | ocaml | *********************************************************************
Objective Caml
Distributed only by permission.
********************************************************************* | , projet Cristal , INRIA Rocquencourt
Copyright 1999 - 2004 ,
Institut National de Recherche en Informatique et en Automatique .
$ I d : seq.ml , v 1.3 2004/09/21 18:15:46 weis Exp $
open Images
let load_sequence_as_pixmaps ~window file =
let seq = load_sequence file [] in
let seq = unoptimize_sequence seq in
List.map
(fun frame ->
Ximage.pixmap_of_image window None
frame.frame_image, frame.frame_delay)
seq.seq_frames
|
f50a9f187a13b5d3cb5465cd7784ffa2a858574482cd3051559019723e3a6faf | l3kn/lisp-compiler-llvm | llvm.scm | (defn emit-alloca (var)
(puts (format " ~A = alloca i64" (list var))))
(defn emit-store (value in)
(puts (format " store i64 ~A, i64* ~A" (list value in))))
(defn emit-load (var from)
(puts (format " ~A = load i64, i64* ~A" (list var from))))
(defn emit-copy (var var_)
(puts (format " ~A = add i64 ~A, 0" (list var var_))))
(defn emit-label (name)
(puts (string-append name ":")))
(defn emit-br1 (label)
(puts (string-append " br label %" label)))
(defn emit-ret (val)
(puts (string-append " ret i64 " val)))
(defn emit-call0 (var name)
(puts (format " ~A = call i64 ~A()" (list var name))))
(defn emit-call1 (var name arg)
(puts (format " ~A = call i64 ~A(i64 ~A)" (list var name arg))))
(defn emit-call2 (var name arg1 arg2)
(puts (format " ~A = call i64 ~A(i64 ~A, i64 ~A)" (list var name arg1 arg2))))
(defn emit-call3 (var name arg1 arg2 arg3)
(puts (format " ~A = call i64 ~A(i64 ~A, i64 ~A, i64 ~A)" (list var name arg1 arg2 arg3))))
| null | https://raw.githubusercontent.com/l3kn/lisp-compiler-llvm/4d9a734e882eed5c4da7f4cd2a779d867359511b/llvm.scm | scheme | (defn emit-alloca (var)
(puts (format " ~A = alloca i64" (list var))))
(defn emit-store (value in)
(puts (format " store i64 ~A, i64* ~A" (list value in))))
(defn emit-load (var from)
(puts (format " ~A = load i64, i64* ~A" (list var from))))
(defn emit-copy (var var_)
(puts (format " ~A = add i64 ~A, 0" (list var var_))))
(defn emit-label (name)
(puts (string-append name ":")))
(defn emit-br1 (label)
(puts (string-append " br label %" label)))
(defn emit-ret (val)
(puts (string-append " ret i64 " val)))
(defn emit-call0 (var name)
(puts (format " ~A = call i64 ~A()" (list var name))))
(defn emit-call1 (var name arg)
(puts (format " ~A = call i64 ~A(i64 ~A)" (list var name arg))))
(defn emit-call2 (var name arg1 arg2)
(puts (format " ~A = call i64 ~A(i64 ~A, i64 ~A)" (list var name arg1 arg2))))
(defn emit-call3 (var name arg1 arg2 arg3)
(puts (format " ~A = call i64 ~A(i64 ~A, i64 ~A, i64 ~A)" (list var name arg1 arg2 arg3))))
| |
de2b12a12e6d31985b6c9a60c849275b96b5b9bd35de18731b1ff2ba4ad5933b | rbkmoney/hellgate | hg_cash_range.erl | -module(hg_cash_range).
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-include("domain.hrl").
-export([marshal/1]).
-export([unmarshal/1]).
-export([is_inside/2]).
-export([is_subrange/2]).
-export([intersect/2]).
-type cash_range() :: dmsl_domain_thrift:'CashRange'().
-type cash() :: dmsl_domain_thrift:'Cash'().
-spec is_inside(cash(), cash_range()) -> within | {exceeds, lower | upper}.
is_inside(Cash, #domain_CashRange{lower = Lower, upper = Upper}) ->
case
{
compare_cash(fun erlang:'>'/2, Cash, Lower),
compare_cash(fun erlang:'<'/2, Cash, Upper)
}
of
{true, true} ->
within;
{false, true} ->
{exceeds, lower};
{true, false} ->
{exceeds, upper};
_ ->
{error, incompatible}
end.
-spec is_subrange(cash_range(), cash_range()) -> true | false.
is_subrange(
#domain_CashRange{lower = Lower1, upper = Upper1},
#domain_CashRange{lower = Lower2, upper = Upper2}
) ->
compare_bound(fun erlang:'>'/2, Lower1, Lower2) and
compare_bound(fun erlang:'<'/2, Upper1, Upper2).
-spec intersect(cash_range(), cash_range()) -> cash_range() | undefined.
intersect(
#domain_CashRange{lower = Lower1, upper = Upper1},
#domain_CashRange{lower = Lower2, upper = Upper2}
) ->
Lower3 = intersect_bounds(fun erlang:'>'/2, Lower1, Lower2),
Upper3 = intersect_bounds(fun erlang:'<'/2, Upper1, Upper2),
case compare_bound(fun erlang:'<'/2, Lower3, Upper3) of
true ->
#domain_CashRange{lower = Lower3, upper = Upper3};
false ->
undefined
end.
%%
intersect_bounds(F, Lower1, Lower2) ->
case compare_bound(F, Lower1, Lower2) of
true ->
Lower1;
false ->
Lower2
end.
compare_bound(_, {exclusive, Cash}, {exclusive, Cash}) ->
true;
compare_bound(F, {_, Cash}, Bound) ->
compare_cash(F, Cash, Bound) == true orelse false.
compare_cash(_, V, {inclusive, V}) ->
true;
compare_cash(F, ?cash(A, C), {_, ?cash(Am, C)}) ->
F(A, Am);
compare_cash(_, _, _) ->
error.
%% Marshalling
-spec marshal(cash_range()) -> hg_msgpack_marshalling:value().
marshal(CashRange) ->
marshal(cash_range, CashRange).
marshal(cash_range, #domain_CashRange{
lower = Lower,
upper = Upper
}) ->
[2, [marshal(cash_bound, Lower), marshal(cash_bound, Upper)]];
marshal(cash_bound, {Exclusiveness, Cash}) ->
[marshal(exclusiveness, Exclusiveness), hg_cash:marshal(Cash)];
marshal(exclusiveness, inclusive) ->
<<"inclusive">>;
marshal(exclusiveness, exclusive) ->
<<"exclusive">>.
%% Unmarshalling
-spec unmarshal(hg_msgpack_marshalling:value()) -> cash_range().
unmarshal(CashRange) ->
unmarshal(cash_range, CashRange).
unmarshal(cash_range, [2, [Lower, Upper]]) ->
#domain_CashRange{
lower = unmarshal(cash_bound, Lower),
upper = unmarshal(cash_bound, Upper)
};
unmarshal(cash_bound, [Exclusiveness, Cash]) ->
{unmarshal(exclusiveness, Exclusiveness), hg_cash:unmarshal(Cash)};
unmarshal(exclusiveness, <<"inclusive">>) ->
inclusive;
unmarshal(exclusiveness, <<"exclusive">>) ->
exclusive;
unmarshal(cash_range, [1, {'domain_CashRange', Upper, Lower}]) ->
#domain_CashRange{
lower = unmarshal(cash_bound_legacy, Lower),
upper = unmarshal(cash_bound_legacy, Upper)
};
unmarshal(cash_bound_legacy, {Exclusiveness, Cash}) when Exclusiveness == exclusive; Exclusiveness == inclusive ->
{Exclusiveness, hg_cash:unmarshal([1, Cash])}.
| null | https://raw.githubusercontent.com/rbkmoney/hellgate/4159f30c726d550c86251f86f00deec73f191c2b/apps/hellgate/src/hg_cash_range.erl | erlang |
Marshalling
Unmarshalling | -module(hg_cash_range).
-include_lib("damsel/include/dmsl_domain_thrift.hrl").
-include("domain.hrl").
-export([marshal/1]).
-export([unmarshal/1]).
-export([is_inside/2]).
-export([is_subrange/2]).
-export([intersect/2]).
-type cash_range() :: dmsl_domain_thrift:'CashRange'().
-type cash() :: dmsl_domain_thrift:'Cash'().
-spec is_inside(cash(), cash_range()) -> within | {exceeds, lower | upper}.
is_inside(Cash, #domain_CashRange{lower = Lower, upper = Upper}) ->
case
{
compare_cash(fun erlang:'>'/2, Cash, Lower),
compare_cash(fun erlang:'<'/2, Cash, Upper)
}
of
{true, true} ->
within;
{false, true} ->
{exceeds, lower};
{true, false} ->
{exceeds, upper};
_ ->
{error, incompatible}
end.
-spec is_subrange(cash_range(), cash_range()) -> true | false.
is_subrange(
#domain_CashRange{lower = Lower1, upper = Upper1},
#domain_CashRange{lower = Lower2, upper = Upper2}
) ->
compare_bound(fun erlang:'>'/2, Lower1, Lower2) and
compare_bound(fun erlang:'<'/2, Upper1, Upper2).
-spec intersect(cash_range(), cash_range()) -> cash_range() | undefined.
intersect(
#domain_CashRange{lower = Lower1, upper = Upper1},
#domain_CashRange{lower = Lower2, upper = Upper2}
) ->
Lower3 = intersect_bounds(fun erlang:'>'/2, Lower1, Lower2),
Upper3 = intersect_bounds(fun erlang:'<'/2, Upper1, Upper2),
case compare_bound(fun erlang:'<'/2, Lower3, Upper3) of
true ->
#domain_CashRange{lower = Lower3, upper = Upper3};
false ->
undefined
end.
intersect_bounds(F, Lower1, Lower2) ->
case compare_bound(F, Lower1, Lower2) of
true ->
Lower1;
false ->
Lower2
end.
compare_bound(_, {exclusive, Cash}, {exclusive, Cash}) ->
true;
compare_bound(F, {_, Cash}, Bound) ->
compare_cash(F, Cash, Bound) == true orelse false.
compare_cash(_, V, {inclusive, V}) ->
true;
compare_cash(F, ?cash(A, C), {_, ?cash(Am, C)}) ->
F(A, Am);
compare_cash(_, _, _) ->
error.
-spec marshal(cash_range()) -> hg_msgpack_marshalling:value().
marshal(CashRange) ->
marshal(cash_range, CashRange).
marshal(cash_range, #domain_CashRange{
lower = Lower,
upper = Upper
}) ->
[2, [marshal(cash_bound, Lower), marshal(cash_bound, Upper)]];
marshal(cash_bound, {Exclusiveness, Cash}) ->
[marshal(exclusiveness, Exclusiveness), hg_cash:marshal(Cash)];
marshal(exclusiveness, inclusive) ->
<<"inclusive">>;
marshal(exclusiveness, exclusive) ->
<<"exclusive">>.
-spec unmarshal(hg_msgpack_marshalling:value()) -> cash_range().
unmarshal(CashRange) ->
unmarshal(cash_range, CashRange).
unmarshal(cash_range, [2, [Lower, Upper]]) ->
#domain_CashRange{
lower = unmarshal(cash_bound, Lower),
upper = unmarshal(cash_bound, Upper)
};
unmarshal(cash_bound, [Exclusiveness, Cash]) ->
{unmarshal(exclusiveness, Exclusiveness), hg_cash:unmarshal(Cash)};
unmarshal(exclusiveness, <<"inclusive">>) ->
inclusive;
unmarshal(exclusiveness, <<"exclusive">>) ->
exclusive;
unmarshal(cash_range, [1, {'domain_CashRange', Upper, Lower}]) ->
#domain_CashRange{
lower = unmarshal(cash_bound_legacy, Lower),
upper = unmarshal(cash_bound_legacy, Upper)
};
unmarshal(cash_bound_legacy, {Exclusiveness, Cash}) when Exclusiveness == exclusive; Exclusiveness == inclusive ->
{Exclusiveness, hg_cash:unmarshal([1, Cash])}.
|
95e6dbca764ef9cb96362d1ea5e74b85528f3072c961728e43c91f0411066b60 | sgbj/MaximaSharp | zunm2r.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 2edcbd958861 2012/05/30 03:34:52 toy $ "
" f2cl2.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl5.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ " )
;;; Using Lisp CMU Common Lisp 20d (20D Unicode)
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':array)
;;; (:array-slicing t) (:declare-common nil)
;;; (:float-format double-float))
(in-package :lapack)
(let* ((one (f2cl-lib:cmplx 1.0 0.0)))
(declare (type (f2cl-lib:complex16) one) (ignorable one))
(defun zunm2r (side trans m n k a lda tau c ldc work info)
(declare (type (array f2cl-lib:complex16 (*)) work c tau a)
(type (f2cl-lib:integer4) info ldc lda k n m)
(type (simple-string *) trans side))
(f2cl-lib:with-multi-array-data
((side character side-%data% side-%offset%)
(trans character trans-%data% trans-%offset%)
(a f2cl-lib:complex16 a-%data% a-%offset%)
(tau f2cl-lib:complex16 tau-%data% tau-%offset%)
(c f2cl-lib:complex16 c-%data% c-%offset%)
(work f2cl-lib:complex16 work-%data% work-%offset%))
(prog ((aii #C(0.0 0.0)) (taui #C(0.0 0.0)) (i 0) (i1 0) (i2 0) (i3 0)
(ic 0) (jc 0) (mi 0) (ni 0) (nq 0) (left nil) (notran nil))
(declare (type (f2cl-lib:complex16) aii taui)
(type (f2cl-lib:integer4) i i1 i2 i3 ic jc mi ni nq)
(type f2cl-lib:logical left notran))
(setf info 0)
(setf left (lsame side "L"))
(setf notran (lsame trans "N"))
(cond
(left
(setf nq m))
(t
(setf nq n)))
(cond
((and (not left) (not (lsame side "R")))
(setf info -1))
((and (not notran) (not (lsame trans "C")))
(setf info -2))
((< m 0)
(setf info -3))
((< n 0)
(setf info -4))
((or (< k 0) (> k nq))
(setf info -5))
((< lda (max (the f2cl-lib:integer4 1) (the f2cl-lib:integer4 nq)))
(setf info -7))
((< ldc (max (the f2cl-lib:integer4 1) (the f2cl-lib:integer4 m)))
(setf info -10)))
(cond
((/= info 0)
(xerbla "ZUNM2R" (f2cl-lib:int-sub info))
(go end_label)))
(if (or (= m 0) (= n 0) (= k 0)) (go end_label))
(cond
((or (and left (not notran)) (and (not left) notran))
(setf i1 1)
(setf i2 k)
(setf i3 1))
(t
(setf i1 k)
(setf i2 1)
(setf i3 -1)))
(cond
(left
(setf ni n)
(setf jc 1))
(t
(setf mi m)
(setf ic 1)))
(f2cl-lib:fdo (i i1 (f2cl-lib:int-add i i3))
((> i i2) nil)
(tagbody
(cond
(left
(setf mi (f2cl-lib:int-add (f2cl-lib:int-sub m i) 1))
(setf ic i))
(t
(setf ni (f2cl-lib:int-add (f2cl-lib:int-sub n i) 1))
(setf jc i)))
(cond
(notran
(setf taui (f2cl-lib:fref tau-%data% (i) ((1 *)) tau-%offset%)))
(t
(setf taui
(coerce
(f2cl-lib:dconjg
(f2cl-lib:fref tau-%data% (i) ((1 *)) tau-%offset%))
'f2cl-lib:complex16))))
(setf aii
(f2cl-lib:fref a-%data% (i i) ((1 lda) (1 *)) a-%offset%))
(setf (f2cl-lib:fref a-%data% (i i) ((1 lda) (1 *)) a-%offset%)
one)
(zlarf side mi ni
(f2cl-lib:array-slice a-%data%
f2cl-lib:complex16
(i i)
((1 lda) (1 *))
a-%offset%)
1 taui
(f2cl-lib:array-slice c-%data%
f2cl-lib:complex16
(ic jc)
((1 ldc) (1 *))
c-%offset%)
ldc work)
(setf (f2cl-lib:fref a-%data% (i i) ((1 lda) (1 *)) a-%offset%)
aii)
label10))
(go end_label)
end_label
(return (values nil nil nil nil nil nil nil nil nil nil nil info))))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::zunm2r
fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((simple-string) (simple-string)
(fortran-to-lisp::integer4) (fortran-to-lisp::integer4)
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4))
:return-values '(nil nil nil nil nil nil nil nil nil nil nil
fortran-to-lisp::info)
:calls '(fortran-to-lisp::zlarf fortran-to-lisp::xerbla
fortran-to-lisp::lsame))))
| null | https://raw.githubusercontent.com/sgbj/MaximaSharp/75067d7e045b9ed50883b5eb09803b4c8f391059/Test/bin/Debug/Maxima-5.30.0/share/maxima/5.30.0/share/lapack/lapack/zunm2r.lisp | lisp | Compiled by f2cl version:
Using Lisp CMU Common Lisp 20d (20D Unicode)
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':array)
(:array-slicing t) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 2edcbd958861 2012/05/30 03:34:52 toy $ "
" f2cl2.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl5.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v 3fe93de3be82 2012/05/06 02:17:14 toy $ " )
(in-package :lapack)
(let* ((one (f2cl-lib:cmplx 1.0 0.0)))
(declare (type (f2cl-lib:complex16) one) (ignorable one))
(defun zunm2r (side trans m n k a lda tau c ldc work info)
(declare (type (array f2cl-lib:complex16 (*)) work c tau a)
(type (f2cl-lib:integer4) info ldc lda k n m)
(type (simple-string *) trans side))
(f2cl-lib:with-multi-array-data
((side character side-%data% side-%offset%)
(trans character trans-%data% trans-%offset%)
(a f2cl-lib:complex16 a-%data% a-%offset%)
(tau f2cl-lib:complex16 tau-%data% tau-%offset%)
(c f2cl-lib:complex16 c-%data% c-%offset%)
(work f2cl-lib:complex16 work-%data% work-%offset%))
(prog ((aii #C(0.0 0.0)) (taui #C(0.0 0.0)) (i 0) (i1 0) (i2 0) (i3 0)
(ic 0) (jc 0) (mi 0) (ni 0) (nq 0) (left nil) (notran nil))
(declare (type (f2cl-lib:complex16) aii taui)
(type (f2cl-lib:integer4) i i1 i2 i3 ic jc mi ni nq)
(type f2cl-lib:logical left notran))
(setf info 0)
(setf left (lsame side "L"))
(setf notran (lsame trans "N"))
(cond
(left
(setf nq m))
(t
(setf nq n)))
(cond
((and (not left) (not (lsame side "R")))
(setf info -1))
((and (not notran) (not (lsame trans "C")))
(setf info -2))
((< m 0)
(setf info -3))
((< n 0)
(setf info -4))
((or (< k 0) (> k nq))
(setf info -5))
((< lda (max (the f2cl-lib:integer4 1) (the f2cl-lib:integer4 nq)))
(setf info -7))
((< ldc (max (the f2cl-lib:integer4 1) (the f2cl-lib:integer4 m)))
(setf info -10)))
(cond
((/= info 0)
(xerbla "ZUNM2R" (f2cl-lib:int-sub info))
(go end_label)))
(if (or (= m 0) (= n 0) (= k 0)) (go end_label))
(cond
((or (and left (not notran)) (and (not left) notran))
(setf i1 1)
(setf i2 k)
(setf i3 1))
(t
(setf i1 k)
(setf i2 1)
(setf i3 -1)))
(cond
(left
(setf ni n)
(setf jc 1))
(t
(setf mi m)
(setf ic 1)))
(f2cl-lib:fdo (i i1 (f2cl-lib:int-add i i3))
((> i i2) nil)
(tagbody
(cond
(left
(setf mi (f2cl-lib:int-add (f2cl-lib:int-sub m i) 1))
(setf ic i))
(t
(setf ni (f2cl-lib:int-add (f2cl-lib:int-sub n i) 1))
(setf jc i)))
(cond
(notran
(setf taui (f2cl-lib:fref tau-%data% (i) ((1 *)) tau-%offset%)))
(t
(setf taui
(coerce
(f2cl-lib:dconjg
(f2cl-lib:fref tau-%data% (i) ((1 *)) tau-%offset%))
'f2cl-lib:complex16))))
(setf aii
(f2cl-lib:fref a-%data% (i i) ((1 lda) (1 *)) a-%offset%))
(setf (f2cl-lib:fref a-%data% (i i) ((1 lda) (1 *)) a-%offset%)
one)
(zlarf side mi ni
(f2cl-lib:array-slice a-%data%
f2cl-lib:complex16
(i i)
((1 lda) (1 *))
a-%offset%)
1 taui
(f2cl-lib:array-slice c-%data%
f2cl-lib:complex16
(ic jc)
((1 ldc) (1 *))
c-%offset%)
ldc work)
(setf (f2cl-lib:fref a-%data% (i i) ((1 lda) (1 *)) a-%offset%)
aii)
label10))
(go end_label)
end_label
(return (values nil nil nil nil nil nil nil nil nil nil nil info))))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::zunm2r
fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((simple-string) (simple-string)
(fortran-to-lisp::integer4) (fortran-to-lisp::integer4)
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4)
(array fortran-to-lisp::complex16 (*))
(fortran-to-lisp::integer4))
:return-values '(nil nil nil nil nil nil nil nil nil nil nil
fortran-to-lisp::info)
:calls '(fortran-to-lisp::zlarf fortran-to-lisp::xerbla
fortran-to-lisp::lsame))))
|
a52c5504957a1f3da22303bb3ae8bdeed5cad9a7a1aaa63c47507aeab6de2a60 | tisnik/clojure-examples | project.clj | ;
( C ) Copyright 2015 , 2020
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the Eclipse Public License v1.0
; which accompanies this distribution, and is available at
-v10.html
;
; Contributors:
;
(defproject seesaw10 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[seesaw "1.4.5"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-project-edn "0.3.0"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot seesaw10.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/cf9b3484a332ebd93bc585f051802c333ebde3df/seesaw10/project.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
Contributors:
| ( C ) Copyright 2015 , 2020
-v10.html
(defproject seesaw10 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[seesaw "1.4.5"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-project-edn "0.3.0"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot seesaw10.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
|
bdacd7a4bbd8a015be7ae403f8e64087d4f501f11f10e710ba25b25201f1de53 | mput/sicp-solutions | 2_10.rkt | #lang racket
Solution for exercise 2_10 .
(require rackunit "2_07.rkt")
(provide div-interval)
(define (div-interval x y)
(if (and (< (lower-bound y) 0) (> (upper-bound y) 0))
(error "Second interval is crossing zerro")
(mul-interval x
(make-interval (/ 1.0 (upper-bound y))
(/ 1.0 (lower-bound y))))))
| null | https://raw.githubusercontent.com/mput/sicp-solutions/fe12ad2b6f17c99978c8fe04b2495005986b8496/solutions/2_10.rkt | racket | #lang racket
Solution for exercise 2_10 .
(require rackunit "2_07.rkt")
(provide div-interval)
(define (div-interval x y)
(if (and (< (lower-bound y) 0) (> (upper-bound y) 0))
(error "Second interval is crossing zerro")
(mul-interval x
(make-interval (/ 1.0 (upper-bound y))
(/ 1.0 (lower-bound y))))))
| |
c76f5e47aeb0c281060009ea7d7218fb6b31ced7469c5c803e9d99e1cfbaa472 | tchoutri/pg-entity | Main.hs | module Main where
import Data.Pool (createPool, withResource)
import qualified Database.PostgreSQL.Simple as PG
import qualified Database.Postgres.Temp as Postgres.Temp
import qualified EntitySpec
import qualified GenericsSpec
import Optics.Core
import Test.Tasty (TestTree, defaultMain, testGroup)
import Utils
main :: IO ()
main = do
env <- getTestEnvironment
withResource (env ^. #pool) migrate
spec <- traverse (`runTestM` env) specs
defaultMain . testGroup "pg-entity tests" $ spec
specs :: [TestM TestTree]
specs =
[ GenericsSpec.spec
, EntitySpec.spec
]
getTestEnvironment :: IO TestEnv
getTestEnvironment = do
eitherDb <- Postgres.Temp.start
case eitherDb of
Right db -> do
pool <-
createPool
(PG.connectPostgreSQL $ Postgres.Temp.toConnectionString db)
PG.close
1
100000000
50
pure TestEnv{..}
Left _ -> error "meh"
| null | https://raw.githubusercontent.com/tchoutri/pg-entity/b5727da1bb655ec5be41c146524bf22e168f7274/test/Main.hs | haskell | module Main where
import Data.Pool (createPool, withResource)
import qualified Database.PostgreSQL.Simple as PG
import qualified Database.Postgres.Temp as Postgres.Temp
import qualified EntitySpec
import qualified GenericsSpec
import Optics.Core
import Test.Tasty (TestTree, defaultMain, testGroup)
import Utils
main :: IO ()
main = do
env <- getTestEnvironment
withResource (env ^. #pool) migrate
spec <- traverse (`runTestM` env) specs
defaultMain . testGroup "pg-entity tests" $ spec
specs :: [TestM TestTree]
specs =
[ GenericsSpec.spec
, EntitySpec.spec
]
getTestEnvironment :: IO TestEnv
getTestEnvironment = do
eitherDb <- Postgres.Temp.start
case eitherDb of
Right db -> do
pool <-
createPool
(PG.connectPostgreSQL $ Postgres.Temp.toConnectionString db)
PG.close
1
100000000
50
pure TestEnv{..}
Left _ -> error "meh"
| |
a35a2bfa1ad45124a2cdb2ef7d8102e498b2e877769bc87acced18aee1083d02 | vikram/lisplibraries | httpd.lisp | ;; -*- lisp -*-
(in-package :it.bese.ucw)
;;;; ** A Trivial HTTP Server
We do n't actually expect anyone to use this backend but 1 ) it 's
convenient when getting starting and 2 ) the mod_lisp backend
;;;; reuses most of it.
(eval-when (:compile-toplevel :load-toplevel :execute)
(defclass httpd-backend (backend)
((port :accessor port :initarg :port :initform 8080)
(host :accessor host :initarg :host :initform "127.0.0.1")
(socket :accessor socket)
(server :accessor server :initarg :server)
(handlers :accessor handlers :initform '())))
(defclass httpd-message (message)
((headers :accessor headers :initform '())
(network-stream :accessor network-stream :initarg :network-stream)))
(defclass httpd-request (httpd-message request)
((parameters :accessor parameters :initform '())
(raw-uri :accessor raw-uri :initform nil)
(query-path :accessor query-path :initform nil)
(raw-body :accessor raw-body :initform nil)
(peer-address :accessor peer-address :initform nil)
(http-method :accessor http-method :initform nil)))
(defclass httpd-response (httpd-message response)
((request :accessor request :initarg :request :initform nil)
(html-stream :accessor html-stream :initform (make-string-output-stream))
(status :accessor status :initform "200 OK")
(external-format :accessor external-format :initform nil)
(content :accessor content :initform nil))))
;;;; Backend methods
(defmethod initialize-backend ((backend httpd-backend) &key server &allow-other-keys)
(when (and (null *mime-types*) (probe-file *default-mime-types-file*))
(read-mime-types *default-mime-types-file*))
(setf (server backend) server)
backend)
(defmethod startup-backend :before ((backend httpd-backend) &rest initargs)
(declare (ignore initargs))
(setf (socket backend) (open-server :host (host backend) :port (port backend))))
(defmethod handle-request ((backend httpd-backend) (request httpd-request) (response httpd-response))
(ucw.backend.info "Handling request from ~S for ~S" (peer-address request) (raw-uri request))
(or (block handle
(dolist* ((can-match handler url-base) (handlers backend))
(declare (ignore url-base))
(when (funcall can-match (query-path request))
(funcall handler request response)
(return-from handle t)))
nil)
(handle-request (server backend) request response)
;; if we get here there's no handler defined for the request
(handle-404 backend request response)))
(defmethod handle-404 ((backend httpd-backend) (request httpd-request) (response httpd-response))
(setf (get-header response "Status") "404 Not Found"
(get-header response "Content-Type") "text/html")
(with-yaclml-stream (html-stream response)
(<:html
(<:head (<:title "404 Not Found"))
(<:body
(<:p (<:as-html (raw-uri request)) " not found."))))
(close-request request)
(send-response response))
(defmethod send-error ((stream stream) message)
"Ignore trying to read the request or anything. Just send an
error message. This is a very low level bailout function."
(let ((response (make-response stream)))
(setf (get-header response "Status") "500 Internal Server Error"
(get-header response "Content-Type") "text/html")
(with-yaclml-stream (html-stream response)
(<:html
(<:head (<:title "500 Internal Server Error"))
(<:body
(<:p "Server Error.")
(<:p (<:as-html message)))))
(send-response response)))
;;;; The single threaded server
(defun abort-backend-request (&optional condition)
(ucw.backend.info "Gracefully aborting httpd request because: ~S" condition)
(throw 'abort-backend-request nil))
(defmethod startup-backend ((backend httpd-backend) &rest init-args)
"Start the RERL."
(declare (ignore init-args))
(let (stream peer-address request response)
(labels ((serve-one-request ()
(multiple-value-setq (stream peer-address)
(accept-connection (socket backend) :element-type '(unsigned-byte 8)))
(setf request (read-request backend stream)
response (make-response request)
(peer-address request) peer-address)
(handle-request backend request response))
(handle-request-error (condition)
(ucw.backend.error "While handling a request on ~S: ~A" stream condition)
(when *debug-on-error*
(restart-case
(swank:swank-debugger-hook condition nil)
(kill-worker ()
:report "Kill this worker."
(values))))
(abort-backend-request condition))
(handle-request/internal ()
(catch 'abort-backend-request
(handler-bind ((stream-error (lambda (c)
(when (eq (stream-error-stream c) stream)
(abort-backend-request c))))
(error #'handle-request-error))
(unwind-protect
(serve-one-request)
(ignore-errors (close stream)))))))
(unwind-protect
(loop (handle-request/internal))
(ignore-errors
(swank-backend:close-socket (socket backend))))))
backend)
(defmethod shutdown-backend ((backend httpd-backend) &rest init-args)
"This would stop the single therad httpd backend if that made any sense.
Stopping the single therad backend requires nothing more than
getting STARTUP-BACKEND to return (either normally or by chosing
you implementation's abort restart after a break)."
(declare (ignore init-args))
backend)
(defmethod publish-directory ((backend httpd-backend) directory-pathname url-base)
(push (list (lambda (request-url)
(ucw.backend.dribble "Trying to match '~S' under url-base '~S' to serve it as a file from '~S'"
request-url url-base directory-pathname)
(starts-with request-url url-base))
(lambda (request response)
(aif (map-query-path-to-file (query-path request)
url-base
directory-pathname)
(progn
(ucw.backend.debug "Serving '~S' as a file under url-base '~S'" it url-base)
(serve-file it request response))
(progn
(ucw.backend.debug "Failed to serve '~S' as a file under url-base '~S'" (query-path request) url-base)
(handle-404 backend request response))))
url-base)
(handlers backend)))
;;;; Message headers methods
(defmethod get-header ((message httpd-message) header-name)
(cdr (assoc header-name (headers message) :test #'string-equal)))
(defmethod (setf get-header) (value (message httpd-message) header-name)
(aif (assoc header-name (headers message) :test #'string-equal)
(setf (cdr it) value)
(push (cons header-name value) (headers message)))
value)
(defmethod add-header ((message httpd-message) header-name value)
(push (cons header-name value) (headers message))
value)
(defmethod delete-header ((message httpd-message) header-name)
(setf (headers message)
(delete-if #'(lambda (item)
(string-equal (car item)
header-name))
(headers message))))
;;;; Request handling
(defun read-line-from-network (stream &optional (eof-error-p t))
"A simple state machine which reads chars from STREAM until it
gets a CR-LF sequence or the end of the stream."
(declare (optimize (speed 3)))
(let ((buffer (make-array 50
:element-type '(unsigned-byte 8)
:adjustable t
:fill-pointer 0)))
(labels ((read-next-char ()
(let ((byte (read-byte stream eof-error-p stream)))
(if (eq stream byte)
(return-from read-line-from-network buffer)
(return-from read-next-char byte))))
(cr ()
(let ((next-byte (read-next-char)))
(case next-byte
LF
(return-from read-line-from-network buffer))
(t ;; add both the cr and this char to the buffer
(vector-push-extend #.+carriage-return+ buffer)
(vector-push-extend next-byte buffer)
(next)))))
(next ()
(let ((next-byte (read-next-char)))
(case next-byte
(#.+carriage-return+ ;; CR
(cr))
LF
(return-from read-line-from-network buffer))
(t
(vector-push-extend next-byte buffer)
(next))))))
(next))))
(defun accumulate-parameters (assoc-list)
"Accumulates same parameters into lists. Otherwise
multiple-selection lists won't have a list value and
<ucw:select would fail."
(let ((result '()))
(dolist* ((name . value) assoc-list)
(unless (string= name "")
(aif (assoc name result :test #'string=)
(if (and (cdr it) (listp (cdr it)))
(setf (cdr it) (cons value (cdr it)))
(setf (cdr it) (list value (cdr it))))
(push (cons name value) result))))
;;; reverse the (cdr it) so that writer lambda's see the values
;;; in correct order.
(dolist (it result)
(when (and (cdr it) (listp (cdr it)))
(setf (cdr it) (nreverse (cdr it)))))
;;; rever the result so that map-parameters see the request
;;; parameters in correct order.
(nreverse result)))
(defmethod read-request ((backend httpd-backend) stream)
"Reads an HTTP request message from STREAM. Returns a httpd-request object."
(let* ((request (make-instance 'httpd-request :network-stream stream))
(line (read-line-from-network stream))
(pieces (split-on-space line)))
(ucw.backend.dribble "In read-request, first line in :us-ascii is ~S, pieces are ~S"
(octets-to-string line :us-ascii) pieces)
(destructuring-bind (http-method uri &optional protocol) pieces
(declare (ignore protocol))
(setf (raw-uri request) (coerce (octets-to-string uri #.(or (external-format-for :url) :iso-8859-1)) 'simple-string)
(http-method request) (coerce (octets-to-string http-method :us-ascii) 'simple-string)
(headers request) (read-request-headers stream))
(aif (position #\? (raw-uri request))
(setf (query-path request) (make-displaced-array (raw-uri request) 0 it)
(parameters request) (parse-query-parameters
(make-displaced-array (raw-uri request)
(1+ it))))
(setf (query-path request) (raw-uri request)
(parameters request) '()))
(setf (query-path request) (unescape-as-uri (query-path request)))
(setf (parameters request) (append (parameters request)
(accumulate-parameters
(parse-request-body stream
(get-header request "Content-Length")
(get-header request "Content-Type"))))))
request))
(defmethod get-parameter ((request httpd-request) name)
(loop
with value = '()
for (k . v) in (parameters request)
when (string= k name)
do (if value
(if (consp value)
(push v value)
(setf value (list v value)))
(setf value v))
finally (return value)))
(defmethod map-parameters ((request httpd-request) lambda)
(dolist* ((name . value) (parameters request))
(unless (string= name "")
(funcall lambda name (if (stringp value)
(copy-seq value)
value)))))
(defun read-request-headers (stream)
(iterate
(for header-line = (read-line-from-network stream))
(until (= 0 (length header-line)))
(for (name . value) = (split-header-line header-line))
(collect (cons (octets-to-string name :us-ascii)
(octets-to-string value :iso-8859-1)))))
(defmethod close-request ((request httpd-request))
request)
;;;; Response objects
(defmethod make-response ((request httpd-request))
(make-instance 'httpd-response
:request request
:network-stream (network-stream request)))
(defmethod make-response ((stream stream))
(make-instance 'httpd-response :network-stream stream))
(defmethod clear-response ((response httpd-response))
(setf (html-stream response) (make-string-output-stream)
(headers response) '()))
;;;; httpd-response objects special case the "Status" header.
(defmethod get-header ((response httpd-response) header-name)
(if (string= "Status" header-name)
(status response)
(call-next-method)))
(defmethod (setf get-header) (value (response httpd-response) header-name)
(if (string= "Status" header-name)
(setf (status response) value)
(call-next-method)))
(defun write-crlf (stream)
(write-byte 13 stream)
(write-byte 10 stream))
(defun write-header-line (name value stream)
(write-sequence (string-to-octets name :us-ascii) stream)
;; ": "
(write-byte 58 stream)
(write-byte 32 stream)
(write-sequence (string-to-octets value :iso-8859-1) stream)
(write-crlf stream))
(defmethod encoding ((response httpd-response))
(or (external-format response)
(call-next-method)))
(defun httpd-send-headers (response &optional calculate-content-length-from-body)
(ucw.backend.dribble "Sending headers for ~S (Status: ~S)." response (status response))
(let ((stream (network-stream response)))
(write-sequence #.(string-to-octets "HTTP/1.1 " :us-ascii) stream)
(write-sequence (string-to-octets (status response) :us-ascii) stream)
(write-byte 32 stream)
(write-crlf stream)
(dolist* ((name . value) (headers response))
(unless (null value)
(ucw.backend.dribble "Sending header ~S: ~S" name value)
(write-header-line name value stream)))
(when calculate-content-length-from-body
(setf (content response)
(string-to-octets (get-output-stream-string (html-stream response))
(encoding response)))
(unless (assoc "Content-Length" (headers response) :test #'string-equal)
Content - Length may be defined and NIL , we do n't print header then
(write-header-line "Content-Length" (princ-to-string (length (content response))) stream)))
(write-crlf stream)
response))
(defmethod send-headers ((response httpd-response))
(httpd-send-headers response nil))
(defmethod send-response ((response httpd-response))
(httpd-send-headers response t)
(unless (and (request response)
(string= "HEAD" (http-method (request response))))
(ucw.backend.dribble "HTTPD: Sending ~S (~D bytes) as body"
(content response) (length (content response)))
(write-sequence (content response) (network-stream response))))
;;;; Debugging the backend
(defparameter *httpd-trace-functions*
'(swank-backend:send
swank-backend:spawn
swank-backend:receive
initialize-backend
startup-backend
shutdown-backend
abort-backend-request
close-request
send-response
httpd-send-headers
httpd-controller-loop
httpd-worker-loop/handle
httpd-worker-loop
httpd-accept-loop
get-header
(setf get-header)
add-header
read-request
parse-query-parameters
parse-request-body
rfc2388-callback
grab-param
read-line-from-network
split-on-space
split-header-line
read-request-headers
make-response
get-parameter
map-parameters
rfc2388:parse-header-value
rfc2388:get-header))
(defun trace-httpd-backend ()
(eval `(trace ,@*httpd-trace-functions*)))
(defun untrace-httpd-backend ()
(eval `(untrace ,@*httpd-trace-functions*)))
Copyright ( c ) 2005 - 2006 Edward
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are
;; met:
;;
;; - Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer.
;;
;; - Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in the
;; documentation and/or other materials provided with the distribution.
;;
- Neither the name of , nor , nor the names
;; of its contributors may be used to endorse or promote products
;; derived from this software without specific prior written permission.
;;
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| null | https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/ucw-boxset/ucw_ajax/src/backend/httpd.lisp | lisp | -*- lisp -*-
** A Trivial HTTP Server
reuses most of it.
Backend methods
if we get here there's no handler defined for the request
The single threaded server
Message headers methods
Request handling
add both the cr and this char to the buffer
CR
reverse the (cdr it) so that writer lambda's see the values
in correct order.
rever the result so that map-parameters see the request
parameters in correct order.
Response objects
httpd-response objects special case the "Status" header.
": "
Debugging the backend
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
LOSS OF USE ,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
(in-package :it.bese.ucw)
We do n't actually expect anyone to use this backend but 1 ) it 's
convenient when getting starting and 2 ) the mod_lisp backend
(eval-when (:compile-toplevel :load-toplevel :execute)
(defclass httpd-backend (backend)
((port :accessor port :initarg :port :initform 8080)
(host :accessor host :initarg :host :initform "127.0.0.1")
(socket :accessor socket)
(server :accessor server :initarg :server)
(handlers :accessor handlers :initform '())))
(defclass httpd-message (message)
((headers :accessor headers :initform '())
(network-stream :accessor network-stream :initarg :network-stream)))
(defclass httpd-request (httpd-message request)
((parameters :accessor parameters :initform '())
(raw-uri :accessor raw-uri :initform nil)
(query-path :accessor query-path :initform nil)
(raw-body :accessor raw-body :initform nil)
(peer-address :accessor peer-address :initform nil)
(http-method :accessor http-method :initform nil)))
(defclass httpd-response (httpd-message response)
((request :accessor request :initarg :request :initform nil)
(html-stream :accessor html-stream :initform (make-string-output-stream))
(status :accessor status :initform "200 OK")
(external-format :accessor external-format :initform nil)
(content :accessor content :initform nil))))
(defmethod initialize-backend ((backend httpd-backend) &key server &allow-other-keys)
(when (and (null *mime-types*) (probe-file *default-mime-types-file*))
(read-mime-types *default-mime-types-file*))
(setf (server backend) server)
backend)
(defmethod startup-backend :before ((backend httpd-backend) &rest initargs)
(declare (ignore initargs))
(setf (socket backend) (open-server :host (host backend) :port (port backend))))
(defmethod handle-request ((backend httpd-backend) (request httpd-request) (response httpd-response))
(ucw.backend.info "Handling request from ~S for ~S" (peer-address request) (raw-uri request))
(or (block handle
(dolist* ((can-match handler url-base) (handlers backend))
(declare (ignore url-base))
(when (funcall can-match (query-path request))
(funcall handler request response)
(return-from handle t)))
nil)
(handle-request (server backend) request response)
(handle-404 backend request response)))
(defmethod handle-404 ((backend httpd-backend) (request httpd-request) (response httpd-response))
(setf (get-header response "Status") "404 Not Found"
(get-header response "Content-Type") "text/html")
(with-yaclml-stream (html-stream response)
(<:html
(<:head (<:title "404 Not Found"))
(<:body
(<:p (<:as-html (raw-uri request)) " not found."))))
(close-request request)
(send-response response))
(defmethod send-error ((stream stream) message)
"Ignore trying to read the request or anything. Just send an
error message. This is a very low level bailout function."
(let ((response (make-response stream)))
(setf (get-header response "Status") "500 Internal Server Error"
(get-header response "Content-Type") "text/html")
(with-yaclml-stream (html-stream response)
(<:html
(<:head (<:title "500 Internal Server Error"))
(<:body
(<:p "Server Error.")
(<:p (<:as-html message)))))
(send-response response)))
(defun abort-backend-request (&optional condition)
(ucw.backend.info "Gracefully aborting httpd request because: ~S" condition)
(throw 'abort-backend-request nil))
(defmethod startup-backend ((backend httpd-backend) &rest init-args)
"Start the RERL."
(declare (ignore init-args))
(let (stream peer-address request response)
(labels ((serve-one-request ()
(multiple-value-setq (stream peer-address)
(accept-connection (socket backend) :element-type '(unsigned-byte 8)))
(setf request (read-request backend stream)
response (make-response request)
(peer-address request) peer-address)
(handle-request backend request response))
(handle-request-error (condition)
(ucw.backend.error "While handling a request on ~S: ~A" stream condition)
(when *debug-on-error*
(restart-case
(swank:swank-debugger-hook condition nil)
(kill-worker ()
:report "Kill this worker."
(values))))
(abort-backend-request condition))
(handle-request/internal ()
(catch 'abort-backend-request
(handler-bind ((stream-error (lambda (c)
(when (eq (stream-error-stream c) stream)
(abort-backend-request c))))
(error #'handle-request-error))
(unwind-protect
(serve-one-request)
(ignore-errors (close stream)))))))
(unwind-protect
(loop (handle-request/internal))
(ignore-errors
(swank-backend:close-socket (socket backend))))))
backend)
(defmethod shutdown-backend ((backend httpd-backend) &rest init-args)
"This would stop the single therad httpd backend if that made any sense.
Stopping the single therad backend requires nothing more than
getting STARTUP-BACKEND to return (either normally or by chosing
you implementation's abort restart after a break)."
(declare (ignore init-args))
backend)
(defmethod publish-directory ((backend httpd-backend) directory-pathname url-base)
(push (list (lambda (request-url)
(ucw.backend.dribble "Trying to match '~S' under url-base '~S' to serve it as a file from '~S'"
request-url url-base directory-pathname)
(starts-with request-url url-base))
(lambda (request response)
(aif (map-query-path-to-file (query-path request)
url-base
directory-pathname)
(progn
(ucw.backend.debug "Serving '~S' as a file under url-base '~S'" it url-base)
(serve-file it request response))
(progn
(ucw.backend.debug "Failed to serve '~S' as a file under url-base '~S'" (query-path request) url-base)
(handle-404 backend request response))))
url-base)
(handlers backend)))
(defmethod get-header ((message httpd-message) header-name)
(cdr (assoc header-name (headers message) :test #'string-equal)))
(defmethod (setf get-header) (value (message httpd-message) header-name)
(aif (assoc header-name (headers message) :test #'string-equal)
(setf (cdr it) value)
(push (cons header-name value) (headers message)))
value)
(defmethod add-header ((message httpd-message) header-name value)
(push (cons header-name value) (headers message))
value)
(defmethod delete-header ((message httpd-message) header-name)
(setf (headers message)
(delete-if #'(lambda (item)
(string-equal (car item)
header-name))
(headers message))))
(defun read-line-from-network (stream &optional (eof-error-p t))
"A simple state machine which reads chars from STREAM until it
gets a CR-LF sequence or the end of the stream."
(declare (optimize (speed 3)))
(let ((buffer (make-array 50
:element-type '(unsigned-byte 8)
:adjustable t
:fill-pointer 0)))
(labels ((read-next-char ()
(let ((byte (read-byte stream eof-error-p stream)))
(if (eq stream byte)
(return-from read-line-from-network buffer)
(return-from read-next-char byte))))
(cr ()
(let ((next-byte (read-next-char)))
(case next-byte
LF
(return-from read-line-from-network buffer))
(vector-push-extend #.+carriage-return+ buffer)
(vector-push-extend next-byte buffer)
(next)))))
(next ()
(let ((next-byte (read-next-char)))
(case next-byte
(cr))
LF
(return-from read-line-from-network buffer))
(t
(vector-push-extend next-byte buffer)
(next))))))
(next))))
(defun accumulate-parameters (assoc-list)
"Accumulates same parameters into lists. Otherwise
multiple-selection lists won't have a list value and
<ucw:select would fail."
(let ((result '()))
(dolist* ((name . value) assoc-list)
(unless (string= name "")
(aif (assoc name result :test #'string=)
(if (and (cdr it) (listp (cdr it)))
(setf (cdr it) (cons value (cdr it)))
(setf (cdr it) (list value (cdr it))))
(push (cons name value) result))))
(dolist (it result)
(when (and (cdr it) (listp (cdr it)))
(setf (cdr it) (nreverse (cdr it)))))
(nreverse result)))
(defmethod read-request ((backend httpd-backend) stream)
"Reads an HTTP request message from STREAM. Returns a httpd-request object."
(let* ((request (make-instance 'httpd-request :network-stream stream))
(line (read-line-from-network stream))
(pieces (split-on-space line)))
(ucw.backend.dribble "In read-request, first line in :us-ascii is ~S, pieces are ~S"
(octets-to-string line :us-ascii) pieces)
(destructuring-bind (http-method uri &optional protocol) pieces
(declare (ignore protocol))
(setf (raw-uri request) (coerce (octets-to-string uri #.(or (external-format-for :url) :iso-8859-1)) 'simple-string)
(http-method request) (coerce (octets-to-string http-method :us-ascii) 'simple-string)
(headers request) (read-request-headers stream))
(aif (position #\? (raw-uri request))
(setf (query-path request) (make-displaced-array (raw-uri request) 0 it)
(parameters request) (parse-query-parameters
(make-displaced-array (raw-uri request)
(1+ it))))
(setf (query-path request) (raw-uri request)
(parameters request) '()))
(setf (query-path request) (unescape-as-uri (query-path request)))
(setf (parameters request) (append (parameters request)
(accumulate-parameters
(parse-request-body stream
(get-header request "Content-Length")
(get-header request "Content-Type"))))))
request))
(defmethod get-parameter ((request httpd-request) name)
(loop
with value = '()
for (k . v) in (parameters request)
when (string= k name)
do (if value
(if (consp value)
(push v value)
(setf value (list v value)))
(setf value v))
finally (return value)))
(defmethod map-parameters ((request httpd-request) lambda)
(dolist* ((name . value) (parameters request))
(unless (string= name "")
(funcall lambda name (if (stringp value)
(copy-seq value)
value)))))
(defun read-request-headers (stream)
(iterate
(for header-line = (read-line-from-network stream))
(until (= 0 (length header-line)))
(for (name . value) = (split-header-line header-line))
(collect (cons (octets-to-string name :us-ascii)
(octets-to-string value :iso-8859-1)))))
(defmethod close-request ((request httpd-request))
request)
(defmethod make-response ((request httpd-request))
(make-instance 'httpd-response
:request request
:network-stream (network-stream request)))
(defmethod make-response ((stream stream))
(make-instance 'httpd-response :network-stream stream))
(defmethod clear-response ((response httpd-response))
(setf (html-stream response) (make-string-output-stream)
(headers response) '()))
(defmethod get-header ((response httpd-response) header-name)
(if (string= "Status" header-name)
(status response)
(call-next-method)))
(defmethod (setf get-header) (value (response httpd-response) header-name)
(if (string= "Status" header-name)
(setf (status response) value)
(call-next-method)))
(defun write-crlf (stream)
(write-byte 13 stream)
(write-byte 10 stream))
(defun write-header-line (name value stream)
(write-sequence (string-to-octets name :us-ascii) stream)
(write-byte 58 stream)
(write-byte 32 stream)
(write-sequence (string-to-octets value :iso-8859-1) stream)
(write-crlf stream))
(defmethod encoding ((response httpd-response))
(or (external-format response)
(call-next-method)))
(defun httpd-send-headers (response &optional calculate-content-length-from-body)
(ucw.backend.dribble "Sending headers for ~S (Status: ~S)." response (status response))
(let ((stream (network-stream response)))
(write-sequence #.(string-to-octets "HTTP/1.1 " :us-ascii) stream)
(write-sequence (string-to-octets (status response) :us-ascii) stream)
(write-byte 32 stream)
(write-crlf stream)
(dolist* ((name . value) (headers response))
(unless (null value)
(ucw.backend.dribble "Sending header ~S: ~S" name value)
(write-header-line name value stream)))
(when calculate-content-length-from-body
(setf (content response)
(string-to-octets (get-output-stream-string (html-stream response))
(encoding response)))
(unless (assoc "Content-Length" (headers response) :test #'string-equal)
Content - Length may be defined and NIL , we do n't print header then
(write-header-line "Content-Length" (princ-to-string (length (content response))) stream)))
(write-crlf stream)
response))
(defmethod send-headers ((response httpd-response))
(httpd-send-headers response nil))
(defmethod send-response ((response httpd-response))
(httpd-send-headers response t)
(unless (and (request response)
(string= "HEAD" (http-method (request response))))
(ucw.backend.dribble "HTTPD: Sending ~S (~D bytes) as body"
(content response) (length (content response)))
(write-sequence (content response) (network-stream response))))
(defparameter *httpd-trace-functions*
'(swank-backend:send
swank-backend:spawn
swank-backend:receive
initialize-backend
startup-backend
shutdown-backend
abort-backend-request
close-request
send-response
httpd-send-headers
httpd-controller-loop
httpd-worker-loop/handle
httpd-worker-loop
httpd-accept-loop
get-header
(setf get-header)
add-header
read-request
parse-query-parameters
parse-request-body
rfc2388-callback
grab-param
read-line-from-network
split-on-space
split-header-line
read-request-headers
make-response
get-parameter
map-parameters
rfc2388:parse-header-value
rfc2388:get-header))
(defun trace-httpd-backend ()
(eval `(trace ,@*httpd-trace-functions*)))
(defun untrace-httpd-backend ()
(eval `(untrace ,@*httpd-trace-functions*)))
Copyright ( c ) 2005 - 2006 Edward
- Neither the name of , nor , nor the names
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
|
32ab6da6dd6a3e69f94e62e0f4e8ba30f8a0dc9cc5c2146b0b5dbad4bb6230f6 | zenspider/schemers | compiler_bug_1.scm | #!/usr/bin/env csi -s
(require rackunit)
(require-library compiler)
(import compiler)
(assert-compile 11 '(+ 1 (* 2 3) 4))
(assert-assemble
11
'((assign proc (op lookup-variable-value) (const +) (reg env))
(save proc)
(assign val (const 4))
(assign argl (op list) (reg val))
(save argl) ; NOTE: added to fix bug
(assign proc (op lookup-variable-value) (const *) (reg env))
(assign val (const 3))
FIX : BUG ! drops ( 4 )
(assign val (const 2))
(assign argl (op cons) (reg val) (reg argl))
(test (op primitive-procedure?) (reg proc))
(branch (label primitive-branch1))
compiled-branch2
(assign continue (label after-call3))
(assign val (op compiled-procedure-entry) (reg proc))
(goto (reg val))
primitive-branch1
(assign val (op apply-primitive-procedure) (reg proc) (reg argl))
after-call3
(restore argl) ; NOTE: added to fix bug
(assign argl (op cons) (reg val) (reg argl)) ; NOTE: switched to cons
(assign val (const 1))
(assign argl (op cons) (reg val) (reg argl))
(restore proc)
(test (op primitive-procedure?) (reg proc))
(branch (label primitive-branch4))
compiled-branch5
(assign continue (label after-call6))
(assign val (op compiled-procedure-entry) (reg proc))
(goto (reg val))
primitive-branch4
(assign val (op apply-primitive-procedure) (reg proc) (reg argl))
after-call6))
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_5/compiler_bug_1.scm | scheme | NOTE: added to fix bug
NOTE: added to fix bug
NOTE: switched to cons | #!/usr/bin/env csi -s
(require rackunit)
(require-library compiler)
(import compiler)
(assert-compile 11 '(+ 1 (* 2 3) 4))
(assert-assemble
11
'((assign proc (op lookup-variable-value) (const +) (reg env))
(save proc)
(assign val (const 4))
(assign argl (op list) (reg val))
(assign proc (op lookup-variable-value) (const *) (reg env))
(assign val (const 3))
FIX : BUG ! drops ( 4 )
(assign val (const 2))
(assign argl (op cons) (reg val) (reg argl))
(test (op primitive-procedure?) (reg proc))
(branch (label primitive-branch1))
compiled-branch2
(assign continue (label after-call3))
(assign val (op compiled-procedure-entry) (reg proc))
(goto (reg val))
primitive-branch1
(assign val (op apply-primitive-procedure) (reg proc) (reg argl))
after-call3
(assign val (const 1))
(assign argl (op cons) (reg val) (reg argl))
(restore proc)
(test (op primitive-procedure?) (reg proc))
(branch (label primitive-branch4))
compiled-branch5
(assign continue (label after-call6))
(assign val (op compiled-procedure-entry) (reg proc))
(goto (reg val))
primitive-branch4
(assign val (op apply-primitive-procedure) (reg proc) (reg argl))
after-call6))
|
1ccad15fd54d2a26fc08a7fc4f2b3d4a4553400f195317d28c981b411c188449 | google/haskell-trainings | Tests.hs | Copyright 2021 Google LLC
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
# LANGUAGE CPP #
module Tests (check) where
import Data.Function (on)
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.List (nub)
import qualified Data.Map as Map
import Text.Printf (printf)
#ifdef SOLUTION
import CodeSolution
import ColorSolution
import ColorMapSolution
import DoSolution
import ErrorOrSolution
#else
import Code
import Color
import ColorMap
import Do
import ErrorOr
#endif
import Internal (test, Tests)
import qualified Internal (check)
Check one section of the codelab
check :: Int -> IO ()
check x = case IntMap.lookup x testMap of
Just (file, tests) -> doCheck file tests
_ -> noSuchSection
-- When we check a code section, we print the file under test and then the
-- results of all tests from there
doCheck :: String -> Tests -> IO ()
doCheck file tests = do
putStr "\n\nChecking code from " >> putStrLn file
Internal.check tests
-- If user supplies invalid arguments, we should print the valid arguments and
-- the files under test
noSuchSection :: IO ()
noSuchSection = do
putStrLn "\n\nRequested invalid section. Available sections are:"
mapM_ displaySection $ IntMap.toAscList testMap
where
displaySection (k, (f, _)) = printf "\t%d -> %s\n" k f
-- We record a mapping section -> (file, tests)
testMap :: IntMap (String, Tests)
testMap = IntMap.fromList
[ (1, ("src/Color.hs", colorTests))
, (2, ("src/ColorMap.hs", colorMapTests))
, (3, ("src/ErrorOr.hs", errorOrTests))
, (4, ("src/Code.hs", codeTests))
, (5, ("src/Do.hs", doTests))
]
colorTests :: Tests
colorTests =
[ test "allColors contains Red" True $ elem Red allColors
, test "allColors contains Yellow" True $ elem Yellow allColors
, test "allColors contains Green" True $ elem Green allColors
, test "allColors contains Cyan" True $ elem Cyan allColors
, test "allColors contains Blue" True $ elem Blue allColors
, test "allColors contains Magenta" True $ elem Magenta allColors
, test "allColors size is 6" 6 $ length allColors
, test "show Red" "R" $ show Red
, test "concatMap show allColors" "RYGCBM" $ concatMap show allColors
, test "allColors starts with Red" Red $ head allColors
, test "allColors ends with Magenta" Magenta $ last allColors
]
colorMapTests :: Tests
colorMapTests =
[ test "getIntOr0 (Just 42)" 42 $ getIntOr0 (Just 42)
, test "getIntOr0 Nothing" 0 $ getIntOr0 Nothing
, test "getCount on empty map" 0 $ getCount Cyan Map.empty
, test "getCount on map" 2 $ getCount Cyan (mk Cyan 2)
, test "add color in empty map" (mk Blue 1) $ addColorToMap Blue Map.empty
, test "add color in map" (mk Blue 3) $ addColorToMap Blue (mk Blue 2)
]
where
mk c x = Map.fromList [(c, x)]
errorOrTests :: Tests
errorOrTests =
[ test "wrapValue on Int" (Value 42) $ wrapValue 42
, test "wrapValue on String" (Value "foo") $ wrapValue "foo"
, test "fmapValue show on Int" (Value $ show 42) $ fmapValue show (Value 42)
, test "fmapValue length on String" (Value $ length "foo") $ fmapValue length (Value "foo")
, test "fmapValue show on Error" (Error "OH NOES") $ fmapValue id (Error "OH NOES" :: ErrorOr String)
, test "apValue function on value" (Value "42") $ apValue (Value show :: ErrorOr (Int -> String)) (Value 42)
, test "apValue function on error" (Error "WAT") $ apValue (Value show :: ErrorOr (Int -> String)) (Error "WAT")
, test "apValue error on value" (Error "OH NOES") $ apValue (Error "OH NOES" :: ErrorOr (Int -> String)) (Value 42)
, test "apValue error on error" (Error "OH NOES") $ apValue (Error "OH NOES" :: ErrorOr (Int -> String)) (Error "WAT")
, test "bindValue on good Int" (Value 42) $ bindValue fun (Value 42)
, test "bindValue on bad Int" (Error "ODD X") $ bindValue fun (Value 21)
, test "bindValue on Error" (Error "OH NOES") $ bindValue fun (Error "OH NOES")
, test "readColor 'R'" (Value Red) $ readColor 'R'
, test "readColor 'Y'" (Value Yellow) $ readColor 'Y'
, test "readColor 'G'" (Value Green) $ readColor 'G'
, test "readColor 'C'" (Value Cyan) $ readColor 'C'
, test "readColor 'B'" (Value Blue) $ readColor 'B'
, test "readColor 'M'" (Value Magenta) $ readColor 'M'
, test "readColor 'Z'" (Error "'Z' is not a proper color.") $ readColor 'Z'
]
where
fun x = if even x then Value x else Error "ODD X"
codeTests :: Tests
codeTests =
[ test "# codes of size 0: 1" 1 $ length $ allCodes 0
, test "# codes of size 1: 6" 6 $ length $ allCodes 1
, test "# codes of size 4: 1296" 1296 $ length $ allCodes 4
, test "all codes 0 have size 0" [0] $ nub $ length <$> allCodes 0
, test "all codes 1 have size 1" [1] $ nub $ length <$> allCodes 1
, test "all codes 4 have size 4" [4] $ nub $ length <$> allCodes 4
, test "no duplicated codes" True $ on (==) length (allCodes 4) (nub $ allCodes 4)
, test "empty code -> empty map" Map.empty $ codeToMap []
, test "[C,R,C] -> {R: 1, C: 2}" (Map.fromList [(Red, 1), (Cyan, 2)]) $ codeToMap [Cyan, Red, Cyan]
, test "countBlacks [R,Y,G,B] [B,R,Y,G]" 0 $ countBlacks [Red, Yellow, Green, Blue] [Blue, Red, Yellow, Green]
, test "countBlacks [R,Y,G,B] [R,B,G,Y]" 2 $ countBlacks [Red, Yellow, Green, Blue] [Red, Blue, Green, Yellow]
, test "countBlacks [B,B,C,G] [Y,B,G,C]" 1 $ countBlacks [Blue, Blue, Cyan, Green] [Yellow, Blue, Green, Cyan]
, test "countBlacks [B,B,C,G] [B,B,C,G]" 4 $ countBlacks [Blue, Blue, Cyan, Green] [Blue, Blue, Cyan, Green]
, test "countTotal [C,R,B,M] [Y,R,G,G]" 1 $ countTotal [Cyan, Red, Blue, Magenta] [Yellow, Red, Green, Green]
, test "countTotal [C,R,B,M] [Y,Y,C,M]" 2 $ countTotal [Cyan, Red, Blue, Magenta] [Yellow, Yellow, Cyan, Magenta]
, test "countTotal [C,R,B,M] [Y,R,C,M]" 3 $ countTotal [Cyan, Red, Blue, Magenta] [Yellow, Red, Cyan, Magenta]
, test "countTotal [B,B,C,G] [Y,B,G,C]" 3 $ countTotal [Blue, Blue, Cyan, Green] [Yellow, Blue, Green, Cyan]
, test "countTotal [B,B,C,G] [B,B,C,G]" 4 $ countTotal [Blue, Blue, Cyan, Green] [Blue, Blue, Cyan, Green]
, test "countScore [B,B,C,G] [R,R,R,R]" (Score 0 0) $ countScore [Blue, Blue, Cyan, Green] [Red, Red, Red, Red]
, test "countScore [B,B,C,G] [Y,B,G,C]" (Score 1 2) $ countScore [Blue, Blue, Cyan, Green] [Yellow, Blue, Green, Cyan]
, test "countScore [B,B,C,G] [B,B,C,G]" (Score 4 0) $ countScore [Blue, Blue, Cyan, Green] [Blue, Blue, Cyan, Green]
]
doTests :: Tests
doTests =
[ test "# codes of size 0: 1" 1 $ length $ allCodesDo 0
, test "# codes of size 1: 6" 6 $ length $ allCodesDo 1
, test "# codes of size 4: 1296" 1296 $ length $ allCodesDo 4
, test "all codes 0 have size 0" [0] $ nub $ length <$> allCodesDo 0
, test "all codes 1 have size 1" [1] $ nub $ length <$> allCodesDo 1
, test "all codes 4 have size 4" [4] $ nub $ length <$> allCodesDo 4
, test "no duplicated codes" True $ on (==) length (allCodesDo 4) (nub $ allCodesDo 4)
, test "len: 0: []" [] $ duplicatesList 0
, test "len: 3: [1, 1, 2, 2, 3, 3]" [1, 1, 2, 2, 3, 3] $ duplicatesList 3
, test "len: 0: []" [] $ oddlyDuplicateList 0
, test "len: 3: [1, 1, 2, 3, 3]" [1, 1, 2, 3, 3] $ oddlyDuplicateList 3
, test "len: 5: [1, 1, 2, 3, 3, 4, 5, 5]" [1, 1, 2, 3, 3, 4, 5, 5] $ oddlyDuplicateList 5
]
| null | https://raw.githubusercontent.com/google/haskell-trainings/c0d0017f179cede0ee7b18a271ee52208441e2e5/haskell_102/codelab/01_mastermind/src/Tests.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
When we check a code section, we print the file under test and then the
results of all tests from there
If user supplies invalid arguments, we should print the valid arguments and
the files under test
We record a mapping section -> (file, tests) | Copyright 2021 Google LLC
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
# LANGUAGE CPP #
module Tests (check) where
import Data.Function (on)
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.List (nub)
import qualified Data.Map as Map
import Text.Printf (printf)
#ifdef SOLUTION
import CodeSolution
import ColorSolution
import ColorMapSolution
import DoSolution
import ErrorOrSolution
#else
import Code
import Color
import ColorMap
import Do
import ErrorOr
#endif
import Internal (test, Tests)
import qualified Internal (check)
Check one section of the codelab
check :: Int -> IO ()
check x = case IntMap.lookup x testMap of
Just (file, tests) -> doCheck file tests
_ -> noSuchSection
doCheck :: String -> Tests -> IO ()
doCheck file tests = do
putStr "\n\nChecking code from " >> putStrLn file
Internal.check tests
noSuchSection :: IO ()
noSuchSection = do
putStrLn "\n\nRequested invalid section. Available sections are:"
mapM_ displaySection $ IntMap.toAscList testMap
where
displaySection (k, (f, _)) = printf "\t%d -> %s\n" k f
testMap :: IntMap (String, Tests)
testMap = IntMap.fromList
[ (1, ("src/Color.hs", colorTests))
, (2, ("src/ColorMap.hs", colorMapTests))
, (3, ("src/ErrorOr.hs", errorOrTests))
, (4, ("src/Code.hs", codeTests))
, (5, ("src/Do.hs", doTests))
]
colorTests :: Tests
colorTests =
[ test "allColors contains Red" True $ elem Red allColors
, test "allColors contains Yellow" True $ elem Yellow allColors
, test "allColors contains Green" True $ elem Green allColors
, test "allColors contains Cyan" True $ elem Cyan allColors
, test "allColors contains Blue" True $ elem Blue allColors
, test "allColors contains Magenta" True $ elem Magenta allColors
, test "allColors size is 6" 6 $ length allColors
, test "show Red" "R" $ show Red
, test "concatMap show allColors" "RYGCBM" $ concatMap show allColors
, test "allColors starts with Red" Red $ head allColors
, test "allColors ends with Magenta" Magenta $ last allColors
]
colorMapTests :: Tests
colorMapTests =
[ test "getIntOr0 (Just 42)" 42 $ getIntOr0 (Just 42)
, test "getIntOr0 Nothing" 0 $ getIntOr0 Nothing
, test "getCount on empty map" 0 $ getCount Cyan Map.empty
, test "getCount on map" 2 $ getCount Cyan (mk Cyan 2)
, test "add color in empty map" (mk Blue 1) $ addColorToMap Blue Map.empty
, test "add color in map" (mk Blue 3) $ addColorToMap Blue (mk Blue 2)
]
where
mk c x = Map.fromList [(c, x)]
errorOrTests :: Tests
errorOrTests =
[ test "wrapValue on Int" (Value 42) $ wrapValue 42
, test "wrapValue on String" (Value "foo") $ wrapValue "foo"
, test "fmapValue show on Int" (Value $ show 42) $ fmapValue show (Value 42)
, test "fmapValue length on String" (Value $ length "foo") $ fmapValue length (Value "foo")
, test "fmapValue show on Error" (Error "OH NOES") $ fmapValue id (Error "OH NOES" :: ErrorOr String)
, test "apValue function on value" (Value "42") $ apValue (Value show :: ErrorOr (Int -> String)) (Value 42)
, test "apValue function on error" (Error "WAT") $ apValue (Value show :: ErrorOr (Int -> String)) (Error "WAT")
, test "apValue error on value" (Error "OH NOES") $ apValue (Error "OH NOES" :: ErrorOr (Int -> String)) (Value 42)
, test "apValue error on error" (Error "OH NOES") $ apValue (Error "OH NOES" :: ErrorOr (Int -> String)) (Error "WAT")
, test "bindValue on good Int" (Value 42) $ bindValue fun (Value 42)
, test "bindValue on bad Int" (Error "ODD X") $ bindValue fun (Value 21)
, test "bindValue on Error" (Error "OH NOES") $ bindValue fun (Error "OH NOES")
, test "readColor 'R'" (Value Red) $ readColor 'R'
, test "readColor 'Y'" (Value Yellow) $ readColor 'Y'
, test "readColor 'G'" (Value Green) $ readColor 'G'
, test "readColor 'C'" (Value Cyan) $ readColor 'C'
, test "readColor 'B'" (Value Blue) $ readColor 'B'
, test "readColor 'M'" (Value Magenta) $ readColor 'M'
, test "readColor 'Z'" (Error "'Z' is not a proper color.") $ readColor 'Z'
]
where
fun x = if even x then Value x else Error "ODD X"
codeTests :: Tests
codeTests =
[ test "# codes of size 0: 1" 1 $ length $ allCodes 0
, test "# codes of size 1: 6" 6 $ length $ allCodes 1
, test "# codes of size 4: 1296" 1296 $ length $ allCodes 4
, test "all codes 0 have size 0" [0] $ nub $ length <$> allCodes 0
, test "all codes 1 have size 1" [1] $ nub $ length <$> allCodes 1
, test "all codes 4 have size 4" [4] $ nub $ length <$> allCodes 4
, test "no duplicated codes" True $ on (==) length (allCodes 4) (nub $ allCodes 4)
, test "empty code -> empty map" Map.empty $ codeToMap []
, test "[C,R,C] -> {R: 1, C: 2}" (Map.fromList [(Red, 1), (Cyan, 2)]) $ codeToMap [Cyan, Red, Cyan]
, test "countBlacks [R,Y,G,B] [B,R,Y,G]" 0 $ countBlacks [Red, Yellow, Green, Blue] [Blue, Red, Yellow, Green]
, test "countBlacks [R,Y,G,B] [R,B,G,Y]" 2 $ countBlacks [Red, Yellow, Green, Blue] [Red, Blue, Green, Yellow]
, test "countBlacks [B,B,C,G] [Y,B,G,C]" 1 $ countBlacks [Blue, Blue, Cyan, Green] [Yellow, Blue, Green, Cyan]
, test "countBlacks [B,B,C,G] [B,B,C,G]" 4 $ countBlacks [Blue, Blue, Cyan, Green] [Blue, Blue, Cyan, Green]
, test "countTotal [C,R,B,M] [Y,R,G,G]" 1 $ countTotal [Cyan, Red, Blue, Magenta] [Yellow, Red, Green, Green]
, test "countTotal [C,R,B,M] [Y,Y,C,M]" 2 $ countTotal [Cyan, Red, Blue, Magenta] [Yellow, Yellow, Cyan, Magenta]
, test "countTotal [C,R,B,M] [Y,R,C,M]" 3 $ countTotal [Cyan, Red, Blue, Magenta] [Yellow, Red, Cyan, Magenta]
, test "countTotal [B,B,C,G] [Y,B,G,C]" 3 $ countTotal [Blue, Blue, Cyan, Green] [Yellow, Blue, Green, Cyan]
, test "countTotal [B,B,C,G] [B,B,C,G]" 4 $ countTotal [Blue, Blue, Cyan, Green] [Blue, Blue, Cyan, Green]
, test "countScore [B,B,C,G] [R,R,R,R]" (Score 0 0) $ countScore [Blue, Blue, Cyan, Green] [Red, Red, Red, Red]
, test "countScore [B,B,C,G] [Y,B,G,C]" (Score 1 2) $ countScore [Blue, Blue, Cyan, Green] [Yellow, Blue, Green, Cyan]
, test "countScore [B,B,C,G] [B,B,C,G]" (Score 4 0) $ countScore [Blue, Blue, Cyan, Green] [Blue, Blue, Cyan, Green]
]
doTests :: Tests
doTests =
[ test "# codes of size 0: 1" 1 $ length $ allCodesDo 0
, test "# codes of size 1: 6" 6 $ length $ allCodesDo 1
, test "# codes of size 4: 1296" 1296 $ length $ allCodesDo 4
, test "all codes 0 have size 0" [0] $ nub $ length <$> allCodesDo 0
, test "all codes 1 have size 1" [1] $ nub $ length <$> allCodesDo 1
, test "all codes 4 have size 4" [4] $ nub $ length <$> allCodesDo 4
, test "no duplicated codes" True $ on (==) length (allCodesDo 4) (nub $ allCodesDo 4)
, test "len: 0: []" [] $ duplicatesList 0
, test "len: 3: [1, 1, 2, 2, 3, 3]" [1, 1, 2, 2, 3, 3] $ duplicatesList 3
, test "len: 0: []" [] $ oddlyDuplicateList 0
, test "len: 3: [1, 1, 2, 3, 3]" [1, 1, 2, 3, 3] $ oddlyDuplicateList 3
, test "len: 5: [1, 1, 2, 3, 3, 4, 5, 5]" [1, 1, 2, 3, 3, 4, 5, 5] $ oddlyDuplicateList 5
]
|
8a79d8b416f323032994a6d4e76545041b5b7b23dc85acadac0018c48d5b9e92 | NorfairKing/yamlparse-applicative | IO.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
module YamlParse.Applicative.IO where
import qualified Data.ByteString as SB
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import Path
import Path.IO
import System.Exit
import YamlParse.Applicative.Class
import YamlParse.Applicative.Explain
import YamlParse.Applicative.Parser
import YamlParse.Applicative.Pretty
| Helper function to read a config file for a type in ' YamlSchema '
readConfigFile :: (YamlSchema a, Yaml.FromJSON a) => Path r File -> IO (Maybe a)
readConfigFile p = readFirstConfigFile [p]
| Helper function to read the first in a list of config files
readFirstConfigFile :: forall a r. (Yaml.FromJSON a, YamlSchema a) => [Path r File] -> IO (Maybe a)
readFirstConfigFile files = go files
where
go :: [Path r File] -> IO (Maybe a)
go =
\case
[] -> pure Nothing
(p : ps) -> do
mc <- forgivingAbsence $ SB.readFile $ toFilePath p
case mc of
Nothing -> go ps
Just contents ->
case Yaml.decodeEither' contents of
Left err -> do
let failedMsgs =
[ "Failed to parse yaml file",
toFilePath p,
"with error:",
Yaml.prettyPrintParseException err
]
triedFilesMsgs = case files of
[] -> []
[f] -> ["While parsing file: " <> toFilePath f]
fs -> "While parsing files:" : map (("* " <>) . toFilePath) fs
referenceMsgs =
[ "Reference: ",
T.unpack $ prettyColourisedSchema $ explainParser (yamlSchema :: YamlParser a)
]
die $
unlines $
concat
[ failedMsgs,
triedFilesMsgs,
referenceMsgs
]
Right (ViaYamlSchema conf) -> pure $ Just conf
| null | https://raw.githubusercontent.com/NorfairKing/yamlparse-applicative/a250d9ea4b0c10dd6a96174f4942f73ec9652a2a/yamlparse-applicative/src/YamlParse/Applicative/IO.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes # | # LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
module YamlParse.Applicative.IO where
import qualified Data.ByteString as SB
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import Path
import Path.IO
import System.Exit
import YamlParse.Applicative.Class
import YamlParse.Applicative.Explain
import YamlParse.Applicative.Parser
import YamlParse.Applicative.Pretty
| Helper function to read a config file for a type in ' YamlSchema '
readConfigFile :: (YamlSchema a, Yaml.FromJSON a) => Path r File -> IO (Maybe a)
readConfigFile p = readFirstConfigFile [p]
| Helper function to read the first in a list of config files
readFirstConfigFile :: forall a r. (Yaml.FromJSON a, YamlSchema a) => [Path r File] -> IO (Maybe a)
readFirstConfigFile files = go files
where
go :: [Path r File] -> IO (Maybe a)
go =
\case
[] -> pure Nothing
(p : ps) -> do
mc <- forgivingAbsence $ SB.readFile $ toFilePath p
case mc of
Nothing -> go ps
Just contents ->
case Yaml.decodeEither' contents of
Left err -> do
let failedMsgs =
[ "Failed to parse yaml file",
toFilePath p,
"with error:",
Yaml.prettyPrintParseException err
]
triedFilesMsgs = case files of
[] -> []
[f] -> ["While parsing file: " <> toFilePath f]
fs -> "While parsing files:" : map (("* " <>) . toFilePath) fs
referenceMsgs =
[ "Reference: ",
T.unpack $ prettyColourisedSchema $ explainParser (yamlSchema :: YamlParser a)
]
die $
unlines $
concat
[ failedMsgs,
triedFilesMsgs,
referenceMsgs
]
Right (ViaYamlSchema conf) -> pure $ Just conf
|
1983fe5b8c0e03176ed05a0ef63b6687483433152d1dc4e103f9be079504fbf4 | charlieg/Sparser | then-and-else.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
copyright ( c ) 1990 Content Technologies Inc.
copyright ( c ) 1992,1993 -- all rights reserved
;;;
;;; File: "then&else"
;;; Module: "tools:basics:syntactic sugar"
Version : 2.0 June 1990
(in-package :sparser)
(defmacro else (&body forms)
"Helps make `if' forms self-documenting when they get
complex and you can't always parse the parens by eye."
`(progn ,@forms) )
(defmacro then (&body forms)
"Helps make `if' forms self-documenting when they get
complex and you can't always parse the parens by eye."
`(progn ,@forms) )
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/tools/basics/syntactic-sugar/then-and-else.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
File: "then&else"
Module: "tools:basics:syntactic sugar" | copyright ( c ) 1990 Content Technologies Inc.
copyright ( c ) 1992,1993 -- all rights reserved
Version : 2.0 June 1990
(in-package :sparser)
(defmacro else (&body forms)
"Helps make `if' forms self-documenting when they get
complex and you can't always parse the parens by eye."
`(progn ,@forms) )
(defmacro then (&body forms)
"Helps make `if' forms self-documenting when they get
complex and you can't always parse the parens by eye."
`(progn ,@forms) )
|
387711fcc91ef26348cdd9625eab418df51f0a6cc27896701307cb1ccba72191 | project-oak/hafnium-verification | FbThreadSafety.mli |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
val is_custom_init : Tenv.t -> Procname.t -> bool
val is_logging_method : Procname.t -> bool
val get_fbthreadsafe_class_annot : Procname.t -> Tenv.t -> (string * string) option
val message_fbthreadsafe_class : string -> string -> string
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/opensource/FbThreadSafety.mli | ocaml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
val is_custom_init : Tenv.t -> Procname.t -> bool
val is_logging_method : Procname.t -> bool
val get_fbthreadsafe_class_annot : Procname.t -> Tenv.t -> (string * string) option
val message_fbthreadsafe_class : string -> string -> string
| |
f1c1159a153d17dde846c53d61a238a1bebdf11e9507e84a73c31c66c3cc222d | kupl/FixML | sol.ml | type formula =
| True
| False
| Not of formula
| AndAlso of formula * formula
| OrElse of formula * formula
| Imply of formula * formula
| Equal of exp * exp
and exp =
| Num of int
| Plus of exp * exp
| Minus of exp * exp
let rec exp_eval : exp -> int
= fun exp ->
match exp with
| Num n -> n
| Plus (e1, e2) -> (exp_eval e1) + (exp_eval e2)
| Minus (e1, e2) -> (exp_eval e1) - (exp_eval e2)
let rec eval : formula -> bool
= fun f ->
match f with
| True -> true
| False -> false
| Not f -> not (eval f)
| AndAlso (f1, f2) -> (eval f1) && (eval f2)
| OrElse (f1, f2) -> (eval f1) || (eval f2)
| Imply (f1, f2) -> (not (eval f1)) || (eval f2)
| Equal (e1, e2) -> (exp_eval e1) = (exp_eval e2) | null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/formula/formula1/sol.ml | ocaml | type formula =
| True
| False
| Not of formula
| AndAlso of formula * formula
| OrElse of formula * formula
| Imply of formula * formula
| Equal of exp * exp
and exp =
| Num of int
| Plus of exp * exp
| Minus of exp * exp
let rec exp_eval : exp -> int
= fun exp ->
match exp with
| Num n -> n
| Plus (e1, e2) -> (exp_eval e1) + (exp_eval e2)
| Minus (e1, e2) -> (exp_eval e1) - (exp_eval e2)
let rec eval : formula -> bool
= fun f ->
match f with
| True -> true
| False -> false
| Not f -> not (eval f)
| AndAlso (f1, f2) -> (eval f1) && (eval f2)
| OrElse (f1, f2) -> (eval f1) || (eval f2)
| Imply (f1, f2) -> (not (eval f1)) || (eval f2)
| Equal (e1, e2) -> (exp_eval e1) = (exp_eval e2) | |
2a79c2f5226c422851265300c7079fc6747f0d79f7589ff81d09cfe2fdd12942 | pavankumarbn/DroneGUIROS | _package_FlightAnim.lisp | (cl:in-package ardrone_autonomy-srv)
(cl:export '(TYPE-VAL
TYPE
DURATION-VAL
DURATION
RESULT-VAL
RESULT
)) | null | https://raw.githubusercontent.com/pavankumarbn/DroneGUIROS/745320d73035bc50ac4fea2699e22586e10be800/devel/share/common-lisp/ros/ardrone_autonomy/srv/_package_FlightAnim.lisp | lisp | (cl:in-package ardrone_autonomy-srv)
(cl:export '(TYPE-VAL
TYPE
DURATION-VAL
DURATION
RESULT-VAL
RESULT
)) | |
f93e0ba5ebcee822aa133c46a61d20e6c134267c1e86d0db4e65ed9a8497ee91 | onaio/milia | submissions_test.cljs | (ns milia.api.submissions-test
(:require-macros [cljs.test :refer [is deftest async]]
[cljs.core.async.macros :refer [go]])
(:require [milia.api.submissions :as sub]
[cljs.test :as test]
[cljs.core.async :refer [<! >! chan take!]]))
(deftest test-get-stats
[]
(is (= (type (sub/get-stats 1 "_submitted_by" "submitted-by"))
cljs.core.async.impl.channels/ManyToManyChannel)))
| null | https://raw.githubusercontent.com/onaio/milia/c68b612bd640aa2499e4ac2f907a7ef793d0820a/test/cljs/milia/api/submissions_test.cljs | clojure | (ns milia.api.submissions-test
(:require-macros [cljs.test :refer [is deftest async]]
[cljs.core.async.macros :refer [go]])
(:require [milia.api.submissions :as sub]
[cljs.test :as test]
[cljs.core.async :refer [<! >! chan take!]]))
(deftest test-get-stats
[]
(is (= (type (sub/get-stats 1 "_submitted_by" "submitted-by"))
cljs.core.async.impl.channels/ManyToManyChannel)))
| |
1d4be80b2bf2c12d86bfa80052216a170c19203c8d5d4685968c1ff31f127b8b | roman01la/clojurescript-workshop | core.cljs | ;; Atom is a reference type and is used for shared state
;; define an atom which holds a value of `0`
(def state (atom 0))
set atom 's value to ` 100 `
100
;; update atom's value with a function
101
;; read atom's value
101
101
;; observe chages
(add-watch state :logger (fn [key st old-val new-val]
(println "New value is:" new-val)))
" New value is : 102 "
" New value is : 103 "
(remove-watch state :logger)
| null | https://raw.githubusercontent.com/roman01la/clojurescript-workshop/48b02266d65cae8113edd4ce34c4ab282ad256d1/04.atom/core.cljs | clojure | Atom is a reference type and is used for shared state
define an atom which holds a value of `0`
update atom's value with a function
read atom's value
observe chages |
(def state (atom 0))
set atom 's value to ` 100 `
100
101
101
101
(add-watch state :logger (fn [key st old-val new-val]
(println "New value is:" new-val)))
" New value is : 102 "
" New value is : 103 "
(remove-watch state :logger)
|
eba9e026cdd197715685e929e4c436f4268c6f5fca6554e11210da95d8f8e821 | jsa-aerial/hanasu | client.clj | (ns aerial.hanasu.client
(:require [http.async.client :as http]
[http.async.client.websocket :as wss]
[clojure.core.async :as async]
[msgpack.core :as mpk]
[msgpack.clojure-extensions]
[clojure.data.json :as json]
[aerial.hanasu.common :refer [update-cdb get-cdb]]))
(def send! wss/send)
#_(async/go-loop [packet (async/<! (get-cdb :bp-chan))]
(let [[ws msg] packet]
(send! ws :byte msg)))
#_(async/put! (get-cdb :bp-chan)
[ws (mpk/pack {:op :reset :payload {:msgsnt 0}})])
(defn send-msg
[ws msg & {:keys [encode] :or {encode :binary}}]
(if (>= (get-cdb [ws :msgsnt])
(get-cdb [ws :bpsize]))
(async/>!! (get-cdb [ws :chan])
{:op :bpwait
:payload {:ws ws :msg msg :encode encode
:msgsnt (get-cdb [ws :msgsnt])}})
(let [msg {:op :msg :payload msg}
emsg (if (= encode :binary)
(mpk/pack msg)
(json/write-str msg))
enc (if (= encode :binary) :byte :text)]
(wss/send ws enc emsg)
(update-cdb [ws :msgsnt] inc)
(async/>!! (get-cdb [ws :chan])
{:op :sent
:payload {:ws ws :msg msg
:msgsnt (get-cdb [:conns ws :msgsnt])}}))))
(defn receive [ws msg]
(let [msg (if (bytes? msg)
(mpk/unpack msg)
(json/read-str msg))]
(case (or (msg :op) (msg "op"))
:set
(let [bpsize (-> msg :payload :bpsize)
msgrcv (-> msg :payload :msgrcv)]
(update-cdb [ws :msgrcv] msgrcv, [ws :bpsize] bpsize))
:reset
(do (update-cdb [ws :msgsnt] (-> msg :payload :msgsnt))
(async/>!! (get-cdb [ws :chan])
{:op :bpresume
:payload msg}))
(:msg "msg")
(let [rcvd (get-cdb [ws :msgrcv])
data (or (msg :payload) (msg "payload"))]
(if (>= (inc rcvd) (get-cdb [ws :bpsize]))
(do (update-cdb [ws :msgrcv] 0)
(send! ws :byte (mpk/pack {:op :reset :payload {:msgsnt 0}})))
(update-cdb [ws :msgrcv] inc))
(async/>!! (get-cdb [ws :chan])
{:op :msg, :payload {:ws ws :data data}}))
;; Else
(prn "Client Receive Handler - unknown OP " msg))))
(defn on-open [ws]
(println "Client OPEN " ws)
(async/>!! (get-cdb :open-chan) ws))
(defn on-rmtclose [ws code reason]
(println "Client CLOSE " :code code :reason reason :ws ws)
(let [client (get-cdb [ws :client])
client-chan (get-cdb [ws :chan])]
(when (http/open? client)
(http/close client)
(async/>!! client-chan
{:op :close :payload {:ws ws :code code :reason reason}}))))
(defn on-error [ws err]
(let [client-rec (get-cdb ws)]
(async/>!! (client-rec :chan) {:op :error :payload {:ws ws :err err}})))
(defn open-connection
[url]
(let [client (http/create-client)
client-chan (async/chan (async/buffer 19))
_ (update-cdb client-chan {:client client :url url :chan client-chan})
ws (http/websocket client
url
:open on-open
:close on-rmtclose
:error on-error
:text receive
:byte receive
)]
(let [ws (async/<!! (get-cdb :open-chan))
client-rec (get-cdb client-chan)
client-rec (assoc client-rec :ws ws :bpsize 0 :msgrcv 0 :msgsnt 0)]
bogus second call of open callback
(update-cdb client-chan client-rec ws client-rec)
(async/>!! client-chan {:op :open :payload ws}))
client-chan))
(defn close-connection [ws]
(let [client (get-cdb [ws :client])
client-chan (get-cdb [ws :chan])]
(update-cdb client-chan :rm ws :rm)
(http/close client)))
;;; Testing comment area
;;;
(comment
(def client (http/create-client))
(def ws (http/websocket client
"ws:3000/ws"
:open on-open
:close on-close
:error on-error
:text receive
:byte receive
))
(http/close client)
(send!
ws :text (json/write-str
{:type "broadcast", :payload {:client "Clojure"}}))
(send!
ws :text (json/write-str
{:type "echo", :payload {:client "Clojure"}}))
(send!
ws :byte (mpk/pack
{:type "broadcast", :payload {:client "Clojure"}}))
)
| null | https://raw.githubusercontent.com/jsa-aerial/hanasu/7c04e43b64095ed597c5bc65250d244412272ec8/src/clj/aerial/hanasu/client.clj | clojure | Else
Testing comment area
| (ns aerial.hanasu.client
(:require [http.async.client :as http]
[http.async.client.websocket :as wss]
[clojure.core.async :as async]
[msgpack.core :as mpk]
[msgpack.clojure-extensions]
[clojure.data.json :as json]
[aerial.hanasu.common :refer [update-cdb get-cdb]]))
(def send! wss/send)
#_(async/go-loop [packet (async/<! (get-cdb :bp-chan))]
(let [[ws msg] packet]
(send! ws :byte msg)))
#_(async/put! (get-cdb :bp-chan)
[ws (mpk/pack {:op :reset :payload {:msgsnt 0}})])
(defn send-msg
[ws msg & {:keys [encode] :or {encode :binary}}]
(if (>= (get-cdb [ws :msgsnt])
(get-cdb [ws :bpsize]))
(async/>!! (get-cdb [ws :chan])
{:op :bpwait
:payload {:ws ws :msg msg :encode encode
:msgsnt (get-cdb [ws :msgsnt])}})
(let [msg {:op :msg :payload msg}
emsg (if (= encode :binary)
(mpk/pack msg)
(json/write-str msg))
enc (if (= encode :binary) :byte :text)]
(wss/send ws enc emsg)
(update-cdb [ws :msgsnt] inc)
(async/>!! (get-cdb [ws :chan])
{:op :sent
:payload {:ws ws :msg msg
:msgsnt (get-cdb [:conns ws :msgsnt])}}))))
(defn receive [ws msg]
(let [msg (if (bytes? msg)
(mpk/unpack msg)
(json/read-str msg))]
(case (or (msg :op) (msg "op"))
:set
(let [bpsize (-> msg :payload :bpsize)
msgrcv (-> msg :payload :msgrcv)]
(update-cdb [ws :msgrcv] msgrcv, [ws :bpsize] bpsize))
:reset
(do (update-cdb [ws :msgsnt] (-> msg :payload :msgsnt))
(async/>!! (get-cdb [ws :chan])
{:op :bpresume
:payload msg}))
(:msg "msg")
(let [rcvd (get-cdb [ws :msgrcv])
data (or (msg :payload) (msg "payload"))]
(if (>= (inc rcvd) (get-cdb [ws :bpsize]))
(do (update-cdb [ws :msgrcv] 0)
(send! ws :byte (mpk/pack {:op :reset :payload {:msgsnt 0}})))
(update-cdb [ws :msgrcv] inc))
(async/>!! (get-cdb [ws :chan])
{:op :msg, :payload {:ws ws :data data}}))
(prn "Client Receive Handler - unknown OP " msg))))
(defn on-open [ws]
(println "Client OPEN " ws)
(async/>!! (get-cdb :open-chan) ws))
(defn on-rmtclose [ws code reason]
(println "Client CLOSE " :code code :reason reason :ws ws)
(let [client (get-cdb [ws :client])
client-chan (get-cdb [ws :chan])]
(when (http/open? client)
(http/close client)
(async/>!! client-chan
{:op :close :payload {:ws ws :code code :reason reason}}))))
(defn on-error [ws err]
(let [client-rec (get-cdb ws)]
(async/>!! (client-rec :chan) {:op :error :payload {:ws ws :err err}})))
(defn open-connection
[url]
(let [client (http/create-client)
client-chan (async/chan (async/buffer 19))
_ (update-cdb client-chan {:client client :url url :chan client-chan})
ws (http/websocket client
url
:open on-open
:close on-rmtclose
:error on-error
:text receive
:byte receive
)]
(let [ws (async/<!! (get-cdb :open-chan))
client-rec (get-cdb client-chan)
client-rec (assoc client-rec :ws ws :bpsize 0 :msgrcv 0 :msgsnt 0)]
bogus second call of open callback
(update-cdb client-chan client-rec ws client-rec)
(async/>!! client-chan {:op :open :payload ws}))
client-chan))
(defn close-connection [ws]
(let [client (get-cdb [ws :client])
client-chan (get-cdb [ws :chan])]
(update-cdb client-chan :rm ws :rm)
(http/close client)))
(comment
(def client (http/create-client))
(def ws (http/websocket client
"ws:3000/ws"
:open on-open
:close on-close
:error on-error
:text receive
:byte receive
))
(http/close client)
(send!
ws :text (json/write-str
{:type "broadcast", :payload {:client "Clojure"}}))
(send!
ws :text (json/write-str
{:type "echo", :payload {:client "Clojure"}}))
(send!
ws :byte (mpk/pack
{:type "broadcast", :payload {:client "Clojure"}}))
)
|
e353fe90113500c98a70dc47e8756c36611cbb83aee92868767dab8b32e7b4f9 | whalliburton/academy | randomness.lisp | (in-package :academy)
(defun random-element (sequence)
(elt sequence (random (length sequence))))
(defun random-booleans (num &optional (probability 2))
(loop for i from 1 to num
collect (zerop (random probability))))
(defmacro random-do (&rest statements)
(let ((length (length statements)))
`(case (random ,length)
,@(loop for x from 0 to length
for statement in statements
collect `(,x ,statement)))))
(defun random-word ()
(unless *words* (load-words))
(aref *words* (random (length *words*))))
Randomize with entropy hopefully taken from a " real " world source .
(setf *random-state* (make-random-state t))
| null | https://raw.githubusercontent.com/whalliburton/academy/87a1a13ffbcd60d8553e42e647c59486c761e8cf/randomness.lisp | lisp | (in-package :academy)
(defun random-element (sequence)
(elt sequence (random (length sequence))))
(defun random-booleans (num &optional (probability 2))
(loop for i from 1 to num
collect (zerop (random probability))))
(defmacro random-do (&rest statements)
(let ((length (length statements)))
`(case (random ,length)
,@(loop for x from 0 to length
for statement in statements
collect `(,x ,statement)))))
(defun random-word ()
(unless *words* (load-words))
(aref *words* (random (length *words*))))
Randomize with entropy hopefully taken from a " real " world source .
(setf *random-state* (make-random-state t))
| |
0eb7754a1f8944a1a9b3b6c385e24b01b362038eb0e4d323d640a34d290646f8 | input-output-hk/rscoin-haskell | Glade.hs | {-# LANGUAGE OverloadedStrings #-}
module GUI.RSCoin.Glade
( GladeMainWindow (..)
, AddContactWindow (..)
, importGlade
) where
import GUI.RSCoin.MainWindow (AddContactWindow (..))
import qualified RSCoin.Core as C
import qualified Graphics.UI.Gtk as G
import Paths_rscoin (getDataFileName)
data GladeMainWindow = GladeMainWindow
{ gWindow :: G.Window
, gNotebookMain :: G.Notebook
, gProgressBarUpdate :: G.ProgressBar
, gLabelSync :: G.Label
, gTreeViewWallet :: G.TreeView
, gBoxRSCoinLogo :: G.Box
, gBoxWalletHeaderWrapper :: G.Box
, gBoxWalletHeader :: G.Box
, gLabelCurrentBalance :: G.Label
, gLabelUnconfirmedBalance :: G.Label
, gLabelTransactionsNumber :: G.Label
, gLabelCurrentAccount :: G.Label
, gEntryPayTo :: G.Entry
, gButtonChooseContacts :: G.Button
, gSpinButtonSendAmount :: G.SpinButton
, gButtonConfirmSend :: G.Button
, gButtonClearSend :: G.Button
, gTreeViewContactsView :: G.TreeView
, gButtonAddContact :: G.Button
, gButtonRemoveContact :: G.Button
, gLabelContactsNum :: G.Label
, gTreeViewAddressesView :: G.TreeView
, gButtonGenerateAddress :: G.Button
, gButtonCopyAddress :: G.Button
}
makeBuilder :: FilePath -> IO G.Builder
makeBuilder path =
do C.logDebug "Initializing glade builder"
builder <- G.builderNew
G.builderAddFromFile builder path
return builder
importGlade :: IO (GladeMainWindow, AddContactWindow)
importGlade = do
C.logDebug "Loading Glade layout"
uiPath <- getDataFileName "resources/RSCoinMain.glade"
builder <- makeBuilder uiPath
let getWidget :: G.GObjectClass c => (G.GObject -> c) -> String -> IO c
getWidget = G.builderGetObject builder
getWindow = getWidget G.castToWindow
getNotebook = getWidget G.castToNotebook
getLabel = getWidget G.castToLabel
getEntry = getWidget G.castToEntry
getButton = getWidget G.castToButton
getSpinButton = getWidget G.castToSpinButton
getBox = getWidget G.castToBox
getProgressBar = getWidget G.castToProgressBar
getView = getWidget G.castToTreeView
C.logDebug "Getting widgets out of GTK"
gmw <- GladeMainWindow
<$> getWindow "mainWindow"
<*> getNotebook "mainNotebook"
<*> getProgressBar "updateProgressBar"
<*> getLabel "syncLabel"
<*> getView "walletTreeView"
<*> getBox "rscoinLogo"
<*> getBox "walletHeaderWrapper"
<*> getBox "walletHeaderBox"
<*> getLabel "currentBalanceLabel"
<*> getLabel "unconfirmedBalanceLabel"
<*> getLabel "transactionsNumberLabel"
<*> getLabel "currentAccountLabel"
<*> getEntry "payToEntry"
<*> getButton "chooseContactsButton"
<*> getSpinButton "sendAmountSpinButton"
<*> getButton "confirmSendButton"
<*> getButton "clearSendButton"
<*> getView "contactsView"
<*> getButton "addContactButton"
<*> getButton "removeContactButton"
<*> getLabel "contactsNumLabel"
<*> getView "addressesView"
<*> getButton "generateAddressButton"
<*> getButton "copyAddressButton"
acw <- AddContactWindow
<$> getWindow "addContactWindow"
<*> getEntry "nameEntry"
<*> getEntry "addressEntry"
<*> getButton "okButton"
<*> getButton "cancelButton"
return (gmw, acw)
| null | https://raw.githubusercontent.com/input-output-hk/rscoin-haskell/109d8f6f226e9d0b360fcaac14c5a90da112a810/src/User/GUI/RSCoin/Glade.hs | haskell | # LANGUAGE OverloadedStrings # |
module GUI.RSCoin.Glade
( GladeMainWindow (..)
, AddContactWindow (..)
, importGlade
) where
import GUI.RSCoin.MainWindow (AddContactWindow (..))
import qualified RSCoin.Core as C
import qualified Graphics.UI.Gtk as G
import Paths_rscoin (getDataFileName)
data GladeMainWindow = GladeMainWindow
{ gWindow :: G.Window
, gNotebookMain :: G.Notebook
, gProgressBarUpdate :: G.ProgressBar
, gLabelSync :: G.Label
, gTreeViewWallet :: G.TreeView
, gBoxRSCoinLogo :: G.Box
, gBoxWalletHeaderWrapper :: G.Box
, gBoxWalletHeader :: G.Box
, gLabelCurrentBalance :: G.Label
, gLabelUnconfirmedBalance :: G.Label
, gLabelTransactionsNumber :: G.Label
, gLabelCurrentAccount :: G.Label
, gEntryPayTo :: G.Entry
, gButtonChooseContacts :: G.Button
, gSpinButtonSendAmount :: G.SpinButton
, gButtonConfirmSend :: G.Button
, gButtonClearSend :: G.Button
, gTreeViewContactsView :: G.TreeView
, gButtonAddContact :: G.Button
, gButtonRemoveContact :: G.Button
, gLabelContactsNum :: G.Label
, gTreeViewAddressesView :: G.TreeView
, gButtonGenerateAddress :: G.Button
, gButtonCopyAddress :: G.Button
}
makeBuilder :: FilePath -> IO G.Builder
makeBuilder path =
do C.logDebug "Initializing glade builder"
builder <- G.builderNew
G.builderAddFromFile builder path
return builder
importGlade :: IO (GladeMainWindow, AddContactWindow)
importGlade = do
C.logDebug "Loading Glade layout"
uiPath <- getDataFileName "resources/RSCoinMain.glade"
builder <- makeBuilder uiPath
let getWidget :: G.GObjectClass c => (G.GObject -> c) -> String -> IO c
getWidget = G.builderGetObject builder
getWindow = getWidget G.castToWindow
getNotebook = getWidget G.castToNotebook
getLabel = getWidget G.castToLabel
getEntry = getWidget G.castToEntry
getButton = getWidget G.castToButton
getSpinButton = getWidget G.castToSpinButton
getBox = getWidget G.castToBox
getProgressBar = getWidget G.castToProgressBar
getView = getWidget G.castToTreeView
C.logDebug "Getting widgets out of GTK"
gmw <- GladeMainWindow
<$> getWindow "mainWindow"
<*> getNotebook "mainNotebook"
<*> getProgressBar "updateProgressBar"
<*> getLabel "syncLabel"
<*> getView "walletTreeView"
<*> getBox "rscoinLogo"
<*> getBox "walletHeaderWrapper"
<*> getBox "walletHeaderBox"
<*> getLabel "currentBalanceLabel"
<*> getLabel "unconfirmedBalanceLabel"
<*> getLabel "transactionsNumberLabel"
<*> getLabel "currentAccountLabel"
<*> getEntry "payToEntry"
<*> getButton "chooseContactsButton"
<*> getSpinButton "sendAmountSpinButton"
<*> getButton "confirmSendButton"
<*> getButton "clearSendButton"
<*> getView "contactsView"
<*> getButton "addContactButton"
<*> getButton "removeContactButton"
<*> getLabel "contactsNumLabel"
<*> getView "addressesView"
<*> getButton "generateAddressButton"
<*> getButton "copyAddressButton"
acw <- AddContactWindow
<$> getWindow "addContactWindow"
<*> getEntry "nameEntry"
<*> getEntry "addressEntry"
<*> getButton "okButton"
<*> getButton "cancelButton"
return (gmw, acw)
|
b5f32a41cbcbd637c8da103c30dc18f2b31457ac51950fcecb71c5289dda5041 | wdebeaum/DeepSemLex | tether.lisp | ;;;;
;;;; W::tether
;;;;
(define-words :pos W::v :TEMPL AGENT-AFFECTED-XP-NP-TEMPL
:words (
(W::tether
(SENSES
((meta-data :origin calo :entry-date 20031230 :change-date nil :comments html-purchasing-corpus)
(LF-PARENT ONT::ATTACH)
(SEM (F::Aspect F::bounded) (F::Time-span F::Atomic))
(TEMPL AGENT-AFFECTED-AFFECTED1-XP-OPTIONAL-TEMPL (xp (% W::pp (W::ptype W::to))))
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/DeepSemLex/ce0e7523dd2b1ebd42b9e88ffbcfdb0fd339aaee/trips/src/LexiconManager/Data/new/tether.lisp | lisp |
W::tether
|
(define-words :pos W::v :TEMPL AGENT-AFFECTED-XP-NP-TEMPL
:words (
(W::tether
(SENSES
((meta-data :origin calo :entry-date 20031230 :change-date nil :comments html-purchasing-corpus)
(LF-PARENT ONT::ATTACH)
(SEM (F::Aspect F::bounded) (F::Time-span F::Atomic))
(TEMPL AGENT-AFFECTED-AFFECTED1-XP-OPTIONAL-TEMPL (xp (% W::pp (W::ptype W::to))))
)
)
)
))
|
af70f3ae9eab82d6fccc3d06fe1e6cff131caa43f1864504cf517766d6b7ad12 | noitcudni/google-search-console-bulk-url-removal | core.cljs | (ns google-webmaster-tools-bulk-url-removal.content-script.core
(:require-macros [cljs.core.async.macros :refer [go go-loop]])
(:require [cljs.core.async :refer [<! >! put! chan] :as async]
[hipo.core :as hipo]
[dommy.core :refer-macros [sel sel1] :as dommy]
[testdouble.cljs.csv :as csv]
;; [cognitect.transit :as t]
[chromex.logging :refer-macros [log info warn error group group-end]]
[chromex.protocols.chrome-port :refer [post-message!]]
[chromex.ext.runtime :as runtime :refer-macros [connect]]
[google-webmaster-tools-bulk-url-removal.content-script.common :as common]
[google-webmaster-tools-bulk-url-removal.background.storage :refer [clear-victims! print-victims update-storage
current-removal-attempt get-bad-victims]]
[cljs-time.core :as t]
[cljs-time.coerce :as tc]
[cemerick.url :refer [url]]
[domina :refer [single-node nodes style styles]]
[domina.xpath :refer [xpath]]
[domina.events :refer [dispatch!]]
))
(defn sync-node-helper
"This is unfortunate. alts! doens't close other channels"
[dom-fn & xpath-strs]
(go-loop []
(let [n (->> xpath-strs
(map (fn [xpath-str]
(dom-fn (xpath xpath-str))
))
(filter #(some? %))
first)]
(if (nil? n)
(do (<! (async/timeout 300))
(recur))
n)
)))
(def sync-single-node (partial sync-node-helper single-node))
(def sync-nodes (partial sync-node-helper nodes))
(defn scrape-xhr-data! []
"grab the xhr injected data and clean up the extra dom"
[]
(go
(let [injected-dom (<! (sync-single-node "//div[@id='__interceptedData']"))]
(dommy/text injected-dom))))
(defn cleanup-xhr-data! []
(go
(doseq [n (<! (sync-nodes "//div[@id='__interceptedData']"))]
(.remove n))))
;; default to Temporarily remove and Remove this URL only
(defn exec-new-removal-request
"url-method: :remove-url vs :clear-cached
url-type: :url-only vs :prefix
Possible return value in a channel
1. :not-in-property
2. :duplicate-request
3. :malform-url
4. :success
"
[url url-method url-type]
(let [ch (chan)
url-type-str (cond (= url-type "prefix") "Remove all URLs with this prefix"
(= url-type "url-only") "Remove this URL only")
next-button-xpath "//span[contains(text(), 'Temporarily remove URL')]/../../../../../../descendant::span[contains(text(), 'Next')]/../.."]
(go
(cond (and (not= url-method "remove-url") (not= url-method "clear-cached"))
(>! ch :erroneous-url-method)
(and (not= url-type "url-only") (not= url-type "prefix"))
(>! ch :erroneous-url-type)
:else
(do #_(.click (single-node (xpath "//span[contains(text(), 'New Request')]")))
(.click (<! (sync-single-node "//span[contains(text(), 'New Request')]")))
;; wait for the modal dialog to show
(<! (sync-single-node "//div[@aria-label='New Request']"))
;; Who cares? Click on all the radiobuttons
(doseq [n (<! (sync-nodes (str "//label[contains(text(), '" url-type-str "')]/div")))]
(.click n))
(doseq [n (<! (sync-nodes "//input[@placeholder='Enter URL']"))]
(do
(.click n)
(domina/set-value! n url)))
NOTE : Need to click one of the tabs to get next to show
;; Increment the wait time in between clicking on the `Clear cached URL` and the `Temporarily remove URL` tabs.
;; Don't stop until the next button is clickable
(loop [next-nodes (nodes (xpath next-button-xpath))
iter-cnt 1]
(when (->> next-nodes
(every? (fn [n]
(= (-> n
js/window.getComputedStyle
(aget "backgroundColor")) "rgba(0, 0, 0, 0.12)"))))
(cond (= url-method "remove-url")
(do
(.click (<! (sync-single-node "//span[contains(text(), 'Clear cached URL')]")))
(<! (async/timeout (* iter-cnt 300)))
(.click (<! (sync-single-node "//span[contains(text(), 'Temporarily remove URL')]")))
(recur (nodes (xpath next-button-xpath)) (inc iter-cnt))
)
(= url-method "clear-cached")
(do (.click (<! (sync-single-node "//span[contains(text(), 'Clear cached URL')]")))
(<! (async/timeout (* iter-cnt 300)))
(recur (nodes (xpath next-button-xpath)) (inc iter-cnt)))
:else
;; trigger skip-error
(prn "Need to skip-error due to url-method : " url-method) ;;xxx
)
))
NOTE : there are two next buttons . One on each tab . Ideally , I 'll use to distill down to the ONE .
;; I can only narrow it down for now. So, just loop through and click on all of them.
(doseq [n (<! (sync-nodes next-button-xpath))]
(.click n))
;; Wait for the next dialog
(<! (sync-single-node "//div[contains(text(), 'URL not in property')]"
"//div[contains(text(), 'Clear cached URL?')]"
"//div[contains(text(), 'Remove URL?')]"
"//div[contains(text(), 'Remove all URLs with this prefix?')]"
"//div[contains(text(), 'Remove entire site?')]"))
(prn "Yay, the next dialog is here !!! --> " url) ;;xxx
;; Check for "URL not in property"
(if-let [not-in-properity-node (single-node (xpath "//div[contains(text(), 'URL not in property')]"))]
;; Oops, not in the right domain
(do
(.click (<! (sync-single-node "//span[contains(text(), 'Close')]")))
(.click (<! (sync-single-node "//span[contains(text(), 'cancel')]")))
(>! ch :not-in-property))
;; NOTE: may encounter
1 . Duplicate request
2 . Malform URL
;; These show up as a modal dialog. Need to check for them
;; Check for post submit modal dialog
(do
(<! (cleanup-xhr-data!))
(prn "about to click on submit request")
(.click (<! (sync-single-node "//span[contains(text(), 'Submit request')]")))
(let [xhr-data (<! (scrape-xhr-data!))
_ (prn "xhr-data: " (subs xhr-data 0 (min 1024 (count xhr-data))))]
(if (clojure.string/includes? (subs xhr-data 0 (min 1024 (count xhr-data))) "SubmitRemovalError")
(let [err-ch (sync-single-node "//div[contains(text(), 'Duplicate request')]"
"//div[contains(text(), 'Malformed URL')]")
_ (<! err-ch)
dup-req-node (single-node (xpath "//div[contains(text(), 'Duplicate request')]"))
malform-url-node (single-node (xpath "//div[contains(text(), 'Malformed URL')]"))]
(cond (not (nil? dup-req-node)) (do
(.click (<! (sync-single-node "//span[contains(text(), 'Close')]")))
(>! ch :duplicate-request))
(not (nil? malform-url-node)) (do
(.click (<! (sync-single-node "//span[contains(text(), 'Close')]")))
(>! ch :malform-url))
))
(>! ch :success)
))
)))
))
ch))
; -- a message loop ---------------------------------------------------------------------------------------------------------
(defn process-message! [chan message]
(let [{:keys [type] :as whole-msg} (common/unmarshall message)]
(prn "CONTENT SCRIPT: process-message!: " whole-msg)
(cond (= type :done-init-victims) (do
(go
;; clean up the injected xhr data
(<! (cleanup-xhr-data!))
(post-message! chan (common/marshall {:type :next-victim}))))
(= type :remove-url) (do (prn "handling :remove-url")
(go
(let [{:keys [victim removal-method url-type]} whole-msg
request-status (<! (exec-new-removal-request victim
removal-method url-type))
;; NOTE: This timeout is here to prevent a race condition.
;; For reasons unbeknownst to me, a successful submission can
results in 2 responses . We do n't care which one comes back first
;; so long as one of them does.
;;
However , the second call may come back later than expected .
;; Even though we clean up the ejected dom right before clicking on
;; the submit button. It's entirely possible that the previously successful
;; submission comes back right after the clean up, resulting in
;; the extension getting stuck.
;;
The timeout is here to allow for plenty of time for the second call
;; to come back.
_ (<! (async/timeout 1500))]
(prn "request-status: " request-status)
(if (or (= :success request-status) (= :duplicate-request request-status))
(post-message! chan (common/marshall {:type :success
:url victim}))
(post-message! chan (common/marshall {:type :skip-error
:reason request-status
:url victim
})))
)))
(= type :done) (js/alert "DONE with bulk url removals!")
)
))
(defn ensure-english-setting []
(let [url-parts (url (.. js/window -location -href))]
(when-not (= "en" (get-in url-parts [:query "hl"]))
(js/alert "Bulk URL Removal extension works properly only in English. Press OK to set the language to English.")
(set! (.. js/window -location -href) (str (assoc-in url-parts [:query "hl"] "en")))
)))
; -- main entry point -------------------------------------------------------------------------------------------------------
(defn init! []
(let [_ (log "CONTENT SCRIPT: init")
background-port (runtime/connect)
_ (prn "single-node: "(single-node (xpath "//span[contains(text(), 'Hello world')]"))) ;;xxx
_ (prn "nodes: " (nodes (xpath "//label[contains(text(), 'hello')]/div"))) ;;xxx
]
(go
(ensure-english-setting)
(common/connect-to-background-page! background-port process-message!)
)
))
| null | https://raw.githubusercontent.com/noitcudni/google-search-console-bulk-url-removal/0f922c7301e26fba0c7ec75e73e4f8df29962348/src/content_script/google_webmaster_tools_bulk_url_removal/content_script/core.cljs | clojure | [cognitect.transit :as t]
default to Temporarily remove and Remove this URL only
wait for the modal dialog to show
Who cares? Click on all the radiobuttons
Increment the wait time in between clicking on the `Clear cached URL` and the `Temporarily remove URL` tabs.
Don't stop until the next button is clickable
trigger skip-error
xxx
I can only narrow it down for now. So, just loop through and click on all of them.
Wait for the next dialog
xxx
Check for "URL not in property"
Oops, not in the right domain
NOTE: may encounter
These show up as a modal dialog. Need to check for them
Check for post submit modal dialog
-- a message loop ---------------------------------------------------------------------------------------------------------
clean up the injected xhr data
NOTE: This timeout is here to prevent a race condition.
For reasons unbeknownst to me, a successful submission can
so long as one of them does.
Even though we clean up the ejected dom right before clicking on
the submit button. It's entirely possible that the previously successful
submission comes back right after the clean up, resulting in
the extension getting stuck.
to come back.
-- main entry point -------------------------------------------------------------------------------------------------------
xxx
xxx | (ns google-webmaster-tools-bulk-url-removal.content-script.core
(:require-macros [cljs.core.async.macros :refer [go go-loop]])
(:require [cljs.core.async :refer [<! >! put! chan] :as async]
[hipo.core :as hipo]
[dommy.core :refer-macros [sel sel1] :as dommy]
[testdouble.cljs.csv :as csv]
[chromex.logging :refer-macros [log info warn error group group-end]]
[chromex.protocols.chrome-port :refer [post-message!]]
[chromex.ext.runtime :as runtime :refer-macros [connect]]
[google-webmaster-tools-bulk-url-removal.content-script.common :as common]
[google-webmaster-tools-bulk-url-removal.background.storage :refer [clear-victims! print-victims update-storage
current-removal-attempt get-bad-victims]]
[cljs-time.core :as t]
[cljs-time.coerce :as tc]
[cemerick.url :refer [url]]
[domina :refer [single-node nodes style styles]]
[domina.xpath :refer [xpath]]
[domina.events :refer [dispatch!]]
))
(defn sync-node-helper
"This is unfortunate. alts! doens't close other channels"
[dom-fn & xpath-strs]
(go-loop []
(let [n (->> xpath-strs
(map (fn [xpath-str]
(dom-fn (xpath xpath-str))
))
(filter #(some? %))
first)]
(if (nil? n)
(do (<! (async/timeout 300))
(recur))
n)
)))
(def sync-single-node (partial sync-node-helper single-node))
(def sync-nodes (partial sync-node-helper nodes))
(defn scrape-xhr-data! []
"grab the xhr injected data and clean up the extra dom"
[]
(go
(let [injected-dom (<! (sync-single-node "//div[@id='__interceptedData']"))]
(dommy/text injected-dom))))
(defn cleanup-xhr-data! []
(go
(doseq [n (<! (sync-nodes "//div[@id='__interceptedData']"))]
(.remove n))))
(defn exec-new-removal-request
"url-method: :remove-url vs :clear-cached
url-type: :url-only vs :prefix
Possible return value in a channel
1. :not-in-property
2. :duplicate-request
3. :malform-url
4. :success
"
[url url-method url-type]
(let [ch (chan)
url-type-str (cond (= url-type "prefix") "Remove all URLs with this prefix"
(= url-type "url-only") "Remove this URL only")
next-button-xpath "//span[contains(text(), 'Temporarily remove URL')]/../../../../../../descendant::span[contains(text(), 'Next')]/../.."]
(go
(cond (and (not= url-method "remove-url") (not= url-method "clear-cached"))
(>! ch :erroneous-url-method)
(and (not= url-type "url-only") (not= url-type "prefix"))
(>! ch :erroneous-url-type)
:else
(do #_(.click (single-node (xpath "//span[contains(text(), 'New Request')]")))
(.click (<! (sync-single-node "//span[contains(text(), 'New Request')]")))
(<! (sync-single-node "//div[@aria-label='New Request']"))
(doseq [n (<! (sync-nodes (str "//label[contains(text(), '" url-type-str "')]/div")))]
(.click n))
(doseq [n (<! (sync-nodes "//input[@placeholder='Enter URL']"))]
(do
(.click n)
(domina/set-value! n url)))
NOTE : Need to click one of the tabs to get next to show
(loop [next-nodes (nodes (xpath next-button-xpath))
iter-cnt 1]
(when (->> next-nodes
(every? (fn [n]
(= (-> n
js/window.getComputedStyle
(aget "backgroundColor")) "rgba(0, 0, 0, 0.12)"))))
(cond (= url-method "remove-url")
(do
(.click (<! (sync-single-node "//span[contains(text(), 'Clear cached URL')]")))
(<! (async/timeout (* iter-cnt 300)))
(.click (<! (sync-single-node "//span[contains(text(), 'Temporarily remove URL')]")))
(recur (nodes (xpath next-button-xpath)) (inc iter-cnt))
)
(= url-method "clear-cached")
(do (.click (<! (sync-single-node "//span[contains(text(), 'Clear cached URL')]")))
(<! (async/timeout (* iter-cnt 300)))
(recur (nodes (xpath next-button-xpath)) (inc iter-cnt)))
:else
)
))
NOTE : there are two next buttons . One on each tab . Ideally , I 'll use to distill down to the ONE .
(doseq [n (<! (sync-nodes next-button-xpath))]
(.click n))
(<! (sync-single-node "//div[contains(text(), 'URL not in property')]"
"//div[contains(text(), 'Clear cached URL?')]"
"//div[contains(text(), 'Remove URL?')]"
"//div[contains(text(), 'Remove all URLs with this prefix?')]"
"//div[contains(text(), 'Remove entire site?')]"))
(if-let [not-in-properity-node (single-node (xpath "//div[contains(text(), 'URL not in property')]"))]
(do
(.click (<! (sync-single-node "//span[contains(text(), 'Close')]")))
(.click (<! (sync-single-node "//span[contains(text(), 'cancel')]")))
(>! ch :not-in-property))
1 . Duplicate request
2 . Malform URL
(do
(<! (cleanup-xhr-data!))
(prn "about to click on submit request")
(.click (<! (sync-single-node "//span[contains(text(), 'Submit request')]")))
(let [xhr-data (<! (scrape-xhr-data!))
_ (prn "xhr-data: " (subs xhr-data 0 (min 1024 (count xhr-data))))]
(if (clojure.string/includes? (subs xhr-data 0 (min 1024 (count xhr-data))) "SubmitRemovalError")
(let [err-ch (sync-single-node "//div[contains(text(), 'Duplicate request')]"
"//div[contains(text(), 'Malformed URL')]")
_ (<! err-ch)
dup-req-node (single-node (xpath "//div[contains(text(), 'Duplicate request')]"))
malform-url-node (single-node (xpath "//div[contains(text(), 'Malformed URL')]"))]
(cond (not (nil? dup-req-node)) (do
(.click (<! (sync-single-node "//span[contains(text(), 'Close')]")))
(>! ch :duplicate-request))
(not (nil? malform-url-node)) (do
(.click (<! (sync-single-node "//span[contains(text(), 'Close')]")))
(>! ch :malform-url))
))
(>! ch :success)
))
)))
))
ch))
(defn process-message! [chan message]
(let [{:keys [type] :as whole-msg} (common/unmarshall message)]
(prn "CONTENT SCRIPT: process-message!: " whole-msg)
(cond (= type :done-init-victims) (do
(go
(<! (cleanup-xhr-data!))
(post-message! chan (common/marshall {:type :next-victim}))))
(= type :remove-url) (do (prn "handling :remove-url")
(go
(let [{:keys [victim removal-method url-type]} whole-msg
request-status (<! (exec-new-removal-request victim
removal-method url-type))
results in 2 responses . We do n't care which one comes back first
However , the second call may come back later than expected .
The timeout is here to allow for plenty of time for the second call
_ (<! (async/timeout 1500))]
(prn "request-status: " request-status)
(if (or (= :success request-status) (= :duplicate-request request-status))
(post-message! chan (common/marshall {:type :success
:url victim}))
(post-message! chan (common/marshall {:type :skip-error
:reason request-status
:url victim
})))
)))
(= type :done) (js/alert "DONE with bulk url removals!")
)
))
(defn ensure-english-setting []
(let [url-parts (url (.. js/window -location -href))]
(when-not (= "en" (get-in url-parts [:query "hl"]))
(js/alert "Bulk URL Removal extension works properly only in English. Press OK to set the language to English.")
(set! (.. js/window -location -href) (str (assoc-in url-parts [:query "hl"] "en")))
)))
(defn init! []
(let [_ (log "CONTENT SCRIPT: init")
background-port (runtime/connect)
]
(go
(ensure-english-setting)
(common/connect-to-background-page! background-port process-message!)
)
))
|
b3dd4f1c0a3e673292155ff6d4c85adee62960594092d78999902e12f3315164 | blindglobe/clocc | lalr.lisp | ;;;; -*- Mode: Lisp; Syntax: Common-Lisp; Package: LALR; -*-
(provide 'lalr)
(defpackage "LALR"
(:export "DEFINE-GRAMMAR"))
(in-package "LALR")
(declaim (optimize (speed 3)))
;;; lalr.lisp
;;;
This is an LALR parser generator .
( c ) 1988 .
This is * not * the property of Xerox Corporation !
Modified to cache the first terminals , the epsilon derivations
;;; the rules that expand a category, and the items that expand
;;; a category
;;; There is a sample grammar at the end of this file.
;;; Use your text-editor to search for "Test grammar" to find it.
( in - package ' LALR )
( export ' ( make - parser lalr - parser * lalr - debug * grammar parse ) )
( shadow ' ( first rest ) )
( defmacro first ( x ) ` ( car , x ) )
( ( x ) ` ( cdr , x ) )
The external interface is MAKE - PARSER . It takes three arguments , a
CFG grammar , a list of the lexical or terminal categories , and an
;;; atomic end marker. It produces a list which is the Lisp code for
;;; an LALR(1) parser for that grammar. If that list is compiled, then
the function LALR - PARSER is defined . LALR - PARSER is a function with
two arguments , NEXT - INPUT and PARSE - ERROR .
;;;
The first argument to LALR - PARSER , NEXT - INPUT must be a function with
zero arguments ; every time NEXT - INPUT is called it should return
two values , the first is the category of the next lexical
form in the input and the second is the value of that form .
Each call to NEXT - INPUT should advance one lexical item in the
input . When the input is consumed , NEXT - INPUT should return a
;;; CONS whose CAR is the atomic end marker used in the call to MAKE-PARSER.
;;;
The second argument to LALR - PARSER , PARSE - ERROR will be called
;;; if the parse fails because the input is ill-formed.
;;;
;;;
;;; There is a sample at the end of this file.
;;; definitions of constants and global variables used
(defconstant *TOPCAT* '$Start)
(defvar *ENDMARKER*)
(defvar glex)
(defvar grules)
(defvar gstart)
(defvar gstarts)
(defvar gcats)
(defvar gfirsts)
(defvar gepsilons)
(defvar gexpansions)
(defvar *lalr-debug* NIL "Inserts debugging code into parser if non-NIL")
(defvar stateList '())
(defvar *first-terminals-cache* nil)
(defmacro fixnum= (x y) `(eq ,x ,y))
(defun make-parser (grammar lex endMarker &key (name 'lalr-parser))
"Takes a grammar and produces the Lisp code for a parser for that grammar"
(setq *ENDMARKER* endMarker)
;;; cache some data that will be useful later
(setq glex lex)
(setq gstart (caar grammar))
(setq grules (let ((i 0))
(mapcar #'(lambda (r) (transform-rule r (incf i)))
grammar)))
(setq gcats (get-all-cats))
(progn
(setq gexpansions (make-hash-table :test #'eq))
(setq *first-terminals-cache* (make-hash-table :test #'equal))
(dolist (cat gcats)
(setf (gethash cat gexpansions) (compute-expansion cat))))
(setq gepsilons (remove-if-not #'derives-eps gcats))
(progn
(setq gstarts (make-hash-table :test #'eq))
(setf (gethash *ENDMARKER* gstarts) (list *ENDMARKER*))
(dolist (cat gcats)
(setf (gethash cat gstarts) (first-terms (list cat)))) )
;;; now actually build the parser
(build-table)
(when (and (listp *lalr-debug*) (member 'print-table *lalr-debug*))
(Print-Table stateList))
(format T "~%; Table ready (total of ~R rules --> ~R states)."
(length grammar)
(length stateList))
(format T "~%; Dumping:")
(build-parser name))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Rules and Grammars
;;;
(defstruct rule no mother daughters action)
(defun transform-rule (rule no)
(make-rule :no no
:mother (first rule)
:daughters (butlast (cddr rule))
:action (car (last rule))))
(defun compute-expansion (cat)
(remove-if-not #'(lambda (rule)
(eq (rule-mother rule) cat))
grules))
(defmacro expand (cat)
`(gethash ,cat gexpansions) )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Properties of grammars
(defun get-all-cats ()
(labels ((try (dejaVu cat)
(if (find cat dejaVu)
dejaVu
(tryRules (cons cat dejaVu) (compute-expansion cat))))
(tryRules (dejaVu rules)
(if rules
(tryRules (tryCats dejaVu (rule-daughters (car rules)))
(cdr rules))
dejaVu))
(tryCats (dejaVu cats)
(if cats
(tryCats (try dejaVu (car cats)) (cdr cats))
dejaVu)))
(try '() gstart)))
(defun derives-eps (c)
"t if c can be rewritten as the null string"
(labels ((try (dejaVu cat)
(unless (find cat dejaVu)
(some #'(lambda (r)
(every #'(lambda (c1) (try (cons cat dejaVu) c1))
(rule-daughters r)))
(expand cat)))))
(try '() c)))
(defmacro derives-epsilon (c)
"looks up the cache to see if c derives the null string"
`(member ,c gepsilons))
(defun first-terms (catList)
"the leading terminals of an expansion of catList"
(labels ((firstDs (cats)
(if cats
(if (derives-epsilon (car cats))
(cons (car cats) (firstDs (cdr cats)))
(list (car cats)))))
(try (dejaVu cat)
(if (member cat dejaVu)
dejaVu
(tryList (cons cat dejaVu)
(mapcan #'(lambda (r)
(firstDs (rule-daughters r)))
(expand cat)))))
(tryList (dejaVu cats)
(if cats
(tryList (try dejaVu (car cats)) (cdr cats))
dejaVu)))
(remove-if-not #'(lambda (term)
(or (eq *ENDMARKER* term)
(find term glex)))
(tryList '() (firstDs catList)))))
(defun first-terminals (cat-list)
(if cat-list
(if (derives-epsilon (first cat-list))
(union (gethash (first cat-list) gstarts)
(first-terminals (rest cat-list)))
(gethash (first cat-list) gstarts))
'()))
#+IGNORE
(defun first-terminals* (cat-list-0 cat-1)
(let ((key (cons cat-list-0 cat-1)))
(multiple-value-bind (v found?) (gethash key *first-terminals-cache*)
(cond (found? v)
(t (setf (gethash key *first-terminals-cache*)
(block foo
(let ((res nil))
(dolist (c0 cat-list-0)
(setf res (union res (gethash c0 gstarts)))
(unless (derives-epsilon c0)
(return-from foo res)))
(union res (gethash cat-1 gstarts)) )))) ))))
(defmacro first-terminals* (cat-list-0 cat-1)
`(let ((cat-list-0 ,cat-list-0)
(cat-1 ,cat-1))
(let ((key (cons cat-list-0 cat-1)))
(multiple-value-bind (v found?) (gethash key *first-terminals-cache*)
(cond (found? v)
(t (setf (gethash key *first-terminals-cache*)
(block foo
(let ((res nil))
(dolist (c0 cat-list-0)
(setf res (union res (gethash c0 gstarts)))
(unless (derives-epsilon c0)
(return-from foo res)))
(union res (gethash cat-1 gstarts)) )))) )))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; LALR(1) parsing table constructor
;;;
(defstruct item rule pos la)
(defmacro item-daughters (i) `(rule-daughters (item-rule ,i)))
(defmacro item-right (i) `(nthcdr (item-pos ,i) (item-daughters ,i)))
(defmacro item-equal (i1 i2)
`(and (eq (item-rule ,i1) (item-rule ,i2))
(fixnum= (item-pos ,i1) (item-pos ,i2))
(eq (item-la ,i1) (item-la ,i2))))
(defmacro item-core-equal (c1 c2)
"T if the cores of c1 and c2 are equal"
`(and (eq (item-rule ,c1) (item-rule ,c2))
(fixnum= (item-pos ,c1) (item-pos ,c2))))
(defun close-items (items)
"computes the closure of a set of items"
(declare (optimize (speed 3)))
(do ((to-do items))
((null to-do) items)
(let ((i (pop to-do)))
(let ((rgt (item-right i)))
(when rgt
(dolist (la (first-terminals* (rest rgt) (item-la i) ))
(dolist (r (expand (first rgt)))
(unless (dolist (i items)
(if (and (eq r (item-rule i))
(eq (item-la i) la)
(fixnum= (item-pos i) 0))
(return t)))
(let ((new (make-item :rule r :pos 0 :la la)))
(push new items)
(push new to-do))))))))))
(defun shift-items (items cat)
"shifts a set of items over cat"
(labels ((shift-item (item)
(if (eq (first (item-right item)) cat)
(make-item :rule (item-rule item)
:pos (1+ (item-pos item))
:la (item-la item)))))
(let ((new-items '()))
(dolist (i items)
(let ((n (shift-item i)))
(if n
(push n new-items))))
new-items)))
(defun items-right (items)
"returns the set of categories appearing to the right of the dot"
(let ((right '()))
(dolist (i items)
(let ((d (first (item-right i))))
(if (and d (not (find d right)))
(push d right))))
right))
(defun compact-items (items)
"collapses items with the same core to compact items"
(let ((soFar '()))
(dolist (i items)
(let ((ci (dolist (s soFar)
(if (item-core-equal s i)
(return s)))))
(if ci
(push (item-la i) (item-la ci))
(push (make-item :rule (item-rule i)
:pos (item-pos i)
:la (list (item-la i)))
soFar))))
(sort soFar #'<
:key #'(lambda (i) (rule-no (item-rule i))))))
(defmacro expand-citems (citems)
"expands a list of compact items into items"
`(let ((items '()))
(dolist (ci ,citems)
(dolist (la (item-la ci))
(push (make-item :rule (item-rule ci)
:pos (item-pos ci)
:la la)
items)))
items))
(defun subsumes-citems (ci1s ci2s)
"T if the sorted set of items ci2s subsumes the sorted set ci1s"
(and (fixnum= (length ci1s) (length ci2s))
(every #'(lambda (ci1 ci2)
(and (item-core-equal ci1 ci2)
(subsetp (item-la ci1) (item-la ci2))))
ci1s ci2s)))
(defun merge-citems (ci1s ci2s)
"Adds the las of ci1s to ci2s. ci2s should subsume ci1s"
(mapcar #'(lambda (ci1 ci2)
(setf (item-la ci2) (nunion (item-la ci1) (item-la ci2))))
ci1s ci2s)
ci2s)
;;; The actual table construction functions
(defstruct state name citems shifts conflict)
(defstruct shift cat where)
(defparameter nextStateNo -1)
( defun lookup ( )
" finds a state with the same core items as if it exits "
; (find-if #'(lambda (state)
( and (= ( length ) ( length ( state - citems state ) ) )
( every # ' ( lambda ( )
( item - core - equal ) )
; citems (state-citems state))
; ))
; stateList))
(defun lookup (citems)
"finds a state with the same core items as citems if it exits"
(dolist (state stateList)
(if (and (fixnum= (length citems) (length (state-citems state)))
(do ((ci1s citems (cdr ci1s))
(ci2s (state-citems state) (cdr ci2s)))
((null ci1s) t)
(unless (item-core-equal (car ci1s) (car ci2s))
(return nil))))
(return state))))
(defun add-state (citems)
"creates a new state and adds it to the state list"
(let ((newState
(make-state :name (intern (format nil "STATE-~D" (incf nextStateNo)))
:citems citems)))
(push newState stateList)
newState))
(defun get-state-name (items)
"returns the state name for this set of items"
(let* ((citems (compact-items items))
(state (lookup citems)))
(cond ((null state)
(setq state (add-state citems))
(build-state state items))
((subsumes-citems citems (state-citems state))
nil)
(t
(merge-citems citems (state-citems state))
(follow-state items)))
(state-name state)))
(defun build-state (state items)
"creates the states that this state can goto"
(let ((closure (close-items items)))
(dolist (cat (items-right closure))
(push (make-shift :cat cat
:where (get-state-name (shift-items closure cat)))
(state-shifts state)))))
(defun follow-state (items)
"percolates look-ahead onto descendant states of this state"
(let ((closure (close-items items)))
(dolist (cat (items-right closure))
(get-state-name (shift-items closure cat)))))
(defun build-table ()
"Actually builds the table"
(setq stateList '())
(setq nextStateNo -1)
(get-state-name (list (make-item :rule (make-rule :no 0
:mother *TOPCAT*
:daughters (list gstart))
:pos 0
:la *ENDMARKER*)))
(setq stateList (nreverse stateList)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; LALR(1) parsing table printer
;;;
(defun print-table (stateList)
"Prints the state table"
(dolist (state stateList)
(format t "~%~%~a:" (state-name state))
(dolist (citem (state-citems state))
(format t "~% ~a -->~{ ~a~} .~{ ~a~}, ~{~a ~}"
(rule-mother (item-rule citem))
(subseq (rule-daughters (item-rule citem)) 0 (item-pos citem))
(subseq (rule-daughters (item-rule citem)) (item-pos citem))
(item-la citem)))
(dolist (shift (state-shifts state))
(format t "~% On ~a shift ~a" (shift-cat shift) (shift-where shift)))
(dolist (reduce (compact-items
(delete-if #'(lambda (i) (item-right i))
(close-items
(expand-citems (state-citems state))))))
(format t "~% On~{ ~a~} reduce~{ ~a~} --> ~a"
(item-la reduce)
(rule-daughters (item-rule reduce))
(rule-mother (item-rule reduce)))))
(format t "~%"))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; LALR(1) parser constructor
;;;
(defun translate-State (state)
"translates a state into lisp code that could appear in a labels form"
(format T " ~(~S~)" (state-name state))
(let ((reduces (compact-items
(delete-if #'(lambda (i) (item-right i))
(close-items
(expand-citems (state-citems state))))))
(symbolsSoFar '())) ; to ensure that a symbol never occurs twice
(labels ((translateShift (shift)
(push (shift-cat shift) symbolsSoFar)
`(,(shift-cat shift)
,@(when *lalr-debug*
`((when *lalr-debug*
(princ ,(format nil "Shift ~a to ~a~%"
(shift-cat shift) (shift-where shift))))))
(shift-from #',(state-name state))
(,(shift-where shift))))
(translateReduce (item)
(when (intersection (item-la item) symbolsSoFar)
(format t "Warning, Not LALR(1)!!: ~a, ~a --> ~{~a ~}~%"
(state-name state)
(rule-mother (item-rule item))
(rule-daughters (item-rule item)))
(setf (item-la item)
(nset-difference (item-la item)
symbolsSoFar)))
(dolist (la (item-la item))
(push la symbolsSoFar))
`(,(item-la item)
,@(when *lalr-debug*
`((when *lalr-debug*
(princ ,(format nil "Reduce ~{~a ~} --> ~a~%"
(rule-daughters (item-rule item))
(rule-mother (item-rule item)))))))
(reduce-cat #',(state-name state)
',(rule-mother (item-rule item))
,(item-pos item)
,(rule-action (item-rule item))))))
`(,(state-name state) ()
(case (input-peek)
,@(mapcar #'translateShift (state-shifts state))
,@(mapcar #'translateReduce reduces)
(otherwise (funcall parse-error)))))))
next - input performs lexical analysis . It must return two values .
;;; the category and the value.
(defun build-parser (name)
"returns an lalr(1) parser. next-input must return 2 values!"
`(defun ,name (next-input parse-error)
(let ((cat-la '()) ; category lookahead
(val-la '()) ; value lookahead
(val-stack '()) ; value stack
(state-stack '())) ; state stack
(labels ((input-peek ()
(unless cat-la
(setf (values cat-la val-la) (funcall next-input)
cat-la (list cat-la)
val-la (list val-la)))
(first cat-la))
(shift-from (name)
(push name state-stack)
(pop cat-la)
(push (pop val-la) val-stack))
(reduce-cat (name cat ndaughters action)
(if (eq cat ',*TOPCAT*)
(pop val-stack)
(let ((daughter-values '())
(state name))
(dotimes (i ndaughters)
(push (pop val-stack) daughter-values)
(setq state (pop state-stack)))
(push cat cat-la)
(push (apply action daughter-values) val-la)
(funcall state))))
,@(mapcar #'translate-State stateList))
(,(state-name (first stateList)))))))
(defmacro define-grammar (name lex-forms &rest grammar)
(make-parser grammar lex-forms :eof :name name))
| null | https://raw.githubusercontent.com/blindglobe/clocc/a50bb75edb01039b282cf320e4505122a59c59a7/src/gui/clx/docs/lalr.lisp | lisp | -*- Mode: Lisp; Syntax: Common-Lisp; Package: LALR; -*-
lalr.lisp
the rules that expand a category, and the items that expand
a category
There is a sample grammar at the end of this file.
Use your text-editor to search for "Test grammar" to find it.
atomic end marker. It produces a list which is the Lisp code for
an LALR(1) parser for that grammar. If that list is compiled, then
every time NEXT - INPUT is called it should return
CONS whose CAR is the atomic end marker used in the call to MAKE-PARSER.
if the parse fails because the input is ill-formed.
There is a sample at the end of this file.
definitions of constants and global variables used
cache some data that will be useful later
now actually build the parser
Rules and Grammars
Properties of grammars
LALR(1) parsing table constructor
The actual table construction functions
(find-if #'(lambda (state)
citems (state-citems state))
))
stateList))
LALR(1) parsing table printer
LALR(1) parser constructor
to ensure that a symbol never occurs twice
the category and the value.
category lookahead
value lookahead
value stack
state stack |
(provide 'lalr)
(defpackage "LALR"
(:export "DEFINE-GRAMMAR"))
(in-package "LALR")
(declaim (optimize (speed 3)))
This is an LALR parser generator .
( c ) 1988 .
This is * not * the property of Xerox Corporation !
Modified to cache the first terminals , the epsilon derivations
( in - package ' LALR )
( export ' ( make - parser lalr - parser * lalr - debug * grammar parse ) )
( shadow ' ( first rest ) )
( defmacro first ( x ) ` ( car , x ) )
( ( x ) ` ( cdr , x ) )
The external interface is MAKE - PARSER . It takes three arguments , a
CFG grammar , a list of the lexical or terminal categories , and an
the function LALR - PARSER is defined . LALR - PARSER is a function with
two arguments , NEXT - INPUT and PARSE - ERROR .
The first argument to LALR - PARSER , NEXT - INPUT must be a function with
two values , the first is the category of the next lexical
form in the input and the second is the value of that form .
Each call to NEXT - INPUT should advance one lexical item in the
input . When the input is consumed , NEXT - INPUT should return a
The second argument to LALR - PARSER , PARSE - ERROR will be called
(defconstant *TOPCAT* '$Start)
(defvar *ENDMARKER*)
(defvar glex)
(defvar grules)
(defvar gstart)
(defvar gstarts)
(defvar gcats)
(defvar gfirsts)
(defvar gepsilons)
(defvar gexpansions)
(defvar *lalr-debug* NIL "Inserts debugging code into parser if non-NIL")
(defvar stateList '())
(defvar *first-terminals-cache* nil)
(defmacro fixnum= (x y) `(eq ,x ,y))
(defun make-parser (grammar lex endMarker &key (name 'lalr-parser))
"Takes a grammar and produces the Lisp code for a parser for that grammar"
(setq *ENDMARKER* endMarker)
(setq glex lex)
(setq gstart (caar grammar))
(setq grules (let ((i 0))
(mapcar #'(lambda (r) (transform-rule r (incf i)))
grammar)))
(setq gcats (get-all-cats))
(progn
(setq gexpansions (make-hash-table :test #'eq))
(setq *first-terminals-cache* (make-hash-table :test #'equal))
(dolist (cat gcats)
(setf (gethash cat gexpansions) (compute-expansion cat))))
(setq gepsilons (remove-if-not #'derives-eps gcats))
(progn
(setq gstarts (make-hash-table :test #'eq))
(setf (gethash *ENDMARKER* gstarts) (list *ENDMARKER*))
(dolist (cat gcats)
(setf (gethash cat gstarts) (first-terms (list cat)))) )
(build-table)
(when (and (listp *lalr-debug*) (member 'print-table *lalr-debug*))
(Print-Table stateList))
(format T "~%; Table ready (total of ~R rules --> ~R states)."
(length grammar)
(length stateList))
(format T "~%; Dumping:")
(build-parser name))
(defstruct rule no mother daughters action)
(defun transform-rule (rule no)
(make-rule :no no
:mother (first rule)
:daughters (butlast (cddr rule))
:action (car (last rule))))
(defun compute-expansion (cat)
(remove-if-not #'(lambda (rule)
(eq (rule-mother rule) cat))
grules))
(defmacro expand (cat)
`(gethash ,cat gexpansions) )
(defun get-all-cats ()
(labels ((try (dejaVu cat)
(if (find cat dejaVu)
dejaVu
(tryRules (cons cat dejaVu) (compute-expansion cat))))
(tryRules (dejaVu rules)
(if rules
(tryRules (tryCats dejaVu (rule-daughters (car rules)))
(cdr rules))
dejaVu))
(tryCats (dejaVu cats)
(if cats
(tryCats (try dejaVu (car cats)) (cdr cats))
dejaVu)))
(try '() gstart)))
(defun derives-eps (c)
"t if c can be rewritten as the null string"
(labels ((try (dejaVu cat)
(unless (find cat dejaVu)
(some #'(lambda (r)
(every #'(lambda (c1) (try (cons cat dejaVu) c1))
(rule-daughters r)))
(expand cat)))))
(try '() c)))
(defmacro derives-epsilon (c)
"looks up the cache to see if c derives the null string"
`(member ,c gepsilons))
(defun first-terms (catList)
"the leading terminals of an expansion of catList"
(labels ((firstDs (cats)
(if cats
(if (derives-epsilon (car cats))
(cons (car cats) (firstDs (cdr cats)))
(list (car cats)))))
(try (dejaVu cat)
(if (member cat dejaVu)
dejaVu
(tryList (cons cat dejaVu)
(mapcan #'(lambda (r)
(firstDs (rule-daughters r)))
(expand cat)))))
(tryList (dejaVu cats)
(if cats
(tryList (try dejaVu (car cats)) (cdr cats))
dejaVu)))
(remove-if-not #'(lambda (term)
(or (eq *ENDMARKER* term)
(find term glex)))
(tryList '() (firstDs catList)))))
(defun first-terminals (cat-list)
(if cat-list
(if (derives-epsilon (first cat-list))
(union (gethash (first cat-list) gstarts)
(first-terminals (rest cat-list)))
(gethash (first cat-list) gstarts))
'()))
#+IGNORE
(defun first-terminals* (cat-list-0 cat-1)
(let ((key (cons cat-list-0 cat-1)))
(multiple-value-bind (v found?) (gethash key *first-terminals-cache*)
(cond (found? v)
(t (setf (gethash key *first-terminals-cache*)
(block foo
(let ((res nil))
(dolist (c0 cat-list-0)
(setf res (union res (gethash c0 gstarts)))
(unless (derives-epsilon c0)
(return-from foo res)))
(union res (gethash cat-1 gstarts)) )))) ))))
(defmacro first-terminals* (cat-list-0 cat-1)
`(let ((cat-list-0 ,cat-list-0)
(cat-1 ,cat-1))
(let ((key (cons cat-list-0 cat-1)))
(multiple-value-bind (v found?) (gethash key *first-terminals-cache*)
(cond (found? v)
(t (setf (gethash key *first-terminals-cache*)
(block foo
(let ((res nil))
(dolist (c0 cat-list-0)
(setf res (union res (gethash c0 gstarts)))
(unless (derives-epsilon c0)
(return-from foo res)))
(union res (gethash cat-1 gstarts)) )))) )))))
(defstruct item rule pos la)
(defmacro item-daughters (i) `(rule-daughters (item-rule ,i)))
(defmacro item-right (i) `(nthcdr (item-pos ,i) (item-daughters ,i)))
(defmacro item-equal (i1 i2)
`(and (eq (item-rule ,i1) (item-rule ,i2))
(fixnum= (item-pos ,i1) (item-pos ,i2))
(eq (item-la ,i1) (item-la ,i2))))
(defmacro item-core-equal (c1 c2)
"T if the cores of c1 and c2 are equal"
`(and (eq (item-rule ,c1) (item-rule ,c2))
(fixnum= (item-pos ,c1) (item-pos ,c2))))
(defun close-items (items)
"computes the closure of a set of items"
(declare (optimize (speed 3)))
(do ((to-do items))
((null to-do) items)
(let ((i (pop to-do)))
(let ((rgt (item-right i)))
(when rgt
(dolist (la (first-terminals* (rest rgt) (item-la i) ))
(dolist (r (expand (first rgt)))
(unless (dolist (i items)
(if (and (eq r (item-rule i))
(eq (item-la i) la)
(fixnum= (item-pos i) 0))
(return t)))
(let ((new (make-item :rule r :pos 0 :la la)))
(push new items)
(push new to-do))))))))))
(defun shift-items (items cat)
"shifts a set of items over cat"
(labels ((shift-item (item)
(if (eq (first (item-right item)) cat)
(make-item :rule (item-rule item)
:pos (1+ (item-pos item))
:la (item-la item)))))
(let ((new-items '()))
(dolist (i items)
(let ((n (shift-item i)))
(if n
(push n new-items))))
new-items)))
(defun items-right (items)
"returns the set of categories appearing to the right of the dot"
(let ((right '()))
(dolist (i items)
(let ((d (first (item-right i))))
(if (and d (not (find d right)))
(push d right))))
right))
(defun compact-items (items)
"collapses items with the same core to compact items"
(let ((soFar '()))
(dolist (i items)
(let ((ci (dolist (s soFar)
(if (item-core-equal s i)
(return s)))))
(if ci
(push (item-la i) (item-la ci))
(push (make-item :rule (item-rule i)
:pos (item-pos i)
:la (list (item-la i)))
soFar))))
(sort soFar #'<
:key #'(lambda (i) (rule-no (item-rule i))))))
(defmacro expand-citems (citems)
"expands a list of compact items into items"
`(let ((items '()))
(dolist (ci ,citems)
(dolist (la (item-la ci))
(push (make-item :rule (item-rule ci)
:pos (item-pos ci)
:la la)
items)))
items))
(defun subsumes-citems (ci1s ci2s)
"T if the sorted set of items ci2s subsumes the sorted set ci1s"
(and (fixnum= (length ci1s) (length ci2s))
(every #'(lambda (ci1 ci2)
(and (item-core-equal ci1 ci2)
(subsetp (item-la ci1) (item-la ci2))))
ci1s ci2s)))
(defun merge-citems (ci1s ci2s)
"Adds the las of ci1s to ci2s. ci2s should subsume ci1s"
(mapcar #'(lambda (ci1 ci2)
(setf (item-la ci2) (nunion (item-la ci1) (item-la ci2))))
ci1s ci2s)
ci2s)
(defstruct state name citems shifts conflict)
(defstruct shift cat where)
(defparameter nextStateNo -1)
( defun lookup ( )
" finds a state with the same core items as if it exits "
( and (= ( length ) ( length ( state - citems state ) ) )
( every # ' ( lambda ( )
( item - core - equal ) )
(defun lookup (citems)
"finds a state with the same core items as citems if it exits"
(dolist (state stateList)
(if (and (fixnum= (length citems) (length (state-citems state)))
(do ((ci1s citems (cdr ci1s))
(ci2s (state-citems state) (cdr ci2s)))
((null ci1s) t)
(unless (item-core-equal (car ci1s) (car ci2s))
(return nil))))
(return state))))
(defun add-state (citems)
"creates a new state and adds it to the state list"
(let ((newState
(make-state :name (intern (format nil "STATE-~D" (incf nextStateNo)))
:citems citems)))
(push newState stateList)
newState))
(defun get-state-name (items)
"returns the state name for this set of items"
(let* ((citems (compact-items items))
(state (lookup citems)))
(cond ((null state)
(setq state (add-state citems))
(build-state state items))
((subsumes-citems citems (state-citems state))
nil)
(t
(merge-citems citems (state-citems state))
(follow-state items)))
(state-name state)))
(defun build-state (state items)
"creates the states that this state can goto"
(let ((closure (close-items items)))
(dolist (cat (items-right closure))
(push (make-shift :cat cat
:where (get-state-name (shift-items closure cat)))
(state-shifts state)))))
(defun follow-state (items)
"percolates look-ahead onto descendant states of this state"
(let ((closure (close-items items)))
(dolist (cat (items-right closure))
(get-state-name (shift-items closure cat)))))
(defun build-table ()
"Actually builds the table"
(setq stateList '())
(setq nextStateNo -1)
(get-state-name (list (make-item :rule (make-rule :no 0
:mother *TOPCAT*
:daughters (list gstart))
:pos 0
:la *ENDMARKER*)))
(setq stateList (nreverse stateList)))
(defun print-table (stateList)
"Prints the state table"
(dolist (state stateList)
(format t "~%~%~a:" (state-name state))
(dolist (citem (state-citems state))
(format t "~% ~a -->~{ ~a~} .~{ ~a~}, ~{~a ~}"
(rule-mother (item-rule citem))
(subseq (rule-daughters (item-rule citem)) 0 (item-pos citem))
(subseq (rule-daughters (item-rule citem)) (item-pos citem))
(item-la citem)))
(dolist (shift (state-shifts state))
(format t "~% On ~a shift ~a" (shift-cat shift) (shift-where shift)))
(dolist (reduce (compact-items
(delete-if #'(lambda (i) (item-right i))
(close-items
(expand-citems (state-citems state))))))
(format t "~% On~{ ~a~} reduce~{ ~a~} --> ~a"
(item-la reduce)
(rule-daughters (item-rule reduce))
(rule-mother (item-rule reduce)))))
(format t "~%"))
(defun translate-State (state)
"translates a state into lisp code that could appear in a labels form"
(format T " ~(~S~)" (state-name state))
(let ((reduces (compact-items
(delete-if #'(lambda (i) (item-right i))
(close-items
(expand-citems (state-citems state))))))
(labels ((translateShift (shift)
(push (shift-cat shift) symbolsSoFar)
`(,(shift-cat shift)
,@(when *lalr-debug*
`((when *lalr-debug*
(princ ,(format nil "Shift ~a to ~a~%"
(shift-cat shift) (shift-where shift))))))
(shift-from #',(state-name state))
(,(shift-where shift))))
(translateReduce (item)
(when (intersection (item-la item) symbolsSoFar)
(format t "Warning, Not LALR(1)!!: ~a, ~a --> ~{~a ~}~%"
(state-name state)
(rule-mother (item-rule item))
(rule-daughters (item-rule item)))
(setf (item-la item)
(nset-difference (item-la item)
symbolsSoFar)))
(dolist (la (item-la item))
(push la symbolsSoFar))
`(,(item-la item)
,@(when *lalr-debug*
`((when *lalr-debug*
(princ ,(format nil "Reduce ~{~a ~} --> ~a~%"
(rule-daughters (item-rule item))
(rule-mother (item-rule item)))))))
(reduce-cat #',(state-name state)
',(rule-mother (item-rule item))
,(item-pos item)
,(rule-action (item-rule item))))))
`(,(state-name state) ()
(case (input-peek)
,@(mapcar #'translateShift (state-shifts state))
,@(mapcar #'translateReduce reduces)
(otherwise (funcall parse-error)))))))
next - input performs lexical analysis . It must return two values .
(defun build-parser (name)
"returns an lalr(1) parser. next-input must return 2 values!"
`(defun ,name (next-input parse-error)
(labels ((input-peek ()
(unless cat-la
(setf (values cat-la val-la) (funcall next-input)
cat-la (list cat-la)
val-la (list val-la)))
(first cat-la))
(shift-from (name)
(push name state-stack)
(pop cat-la)
(push (pop val-la) val-stack))
(reduce-cat (name cat ndaughters action)
(if (eq cat ',*TOPCAT*)
(pop val-stack)
(let ((daughter-values '())
(state name))
(dotimes (i ndaughters)
(push (pop val-stack) daughter-values)
(setq state (pop state-stack)))
(push cat cat-la)
(push (apply action daughter-values) val-la)
(funcall state))))
,@(mapcar #'translate-State stateList))
(,(state-name (first stateList)))))))
(defmacro define-grammar (name lex-forms &rest grammar)
(make-parser grammar lex-forms :eof :name name))
|
fea6dcc8265a0c298499f126c316cf6c6631bcb05dd772b1a83e74863fe591cb | spechub/Hets | Logic_Hybrid.hs | # LANGUAGE MultiParamTypeClasses , TypeSynonymInstances , FlexibleInstances #
|
Module : / Logic_Hybrid.hs
Description : Instance of class Logic for Hybrid CASL
Instance of class Logic for hybrid logic .
Module : ./Hybrid/Logic_Hybrid.hs
Description : Instance of class Logic for Hybrid CASL
Instance of class Logic for hybrid logic.
-}
module Hybrid.Logic_Hybrid where
import Logic.Logic
import Hybrid.AS_Hybrid
import Hybrid.HybridSign
import Hybrid.ATC_Hybrid ()
import Hybrid.Parse_AS
import Hybrid.Print_AS
import Hybrid.StatAna
import CASL.Sign
import CASL.Morphism
import CASL.SymbolMapAnalysis
import CASL.AS_Basic_CASL
import CASL.Parse_AS_Basic
import CASL.MapSentence
import CASL.SimplifySen
import CASL.SymbolParser
import CASL.Taxonomy
import CASL.ToDoc
import CASL.Logic_CASL ()
data Hybrid = Hybrid deriving Show
instance Language Hybrid where
description _ = "Hybrid CASL\n" ++
"Extends an abitrary logic with at/modal operators."
type HSign = Sign H_FORMULA HybridSign
type HybridMor = Morphism H_FORMULA HybridSign (DefMorExt HybridSign)
type HybridFORMULA = FORMULA H_FORMULA
instance SignExtension HybridSign where
isSubSignExtension = isSubHybridSign
instance Syntax Hybrid H_BASIC_SPEC Symbol SYMB_ITEMS SYMB_MAP_ITEMS where
parse_basic_spec Hybrid = Just $ basicSpec hybrid_reserved_words
parse_symb_items Hybrid = Just . const $ symbItems hybrid_reserved_words
parse_symb_map_items Hybrid = Just . const $ symbMapItems hybrid_reserved_words
-- Hybrid logic
map_H_FORMULA :: MapSen H_FORMULA HybridSign (DefMorExt HybridSign)
map_H_FORMULA mor (BoxOrDiamond b m f ps) =
let newM = case m of
Simple_mod _ -> m
Term_mod t -> let newT = mapTerm map_H_FORMULA mor t
in Term_mod newT
newF = mapSen map_H_FORMULA mor f
in BoxOrDiamond b newM newF ps
map_H_FORMULA mor (At n f ps) = At n (mapSen map_H_FORMULA mor f) ps
map_H_FORMULA mor (Univ n f ps) = Univ n (mapSen map_H_FORMULA mor f) ps
map_H_FORMULA mor (Exist n f ps) = Exist n (mapSen map_H_FORMULA mor f) ps
map_H_FORMULA _ (Here n ps) = Here n ps
instance Sentences Hybrid HybridFORMULA HSign HybridMor Symbol where
map_sen Hybrid h = return . mapSen map_H_FORMULA h
sym_of Hybrid = symOf
symmap_of Hybrid = morphismToSymbMap
sym_name Hybrid = symName
simplify_sen Hybrid = simplifySen minExpForm simHybrid
print_sign Hybrid sig = printSign
(printHybridSign $ simplifySen minExpForm simHybrid sig) sig
print_named Hybrid = printTheoryFormula
-- simplifySen for ExtFORMULA
simHybrid :: Sign H_FORMULA HybridSign -> H_FORMULA -> H_FORMULA
simHybrid sign (BoxOrDiamond b md form pos) =
let mod' = case md of
Term_mod term -> Term_mod $ rmTypesT minExpForm
simHybrid sign term
t -> t
in BoxOrDiamond b mod' (simplifySen minExpForm simHybrid sign form) pos
simHybrid sign (At n f ps) =
At n (simplifySen minExpForm simHybrid sign f) ps
simHybrid sign (Univ n f ps) =
Univ n (simplifySen minExpForm simHybrid sign f) ps
simHybrid sign (Exist n f ps) =
Exist n (simplifySen minExpForm simHybrid sign f) ps
simHybrid _ (Here n ps) = Here n ps
rmTypesExt :: a -> b -> b
rmTypesExt _ f = f
instance StaticAnalysis Hybrid H_BASIC_SPEC HybridFORMULA
SYMB_ITEMS SYMB_MAP_ITEMS
HSign
HybridMor
Symbol RawSymbol where
basic_analysis Hybrid = Just basicHybridAnalysis
stat_symb_map_items Hybrid = statSymbMapItems
stat_symb_items Hybrid = statSymbItems
symbol_to_raw Hybrid = symbolToRaw
id_to_raw Hybrid = idToRaw
matches Hybrid = CASL.Morphism.matches
empty_signature Hybrid = emptySign emptyHybridSign
signature_union Hybrid s = return . addSig addHybridSign s
intersection Hybrid s = return . interSig interHybridSign s
morphism_union Hybrid = plainMorphismUnion addHybridSign
final_union Hybrid = finalUnion addHybridSign
is_subsig Hybrid = isSubSig isSubHybridSign
subsig_inclusion Hybrid = sigInclusion emptyMorExt
cogenerated_sign Hybrid = cogeneratedSign emptyMorExt
generated_sign Hybrid = generatedSign emptyMorExt
induced_from_morphism Hybrid = inducedFromMorphism emptyMorExt
induced_from_to_morphism Hybrid = inducedFromToMorphism
emptyMorExt isSubHybridSign diffHybridSign
theory_to_taxonomy Hybrid = convTaxo
instance Logic Hybrid ()
H_BASIC_SPEC HybridFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
HSign
HybridMor
Symbol RawSymbol () where
stability _ = Experimental
empty_proof_tree _ = ()
| null | https://raw.githubusercontent.com/spechub/Hets/fc26b4947bf52be6baf7819a75d7e7f127e290dd/Hybrid/Logic_Hybrid.hs | haskell | Hybrid logic
simplifySen for ExtFORMULA | # LANGUAGE MultiParamTypeClasses , TypeSynonymInstances , FlexibleInstances #
|
Module : / Logic_Hybrid.hs
Description : Instance of class Logic for Hybrid CASL
Instance of class Logic for hybrid logic .
Module : ./Hybrid/Logic_Hybrid.hs
Description : Instance of class Logic for Hybrid CASL
Instance of class Logic for hybrid logic.
-}
module Hybrid.Logic_Hybrid where
import Logic.Logic
import Hybrid.AS_Hybrid
import Hybrid.HybridSign
import Hybrid.ATC_Hybrid ()
import Hybrid.Parse_AS
import Hybrid.Print_AS
import Hybrid.StatAna
import CASL.Sign
import CASL.Morphism
import CASL.SymbolMapAnalysis
import CASL.AS_Basic_CASL
import CASL.Parse_AS_Basic
import CASL.MapSentence
import CASL.SimplifySen
import CASL.SymbolParser
import CASL.Taxonomy
import CASL.ToDoc
import CASL.Logic_CASL ()
data Hybrid = Hybrid deriving Show
instance Language Hybrid where
description _ = "Hybrid CASL\n" ++
"Extends an abitrary logic with at/modal operators."
type HSign = Sign H_FORMULA HybridSign
type HybridMor = Morphism H_FORMULA HybridSign (DefMorExt HybridSign)
type HybridFORMULA = FORMULA H_FORMULA
instance SignExtension HybridSign where
isSubSignExtension = isSubHybridSign
instance Syntax Hybrid H_BASIC_SPEC Symbol SYMB_ITEMS SYMB_MAP_ITEMS where
parse_basic_spec Hybrid = Just $ basicSpec hybrid_reserved_words
parse_symb_items Hybrid = Just . const $ symbItems hybrid_reserved_words
parse_symb_map_items Hybrid = Just . const $ symbMapItems hybrid_reserved_words
map_H_FORMULA :: MapSen H_FORMULA HybridSign (DefMorExt HybridSign)
map_H_FORMULA mor (BoxOrDiamond b m f ps) =
let newM = case m of
Simple_mod _ -> m
Term_mod t -> let newT = mapTerm map_H_FORMULA mor t
in Term_mod newT
newF = mapSen map_H_FORMULA mor f
in BoxOrDiamond b newM newF ps
map_H_FORMULA mor (At n f ps) = At n (mapSen map_H_FORMULA mor f) ps
map_H_FORMULA mor (Univ n f ps) = Univ n (mapSen map_H_FORMULA mor f) ps
map_H_FORMULA mor (Exist n f ps) = Exist n (mapSen map_H_FORMULA mor f) ps
map_H_FORMULA _ (Here n ps) = Here n ps
instance Sentences Hybrid HybridFORMULA HSign HybridMor Symbol where
map_sen Hybrid h = return . mapSen map_H_FORMULA h
sym_of Hybrid = symOf
symmap_of Hybrid = morphismToSymbMap
sym_name Hybrid = symName
simplify_sen Hybrid = simplifySen minExpForm simHybrid
print_sign Hybrid sig = printSign
(printHybridSign $ simplifySen minExpForm simHybrid sig) sig
print_named Hybrid = printTheoryFormula
simHybrid :: Sign H_FORMULA HybridSign -> H_FORMULA -> H_FORMULA
simHybrid sign (BoxOrDiamond b md form pos) =
let mod' = case md of
Term_mod term -> Term_mod $ rmTypesT minExpForm
simHybrid sign term
t -> t
in BoxOrDiamond b mod' (simplifySen minExpForm simHybrid sign form) pos
simHybrid sign (At n f ps) =
At n (simplifySen minExpForm simHybrid sign f) ps
simHybrid sign (Univ n f ps) =
Univ n (simplifySen minExpForm simHybrid sign f) ps
simHybrid sign (Exist n f ps) =
Exist n (simplifySen minExpForm simHybrid sign f) ps
simHybrid _ (Here n ps) = Here n ps
rmTypesExt :: a -> b -> b
rmTypesExt _ f = f
instance StaticAnalysis Hybrid H_BASIC_SPEC HybridFORMULA
SYMB_ITEMS SYMB_MAP_ITEMS
HSign
HybridMor
Symbol RawSymbol where
basic_analysis Hybrid = Just basicHybridAnalysis
stat_symb_map_items Hybrid = statSymbMapItems
stat_symb_items Hybrid = statSymbItems
symbol_to_raw Hybrid = symbolToRaw
id_to_raw Hybrid = idToRaw
matches Hybrid = CASL.Morphism.matches
empty_signature Hybrid = emptySign emptyHybridSign
signature_union Hybrid s = return . addSig addHybridSign s
intersection Hybrid s = return . interSig interHybridSign s
morphism_union Hybrid = plainMorphismUnion addHybridSign
final_union Hybrid = finalUnion addHybridSign
is_subsig Hybrid = isSubSig isSubHybridSign
subsig_inclusion Hybrid = sigInclusion emptyMorExt
cogenerated_sign Hybrid = cogeneratedSign emptyMorExt
generated_sign Hybrid = generatedSign emptyMorExt
induced_from_morphism Hybrid = inducedFromMorphism emptyMorExt
induced_from_to_morphism Hybrid = inducedFromToMorphism
emptyMorExt isSubHybridSign diffHybridSign
theory_to_taxonomy Hybrid = convTaxo
instance Logic Hybrid ()
H_BASIC_SPEC HybridFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
HSign
HybridMor
Symbol RawSymbol () where
stability _ = Experimental
empty_proof_tree _ = ()
|
c3e63b6bbf49e89fd32080dabddc616a9436ac48f1701fb2705e2975287a8c0e | aeternity/aeternity | aec_parent_chain_cache_tests.erl | %%%-------------------------------------------------------------------
( C ) 2022 , Aeternity Anstalt
%%% @doc
EUnit tests for aec_parent_chain_cache
%%% @end
%%%-------------------------------------------------------------------
-module(aec_parent_chain_cache_tests).
-include_lib("eunit/include/eunit.hrl").
-define(TEST_MODULE, aec_parent_chain_cache).
-define(ALICE, <<123450:32/unit:8>>).
-define(BOB, <<123451:32/unit:8>>).
-define(CAROL, <<123452:32/unit:8>>).
-define(DAVE, <<123453:32/unit:8>>).
-define(GENESIS, <<42:32/unit:8>>).
%%%===================================================================
%%% Test cases
%%%===================================================================
follow_child_chain_strategy_test_() ->
{foreach,
fun() ->
meck:new(aec_chain, []),
meck:expect(aec_chain, top_height, fun() -> 0 end),
meck:expect(aec_chain, genesis_hash, fun() -> ?GENESIS end),
meck:new(aec_conductor, []),
mock_parent_connector(),
mock_events()
end,
fun(_) ->
unmock_events(),
meck:unload(aec_chain),
meck:unload(aec_conductor),
unmock_parent_connector()
end,
[ {"Cache all the blocks above current child height", fun cache_all_above_child_height/0},
{"Post cachable parent top", fun post_cachable_parent_top/0},
{"Post non cachable parent top", fun post_non_cachable_parent_top/0},
{"Post child top in the middle of caching heights", fun post_child_top_in_the_middle_of_cachable_heights/0},
{"Configurable confirmation height", fun configurable_confirmation_height/0}
]}.
produce_commitments_test_() ->
{foreach,
fun() ->
meck:new(aec_chain, []),
meck:expect(aec_chain, top_height, fun() -> 0 end),
meck:expect(aec_chain, genesis_hash, fun() -> height_to_hash(0) end),
meck:new(aec_conductor, []),
meck:new(aetx_env, []),
meck:expect(aetx_env, tx_env_and_trees_from_hash,
fun(_, _Hash) -> {tx_env, trees} end),
mock_parent_connector(),
mock_stakers(),
mock_events()
end,
fun(_) ->
unmock_events(),
unmock_stakers(),
meck:unload(aec_chain),
meck:unload(aetx_env),
meck:unload(aec_conductor),
unmock_parent_connector()
end,
[ {"No commitments before the startheight", fun no_commitments_before_start/0},
{"Post genesis commitments before start seing blocks on the child chain", fun post_initial_commitments/0},
{"Post commitments according to child hash", fun post_commitments/0},
{"No commitments if stopped", fun no_commitments_if_stopped/0},
{"Stopping and starting block production dictates commitments emmitting", fun block_production_dictates_commitments/0}
]}.
%%%===================================================================
%%% Test cases
%%%===================================================================
cache_all_above_child_height() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
%% the cache is waiting for a new top, the cache is up to the target top
ExpectedTopHeight = ChildTop0 + StartHeight,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ChildTop0
+
StartHeight
- CacheMaxSize - 1),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ExpectedTopHeight + 1),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
post_cachable_parent_top() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
%% the cache is waiting for a new top, the cache is up to the target top
ExpectedTopHeight = ChildTop0 + StartHeight,
MaxCachableHeight =
fun(CurrentChildTop) -> CurrentChildTop + StartHeight + CacheMaxSize end,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
%% post some top in the cache's range
ParentTop = MaxCachableHeight(ChildTop0) - 2,
?TEST_MODULE:post_block(block_by_height(ParentTop)),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop0,
top_height := ParentTop} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
post_non_cachable_parent_top() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
%% the cache is waiting for a new top, the cache is up to the target top
ExpectedTopHeight = ChildTop0 + StartHeight,
MaxCachableHeight =
fun(CurrentChildTop) -> CurrentChildTop + StartHeight + CacheMaxSize end,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
%% post some top in the cache's range
ParentTop = MaxCachableHeight(ChildTop0) + 10,
?TEST_MODULE:post_block(block_by_height(ParentTop)),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop0,
top_height := ParentTop} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
post_child_top_in_the_middle_of_cachable_heights() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
%% the cache is waiting for a new top, the cache is up to the target top
ExpectedTopHeight = ChildTop0 + StartHeight,
MaxCachableHeight =
fun(CurrentChildTop) -> CurrentChildTop + StartHeight + CacheMaxSize end,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
%% post some top in the cache's range
ParentTop = MaxCachableHeight(ChildTop0) + 10,
?TEST_MODULE:post_block(block_by_height(ParentTop)),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop0,
top_height := ParentTop}} = ?TEST_MODULE:get_state(),
ChildTop1 = ChildTop0 + 10,
child_new_top(CachePid, ChildTop1),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop2,
top_height := ParentTop} = Res} = ?TEST_MODULE:get_state(),
{ChildTop1, ChildTop1} = {ChildTop1, ChildTop2},
assert_child_cache_consistency(Res),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
configurable_confirmation_height() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0, Confirmations) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations),
timer:sleep(20),
%% the cache is waiting for a new top, the cache is up to the target top
ExpectedTopHeight = ChildTop0 + StartHeight,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ChildTop0 + StartHeight - CacheMaxSize - 1),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ExpectedTopHeight + 1),
?TEST_MODULE:stop()
end,
Test(20, 200, 0, 1),
Test(20, 200, 0, 10),
ok.
no_commitments_before_start() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop0 = 0,
Offset = 10,
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
ParentTop = StartHeight - Offset,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
%% populate the cache and start making commitments
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop0} = _Res} = ?TEST_MODULE:get_state(),
[] = collect_commitments(?ALICE),
[] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
meck:reset(aec_parent_connector),
ok
end,
lists:seq(0, Offset - 1)),
?TEST_MODULE:stop(),
ok.
post_initial_commitments() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop0 = 0,
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
ParentTop = StartHeight,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
GenesisHash = aeser_api_encoder:encode(key_block_hash, height_to_hash(0)),
%% populate the cache and start making commitments
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
[GenesisHash] = collect_commitments(?ALICE),
[GenesisHash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
meck:reset(aec_parent_connector),
ok
end,
lists:seq(0, Confirmations - 1)),
?TEST_MODULE:stop(),
ok.
post_commitments() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop = Confirmations,
meck:expect(aec_chain, top_height, fun() -> ChildTop end),
ParentTop = StartHeight + Confirmations,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
%% populate the cache and start making commitments
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain and
%% the child chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
Hash = aeser_api_encoder:encode(key_block_hash, height_to_hash(ChildTop1)),
[Hash] = collect_commitments(?ALICE),
[Hash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
?TEST_MODULE:stop(),
ok.
no_commitments_if_stopped() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop = Confirmations,
meck:expect(aec_chain, top_height, fun() -> ChildTop end),
ParentTop = StartHeight + Confirmations,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, false),
%% populate the cache and start making commitments
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain and
%% the child chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
[] = collect_commitments(?ALICE),
[] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
?TEST_MODULE:stop(),
ok.
block_production_dictates_commitments() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop = Confirmations,
meck:expect(aec_chain, top_height, fun() -> ChildTop end),
ParentTop = StartHeight + Confirmations,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
%% populate the cache and start making commitments
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain and
%% the child chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
Hash = aeser_api_encoder:encode(key_block_hash, height_to_hash(ChildTop1)),
[Hash] = collect_commitments(?ALICE),
[Hash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
%% stop block production
ParentTop1 = ParentTop + 20,
CachePid ! {gproc_ps_event, stop_mining, unused},
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop1 + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain and
%% the child chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := _ChildTop1}} = ?TEST_MODULE:get_state(),
[] = collect_commitments(?ALICE),
[] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
%% start block production
ParentTop2 = ParentTop1 + 20,
CachePid ! {gproc_ps_event, start_mining, unused},
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop2 + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
%% ensure that the node is up to date with the parent chain and
%% the child chain
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
Hash = aeser_api_encoder:encode(key_block_hash, height_to_hash(ChildTop1)),
[Hash] = collect_commitments(?ALICE),
[Hash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
?TEST_MODULE:stop(),
ok.
%%%===================================================================
%%% Helper functions
%%%===================================================================
start_cache(StartHeight, MaxSize, Confirmations) ->
start_cache(StartHeight, MaxSize, Confirmations, false).
start_cache(StartHeight, MaxSize, Confirmations, IsPublishingCommitments) ->
Args = [StartHeight, MaxSize, Confirmations, IsPublishingCommitments],
gen_server:start({local, ?TEST_MODULE}, ?TEST_MODULE, Args, []).
height_to_hash(Height) when Height < 0 -> height_to_hash(0);
height_to_hash(Height) when is_integer(Height) -> <<Height:32/unit:8>>.
%% hash_to_height(Hash) ->
%% MeaningfulBytes = [B || B <- binary_to_list(Hash), B =/= 0],
%% {Height, _} =
%% lists:foldr( %% NB: we go right to left!
fun(B , { AccumHeight , ByteIdx } ) - >
{ B * trunc(math : pow(8 , ByteIdx ) ) + AccumHeight , ByteIdx + 1 }
%% end,
%% {0, 0},
MeaningfulBytes ) ,
%% Height.
block_by_height(Height, Commitments) ->
B0 = block_by_height(Height),
aec_parent_chain_block:set_commitments(B0, Commitments).
block_by_height(Height) ->
Hash = height_to_hash(Height),
PrevHash = height_to_hash(Height - 1),
aec_parent_chain_block:new(Hash, Height, PrevHash).
%% block_by_hash(Hash) ->
%% Height = hash_to_height(Hash),
%% block_by_height(Height).
mock_parent_connector() ->
meck:new(aec_parent_connector, []),
meck:expect(aec_parent_connector, request_block_by_height,
fun(Height) ->
spawn(
fun() ->
Block = block_by_height(Height),
?TEST_MODULE:post_block(Block)
end)
end),
meck:expect(aec_parent_connector, fetch_block_by_height,
fun(Height) ->
Block = block_by_height(Height),
{ok, Block}
end),
meck:expect(aec_parent_connector, request_top,
fun() -> ok end),
meck:expect(aec_parent_connector, post_commitment,
fun(_Who, _What) -> ok end),
ok.
mock_stakers() ->
meck:new(aec_consensus_hc, []),
meck:new(aec_preset_keys, []),
ok.
unmock_stakers() ->
meck:unload(aec_preset_keys),
meck:unload(aec_consensus_hc),
ok.
expect_stakers(StakerList) ->
meck:expect(aec_consensus_hc, parent_chain_validators,
fun(_TxEnv, _Trees) -> {ok, StakerList} end),
ok.
expect_keys(PubkeyList) ->
meck:expect(aec_preset_keys, is_key_present,
fun(Pubkey) -> lists:member(Pubkey, PubkeyList) end),
ok.
set_parent_chain_top(TopHeight) ->
meck:expect(aec_parent_connector, request_block_by_height,
fun(RequestedHeight) when RequestedHeight > TopHeight ->
ok;
(RequestedHeight) ->
spawn(
fun() ->
Block = block_by_height(RequestedHeight),
?TEST_MODULE:post_block(Block)
end)
end),
meck:expect(aec_parent_connector, fetch_block_by_height,
fun(RequestedHeight) when RequestedHeight > TopHeight ->
{error, not_found};
(RequestedHeight) ->
Block = block_by_height(RequestedHeight),
{ok, Block}
end),
ok.
unmock_parent_connector() ->
meck:unload(aec_parent_connector).
mock_events() ->
meck:new(aec_events, []),
meck:expect(aec_events, subscribe,
fun(top_changed) -> ok;
(start_mining) -> ok;
(stop_mining) -> ok
end),
ok.
unmock_events() ->
meck:unload(aec_events).
child_new_top(CachePid, Height) ->
Hash = height_to_hash(Height),
CachePid ! {gproc_ps_event, top_changed, #{info => #{block_type => key,
block_hash => Hash,
height => Height}}}.
assert_child_cache_consistency(#{ child_start_height := StartHeight,
child_top_height := ChildTop,
blocks := Blocks,
max_size := CacheMaxSize,
pc_confirmations := Confirmations,
top_height := TopHeight}) ->
?assertEqual(CacheMaxSize, map_size(Blocks)),
CacheExpectedStart = min(ChildTop + StartHeight, TopHeight - CacheMaxSize + 1),
?assertEqual(CacheExpectedStart, lists:min(maps:keys(Blocks))),
CacheExpectedEnd = CacheExpectedStart + CacheMaxSize - 1,
?assertEqual(CacheExpectedEnd, lists:max(maps:keys(Blocks))),
lists:foreach(
fun(Height) ->
{true, Height} = {maps:is_key(Height, Blocks), Height},
IsMature = TopHeight - Confirmations >= Height,
Block = block_by_height(Height),
case ?TEST_MODULE:get_block_by_height(Height) of
{ok, Block} when IsMature -> ok;
{error, {not_enough_confirmations, Block}} -> ok
end,
ok
end,
lists:seq(CacheExpectedEnd, CacheExpectedEnd)),
ok.
collect_commitments(Staker) ->
AllCommitments =
lists:filter(
fun({_Pid, {_M, _F, [Who, _Hash]}, _Res}) ->
Who =:= Staker
end,
filter_meck_events(aec_parent_connector, post_commitment)),
lists:map(
fun({_Pid, {_M, _F, [_Who, Hash]}, _Res}) -> Hash end,
AllCommitments).
filter_meck_events(Module, Function) ->
lists:filter(
fun({_Pid, {M, F, _Args}, _Res}) ->
M =:= Module andalso F =:= Function
end,
meck:history(Module)).
mock_commitments_list(_BlockHashesMap) ->
meck:expect(aec_parent_connector, request_block_by_height,
fun(Height) ->
spawn(
fun() ->
Block = block_by_height(Height),
?TEST_MODULE:post_block(Block)
end)
end).
mock_commitments_list(all, L) ->
meck:expect(aec_parent_connector, request_block_by_height,
fun(Height) ->
spawn(
fun() ->
Block0 = block_by_height(Height),
Block = aec_parent_chain_block:set_commitments(Block0, L),
?TEST_MODULE:post_block(Block)
end)
end).
| null | https://raw.githubusercontent.com/aeternity/aeternity/1c819d843c44baf554d97f29dc3f86f2b530c59c/apps/aecore/test/aec_parent_chain_cache_tests.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
===================================================================
Test cases
===================================================================
===================================================================
Test cases
===================================================================
the cache is waiting for a new top, the cache is up to the target top
the cache is waiting for a new top, the cache is up to the target top
post some top in the cache's range
the cache is waiting for a new top, the cache is up to the target top
post some top in the cache's range
the cache is waiting for a new top, the cache is up to the target top
post some top in the cache's range
the cache is waiting for a new top, the cache is up to the target top
populate the cache and start making commitments
ensure that the node is up to date with the parent chain
populate the cache and start making commitments
ensure that the node is up to date with the parent chain
populate the cache and start making commitments
ensure that the node is up to date with the parent chain and
the child chain
populate the cache and start making commitments
ensure that the node is up to date with the parent chain and
the child chain
populate the cache and start making commitments
ensure that the node is up to date with the parent chain and
the child chain
stop block production
ensure that the node is up to date with the parent chain and
the child chain
start block production
ensure that the node is up to date with the parent chain and
the child chain
===================================================================
Helper functions
===================================================================
hash_to_height(Hash) ->
MeaningfulBytes = [B || B <- binary_to_list(Hash), B =/= 0],
{Height, _} =
lists:foldr( %% NB: we go right to left!
end,
{0, 0},
Height.
block_by_hash(Hash) ->
Height = hash_to_height(Hash),
block_by_height(Height). | ( C ) 2022 , Aeternity Anstalt
EUnit tests for aec_parent_chain_cache
-module(aec_parent_chain_cache_tests).
-include_lib("eunit/include/eunit.hrl").
-define(TEST_MODULE, aec_parent_chain_cache).
-define(ALICE, <<123450:32/unit:8>>).
-define(BOB, <<123451:32/unit:8>>).
-define(CAROL, <<123452:32/unit:8>>).
-define(DAVE, <<123453:32/unit:8>>).
-define(GENESIS, <<42:32/unit:8>>).
follow_child_chain_strategy_test_() ->
{foreach,
fun() ->
meck:new(aec_chain, []),
meck:expect(aec_chain, top_height, fun() -> 0 end),
meck:expect(aec_chain, genesis_hash, fun() -> ?GENESIS end),
meck:new(aec_conductor, []),
mock_parent_connector(),
mock_events()
end,
fun(_) ->
unmock_events(),
meck:unload(aec_chain),
meck:unload(aec_conductor),
unmock_parent_connector()
end,
[ {"Cache all the blocks above current child height", fun cache_all_above_child_height/0},
{"Post cachable parent top", fun post_cachable_parent_top/0},
{"Post non cachable parent top", fun post_non_cachable_parent_top/0},
{"Post child top in the middle of caching heights", fun post_child_top_in_the_middle_of_cachable_heights/0},
{"Configurable confirmation height", fun configurable_confirmation_height/0}
]}.
produce_commitments_test_() ->
{foreach,
fun() ->
meck:new(aec_chain, []),
meck:expect(aec_chain, top_height, fun() -> 0 end),
meck:expect(aec_chain, genesis_hash, fun() -> height_to_hash(0) end),
meck:new(aec_conductor, []),
meck:new(aetx_env, []),
meck:expect(aetx_env, tx_env_and_trees_from_hash,
fun(_, _Hash) -> {tx_env, trees} end),
mock_parent_connector(),
mock_stakers(),
mock_events()
end,
fun(_) ->
unmock_events(),
unmock_stakers(),
meck:unload(aec_chain),
meck:unload(aetx_env),
meck:unload(aec_conductor),
unmock_parent_connector()
end,
[ {"No commitments before the startheight", fun no_commitments_before_start/0},
{"Post genesis commitments before start seing blocks on the child chain", fun post_initial_commitments/0},
{"Post commitments according to child hash", fun post_commitments/0},
{"No commitments if stopped", fun no_commitments_if_stopped/0},
{"Stopping and starting block production dictates commitments emmitting", fun block_production_dictates_commitments/0}
]}.
cache_all_above_child_height() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
ExpectedTopHeight = ChildTop0 + StartHeight,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ChildTop0
+
StartHeight
- CacheMaxSize - 1),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ExpectedTopHeight + 1),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
post_cachable_parent_top() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
ExpectedTopHeight = ChildTop0 + StartHeight,
MaxCachableHeight =
fun(CurrentChildTop) -> CurrentChildTop + StartHeight + CacheMaxSize end,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
ParentTop = MaxCachableHeight(ChildTop0) - 2,
?TEST_MODULE:post_block(block_by_height(ParentTop)),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop0,
top_height := ParentTop} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
post_non_cachable_parent_top() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
ExpectedTopHeight = ChildTop0 + StartHeight,
MaxCachableHeight =
fun(CurrentChildTop) -> CurrentChildTop + StartHeight + CacheMaxSize end,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
ParentTop = MaxCachableHeight(ChildTop0) + 10,
?TEST_MODULE:post_block(block_by_height(ParentTop)),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop0,
top_height := ParentTop} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
post_child_top_in_the_middle_of_cachable_heights() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, _Confirmations = 1),
timer:sleep(20),
ExpectedTopHeight = ChildTop0 + StartHeight,
MaxCachableHeight =
fun(CurrentChildTop) -> CurrentChildTop + StartHeight + CacheMaxSize end,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
ParentTop = MaxCachableHeight(ChildTop0) + 10,
?TEST_MODULE:post_block(block_by_height(ParentTop)),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop0,
top_height := ParentTop}} = ?TEST_MODULE:get_state(),
ChildTop1 = ChildTop0 + 10,
child_new_top(CachePid, ChildTop1),
timer:sleep(20),
{ok, #{ child_start_height := StartHeight,
child_top_height := ChildTop2,
top_height := ParentTop} = Res} = ?TEST_MODULE:get_state(),
{ChildTop1, ChildTop1} = {ChildTop1, ChildTop2},
assert_child_cache_consistency(Res),
?TEST_MODULE:stop()
end,
Test(20, 200, 0),
Test(20, 200, 50),
ok.
configurable_confirmation_height() ->
Test =
fun(CacheMaxSize, StartHeight, ChildTop0, Confirmations) ->
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations),
timer:sleep(20),
ExpectedTopHeight = ChildTop0 + StartHeight,
{ok, #{ child_start_height := StartHeight,
top_height := ExpectedTopHeight,
child_top_height := ChildTop0} = Res} = ?TEST_MODULE:get_state(),
assert_child_cache_consistency(Res),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ChildTop0 + StartHeight - CacheMaxSize - 1),
{error, not_in_cache} = ?TEST_MODULE:get_block_by_height(ExpectedTopHeight + 1),
?TEST_MODULE:stop()
end,
Test(20, 200, 0, 1),
Test(20, 200, 0, 10),
ok.
no_commitments_before_start() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop0 = 0,
Offset = 10,
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
ParentTop = StartHeight - Offset,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop0} = _Res} = ?TEST_MODULE:get_state(),
[] = collect_commitments(?ALICE),
[] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
meck:reset(aec_parent_connector),
ok
end,
lists:seq(0, Offset - 1)),
?TEST_MODULE:stop(),
ok.
post_initial_commitments() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop0 = 0,
meck:expect(aec_chain, top_height, fun() -> ChildTop0 end),
ParentTop = StartHeight,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, _CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
GenesisHash = aeser_api_encoder:encode(key_block_hash, height_to_hash(0)),
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop0}} = ?TEST_MODULE:get_state(),
[GenesisHash] = collect_commitments(?ALICE),
[GenesisHash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
meck:reset(aec_parent_connector),
ok
end,
lists:seq(0, Confirmations - 1)),
?TEST_MODULE:stop(),
ok.
post_commitments() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop = Confirmations,
meck:expect(aec_chain, top_height, fun() -> ChildTop end),
ParentTop = StartHeight + Confirmations,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
Hash = aeser_api_encoder:encode(key_block_hash, height_to_hash(ChildTop1)),
[Hash] = collect_commitments(?ALICE),
[Hash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
?TEST_MODULE:stop(),
ok.
no_commitments_if_stopped() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop = Confirmations,
meck:expect(aec_chain, top_height, fun() -> ChildTop end),
ParentTop = StartHeight + Confirmations,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, false),
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
[] = collect_commitments(?ALICE),
[] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
?TEST_MODULE:stop(),
ok.
block_production_dictates_commitments() ->
CacheMaxSize = 20,
StartHeight = 200,
Confirmations = 10,
ChildTop = Confirmations,
meck:expect(aec_chain, top_height, fun() -> ChildTop end),
ParentTop = StartHeight + Confirmations,
expect_stakers([?ALICE, ?BOB, ?CAROL]),
expect_keys([?ALICE, ?BOB]),
set_parent_chain_top(ParentTop),
{ok, CachePid} = start_cache(StartHeight, CacheMaxSize, Confirmations, true),
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
Hash = aeser_api_encoder:encode(key_block_hash, height_to_hash(ChildTop1)),
[Hash] = collect_commitments(?ALICE),
[Hash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
ParentTop1 = ParentTop + 20,
CachePid ! {gproc_ps_event, stop_mining, unused},
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop1 + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := _ChildTop1}} = ?TEST_MODULE:get_state(),
[] = collect_commitments(?ALICE),
[] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
ParentTop2 = ParentTop1 + 20,
CachePid ! {gproc_ps_event, start_mining, unused},
lists:foreach(
fun(Idx) ->
ParentHeight = ParentTop2 + Idx,
set_parent_chain_top(ParentHeight),
ChildTop1 = ChildTop + Idx,
meck:reset(aec_parent_connector),
child_new_top(CachePid, ChildTop1),
Block = block_by_height(ParentHeight),
?TEST_MODULE:post_block(Block),
timer:sleep(10),
{ok, #{ child_start_height := StartHeight,
top_height := ParentHeight,
child_top_height := ChildTop1}} = ?TEST_MODULE:get_state(),
Hash = aeser_api_encoder:encode(key_block_hash, height_to_hash(ChildTop1)),
[Hash] = collect_commitments(?ALICE),
[Hash] = collect_commitments(?BOB),
[] = collect_commitments(?CAROL),
[] = collect_commitments(?DAVE),
ok
end,
lists:seq(0, 20)),
?TEST_MODULE:stop(),
ok.
start_cache(StartHeight, MaxSize, Confirmations) ->
start_cache(StartHeight, MaxSize, Confirmations, false).
start_cache(StartHeight, MaxSize, Confirmations, IsPublishingCommitments) ->
Args = [StartHeight, MaxSize, Confirmations, IsPublishingCommitments],
gen_server:start({local, ?TEST_MODULE}, ?TEST_MODULE, Args, []).
height_to_hash(Height) when Height < 0 -> height_to_hash(0);
height_to_hash(Height) when is_integer(Height) -> <<Height:32/unit:8>>.
fun(B , { AccumHeight , ByteIdx } ) - >
{ B * trunc(math : pow(8 , ByteIdx ) ) + AccumHeight , ByteIdx + 1 }
MeaningfulBytes ) ,
block_by_height(Height, Commitments) ->
B0 = block_by_height(Height),
aec_parent_chain_block:set_commitments(B0, Commitments).
block_by_height(Height) ->
Hash = height_to_hash(Height),
PrevHash = height_to_hash(Height - 1),
aec_parent_chain_block:new(Hash, Height, PrevHash).
mock_parent_connector() ->
meck:new(aec_parent_connector, []),
meck:expect(aec_parent_connector, request_block_by_height,
fun(Height) ->
spawn(
fun() ->
Block = block_by_height(Height),
?TEST_MODULE:post_block(Block)
end)
end),
meck:expect(aec_parent_connector, fetch_block_by_height,
fun(Height) ->
Block = block_by_height(Height),
{ok, Block}
end),
meck:expect(aec_parent_connector, request_top,
fun() -> ok end),
meck:expect(aec_parent_connector, post_commitment,
fun(_Who, _What) -> ok end),
ok.
mock_stakers() ->
meck:new(aec_consensus_hc, []),
meck:new(aec_preset_keys, []),
ok.
unmock_stakers() ->
meck:unload(aec_preset_keys),
meck:unload(aec_consensus_hc),
ok.
expect_stakers(StakerList) ->
meck:expect(aec_consensus_hc, parent_chain_validators,
fun(_TxEnv, _Trees) -> {ok, StakerList} end),
ok.
expect_keys(PubkeyList) ->
meck:expect(aec_preset_keys, is_key_present,
fun(Pubkey) -> lists:member(Pubkey, PubkeyList) end),
ok.
set_parent_chain_top(TopHeight) ->
meck:expect(aec_parent_connector, request_block_by_height,
fun(RequestedHeight) when RequestedHeight > TopHeight ->
ok;
(RequestedHeight) ->
spawn(
fun() ->
Block = block_by_height(RequestedHeight),
?TEST_MODULE:post_block(Block)
end)
end),
meck:expect(aec_parent_connector, fetch_block_by_height,
fun(RequestedHeight) when RequestedHeight > TopHeight ->
{error, not_found};
(RequestedHeight) ->
Block = block_by_height(RequestedHeight),
{ok, Block}
end),
ok.
unmock_parent_connector() ->
meck:unload(aec_parent_connector).
mock_events() ->
meck:new(aec_events, []),
meck:expect(aec_events, subscribe,
fun(top_changed) -> ok;
(start_mining) -> ok;
(stop_mining) -> ok
end),
ok.
unmock_events() ->
meck:unload(aec_events).
child_new_top(CachePid, Height) ->
Hash = height_to_hash(Height),
CachePid ! {gproc_ps_event, top_changed, #{info => #{block_type => key,
block_hash => Hash,
height => Height}}}.
assert_child_cache_consistency(#{ child_start_height := StartHeight,
child_top_height := ChildTop,
blocks := Blocks,
max_size := CacheMaxSize,
pc_confirmations := Confirmations,
top_height := TopHeight}) ->
?assertEqual(CacheMaxSize, map_size(Blocks)),
CacheExpectedStart = min(ChildTop + StartHeight, TopHeight - CacheMaxSize + 1),
?assertEqual(CacheExpectedStart, lists:min(maps:keys(Blocks))),
CacheExpectedEnd = CacheExpectedStart + CacheMaxSize - 1,
?assertEqual(CacheExpectedEnd, lists:max(maps:keys(Blocks))),
lists:foreach(
fun(Height) ->
{true, Height} = {maps:is_key(Height, Blocks), Height},
IsMature = TopHeight - Confirmations >= Height,
Block = block_by_height(Height),
case ?TEST_MODULE:get_block_by_height(Height) of
{ok, Block} when IsMature -> ok;
{error, {not_enough_confirmations, Block}} -> ok
end,
ok
end,
lists:seq(CacheExpectedEnd, CacheExpectedEnd)),
ok.
collect_commitments(Staker) ->
AllCommitments =
lists:filter(
fun({_Pid, {_M, _F, [Who, _Hash]}, _Res}) ->
Who =:= Staker
end,
filter_meck_events(aec_parent_connector, post_commitment)),
lists:map(
fun({_Pid, {_M, _F, [_Who, Hash]}, _Res}) -> Hash end,
AllCommitments).
filter_meck_events(Module, Function) ->
lists:filter(
fun({_Pid, {M, F, _Args}, _Res}) ->
M =:= Module andalso F =:= Function
end,
meck:history(Module)).
mock_commitments_list(_BlockHashesMap) ->
meck:expect(aec_parent_connector, request_block_by_height,
fun(Height) ->
spawn(
fun() ->
Block = block_by_height(Height),
?TEST_MODULE:post_block(Block)
end)
end).
mock_commitments_list(all, L) ->
meck:expect(aec_parent_connector, request_block_by_height,
fun(Height) ->
spawn(
fun() ->
Block0 = block_by_height(Height),
Block = aec_parent_chain_block:set_commitments(Block0, L),
?TEST_MODULE:post_block(Block)
end)
end).
|
609e6ab76ab11848a8003820b66c7f8ddf2fb9ea81095d84fc1597aa2d441566 | bpiel/guildsman | cytoscape.cljs | (ns guildsman.cytoscape
(:require [re-frame.core :as rf]
[reagent.core :as r]
[re-com.core :as rc]))
(def c1 (volatile! nil))
(def a1 (atom nil))
(def last-node-click (volatile! [nil 0]))
(defn now-ts [] (.getTime (js/Date.)))
(defn on-click-node
[xc-api evt-js]
(def evt-js1 evt-js)
(let [{target "target"} (js->clj evt-js)
[last-node last-ts] @last-node-click]
(if (= last-node target)
(let [now (now-ts)]
(println (- now last-ts))
(if (< (- now last-ts) 750)
(cond (.isExpandable xc-api target)
(.expand xc-api target)
(.isCollapsible xc-api target)
(.collapse xc-api target))
(println "single same")))
(println "different"))
(vreset! last-node-click
[target (now-ts)])
(rf/dispatch [:node-select (.id target)])))
(defn setup-cyto
[cy]
(println "START setup-cyto")
(let [xc-api (.expandCollapse cy (clj->js {:layoutBy {:name "dagre"
:nodeSep 600
:rankSep 100}
:fisheye false
:animate true
:undoable false
:cueEnabled false}))]
(.collapseAll xc-api)
(.on cy "tap" "node" (partial on-click-node xc-api))
(println "DONE setup-cyto")))
(defn cyto-state->cyto-gen-map
[{:keys [id value]}]
(println "cyto-state->cyto-gen-map")
(println id)
(clj->js (merge value
{:container (.getElementById js/document
id)})))
(defn gen-cyto
[state']
(println "gen-cyto")
(let [c (js/cytoscape (cyto-state->cyto-gen-map state'))]
(vreset! c1 c)
(println "gen-cyto POST vreset")
c))
(defn dist
[x1 y1 x2 y2]
(let [dx (- x2 x1)
dy (- y2 y1)]
(Math/sqrt (+ (* dy dy)
(* dx dx)))))
(defn steeper?
[x1 y1 x2 y2 x3 y3]
(< (* (- x1 x3)
(- y1 y2))
(* (- x1 x2)
(- y1 y3))))
(defn inside-box?
[x1 y1 x2 y2 xp yp]
(and (or (< x1 xp x2)
(> x1 xp x2))
(or (< y1 yp y2)
(> y1 yp y2))))
(defn find-intersection
[x1 y1 x2 y2 x3 y3]
(let [dx (- x2 x1)
dy (- y2 y1)
k (/ (- (* dy (- x3 x1))
(* dx (- y3 y1)))
(+ (* dy dy)
(* dx dx)))
x4 (- x3 (* k dy))
y4 (+ y3 (* k dx))]
[x4 y4]))
(defn rel-coords
[x1 y1 x2 y2 x3 y3]
(let [[x4 y4] (find-intersection x1 y1 x2 y2 x3 y3)
d12 (dist x1 y1 x2 y2)
d14 (dist x1 y1 x4 y4)
d34 (dist x3 y3 x4 y4)
st (if (steeper? x1 y1 x2 y2 x3 y3) 1 -1)]
(when (inside-box? x1 y1 x2 y2 x4 y4)
[(* d34 st) (/ d14 d12)])))
#_(defn perp-coords
[x1 y1 x2 y2 xp yp]
(let [dx (- x2 x1)
dy (- y2 y1)
k (/ (- (* dy (- xp x1))
(* dx (- yp y1)))
(+ (* dy dy)
(* dx dx)))
x4 (- xp (* k dy))
y4 (+ yp (* k dx))
d (Math/sqrt (+ (* (- y2 y1)
(- y2 y1))
(* (- x2 x1)
(- x2 x1))))
ypt (Math/sqrt (+ (* (- y4 y1)
(- y4 y1))
(* (- x4 x1)
(- x4 x1))))
xpt (dist x1 y1 x2 y2 xp yp)]
[xpt (/ ypt d)]))
(defn js->xy
[xy]
((juxt #(get % "x")
#(get % "y"))
(js->clj xy)))
(defn node->xy
[n]
(-> (.position n)
js->xy))
(defn manhattan
[x1 y1 x2 y2]
(+ (Math/abs (- x1 x2))
(Math/abs (- y1 y2))))
(defn p
[x]
(when false
(println x)))
(defn find-nearbys
[x1 y1 x2 y2]
(keep (fn [n]
(let [[xp yp] (node->xy n)]
(p "------")
(when-let [pc (rel-coords x1 y1 x2 y2 xp yp)]
(p pc)
(p "------")
pc)))
(.toArray (.$ @c1 "node"))))
#_(def e1 (-> (.$ @c1 "edge[source = 'loss']")
.first))
(defn near-edge?
[[xp yp]]
(< -50. xp 50.))
(defn mk-ctrl-point
[[x y]]
[(if (<= x 0)
(+ 50 x)
(- x 50))
#_ (if (<= x 0)
100 -100)
#_(if (<= x 0)
(- -50 x)
(- 50 x))
y])
(defn mk-ctrl-styles
[ps]
[(clojure.string/join " " (map str (map first ps)))
(clojure.string/join " " (map str (map second ps)))])
(defn route-edge
[edge]
(let [[sx sy] (js->xy (.sourceEndpoint edge))
[dx dy] (js->xy (.targetEndpoint edge))
[cpd cpw] (mk-ctrl-styles
(sort-by second
(map mk-ctrl-point
(filter near-edge?
(find-nearbys sx sy dx dy)))))]
(p [cpd cpw])
(p "===========")
(-> edge
#_ (.style "curveStyle" "unbundled-bezier")
(.style "controlPointDistances" cpd)
(.style "controlPointWeights" cpw))))
(defn route-all-edges
[]
#_(p "route-all-edges")
(.map (.$ @c1 "edge")
route-edge))
#_(route-all-edges)
#_(def in1 (.setInterval js/window
route-all-edges
100))
#_(.clearInterval js/window in1)
#_(.map (.$ @c1 "edge[source = 'loss']")
route-edge)
#_(route-edge e1)
#_(-> (.$ @c1 "node[")
(.map node->xy)
js->clj)
#_(.fit @c1)
#_(-> (.$ @c1 "edge[source = 'loss']")
(.style "curveStyle" "unbundled-bezier")
(.style "controlPointStepSize" "10")
(.style "controlPointWeight" "0.5"))
#_(def e1
(-> (.$ @c1 "edge[source = 'loss']")
.first))
#_(vreset! c1
(js/cytoscape (clj->js {:container (.getElementById js/document "cyto2")
:layout {:name "preset"}
:style [{:selector "edge"
:style {"curve-style" "unbundled-bezier"
"edge-distances" "node-position"
:control-point-distances [0]
:control-point-weights [0.5]}}]
:elements {:nodes [{:data {:id "a"}
:position {:x 0 :y 0}}
{:data {:id "b"}
:position {:x 100 :y 100}}
{:data {:id "c"}}
{:data {:id "d"}}
{:data {:id "e"}}
{:data {:id "f"}} ]
:edges [{:data {:source "a"
:target "b"}}
{:data {:source "c"
:target "d"}}]}})))
#_(vreset! c1
(js/cytoscape (clj->js {:container (.getElementById js/document "cyto6")
:style [{:selector "edge"
:style {"curve-style" "unbundled-bezier"
"edge-distances" "node-position"
:control-point-distances [0]
:control-point-weights [0.5]}}]
:elements {:nodes [{:data {:id "a"}}
{:data {:id "b"}}]
:edges [{:data {:source "a"
:target "b"}}]}})))
(defn cyto-comp-did-mount
[state this]
(vswap! state assoc
:instance
(gen-cyto @state))
(setup-cyto (:instance @state)))
(defn cyto-reagent-render
[state value]
(let [{:keys [id]} @state]
(println "cyto-reagent-render")
(println id)
[:div {:id id :style {:width "100%" :height "100%"}}]))
(defn cyto-comp-will-update
[state this [_ new-value]]
(vswap! state
assoc :value new-value))
(defn cyto-comp-did-update
[state this [_ {:keys [config data highlighted selected] :as old-val}]]
(let [{:keys [value] :as state'} @state]
(cond #_(not= config (:config state'))
(not= value old-val)
(do (println "generate")
(vswap! state
assoc
:instance
(gen-cyto state'))
(setup-cyto (:instance @state)))
#_ ((not= data (:data state'))
(do (println "load")
(.load instance (clj->js (merge (:data state') {:unload true}))))
(not= [highlighted selected] [(:highlighted state')
(:selected state')])
(do ;(println "flush")
(.flush instance))))))
(defn cytoscape
[value]
(println "cyto/cyto")
(let [state (volatile! {:id (str (gensym "cyto"))
:value value})]
(r/create-class {:component-did-mount (partial cyto-comp-did-mount state)
:component-did-update (partial cyto-comp-did-update state)
:component-will-update (partial cyto-comp-will-update state)
:reagent-render (partial cyto-reagent-render state)})))
| null | https://raw.githubusercontent.com/bpiel/guildsman/59c9a7459de19525cfc54112f02127e0777a00ce/resources/public/js/compiled/out/guildsman/cytoscape.cljs | clojure | (println "flush") | (ns guildsman.cytoscape
(:require [re-frame.core :as rf]
[reagent.core :as r]
[re-com.core :as rc]))
(def c1 (volatile! nil))
(def a1 (atom nil))
(def last-node-click (volatile! [nil 0]))
(defn now-ts [] (.getTime (js/Date.)))
(defn on-click-node
[xc-api evt-js]
(def evt-js1 evt-js)
(let [{target "target"} (js->clj evt-js)
[last-node last-ts] @last-node-click]
(if (= last-node target)
(let [now (now-ts)]
(println (- now last-ts))
(if (< (- now last-ts) 750)
(cond (.isExpandable xc-api target)
(.expand xc-api target)
(.isCollapsible xc-api target)
(.collapse xc-api target))
(println "single same")))
(println "different"))
(vreset! last-node-click
[target (now-ts)])
(rf/dispatch [:node-select (.id target)])))
(defn setup-cyto
[cy]
(println "START setup-cyto")
(let [xc-api (.expandCollapse cy (clj->js {:layoutBy {:name "dagre"
:nodeSep 600
:rankSep 100}
:fisheye false
:animate true
:undoable false
:cueEnabled false}))]
(.collapseAll xc-api)
(.on cy "tap" "node" (partial on-click-node xc-api))
(println "DONE setup-cyto")))
(defn cyto-state->cyto-gen-map
[{:keys [id value]}]
(println "cyto-state->cyto-gen-map")
(println id)
(clj->js (merge value
{:container (.getElementById js/document
id)})))
(defn gen-cyto
[state']
(println "gen-cyto")
(let [c (js/cytoscape (cyto-state->cyto-gen-map state'))]
(vreset! c1 c)
(println "gen-cyto POST vreset")
c))
(defn dist
[x1 y1 x2 y2]
(let [dx (- x2 x1)
dy (- y2 y1)]
(Math/sqrt (+ (* dy dy)
(* dx dx)))))
(defn steeper?
[x1 y1 x2 y2 x3 y3]
(< (* (- x1 x3)
(- y1 y2))
(* (- x1 x2)
(- y1 y3))))
(defn inside-box?
[x1 y1 x2 y2 xp yp]
(and (or (< x1 xp x2)
(> x1 xp x2))
(or (< y1 yp y2)
(> y1 yp y2))))
(defn find-intersection
[x1 y1 x2 y2 x3 y3]
(let [dx (- x2 x1)
dy (- y2 y1)
k (/ (- (* dy (- x3 x1))
(* dx (- y3 y1)))
(+ (* dy dy)
(* dx dx)))
x4 (- x3 (* k dy))
y4 (+ y3 (* k dx))]
[x4 y4]))
(defn rel-coords
[x1 y1 x2 y2 x3 y3]
(let [[x4 y4] (find-intersection x1 y1 x2 y2 x3 y3)
d12 (dist x1 y1 x2 y2)
d14 (dist x1 y1 x4 y4)
d34 (dist x3 y3 x4 y4)
st (if (steeper? x1 y1 x2 y2 x3 y3) 1 -1)]
(when (inside-box? x1 y1 x2 y2 x4 y4)
[(* d34 st) (/ d14 d12)])))
#_(defn perp-coords
[x1 y1 x2 y2 xp yp]
(let [dx (- x2 x1)
dy (- y2 y1)
k (/ (- (* dy (- xp x1))
(* dx (- yp y1)))
(+ (* dy dy)
(* dx dx)))
x4 (- xp (* k dy))
y4 (+ yp (* k dx))
d (Math/sqrt (+ (* (- y2 y1)
(- y2 y1))
(* (- x2 x1)
(- x2 x1))))
ypt (Math/sqrt (+ (* (- y4 y1)
(- y4 y1))
(* (- x4 x1)
(- x4 x1))))
xpt (dist x1 y1 x2 y2 xp yp)]
[xpt (/ ypt d)]))
(defn js->xy
[xy]
((juxt #(get % "x")
#(get % "y"))
(js->clj xy)))
(defn node->xy
[n]
(-> (.position n)
js->xy))
(defn manhattan
[x1 y1 x2 y2]
(+ (Math/abs (- x1 x2))
(Math/abs (- y1 y2))))
(defn p
[x]
(when false
(println x)))
(defn find-nearbys
[x1 y1 x2 y2]
(keep (fn [n]
(let [[xp yp] (node->xy n)]
(p "------")
(when-let [pc (rel-coords x1 y1 x2 y2 xp yp)]
(p pc)
(p "------")
pc)))
(.toArray (.$ @c1 "node"))))
#_(def e1 (-> (.$ @c1 "edge[source = 'loss']")
.first))
(defn near-edge?
[[xp yp]]
(< -50. xp 50.))
(defn mk-ctrl-point
[[x y]]
[(if (<= x 0)
(+ 50 x)
(- x 50))
#_ (if (<= x 0)
100 -100)
#_(if (<= x 0)
(- -50 x)
(- 50 x))
y])
(defn mk-ctrl-styles
[ps]
[(clojure.string/join " " (map str (map first ps)))
(clojure.string/join " " (map str (map second ps)))])
(defn route-edge
[edge]
(let [[sx sy] (js->xy (.sourceEndpoint edge))
[dx dy] (js->xy (.targetEndpoint edge))
[cpd cpw] (mk-ctrl-styles
(sort-by second
(map mk-ctrl-point
(filter near-edge?
(find-nearbys sx sy dx dy)))))]
(p [cpd cpw])
(p "===========")
(-> edge
#_ (.style "curveStyle" "unbundled-bezier")
(.style "controlPointDistances" cpd)
(.style "controlPointWeights" cpw))))
(defn route-all-edges
[]
#_(p "route-all-edges")
(.map (.$ @c1 "edge")
route-edge))
#_(route-all-edges)
#_(def in1 (.setInterval js/window
route-all-edges
100))
#_(.clearInterval js/window in1)
#_(.map (.$ @c1 "edge[source = 'loss']")
route-edge)
#_(route-edge e1)
#_(-> (.$ @c1 "node[")
(.map node->xy)
js->clj)
#_(.fit @c1)
#_(-> (.$ @c1 "edge[source = 'loss']")
(.style "curveStyle" "unbundled-bezier")
(.style "controlPointStepSize" "10")
(.style "controlPointWeight" "0.5"))
#_(def e1
(-> (.$ @c1 "edge[source = 'loss']")
.first))
#_(vreset! c1
(js/cytoscape (clj->js {:container (.getElementById js/document "cyto2")
:layout {:name "preset"}
:style [{:selector "edge"
:style {"curve-style" "unbundled-bezier"
"edge-distances" "node-position"
:control-point-distances [0]
:control-point-weights [0.5]}}]
:elements {:nodes [{:data {:id "a"}
:position {:x 0 :y 0}}
{:data {:id "b"}
:position {:x 100 :y 100}}
{:data {:id "c"}}
{:data {:id "d"}}
{:data {:id "e"}}
{:data {:id "f"}} ]
:edges [{:data {:source "a"
:target "b"}}
{:data {:source "c"
:target "d"}}]}})))
#_(vreset! c1
(js/cytoscape (clj->js {:container (.getElementById js/document "cyto6")
:style [{:selector "edge"
:style {"curve-style" "unbundled-bezier"
"edge-distances" "node-position"
:control-point-distances [0]
:control-point-weights [0.5]}}]
:elements {:nodes [{:data {:id "a"}}
{:data {:id "b"}}]
:edges [{:data {:source "a"
:target "b"}}]}})))
(defn cyto-comp-did-mount
[state this]
(vswap! state assoc
:instance
(gen-cyto @state))
(setup-cyto (:instance @state)))
(defn cyto-reagent-render
[state value]
(let [{:keys [id]} @state]
(println "cyto-reagent-render")
(println id)
[:div {:id id :style {:width "100%" :height "100%"}}]))
(defn cyto-comp-will-update
[state this [_ new-value]]
(vswap! state
assoc :value new-value))
(defn cyto-comp-did-update
[state this [_ {:keys [config data highlighted selected] :as old-val}]]
(let [{:keys [value] :as state'} @state]
(cond #_(not= config (:config state'))
(not= value old-val)
(do (println "generate")
(vswap! state
assoc
:instance
(gen-cyto state'))
(setup-cyto (:instance @state)))
#_ ((not= data (:data state'))
(do (println "load")
(.load instance (clj->js (merge (:data state') {:unload true}))))
(not= [highlighted selected] [(:highlighted state')
(:selected state')])
(.flush instance))))))
(defn cytoscape
[value]
(println "cyto/cyto")
(let [state (volatile! {:id (str (gensym "cyto"))
:value value})]
(r/create-class {:component-did-mount (partial cyto-comp-did-mount state)
:component-did-update (partial cyto-comp-did-update state)
:component-will-update (partial cyto-comp-will-update state)
:reagent-render (partial cyto-reagent-render state)})))
|
567b37a67844a9acf70d4c6380298c404b554b81b31bd12ad911b9127af7a756 | haslab/HAAP | Debugger.hs |
HAAP : Haskell Automated Assessment Platform
This module provides the @Debugger@ plugin that invokes external debugging tools .
HAAP: Haskell Automated Assessment Platform
This module provides the @Debugger@ plugin that invokes external debugging tools.
-}
# LANGUAGE EmptyDataDecls , TypeOperators , ScopedTypeVariables , TypeFamilies , FlexibleInstances , FlexibleContexts , UndecidableInstances , MultiParamTypeClasses , OverloadedStrings #
module HAAP.Code.Debugger where
import HAAP.Web.Diff
import HAAP.Core
import HAAP.IO
import HAAP.Web.Hakyll
import HAAP.Utils
import HAAP.Plugin
import HAAP.Shelly
import HAAP.Pretty
import HAAP.Compiler.GHC
import Data.Default
import Data.List
import qualified Data.Text as T
import Data.Proxy
import Control.Monad.Reader as Reader
import Control . Monad . Except
import System.FilePath
import System.Directory
import Debug.Hoed.Algorithmic
data Debugger
instance HaapPlugin Debugger where
type PluginI Debugger = DebuggerArgs
type PluginO Debugger = ()
type PluginT Debugger = ReaderT DebuggerArgs
type PluginK Debugger t m = ()
usePlugin getArgs m = do
args <- getArgs
x <- mapHaapMonad (flip Reader.runReaderT args . getComposeT) m
return (x,())
instance HaapMonad m => HasPlugin Debugger (ReaderT DebuggerArgs) m where
liftPlugin = id
instance (HaapStack t2 m) => HasPlugin Debugger (ComposeT (ReaderT DebuggerArgs) t2) m where
liftPlugin m = ComposeT $ hoist' lift m
data DebuggerArgs = DebuggerArgs
{ debuggerSandbox :: Sandbox
, debuggerArgs :: [String]
, debuggerGHC :: GHCArgs
, debuggerPath :: FilePath -- path relative to the project whose files are being debugged
, debuggerFiles :: [FilePath] -- debugged files relative to the debugger path
, debuggerInstrumentedPath :: FilePath -- path relative to the project path of already instrumented debug files
, debuggerInstrumentedFiles :: [FilePath] -- debug files relative to the instrumented path
, debuggerModules :: [String] -- a list of modules to import or the source code
, debuggerImports :: String -- a list of imports to prefix the file
, debuggerProgram :: String
, debuggerHtmlPath :: FilePath
}
useAndRunDebugger :: (MonadIO m,HasPlugin Hakyll t m) => DebuggerArgs -> Haap t m (FilePath,FilePath,FilePath)
useAndRunDebugger args = usePlugin_ (return args) $ runDebugger
runDebugger :: (MonadIO m,HasPlugin Hakyll t m,HasPlugin Debugger t m) => Haap t m (FilePath,FilePath,FilePath)
runDebugger = do
h <- liftHaap $ liftPluginProxy (Proxy::Proxy Debugger) $ Reader.ask
hp <- getHakyllP
tmp <- getProjectTmpPath
let htmldatapath::String = dirToRoot (debuggerHtmlPath h) </> "debug"
let extras = debuggerArgs h
let ioargs = (ghcIO $ debuggerGHC h) { ioSandbox = debuggerSandbox h }
let ioArgs = (ghcIO $ debuggerGHC h) { ioSandbox = mapSandboxCfg (dirToRoot (tmp </> debuggerHtmlPath h) </>) (debuggerSandbox h) }
let ghcArgs = (debuggerGHC h) { ghcIO = ioArgs, ghcSafe = False }
let debuggererrorpath = addExtension (debuggerHtmlPath h) "html"
orErrorHakyllPage debuggererrorpath (debuggererrorpath,debuggererrorpath,debuggererrorpath) $ do
--let html = dirToRoot (debuggerPath h) </> tmp </> debuggerHtmlPath h
pperrs <- runBaseSh $ do
pperrs <- forM (debuggerFiles h) $ \file -> do
shMkDir $ takeDirectory (tmp </> debuggerHtmlPath h </> file)
shCommandToFileWith ioargs "debug-pp" [(debuggerPath h </> file)] (tmp </> debuggerHtmlPath h </> file)
forM_ (debuggerInstrumentedFiles h) $ \file -> do
shMkDir $ takeDirectory (tmp </> debuggerHtmlPath h </> file)
shCp (debuggerInstrumentedPath h </> file) (tmp </> debuggerHtmlPath h </> file)
let imports = debuggerImports h ++ concatMap (\modu -> "import " ++ modu ++ "\n") (debuggerModules h)
let mainfile = "{-# LANGUAGE PackageImports #-}" ++ "\n"
++ imports ++ "\n"
++ "import qualified \"Hoed\" Debug.Hoed as Hoed" ++ "\n"
++ "import qualified \"debug\" Debug.Hoed as Debug" ++ "\n"
++ "import qualified \"debug\" Debug.Hoed.Algorithmic as Debug" ++ "\n"
++ "import qualified \"debug\" Debug.Hoed.Graphical as Debug" ++ "\n"
++ "import qualified Prelude" ++ "\n"
++ "main = do" ++ "\n"
++ " let prog = " ++ debuggerProgram h ++ "\n"
++ " h <- Hoed.runO' Hoed.defaultHoedOptions (Prelude.print prog)" ++ "\n"
++ " Debug.debugSaveTrace \"debug.html\" (Debug.convert $ Hoed.hoedCompTree h)" ++ "\n"
++ " Debug.debugGraphicalOutputToFile \"jshood.html\" h" ++ "\n"
++ " Debug.debugAlgorithmicOutput " ++ show htmldatapath ++ " " ++ show "." ++ " h" ++ "\n"
shWriteFile' (tmp </> debuggerHtmlPath h </> "Main.hs") (T.pack mainfile)
return pperrs
let dir = (tmp </> debuggerHtmlPath h)
iores <- addMessageToError (T.unlines $ map prettyText pperrs) $ orIOResult $ runBaseSh $ do
shCd dir
shRm "Main"
shGhcWith (ghcArgs { ghcMake = True }) ["Main.hs"]
addMessageToError (prettyText iores) $ runBaseSh $ do
shCd dir
exec <- shExec "Main"
shCommandWith_ ioArgs exec extras
let debugPath = debuggerHtmlPath h </> "debug.html"
let jshoodPath = debuggerHtmlPath h </> "jshood.html"
let jshoedPath = debuggerHtmlPath h </> "jshoed.html"
hakyllFocus ["debug",tmp </> debuggerHtmlPath h] $ hakyllRules $ do
-- copy the debugger data files
let globdata = (fromGlob $ "debug" </> "img" </> "*.png")
.||. (fromGlob $ "debug" </> "JsHoed.jsexe" </> "*.js")
match globdata $ do
route $ idRoute`composeRoutes` funRoute (hakyllRoute hp)
compile $ copyFileCompiler
match (fromGlob $ tmp </> debuggerHtmlPath h </> "CompTree") $ do
route $ relativeRoute tmp `composeRoutes` funRoute (hakyllRoute hp)
compile $ copyFileCompiler
-- copy the debugger generated documentation
match (fromGlob $ tmp </> debuggerHtmlPath h </> "*.html") $ do
route $ relativeRoute tmp `composeRoutes` funRoute (hakyllRoute hp)
compile $ getResourceString >>= hakyllCompile hp
return (hakyllRoute hp $ debugPath,hakyllRoute hp $ jshoodPath,hakyllRoute hp $ jshoedPath)
copyDebuggerFiles :: (MonadIO m,HaapStack t m) => Configuration -> Haap t m ()
copyDebuggerFiles cfg = do
let outpath = providerDirectory cfg </> "debug"
runBaseIO' $ copyDebugAlgorithmicFiles outpath
--datapath <- runBaseIO' $ debugHtmlDataPath
xs < - runBaseIO ' $ listDirectory datapath
runBaseSh $ forM _ xs $ \x - > shCpRecursive ( datapath < / > x ) ( providerDirectory cfg < / > " debug " < / > x )
| null | https://raw.githubusercontent.com/haslab/HAAP/5acf9efaf0e5f6cba1c2482e51bda703f405a86f/src/HAAP/Code/Debugger.hs | haskell | path relative to the project whose files are being debugged
debugged files relative to the debugger path
path relative to the project path of already instrumented debug files
debug files relative to the instrumented path
a list of modules to import or the source code
a list of imports to prefix the file
let html = dirToRoot (debuggerPath h) </> tmp </> debuggerHtmlPath h
copy the debugger data files
copy the debugger generated documentation
datapath <- runBaseIO' $ debugHtmlDataPath |
HAAP : Haskell Automated Assessment Platform
This module provides the @Debugger@ plugin that invokes external debugging tools .
HAAP: Haskell Automated Assessment Platform
This module provides the @Debugger@ plugin that invokes external debugging tools.
-}
# LANGUAGE EmptyDataDecls , TypeOperators , ScopedTypeVariables , TypeFamilies , FlexibleInstances , FlexibleContexts , UndecidableInstances , MultiParamTypeClasses , OverloadedStrings #
module HAAP.Code.Debugger where
import HAAP.Web.Diff
import HAAP.Core
import HAAP.IO
import HAAP.Web.Hakyll
import HAAP.Utils
import HAAP.Plugin
import HAAP.Shelly
import HAAP.Pretty
import HAAP.Compiler.GHC
import Data.Default
import Data.List
import qualified Data.Text as T
import Data.Proxy
import Control.Monad.Reader as Reader
import Control . Monad . Except
import System.FilePath
import System.Directory
import Debug.Hoed.Algorithmic
data Debugger
instance HaapPlugin Debugger where
type PluginI Debugger = DebuggerArgs
type PluginO Debugger = ()
type PluginT Debugger = ReaderT DebuggerArgs
type PluginK Debugger t m = ()
usePlugin getArgs m = do
args <- getArgs
x <- mapHaapMonad (flip Reader.runReaderT args . getComposeT) m
return (x,())
instance HaapMonad m => HasPlugin Debugger (ReaderT DebuggerArgs) m where
liftPlugin = id
instance (HaapStack t2 m) => HasPlugin Debugger (ComposeT (ReaderT DebuggerArgs) t2) m where
liftPlugin m = ComposeT $ hoist' lift m
data DebuggerArgs = DebuggerArgs
{ debuggerSandbox :: Sandbox
, debuggerArgs :: [String]
, debuggerGHC :: GHCArgs
, debuggerProgram :: String
, debuggerHtmlPath :: FilePath
}
useAndRunDebugger :: (MonadIO m,HasPlugin Hakyll t m) => DebuggerArgs -> Haap t m (FilePath,FilePath,FilePath)
useAndRunDebugger args = usePlugin_ (return args) $ runDebugger
runDebugger :: (MonadIO m,HasPlugin Hakyll t m,HasPlugin Debugger t m) => Haap t m (FilePath,FilePath,FilePath)
runDebugger = do
h <- liftHaap $ liftPluginProxy (Proxy::Proxy Debugger) $ Reader.ask
hp <- getHakyllP
tmp <- getProjectTmpPath
let htmldatapath::String = dirToRoot (debuggerHtmlPath h) </> "debug"
let extras = debuggerArgs h
let ioargs = (ghcIO $ debuggerGHC h) { ioSandbox = debuggerSandbox h }
let ioArgs = (ghcIO $ debuggerGHC h) { ioSandbox = mapSandboxCfg (dirToRoot (tmp </> debuggerHtmlPath h) </>) (debuggerSandbox h) }
let ghcArgs = (debuggerGHC h) { ghcIO = ioArgs, ghcSafe = False }
let debuggererrorpath = addExtension (debuggerHtmlPath h) "html"
orErrorHakyllPage debuggererrorpath (debuggererrorpath,debuggererrorpath,debuggererrorpath) $ do
pperrs <- runBaseSh $ do
pperrs <- forM (debuggerFiles h) $ \file -> do
shMkDir $ takeDirectory (tmp </> debuggerHtmlPath h </> file)
shCommandToFileWith ioargs "debug-pp" [(debuggerPath h </> file)] (tmp </> debuggerHtmlPath h </> file)
forM_ (debuggerInstrumentedFiles h) $ \file -> do
shMkDir $ takeDirectory (tmp </> debuggerHtmlPath h </> file)
shCp (debuggerInstrumentedPath h </> file) (tmp </> debuggerHtmlPath h </> file)
let imports = debuggerImports h ++ concatMap (\modu -> "import " ++ modu ++ "\n") (debuggerModules h)
let mainfile = "{-# LANGUAGE PackageImports #-}" ++ "\n"
++ imports ++ "\n"
++ "import qualified \"Hoed\" Debug.Hoed as Hoed" ++ "\n"
++ "import qualified \"debug\" Debug.Hoed as Debug" ++ "\n"
++ "import qualified \"debug\" Debug.Hoed.Algorithmic as Debug" ++ "\n"
++ "import qualified \"debug\" Debug.Hoed.Graphical as Debug" ++ "\n"
++ "import qualified Prelude" ++ "\n"
++ "main = do" ++ "\n"
++ " let prog = " ++ debuggerProgram h ++ "\n"
++ " h <- Hoed.runO' Hoed.defaultHoedOptions (Prelude.print prog)" ++ "\n"
++ " Debug.debugSaveTrace \"debug.html\" (Debug.convert $ Hoed.hoedCompTree h)" ++ "\n"
++ " Debug.debugGraphicalOutputToFile \"jshood.html\" h" ++ "\n"
++ " Debug.debugAlgorithmicOutput " ++ show htmldatapath ++ " " ++ show "." ++ " h" ++ "\n"
shWriteFile' (tmp </> debuggerHtmlPath h </> "Main.hs") (T.pack mainfile)
return pperrs
let dir = (tmp </> debuggerHtmlPath h)
iores <- addMessageToError (T.unlines $ map prettyText pperrs) $ orIOResult $ runBaseSh $ do
shCd dir
shRm "Main"
shGhcWith (ghcArgs { ghcMake = True }) ["Main.hs"]
addMessageToError (prettyText iores) $ runBaseSh $ do
shCd dir
exec <- shExec "Main"
shCommandWith_ ioArgs exec extras
let debugPath = debuggerHtmlPath h </> "debug.html"
let jshoodPath = debuggerHtmlPath h </> "jshood.html"
let jshoedPath = debuggerHtmlPath h </> "jshoed.html"
hakyllFocus ["debug",tmp </> debuggerHtmlPath h] $ hakyllRules $ do
let globdata = (fromGlob $ "debug" </> "img" </> "*.png")
.||. (fromGlob $ "debug" </> "JsHoed.jsexe" </> "*.js")
match globdata $ do
route $ idRoute`composeRoutes` funRoute (hakyllRoute hp)
compile $ copyFileCompiler
match (fromGlob $ tmp </> debuggerHtmlPath h </> "CompTree") $ do
route $ relativeRoute tmp `composeRoutes` funRoute (hakyllRoute hp)
compile $ copyFileCompiler
match (fromGlob $ tmp </> debuggerHtmlPath h </> "*.html") $ do
route $ relativeRoute tmp `composeRoutes` funRoute (hakyllRoute hp)
compile $ getResourceString >>= hakyllCompile hp
return (hakyllRoute hp $ debugPath,hakyllRoute hp $ jshoodPath,hakyllRoute hp $ jshoedPath)
copyDebuggerFiles :: (MonadIO m,HaapStack t m) => Configuration -> Haap t m ()
copyDebuggerFiles cfg = do
let outpath = providerDirectory cfg </> "debug"
runBaseIO' $ copyDebugAlgorithmicFiles outpath
xs < - runBaseIO ' $ listDirectory datapath
runBaseSh $ forM _ xs $ \x - > shCpRecursive ( datapath < / > x ) ( providerDirectory cfg < / > " debug " < / > x )
|
310556f916ef7cf24e2c9e4ed092bd7067e474ddf647bf9638cf0c430b6d2993 | Liqwid-Labs/liqwid-plutarch-extra | Functor.hs | module Plutarch.Extra.Functor (
-- * Type classes
Plut,
PFunctor (..),
PBifunctor (..),
-- * Functions
(#<$),
(#$>),
(#<$>),
(#<&>),
pvoid,
) where
import Data.Kind (Constraint)
import Plutarch.Api.V1.AssocMap (KeyGuarantees, PMap (PMap))
import Plutarch.Api.V1.Maybe (PMaybeData (PDJust, PDNothing))
import Plutarch.Builtin (ppairDataBuiltin)
import Plutarch.Extra.Boring (PBoring (pboring))
import Plutarch.Extra.Function (pconst, pidentity)
import Plutarch.Extra.TermCont (pletC, pmatchC)
import Plutarch.Lift (PUnsafeLiftDecl)
| Describes the entire category of Plutarch types , with arrows being Plutarch
functions . Since the typical name for the category of types is
@Hask@ , we follow this trend with naming , choosing ' Plut ' .
Use this for ' PSubcategory ' if you want /any/ Plutarch type to be available .
@since 3.1.0
functions. Since the typical name for the category of Haskell types is
@Hask@, we follow this trend with naming, choosing 'Plut'.
Use this for 'PSubcategory' if you want /any/ Plutarch type to be available.
@since 3.1.0
-}
class Plut (a :: S -> Type)
| @since 3.1.0
instance Plut a
| Describes a Plutarch - level covariant endofunctor . However , unlike in
Haskell , the endofunctor is defined over a subcategory of @Plut@ , rather than
all of it .
Put another way , this is the Plutarch equivalent to ' Functor ' , but unlike in
Haskell , instead of requiring full parametricity , we are allowed to constrain
what we are parametric over .
= Laws
Formally , must be an endofunctor on a subcategory of @Plut@ , as described
by the ' PSubcategory ' constraint . This means that the following must hold :
* @'pfmap ' ' # ' ' Plutarch . Extra . Category.pidentity'@ @=@
@'Plutarch . Extra . Category.pidentity'@
* @'pfmap ' ' # ' ( f ' Plutarch . Extra . Category . # > > > ' g)@ @=@ @('pfmap ' ' # ' f )
' Plutarch . Extra . Category . # > > > ' ( ' pfmap ' ' # ' g)@
If @'PSubcategory ' f@ is ' Plut ' ( that is , is defined as an endofunctor on
/all/ of @Plut@ ) , the second law is a free theorem ; however , in any other
case , it may not be .
@since 1.0.0
Haskell, the endofunctor is defined over a subcategory of @Plut@, rather than
all of it.
Put another way, this is the Plutarch equivalent to 'Functor', but unlike in
Haskell, instead of requiring full parametricity, we are allowed to constrain
what we are parametric over.
= Laws
Formally, @f@ must be an endofunctor on a subcategory of @Plut@, as described
by the 'PSubcategory' constraint. This means that the following must hold:
* @'pfmap' '#' 'Plutarch.Extra.Category.pidentity'@ @=@
@'Plutarch.Extra.Category.pidentity'@
* @'pfmap' '#' (f 'Plutarch.Extra.Category.#>>>' g)@ @=@ @('pfmap' '#' f)
'Plutarch.Extra.Category.#>>>' ('pfmap' '#' g)@
If @'PSubcategory' f@ is 'Plut' (that is, @f@ is defined as an endofunctor on
/all/ of @Plut@), the second law is a free theorem; however, in any other
case, it may not be.
@since 1.0.0
-}
class PFunctor (f :: (S -> Type) -> S -> Type) where
{-# MINIMAL pfmap #-}
| Describes the subcategory of @Plut@ that is an endofunctor on . Put
another way , this describes what kind of types is \'parametric
-- over\'.
--
-- Common choices for this are:
--
* ' ' , which means \'parametric over anything of kind @ 'S ' - > ' '
* ' PIsData ' , which means \'parametric over things which are
-- @Data@-encodable\'
* ' PUnsafeLiftDecl ' , which means \'parametric over things that have a
-- Haskell-level equivalent\'
type PSubcategory f :: (S -> Type) -> Constraint
pfmap ::
forall (a :: S -> Type) (b :: S -> Type) (s :: S).
(PSubcategory f a, PSubcategory f b) =>
Term s ((a :--> b) :--> f a :--> f b)
-- | Replace all values to be computed with a fixed value. Defaults to
-- @'pfmap' 'pconst'@, but could be more efficient for some 'PFunctor's.
--
-- @since 1.2.0
# INLINEABLE pfconst #
pfconst ::
forall (a :: S -> Type) (b :: S -> Type) (s :: S).
(PSubcategory f a, PSubcategory f b) =>
Term s (a :--> f b :--> f a)
pfconst = phoistAcyclic $ plam $ \x ys -> pfmap # (pconst # x) # ys
| @since 3.1.0
instance PFunctor PMaybe where
type PSubcategory PMaybe = Plut
pfmap = phoistAcyclic $
plam $ \f t -> unTermCont $ do
t' <- pmatchC t
pure . pcon $ case t' of
PNothing -> PNothing
PJust t'' -> PJust $ f # t''
-- | @since 1.0.0
instance PFunctor PMaybeData where
type PSubcategory PMaybeData = PIsData
pfmap = phoistAcyclic $
plam $ \f t -> unTermCont $ do
t' <- pmatchC t
case t' of
PDNothing _ -> pure . pcon . PDNothing $ pdnil
PDJust t'' -> do
x <- pletC (pfromData $ pfield @"_0" # t'')
res <- pletC (f # x)
pure . pcon . PDJust $ pdcons # pdata res # pdnil
| @since 3.1.0
instance PFunctor PList where
type PSubcategory PList = Plut
pfmap = phoistAcyclic $ plam $ \f t -> pmap # f # t
-- | @since 1.0.0
instance PFunctor PBuiltinList where
type PSubcategory PBuiltinList = PUnsafeLiftDecl
pfmap = phoistAcyclic $ plam $ \f t -> pmap # f # t
-- | @since 1.0.0
instance forall (s :: KeyGuarantees) (k :: S -> Type). (PIsData k) => PFunctor (PMap s k) where
type PSubcategory (PMap s k) = PIsData
pfmap = psecond
| @since 3.1.0
instance PFunctor (PPair a) where
type PSubcategory (PPair a) = Plut
pfmap = psecond
| @since 3.1.0
instance PFunctor (PEither e) where
type PSubcategory (PEither e) = Plut
pfmap = psecond
{- | Infix, 'Term'-lifted version of 'pfconst'.
@since 1.0.0
-}
(#<$) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s a ->
Term s (f b) ->
Term s (f a)
x #<$ f = pfconst # x # f
infixl 4 #<$
{- | Flipped version of '#<$'.
@since 1.0.0
-}
(#$>) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s (f a) ->
Term s b ->
Term s (f b)
(#$>) = flip (#<$)
infixl 4 #$>
{- | Infix, 'Term'-level version of 'pfmap'.
@since 1.0.0
-}
(#<$>) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s (a :--> b) ->
Term s (f a) ->
Term s (f b)
f #<$> t = pfmap # f # t
infixl 4 #<$>
| Flipped version of ' # < $ > ' .
@since 1.0.0
@since 1.0.0
-}
(#<&>) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s (f a) ->
Term s (a :--> b) ->
Term s (f b)
(#<&>) = flip (#<$>)
infixl 1 #<&>
| Erases every location in the input .
@since 1.2.0
@since 1.2.0
-}
pvoid ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b, PBoring b) =>
Term s (f a) ->
Term s (f b)
pvoid t = pfconst # pboring # t
| Similar to ' PFunctor ' , but is covariant in /two/ parameters instead of one .
This means that types like ' PEither ' do n't need to be partially applied , as
is the case with ' PFunctor ' . Formally , this represents a Plutarch - level
covariant bifunctor ; just as with ' PFunctor ' however , it is defined over a
subcategory of ' Plut ' .
Similarly to ' PFunctor ' , this is the Plutarch equivalent of .
= Laws
Formally , must be a bifunctor on a subcategory of @Plut@ , as described by
' PSubcategoryLeft ' ( for the first parameter ) and ' PSubcategoryRight ' ( for the
second ) . For ' pbimap ' , this means the following must hold :
* ' ' # ' ' Plutarch.Extra.Category.pidentity ' ' # '
' Plutarch . Extra . Category.pidentity'@ @=@
@'Plutarch . Extra . Category.pidentity'@
* @'pbimap ' ' # ' ( f1 ' Plutarch . Extra . Category . # > > > ' f2 ) ' # ' ( g1
' Plutarch . Extra . Category . # > > > ' g2)@ @=@ @('pbimap ' ' # ' f1 ' # ' g1 )
' Plutarch . Extra . Category . # > > > ' ( ' pbimap ' ' # ' f2 ' # ' Furthermore , @'PSubcategoryLeft f ' ~ ' PSubcategoryRight ' f@ should hold ; this
may be required in the future . If both @'PSubcategoryLeft ' f@ and
@'PSubcategoryRight ' f@ are ' Plut ' , the second law is a free theorem ; however ,
this does not hold in general .
If you define ' pfirst ' and ' psecond ' , the following must also hold :
* @'pfirst ' ' # ' ' Plutarch . Extra . Category.pidentity'@ @=@ @'psecond ' ' # '
' Plutarch . Extra . Category.pidentity'@ @=@
@'Plutarch . Extra . Category.pidentity'@
* @'pfirst ' ' # ' f@ @=@ @'pbimap ' ' # ' f ' # '
' Plutarch . Extra . Category.pidentity'@
* @'psecond ' ' # ' f@ @=@ @'pbimap ' ' # ' ' Plutarch.Extra.Category.pidentity ' ' # '
f@
* @'pfirst ' ' # ' ( f ' Plutarch . Extra . Category . # > > > ' g)@ @=@ @('pfirst ' ' # ' f )
' Plutarch . Extra . Category . # > > > ' ( ' pfirst ' ' # ' g)@
* @'psecond ' ' # ' ( f ' Plutarch . Extra . Category . # > > > ' g)@ @=@ @('psecond ' ' # ' f )
' Plutarch . Extra . Category . # > > > ' ( ' psecond ' ' # ' g)@
If you define ' pfirst ' and ' psecond ' /instead/ of ' pbimap ' , the following
must also hold :
* @('pfirst ' ' # ' f ) ' Plutarch . Extra . Category . # > > > ' ( ' psecond ' ' # ' g)@ @=@
@('psecond ' ' # ' g ) ' Plutarch . Extra . Category . # > > > ' ( ' pfirst ' ' # ' f)@ @=@
@'pbimap ' ' # ' f ' # ' g@
= Note
If @f a@ is also an instance of ' PFunctor ' , ' f ~
' PSubcategory ' ( f a)@ should hold , and we should have @'pfmap ' = ' psecond'@ ;
once again , this is not currently enforced , but may be in the future .
@since 1.0.0
This means that types like 'PEither' don't need to be partially applied, as
is the case with 'PFunctor'. Formally, this represents a Plutarch-level
covariant bifunctor; just as with 'PFunctor' however, it is defined over a
subcategory of 'Plut'.
Similarly to 'PFunctor', this is the Plutarch equivalent of 'Bifunctor'.
= Laws
Formally, @f@ must be a bifunctor on a subcategory of @Plut@, as described by
'PSubcategoryLeft' (for the first parameter) and 'PSubcategoryRight' (for the
second). For 'pbimap', this means the following must hold:
* @'pbimap' '#' 'Plutarch.Extra.Category.pidentity' '#'
'Plutarch.Extra.Category.pidentity'@ @=@
@'Plutarch.Extra.Category.pidentity'@
* @'pbimap' '#' (f1 'Plutarch.Extra.Category.#>>>' f2) '#' (g1
'Plutarch.Extra.Category.#>>>' g2)@ @=@ @('pbimap' '#' f1 '#' g1)
'Plutarch.Extra.Category.#>>>' ('pbimap' '#' f2 '#' g2)@
Furthermore, @'PSubcategoryLeft f' ~ 'PSubcategoryRight' f@ should hold; this
may be required in the future. If both @'PSubcategoryLeft' f@ and
@'PSubcategoryRight' f@ are 'Plut', the second law is a free theorem; however,
this does not hold in general.
If you define 'pfirst' and 'psecond', the following must also hold:
* @'pfirst' '#' 'Plutarch.Extra.Category.pidentity'@ @=@ @'psecond' '#'
'Plutarch.Extra.Category.pidentity'@ @=@
@'Plutarch.Extra.Category.pidentity'@
* @'pfirst' '#' f@ @=@ @'pbimap' '#' f '#'
'Plutarch.Extra.Category.pidentity'@
* @'psecond' '#' f@ @=@ @'pbimap' '#' 'Plutarch.Extra.Category.pidentity' '#'
f@
* @'pfirst' '#' (f 'Plutarch.Extra.Category.#>>>' g)@ @=@ @('pfirst' '#' f)
'Plutarch.Extra.Category.#>>>' ('pfirst' '#' g)@
* @'psecond' '#' (f 'Plutarch.Extra.Category.#>>>' g)@ @=@ @('psecond' '#' f)
'Plutarch.Extra.Category.#>>>' ('psecond' '#' g)@
If you define 'pfirst' and 'psecond' /instead/ of 'pbimap', the following
must also hold:
* @('pfirst' '#' f) 'Plutarch.Extra.Category.#>>>' ('psecond' '#' g)@ @=@
@('psecond' '#' g) 'Plutarch.Extra.Category.#>>>' ('pfirst' '#' f)@ @=@
@'pbimap' '#' f '#' g@
= Note
If @f a@ is also an instance of 'PFunctor', @'PSubcategoryRight' f ~
'PSubcategory' (f a)@ should hold, and we should have @'pfmap' = 'psecond'@;
once again, this is not currently enforced, but may be in the future.
@since 1.0.0
-}
class PBifunctor (f :: (S -> Type) -> (S -> Type) -> S -> Type) where
| Similar to ' PSubcategory ' , but for only the first parameter of @f@. See
-- the documentation on 'PSubcategory' for common choices here.
type PSubcategoryLeft f :: (S -> Type) -> Constraint
| Similar to ' PSubcategory ' , but for only the second parameter of @f@.
-- See the documentation on 'PSubcategory' for common choices here.
type PSubcategoryRight f :: (S -> Type) -> Constraint
# MINIMAL pbimap | pfirst , psecond #
pbimap ::
forall (a :: S -> Type) (b :: S -> Type) (c :: S -> Type) (d :: S -> Type) (s :: S).
( PSubcategoryLeft f a
, PSubcategoryLeft f c
, PSubcategoryRight f b
, PSubcategoryRight f d
) =>
Term s ((a :--> c) :--> (b :--> d) :--> f a b :--> f c d)
pbimap = phoistAcyclic $ plam $ \f g t -> pfirst # f # (psecond # g # t)
pfirst ::
forall (a :: S -> Type) (b :: S -> Type) (c :: S -> Type) (s :: S).
( PSubcategoryLeft f a
, PSubcategoryLeft f c
, PSubcategoryRight f b
) =>
Term s ((a :--> c) :--> f a b :--> f c b)
pfirst = phoistAcyclic $ plam $ \f t -> pbimap # f # pidentity # t
psecond ::
forall (a :: S -> Type) (b :: S -> Type) (d :: S -> Type) (s :: S).
( PSubcategoryLeft f a
, PSubcategoryRight f b
, PSubcategoryRight f d
) =>
Term s ((b :--> d) :--> f a b :--> f a d)
psecond = phoistAcyclic $ plam $ \g t -> pbimap # pidentity # g # t
| @since 3.1.0
instance PBifunctor PPair where
type PSubcategoryLeft PPair = Plut
type PSubcategoryRight PPair = Plut
pbimap = phoistAcyclic $
plam $ \f g t -> unTermCont $ do
PPair x y <- pmatchC t
pure . pcon . PPair (f # x) $ g # y
| @since 3.1.0
instance PBifunctor PEither where
type PSubcategoryLeft PEither = Plut
type PSubcategoryRight PEither = Plut
pbimap = phoistAcyclic $
plam $ \f g t -> unTermCont $ do
t' <- pmatchC t
pure . pcon $ case t' of
PLeft x -> PLeft $ f # x
PRight y -> PRight $ g # y
-- | @since 1.0.0
instance forall (keys :: KeyGuarantees). PBifunctor (PMap keys) where
type PSubcategoryLeft (PMap keys) = PIsData
type PSubcategoryRight (PMap keys) = PIsData
pbimap ::
forall (a :: S -> Type) (b :: S -> Type) (c :: S -> Type) (d :: S -> Type) (s :: S).
(PIsData a, PIsData b, PIsData c, PIsData d) =>
> b ) : -- > ( c : -- > d ) : -- > PMap keys a c : -- > PMap keys b d )
pbimap = phoistAcyclic $
plam $ \f g t -> unTermCont $ do
PMap t' <- pmatchC t
pure . pcon . PMap $ pfmap # (go # f # g) # t'
where
go ::
forall (s' :: S).
Term
s'
( (a :--> b)
:--> (c :--> d)
:--> PBuiltinPair (PAsData a) (PAsData c)
:--> PBuiltinPair (PAsData b) (PAsData d)
)
go = phoistAcyclic $
plam $ \f g p -> unTermCont $ do
k <- pletC (pfromData $ pfstBuiltin # p)
v <- pletC (pfromData $ psndBuiltin # p)
k' <- pletC (f # k)
v' <- pletC (g # v)
pure $ ppairDataBuiltin # pdata k' # pdata v'
| null | https://raw.githubusercontent.com/Liqwid-Labs/liqwid-plutarch-extra/8741fde489a0123cab0b90e01d0ac207d306af33/src/Plutarch/Extra/Functor.hs | haskell | * Type classes
* Functions
# MINIMAL pfmap #
over\'.
Common choices for this are:
@Data@-encodable\'
Haskell-level equivalent\'
> b) :--> f a :--> f b)
| Replace all values to be computed with a fixed value. Defaults to
@'pfmap' 'pconst'@, but could be more efficient for some 'PFunctor's.
@since 1.2.0
> f b :--> f a)
| @since 1.0.0
| @since 1.0.0
| @since 1.0.0
| Infix, 'Term'-lifted version of 'pfconst'.
@since 1.0.0
| Flipped version of '#<$'.
@since 1.0.0
| Infix, 'Term'-level version of 'pfmap'.
@since 1.0.0
> b) ->
> b) ->
the documentation on 'PSubcategory' for common choices here.
See the documentation on 'PSubcategory' for common choices here.
> c) :--> (b :--> d) :--> f a b :--> f c d)
> c) :--> f a b :--> f c b)
> d) :--> f a b :--> f a d)
| @since 1.0.0
> ( c : -- > d ) : -- > PMap keys a c : -- > PMap keys b d )
> b)
> (c :--> d)
> PBuiltinPair (PAsData a) (PAsData c)
> PBuiltinPair (PAsData b) (PAsData d) | module Plutarch.Extra.Functor (
Plut,
PFunctor (..),
PBifunctor (..),
(#<$),
(#$>),
(#<$>),
(#<&>),
pvoid,
) where
import Data.Kind (Constraint)
import Plutarch.Api.V1.AssocMap (KeyGuarantees, PMap (PMap))
import Plutarch.Api.V1.Maybe (PMaybeData (PDJust, PDNothing))
import Plutarch.Builtin (ppairDataBuiltin)
import Plutarch.Extra.Boring (PBoring (pboring))
import Plutarch.Extra.Function (pconst, pidentity)
import Plutarch.Extra.TermCont (pletC, pmatchC)
import Plutarch.Lift (PUnsafeLiftDecl)
| Describes the entire category of Plutarch types , with arrows being Plutarch
functions . Since the typical name for the category of types is
@Hask@ , we follow this trend with naming , choosing ' Plut ' .
Use this for ' PSubcategory ' if you want /any/ Plutarch type to be available .
@since 3.1.0
functions. Since the typical name for the category of Haskell types is
@Hask@, we follow this trend with naming, choosing 'Plut'.
Use this for 'PSubcategory' if you want /any/ Plutarch type to be available.
@since 3.1.0
-}
class Plut (a :: S -> Type)
| @since 3.1.0
instance Plut a
| Describes a Plutarch - level covariant endofunctor . However , unlike in
Haskell , the endofunctor is defined over a subcategory of @Plut@ , rather than
all of it .
Put another way , this is the Plutarch equivalent to ' Functor ' , but unlike in
Haskell , instead of requiring full parametricity , we are allowed to constrain
what we are parametric over .
= Laws
Formally , must be an endofunctor on a subcategory of @Plut@ , as described
by the ' PSubcategory ' constraint . This means that the following must hold :
* @'pfmap ' ' # ' ' Plutarch . Extra . Category.pidentity'@ @=@
@'Plutarch . Extra . Category.pidentity'@
* @'pfmap ' ' # ' ( f ' Plutarch . Extra . Category . # > > > ' g)@ @=@ @('pfmap ' ' # ' f )
' Plutarch . Extra . Category . # > > > ' ( ' pfmap ' ' # ' g)@
If @'PSubcategory ' f@ is ' Plut ' ( that is , is defined as an endofunctor on
/all/ of @Plut@ ) , the second law is a free theorem ; however , in any other
case , it may not be .
@since 1.0.0
Haskell, the endofunctor is defined over a subcategory of @Plut@, rather than
all of it.
Put another way, this is the Plutarch equivalent to 'Functor', but unlike in
Haskell, instead of requiring full parametricity, we are allowed to constrain
what we are parametric over.
= Laws
Formally, @f@ must be an endofunctor on a subcategory of @Plut@, as described
by the 'PSubcategory' constraint. This means that the following must hold:
* @'pfmap' '#' 'Plutarch.Extra.Category.pidentity'@ @=@
@'Plutarch.Extra.Category.pidentity'@
* @'pfmap' '#' (f 'Plutarch.Extra.Category.#>>>' g)@ @=@ @('pfmap' '#' f)
'Plutarch.Extra.Category.#>>>' ('pfmap' '#' g)@
If @'PSubcategory' f@ is 'Plut' (that is, @f@ is defined as an endofunctor on
/all/ of @Plut@), the second law is a free theorem; however, in any other
case, it may not be.
@since 1.0.0
-}
class PFunctor (f :: (S -> Type) -> S -> Type) where
| Describes the subcategory of @Plut@ that is an endofunctor on . Put
another way , this describes what kind of types is \'parametric
* ' ' , which means \'parametric over anything of kind @ 'S ' - > ' '
* ' PIsData ' , which means \'parametric over things which are
* ' PUnsafeLiftDecl ' , which means \'parametric over things that have a
type PSubcategory f :: (S -> Type) -> Constraint
pfmap ::
forall (a :: S -> Type) (b :: S -> Type) (s :: S).
(PSubcategory f a, PSubcategory f b) =>
# INLINEABLE pfconst #
pfconst ::
forall (a :: S -> Type) (b :: S -> Type) (s :: S).
(PSubcategory f a, PSubcategory f b) =>
pfconst = phoistAcyclic $ plam $ \x ys -> pfmap # (pconst # x) # ys
| @since 3.1.0
instance PFunctor PMaybe where
type PSubcategory PMaybe = Plut
pfmap = phoistAcyclic $
plam $ \f t -> unTermCont $ do
t' <- pmatchC t
pure . pcon $ case t' of
PNothing -> PNothing
PJust t'' -> PJust $ f # t''
instance PFunctor PMaybeData where
type PSubcategory PMaybeData = PIsData
pfmap = phoistAcyclic $
plam $ \f t -> unTermCont $ do
t' <- pmatchC t
case t' of
PDNothing _ -> pure . pcon . PDNothing $ pdnil
PDJust t'' -> do
x <- pletC (pfromData $ pfield @"_0" # t'')
res <- pletC (f # x)
pure . pcon . PDJust $ pdcons # pdata res # pdnil
| @since 3.1.0
instance PFunctor PList where
type PSubcategory PList = Plut
pfmap = phoistAcyclic $ plam $ \f t -> pmap # f # t
instance PFunctor PBuiltinList where
type PSubcategory PBuiltinList = PUnsafeLiftDecl
pfmap = phoistAcyclic $ plam $ \f t -> pmap # f # t
instance forall (s :: KeyGuarantees) (k :: S -> Type). (PIsData k) => PFunctor (PMap s k) where
type PSubcategory (PMap s k) = PIsData
pfmap = psecond
| @since 3.1.0
instance PFunctor (PPair a) where
type PSubcategory (PPair a) = Plut
pfmap = psecond
| @since 3.1.0
instance PFunctor (PEither e) where
type PSubcategory (PEither e) = Plut
pfmap = psecond
(#<$) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s a ->
Term s (f b) ->
Term s (f a)
x #<$ f = pfconst # x # f
infixl 4 #<$
(#$>) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s (f a) ->
Term s b ->
Term s (f b)
(#$>) = flip (#<$)
infixl 4 #$>
(#<$>) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s (f a) ->
Term s (f b)
f #<$> t = pfmap # f # t
infixl 4 #<$>
| Flipped version of ' # < $ > ' .
@since 1.0.0
@since 1.0.0
-}
(#<&>) ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b) =>
Term s (f a) ->
Term s (f b)
(#<&>) = flip (#<$>)
infixl 1 #<&>
| Erases every location in the input .
@since 1.2.0
@since 1.2.0
-}
pvoid ::
forall (f :: (S -> Type) -> S -> Type) (a :: S -> Type) (b :: S -> Type) (s :: S).
(PFunctor f, PSubcategory f a, PSubcategory f b, PBoring b) =>
Term s (f a) ->
Term s (f b)
pvoid t = pfconst # pboring # t
| Similar to ' PFunctor ' , but is covariant in /two/ parameters instead of one .
This means that types like ' PEither ' do n't need to be partially applied , as
is the case with ' PFunctor ' . Formally , this represents a Plutarch - level
covariant bifunctor ; just as with ' PFunctor ' however , it is defined over a
subcategory of ' Plut ' .
Similarly to ' PFunctor ' , this is the Plutarch equivalent of .
= Laws
Formally , must be a bifunctor on a subcategory of @Plut@ , as described by
' PSubcategoryLeft ' ( for the first parameter ) and ' PSubcategoryRight ' ( for the
second ) . For ' pbimap ' , this means the following must hold :
* ' ' # ' ' Plutarch.Extra.Category.pidentity ' ' # '
' Plutarch . Extra . Category.pidentity'@ @=@
@'Plutarch . Extra . Category.pidentity'@
* @'pbimap ' ' # ' ( f1 ' Plutarch . Extra . Category . # > > > ' f2 ) ' # ' ( g1
' Plutarch . Extra . Category . # > > > ' g2)@ @=@ @('pbimap ' ' # ' f1 ' # ' g1 )
' Plutarch . Extra . Category . # > > > ' ( ' pbimap ' ' # ' f2 ' # ' Furthermore , @'PSubcategoryLeft f ' ~ ' PSubcategoryRight ' f@ should hold ; this
may be required in the future . If both @'PSubcategoryLeft ' f@ and
@'PSubcategoryRight ' f@ are ' Plut ' , the second law is a free theorem ; however ,
this does not hold in general .
If you define ' pfirst ' and ' psecond ' , the following must also hold :
* @'pfirst ' ' # ' ' Plutarch . Extra . Category.pidentity'@ @=@ @'psecond ' ' # '
' Plutarch . Extra . Category.pidentity'@ @=@
@'Plutarch . Extra . Category.pidentity'@
* @'pfirst ' ' # ' f@ @=@ @'pbimap ' ' # ' f ' # '
' Plutarch . Extra . Category.pidentity'@
* @'psecond ' ' # ' f@ @=@ @'pbimap ' ' # ' ' Plutarch.Extra.Category.pidentity ' ' # '
f@
* @'pfirst ' ' # ' ( f ' Plutarch . Extra . Category . # > > > ' g)@ @=@ @('pfirst ' ' # ' f )
' Plutarch . Extra . Category . # > > > ' ( ' pfirst ' ' # ' g)@
* @'psecond ' ' # ' ( f ' Plutarch . Extra . Category . # > > > ' g)@ @=@ @('psecond ' ' # ' f )
' Plutarch . Extra . Category . # > > > ' ( ' psecond ' ' # ' g)@
If you define ' pfirst ' and ' psecond ' /instead/ of ' pbimap ' , the following
must also hold :
* @('pfirst ' ' # ' f ) ' Plutarch . Extra . Category . # > > > ' ( ' psecond ' ' # ' g)@ @=@
@('psecond ' ' # ' g ) ' Plutarch . Extra . Category . # > > > ' ( ' pfirst ' ' # ' f)@ @=@
@'pbimap ' ' # ' f ' # ' g@
= Note
If @f a@ is also an instance of ' PFunctor ' , ' f ~
' PSubcategory ' ( f a)@ should hold , and we should have @'pfmap ' = ' psecond'@ ;
once again , this is not currently enforced , but may be in the future .
@since 1.0.0
This means that types like 'PEither' don't need to be partially applied, as
is the case with 'PFunctor'. Formally, this represents a Plutarch-level
covariant bifunctor; just as with 'PFunctor' however, it is defined over a
subcategory of 'Plut'.
Similarly to 'PFunctor', this is the Plutarch equivalent of 'Bifunctor'.
= Laws
Formally, @f@ must be a bifunctor on a subcategory of @Plut@, as described by
'PSubcategoryLeft' (for the first parameter) and 'PSubcategoryRight' (for the
second). For 'pbimap', this means the following must hold:
* @'pbimap' '#' 'Plutarch.Extra.Category.pidentity' '#'
'Plutarch.Extra.Category.pidentity'@ @=@
@'Plutarch.Extra.Category.pidentity'@
* @'pbimap' '#' (f1 'Plutarch.Extra.Category.#>>>' f2) '#' (g1
'Plutarch.Extra.Category.#>>>' g2)@ @=@ @('pbimap' '#' f1 '#' g1)
'Plutarch.Extra.Category.#>>>' ('pbimap' '#' f2 '#' g2)@
Furthermore, @'PSubcategoryLeft f' ~ 'PSubcategoryRight' f@ should hold; this
may be required in the future. If both @'PSubcategoryLeft' f@ and
@'PSubcategoryRight' f@ are 'Plut', the second law is a free theorem; however,
this does not hold in general.
If you define 'pfirst' and 'psecond', the following must also hold:
* @'pfirst' '#' 'Plutarch.Extra.Category.pidentity'@ @=@ @'psecond' '#'
'Plutarch.Extra.Category.pidentity'@ @=@
@'Plutarch.Extra.Category.pidentity'@
* @'pfirst' '#' f@ @=@ @'pbimap' '#' f '#'
'Plutarch.Extra.Category.pidentity'@
* @'psecond' '#' f@ @=@ @'pbimap' '#' 'Plutarch.Extra.Category.pidentity' '#'
f@
* @'pfirst' '#' (f 'Plutarch.Extra.Category.#>>>' g)@ @=@ @('pfirst' '#' f)
'Plutarch.Extra.Category.#>>>' ('pfirst' '#' g)@
* @'psecond' '#' (f 'Plutarch.Extra.Category.#>>>' g)@ @=@ @('psecond' '#' f)
'Plutarch.Extra.Category.#>>>' ('psecond' '#' g)@
If you define 'pfirst' and 'psecond' /instead/ of 'pbimap', the following
must also hold:
* @('pfirst' '#' f) 'Plutarch.Extra.Category.#>>>' ('psecond' '#' g)@ @=@
@('psecond' '#' g) 'Plutarch.Extra.Category.#>>>' ('pfirst' '#' f)@ @=@
@'pbimap' '#' f '#' g@
= Note
If @f a@ is also an instance of 'PFunctor', @'PSubcategoryRight' f ~
'PSubcategory' (f a)@ should hold, and we should have @'pfmap' = 'psecond'@;
once again, this is not currently enforced, but may be in the future.
@since 1.0.0
-}
class PBifunctor (f :: (S -> Type) -> (S -> Type) -> S -> Type) where
| Similar to ' PSubcategory ' , but for only the first parameter of @f@. See
type PSubcategoryLeft f :: (S -> Type) -> Constraint
| Similar to ' PSubcategory ' , but for only the second parameter of @f@.
type PSubcategoryRight f :: (S -> Type) -> Constraint
# MINIMAL pbimap | pfirst , psecond #
pbimap ::
forall (a :: S -> Type) (b :: S -> Type) (c :: S -> Type) (d :: S -> Type) (s :: S).
( PSubcategoryLeft f a
, PSubcategoryLeft f c
, PSubcategoryRight f b
, PSubcategoryRight f d
) =>
pbimap = phoistAcyclic $ plam $ \f g t -> pfirst # f # (psecond # g # t)
pfirst ::
forall (a :: S -> Type) (b :: S -> Type) (c :: S -> Type) (s :: S).
( PSubcategoryLeft f a
, PSubcategoryLeft f c
, PSubcategoryRight f b
) =>
pfirst = phoistAcyclic $ plam $ \f t -> pbimap # f # pidentity # t
psecond ::
forall (a :: S -> Type) (b :: S -> Type) (d :: S -> Type) (s :: S).
( PSubcategoryLeft f a
, PSubcategoryRight f b
, PSubcategoryRight f d
) =>
psecond = phoistAcyclic $ plam $ \g t -> pbimap # pidentity # g # t
| @since 3.1.0
instance PBifunctor PPair where
type PSubcategoryLeft PPair = Plut
type PSubcategoryRight PPair = Plut
pbimap = phoistAcyclic $
plam $ \f g t -> unTermCont $ do
PPair x y <- pmatchC t
pure . pcon . PPair (f # x) $ g # y
| @since 3.1.0
instance PBifunctor PEither where
type PSubcategoryLeft PEither = Plut
type PSubcategoryRight PEither = Plut
pbimap = phoistAcyclic $
plam $ \f g t -> unTermCont $ do
t' <- pmatchC t
pure . pcon $ case t' of
PLeft x -> PLeft $ f # x
PRight y -> PRight $ g # y
instance forall (keys :: KeyGuarantees). PBifunctor (PMap keys) where
type PSubcategoryLeft (PMap keys) = PIsData
type PSubcategoryRight (PMap keys) = PIsData
pbimap ::
forall (a :: S -> Type) (b :: S -> Type) (c :: S -> Type) (d :: S -> Type) (s :: S).
(PIsData a, PIsData b, PIsData c, PIsData d) =>
pbimap = phoistAcyclic $
plam $ \f g t -> unTermCont $ do
PMap t' <- pmatchC t
pure . pcon . PMap $ pfmap # (go # f # g) # t'
where
go ::
forall (s' :: S).
Term
s'
)
go = phoistAcyclic $
plam $ \f g p -> unTermCont $ do
k <- pletC (pfromData $ pfstBuiltin # p)
v <- pletC (pfromData $ psndBuiltin # p)
k' <- pletC (f # k)
v' <- pletC (g # v)
pure $ ppairDataBuiltin # pdata k' # pdata v'
|
7c60e42c6ff7574eb7585d481448401ec4d8aba423954665c01614c71cdc57d3 | heroku/lein-heroku | middleware.clj | (ns happy-path.middleware
(:require [happy-path.layout :refer [*app-context* error-page]]
[taoensso.timbre :as timbre]
[environ.core :refer [env]]
[selmer.middleware :refer [wrap-error-page]]
[prone.middleware :refer [wrap-exceptions]]
[ring-ttl-session.core :refer [ttl-memory-store]]
[ring.middleware.reload :as reload]
[ring.middleware.webjars :refer [wrap-webjars]]
[ring.middleware.defaults :refer [site-defaults wrap-defaults]]
[ring.middleware.anti-forgery :refer [wrap-anti-forgery]]
[ring.middleware.format :refer [wrap-restful-format]]))
(defn wrap-context [handler]
(fn [request]
(binding [*app-context*
(if-let [context (:servlet-context request)]
;; If we're not inside a servlet environment
;; (for example when using mock requests), then
;; .getContextPath might not exist
(try (.getContextPath context)
(catch IllegalArgumentException _ context))
;; if the context is not specified in the request
;; we check if one has been specified in the environment
;; instead
(:app-context env))]
(handler request))))
(defn wrap-internal-error [handler]
(fn [req]
(try
(handler req)
(catch Throwable t
(timbre/error t)
(error-page {:status 500
:title "Something very bad has happened!"
:message "We've dispatched a team of highly trained gnomes to take care of the problem."})))))
(defn wrap-dev [handler]
(if (env :dev)
(-> handler
reload/wrap-reload
wrap-error-page
wrap-exceptions)
handler))
(defn wrap-csrf [handler]
(wrap-anti-forgery
handler
{:error-response
(error-page
{:status 403
:title "Invalid anti-forgery token"})}))
(defn wrap-formats [handler]
(wrap-restful-format handler {:formats [:json-kw :transit-json :transit-msgpack]}))
(defn wrap-base [handler]
(-> handler
wrap-dev
wrap-formats
wrap-webjars
(wrap-defaults
(-> site-defaults
(assoc-in [:security :anti-forgery] false)
(assoc-in [:session :store] (ttl-memory-store (* 60 30)))))
wrap-context
wrap-internal-error))
| null | https://raw.githubusercontent.com/heroku/lein-heroku/337a56787b42b7291e519090fa9bb7d96470667c/it/happy-path/src/happy_path/middleware.clj | clojure | If we're not inside a servlet environment
(for example when using mock requests), then
.getContextPath might not exist
if the context is not specified in the request
we check if one has been specified in the environment
instead | (ns happy-path.middleware
(:require [happy-path.layout :refer [*app-context* error-page]]
[taoensso.timbre :as timbre]
[environ.core :refer [env]]
[selmer.middleware :refer [wrap-error-page]]
[prone.middleware :refer [wrap-exceptions]]
[ring-ttl-session.core :refer [ttl-memory-store]]
[ring.middleware.reload :as reload]
[ring.middleware.webjars :refer [wrap-webjars]]
[ring.middleware.defaults :refer [site-defaults wrap-defaults]]
[ring.middleware.anti-forgery :refer [wrap-anti-forgery]]
[ring.middleware.format :refer [wrap-restful-format]]))
(defn wrap-context [handler]
(fn [request]
(binding [*app-context*
(if-let [context (:servlet-context request)]
(try (.getContextPath context)
(catch IllegalArgumentException _ context))
(:app-context env))]
(handler request))))
(defn wrap-internal-error [handler]
(fn [req]
(try
(handler req)
(catch Throwable t
(timbre/error t)
(error-page {:status 500
:title "Something very bad has happened!"
:message "We've dispatched a team of highly trained gnomes to take care of the problem."})))))
(defn wrap-dev [handler]
(if (env :dev)
(-> handler
reload/wrap-reload
wrap-error-page
wrap-exceptions)
handler))
(defn wrap-csrf [handler]
(wrap-anti-forgery
handler
{:error-response
(error-page
{:status 403
:title "Invalid anti-forgery token"})}))
(defn wrap-formats [handler]
(wrap-restful-format handler {:formats [:json-kw :transit-json :transit-msgpack]}))
(defn wrap-base [handler]
(-> handler
wrap-dev
wrap-formats
wrap-webjars
(wrap-defaults
(-> site-defaults
(assoc-in [:security :anti-forgery] false)
(assoc-in [:session :store] (ttl-memory-store (* 60 30)))))
wrap-context
wrap-internal-error))
|
5cd24019cea41a1e43f9f3cc31d7786a103667532b6ac0132c1095bd0fbb4bed | xapi-project/xen-api-libs | base64.mli |
* Copyright ( C ) 2006 - 2009 Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) 2006-2009 Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
* decode a string encoded in base64 . Will leave trailing NULLs on the string
padding it out to a multiple of 3 characters
padding it out to a multiple of 3 characters *)
val decode: string -> string
(** encode a string into base64 *)
val encode: string -> string
| null | https://raw.githubusercontent.com/xapi-project/xen-api-libs/d603ee2b8456bc2aac99b0a4955f083e22f4f314/stdext/base64.mli | ocaml | * encode a string into base64 |
* Copyright ( C ) 2006 - 2009 Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) 2006-2009 Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
* decode a string encoded in base64 . Will leave trailing NULLs on the string
padding it out to a multiple of 3 characters
padding it out to a multiple of 3 characters *)
val decode: string -> string
val encode: string -> string
|
d001abf08324ba5410393c702f8a0feb9307e2c31eb00d2ee2b41abcc43c20c7 | tommaisey/aeon | offset-out.help.scm | ;; (offset-out bufferindex inputs)
;; output signal to a bus, the sample offset within the bus is kept
exactly . This is used where sample accurate output is needed .
(audition
(mrg2 (offset-out 0 (impulse ar 5 0))
(out 0 (mul (sin-osc ar 60 0) 0.1))))
(audition
(mrg2 (out 0 (impulse ar 5 0))
(out 0 (mul (sin-osc ar 60 0) 0.1))))
| null | https://raw.githubusercontent.com/tommaisey/aeon/80744a7235425c47a061ec8324d923c53ebedf15/libs/third-party/sc3/rsc3/help/ugen/io/offset-out.help.scm | scheme | (offset-out bufferindex inputs)
output signal to a bus, the sample offset within the bus is kept |
exactly . This is used where sample accurate output is needed .
(audition
(mrg2 (offset-out 0 (impulse ar 5 0))
(out 0 (mul (sin-osc ar 60 0) 0.1))))
(audition
(mrg2 (out 0 (impulse ar 5 0))
(out 0 (mul (sin-osc ar 60 0) 0.1))))
|
fbc99de9260c934f06fd5ecb72ae59e43e3466c2ea714c89b299a3b2c17165b4 | monadplus/twitch-game | Lib.hs | module Twitch.Lib (module M) where
import Twitch.Internal.Client as M
import Twitch.Internal.Parser as M
import Twitch.Internal.Constants as M
import Twitch.Internal.Types as M
| null | https://raw.githubusercontent.com/monadplus/twitch-game/39c73aa76003e778e32e88f1b2b9add68f90799f/src/Twitch/Lib.hs | haskell | module Twitch.Lib (module M) where
import Twitch.Internal.Client as M
import Twitch.Internal.Parser as M
import Twitch.Internal.Constants as M
import Twitch.Internal.Types as M
| |
2f56317ec5fad0fc308c99bf4864aa00ea2004c70f1114929678ddfcd08b5af4 | basho/riak_test | ts_cluster_keys_SUITE.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2016 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%s
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% Tests for the different combinations of keys supported by
Riak Time Series .
%%
%% -------------------------------------------------------------------
-module(ts_cluster_keys_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
%%--------------------------------------------------------------------
%% COMMON TEST CALLBACK FUNCTIONS
%%--------------------------------------------------------------------
suite() ->
[{timetrap,{minutes,10}}].
init_per_suite(Config) ->
[Node|_] = Cluster = ts_setup:start_cluster(3),
Pid = rt:pbc(Node),
% create tables and populate them with data
create_data_def_1(Pid),
create_data_def_2(Pid),
create_data_def_3(Pid),
create_data_def_4(Pid),
create_data_def_5(Pid),
create_data_def_8(Pid),
all_booleans_create_data(Pid),
all_timestamps_create_data(Pid),
all_types_create_data(Pid),
create_data_def_desc_on_quantum_table(Pid),
create_data_def_desc_on_varchar_table(Pid),
table_def_desc_on_additional_local_key_field_create_data(Pid),
[{cluster, Cluster} | Config].
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, _Config) ->
ok.
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(_TestCase, _Config) ->
ok.
groups() ->
[].
all() ->
rt:grep_test_functions(?MODULE).
client_pid(Ctx) ->
[Node|_] = proplists:get_value(cluster, Ctx),
rt:pbc(Node).
run_query(Ctx, Query) ->
riakc_ts:query(client_pid(Ctx), Query).
%%%
TABLE 1
%%%
create_data_def_1(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}},riakc_ts:query(Pid, table_def_1())),
ok = riakc_ts:put(Pid, <<"table1">>, [{1,1,N,1} || N <- lists:seq(1,6000)]).
column_names_def_1() ->
[<<"a">>, <<"b">>, <<"c">>, <<"d">>].
table_def_1() ->
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d))".
select_exclusive_def_1_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 0 AND c < 11",
Results =
[{1,1,N,1} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_exclusive_def_1_2_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 44 AND c < 54",
Results =
[{1,1,N,1} || N <- lists:seq(45,53)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_exclusive_def_1_across_quanta_1_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 500 AND c < 1500",
Results =
[{1,1,N,1} || N <- lists:seq(501,1499)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
%% Across more quanta
select_exclusive_def_1_across_quanta_2_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 500 AND c < 4500",
Results =
[{1,1,N,1} || N <- lists:seq(501,4499)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_1_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c >= 11 AND c <= 20",
Results =
[{1,1,N,1} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
%% Missing an a
where_clause_must_cover_the_partition_key_missing_a_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE b = 1 AND c > 0 AND c < 11",
?assertMatch(
{error, {1001,<<_/binary>>}},
run_query(Ctx, Query)
).
%% Missing a b
where_clause_must_cover_the_partition_key_missing_b_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND c > 0 AND c < 11",
?assertMatch(
{error, {1001,<<_/binary>>}},
run_query(Ctx, Query)
).
%% Missing an c, the the quantum
where_clause_must_cover_the_partition_key_missing_c_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE b = 1",
?assertMatch(
{error, {1001,<<_/binary>>}},
run_query(Ctx, Query)
).
%%%
TABLE 2 ( same columns as table 1 )
%%%
create_data_def_2(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_2())),
ok = riakc_ts:put(Pid, <<"table2">>, [{N,1,1,1} || N <- lists:seq(1,200)]).
table_def_2() ->
"CREATE TABLE table2 ("
"a TIMESTAMP NOT NULL, "
"b SINT64 NOT NULL, "
"c SINT64 NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((quantum(a, 1, 's')), a,b,c,d))".
select_exclusive_def_2_test(Ctx) ->
Query =
"SELECT * FROM table2 WHERE a > 0 AND a < 11",
Results =
[{N,1,1,1} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_2_test(Ctx) ->
Query =
"SELECT * FROM table2 WHERE a >= 11 AND a <= 20",
Results =
[{N,1,1,1} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
%%%
TABLE 3 , small key where partition and local are the same
%%%
create_data_def_3(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_3())),
ok = riakc_ts:put(Pid, <<"table3">>, [{1,N} || N <- lists:seq(1,200)]).
column_names_def_3() ->
[<<"a">>, <<"b">>].
table_def_3() ->
"CREATE TABLE table3 ("
"a SINT64 NOT NULL, "
"b TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,quantum(b, 1, 's')),a,b))".
select_exclusive_def_3_test(Ctx) ->
Query =
"SELECT * FROM table3 WHERE b > 0 AND b < 11 AND a = 1",
Results =
[{1,N} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_3(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_3_test(Ctx) ->
Query =
"SELECT * FROM table3 WHERE b >= 11 AND b <= 20 AND a = 1",
Results =
[{1,N} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_3(), Results}},
run_query(Ctx, Query)
).
%%%
%%% TABLE 4, small key where partition and local are the same
%%%
create_data_def_4(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_4())),
ok = riakc_ts:put(Pid, <<"table4">>, [{1,1,N} || N <- lists:seq(1,200)]).
column_names_def_4() ->
[<<"a">>, <<"b">>, <<"c">>].
table_def_4() ->
"CREATE TABLE table4 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')),a,b,c))".
select_exclusive_def_4_test(Ctx) ->
Query =
"SELECT * FROM table4 WHERE a = 1 AND b = 1 AND c > 0 AND c < 11",
Results =
[{1,1,N} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_4(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_4_test(Ctx) ->
Query =
"SELECT * FROM table4 WHERE a = 1 AND b = 1 AND c >= 11 AND c <= 20",
Results =
[{1,1,N} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_4(), Results}},
run_query(Ctx, Query)
).
%%%
TABLE 5 no quanta
%%%
column_names_def_5() ->
[<<"a">>, <<"b">>, <<"c">>].
table_def_5() ->
"CREATE TABLE table5 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,c),a,b,c))".
create_data_def_5(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_5())),
ok = riakc_ts:put(Pid, <<"table5">>, [{1,1,N} || N <- lists:seq(1,200)]).
select_def_5_test(Ctx) ->
Query =
"SELECT * FROM table5 WHERE a = 1 AND b = 1 AND c = 20",
ts_data:assert_row_sets(
{ok, {column_names_def_5(), [{1,1,20}]}},
run_query(Ctx, Query)
).
%%%
%%% Tests for where clause filters on additional fields in the local key.
%%%
create_data_def_8(Pid) ->
ts_data:assert_row_sets(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE table8 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d))"
)),
ok = riakc_ts:put(Pid, <<"table8">>, [{1,1,N,N} || N <- lists:seq(1,6000)]).
d_equal_than_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d = 3000",
ts_data:assert_row_sets(
{rt_ignore_columns, [{1,1,3000,3000}]},
run_query(Ctx, Query)
).
d_greater_than_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d > 3000",
Results =
[{1,1,N,N} || N <- lists:seq(3001,4500)],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
d_greater_or_equal_to_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d >= 3000",
Results =
[{1,1,N,N} || N <- lists:seq(3000,4500)],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
d_not_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d != 3000",
Results =
[{1,1,N,N} || N <- lists:seq(2500,4500), N /= 3000],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
%%%
%%% ERROR CASE TESTS
%%%
nulls_in_additional_local_key_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64, " %% d is in the local key and set as nullable
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d))"
)
).
duplicate_fields_in_local_key_1_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,c))"
)
).
duplicate_fields_in_local_key_2_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d,d))"
)
).
duplicate_fields_in_partition_key_1_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64, "
"PRIMARY KEY ((a,a,quantum(c, 1, 's')), a,a,c))"
)
).
multiple_quantum_functions_in_partition_key_not_allowed(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b TIMESTAMP NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,quantum(b, 1, 's'),quantum(c, 1, 's')), a,b,c))"
)
).
%%%
%%% Keys with different types
%%%
double_pk_double_boolean_lk_test(Ctx) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE double_pk_double_boolean_lk_test ("
"a DOUBLE NOT NULL, "
"b BOOLEAN NOT NULL, "
"PRIMARY KEY ((a), a,b))"
)),
Doubles = [N * 0.1 || N <- lists:seq(1,100)],
ok = riakc_ts:put(client_pid(Ctx), <<"double_pk_double_boolean_lk_test">>,
[{F,B} || F <- Doubles, B <- [true,false]]),
Query =
"SELECT * FROM double_pk_double_boolean_lk_test "
"WHERE a = 0.5 AND b = true",
ts_data:assert_row_sets(
{rt_ignore_columns, [{0.5,true}]},
run_query(Ctx, Query)
).
boolean_pk_boolean_double_lk_test(Ctx) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE boolean_pk_boolean_double_lk_test ("
"a BOOLEAN NOT NULL, "
"b DOUBLE NOT NULL, "
"PRIMARY KEY ((a), a,b))"
)),
Doubles = [N * 0.1 || N <- lists:seq(1,100)],
ok = riakc_ts:put(client_pid(Ctx), <<"boolean_pk_boolean_double_lk_test">>,
[{B,F} || F <- Doubles, B <- [true,false]]),
Query =
"SELECT * FROM boolean_pk_boolean_double_lk_test "
"WHERE a = false AND b = 0.5",
ts_data:assert_row_sets(
{rt_ignore_columns, [{false,0.5}]},
run_query(Ctx, Query)
).
all_types_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE all_types ("
"a VARCHAR NOT NULL, "
"b TIMESTAMP NOT NULL, "
"c SINT64 NOT NULL, "
"d BOOLEAN NOT NULL, "
"e DOUBLE NOT NULL, "
"f VARCHAR NOT NULL, "
"g TIMESTAMP NOT NULL, "
"h SINT64 NOT NULL, "
"i BOOLEAN NOT NULL, "
"j DOUBLE NOT NULL, "
"k VARCHAR NOT NULL, "
"l TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,c,d,e,f,g), a,b,c,d,e,f,g,h,i,j,k,l))"
)),
%% increasing `Num' increases the result set massivey
Num = 3,
Varchars = [<<"a">>,<<"b">>,<<"c">>],
Timestamps = lists:seq(1,Num),
Sint64s = lists:seq(1,Num),
Booleans = ts_booleans(),
Doubles = [N * 0.1 || N <- lists:seq(1,2)],
%% hard code some of the local key values to reduce the result set
H = 1,
K = <<"k">>,
L = 1,
ok = riakc_ts:put(Pid, <<"all_types">>,
[{A,B,C,D,E,F,G,H,I,J,K,L} || A <- Varchars, B <- Timestamps,
C <- Sint64s, D <- Booleans,
E <- Doubles, F <- Varchars,
G <- Timestamps,
I <- Booleans, J <- Doubles]).
all_types_1_test(Ctx) ->
H = 1,
K = <<"k">>,
L = 1,
Doubles = [N * 0.1 || N <- lists:seq(1,2)],
Query =
"SELECT * FROM all_types "
"WHERE a = 'b' AND b = 1 AND c = 3 AND d = true AND e = 0.1 "
"AND f = 'a' AND g = 2",
Results =
[{<<"b">>,1,3,true,0.1,<<"a">>,2,H,I,J,K,L} ||
I <- ts_booleans()
,J <- Doubles
],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
all_types_or_filter_test(Ctx) ->
H = 1,
K = <<"k">>,
L = 1,
Doubles = [N * 0.1 || N <- lists:seq(1,2)],
Query =
"SELECT * FROM all_types "
"WHERE a = 'b' AND b = 1 AND c = 3 AND d = true AND e = 0.1 "
"AND f = 'a' AND g = 2 AND (i = true OR j = 0.2)",
Results =
[{<<"b">>,1,3,true,0.1,<<"a">>,2,H,I,J,K,L} ||
I <- ts_booleans(),
J <- Doubles,
I == true orelse J == 0.2
],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
%%%
%%% Boolean Keys
%%%
all_booleans_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE all_booleans ("
"a BOOLEAN NOT NULL, "
"b BOOLEAN NOT NULL, "
"c BOOLEAN NOT NULL, "
"d BOOLEAN NOT NULL, "
"e BOOLEAN NOT NULL, "
"f BOOLEAN NOT NULL, "
"g BOOLEAN NOT NULL, "
"PRIMARY KEY ((a,b,c), a,b,c,d,e,f,g))"
)),
ok = riakc_ts:put(Pid, <<"all_booleans">>,
[{Ba,Bb,Bc,Bd,Be,Bf,Bg} || Ba <- ts_booleans(),
Bb <- ts_booleans(), Bc <- ts_booleans(),
Bd <- ts_booleans(), Be <- ts_booleans(),
Bf <- ts_booleans(), Bg <- ts_booleans()]).
ts_booleans() ->
[false,true]. %% false > true
all_booleans_test(Ctx) ->
Query =
"SELECT * FROM all_booleans "
"WHERE a = true AND b = true AND c = true",
Results =
[{true,true,true,Bd,Be,Bf,Bg} || Bd <- ts_booleans(), Be <- ts_booleans(),
Bf <- ts_booleans(), Bg <- ts_booleans()],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_booleans_filter_on_g_test(Ctx) ->
Query =
"SELECT * FROM all_booleans "
"WHERE a = true AND b = true AND c = true AND g = false",
Results =
[{true,true,true,Bd,Be,Bf,false} || Bd <- ts_booleans(), Be <- ts_booleans(),
Bf <- ts_booleans()],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_booleans_filter_on_d_and_f_test(Ctx) ->
Query =
"SELECT * FROM all_booleans "
"WHERE a = true AND b = true AND c = true AND d = false AND f = true",
Results =
[{true,true,true,false,Be,true,Bg} || Be <- ts_booleans(), Bg <- ts_booleans()],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
%%%
%%% Time Stamp Keys
%%%
all_timestamps_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE all_timestamps ("
"a TIMESTAMP NOT NULL, "
"b TIMESTAMP NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d TIMESTAMP NOT NULL, "
"e TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,c,quantum(b,15,s)), a,c,b,d,e))"
)),
ok = riakc_ts:put(Pid, <<"all_timestamps">>,
[{A,B,3,4,5} || A <- [1,2,3], B <- lists:seq(100, 10000, 100)]).
all_timestamps_across_quanta_test(Ctx) ->
Query =
"SELECT * FROM all_timestamps "
"WHERE a = 2 AND b > 200 AND b < 3000 AND c = 3",
Results =
[{2,B,3,4,5} || B <- lists:seq(300, 2900, 100)],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_timestamps_single_quanta_test(Ctx) ->
Query =
"SELECT * FROM all_timestamps "
"WHERE a = 2 AND b > 200 AND b <= 900 AND c = 3",
Results =
[{2,B,3,4,5} || B <- lists:seq(300, 900, 100)],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
%%%
%%% DESCENDING KEYS
%%%
table_def_desc_on_quantum_table() ->
"CREATE TABLE desc_on_quantum_table ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c DESC))".
create_data_def_desc_on_quantum_table(Pid) ->
?assertEqual({ok, {[],[]}}, riakc_ts:query(Pid, table_def_desc_on_quantum_table())),
ok = riakc_ts:put(Pid, <<"desc_on_quantum_table">>, [{1,1,N} || N <- lists:seq(200,200*100,100)]).
desc_on_quantum___one_quantum_not_against_bounds_inclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 3100 AND c <= 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4800,3100,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum___one_quanta_not_against_bounds_exclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c > 3100 AND c < 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4700,3200,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum___one_quanta_at_bounds_inclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 3000 AND c <= 5000",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(5000,3000,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum___one_quanta_at_bounds_exclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c > 3000 AND c < 5000",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4900,3100,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 4200 AND c <= 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4800,4200,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_exclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c > 4200 AND c < 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4700,4300,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_inclusive_quanta_boundaries_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 4000 AND c <= 5000",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(5000,4000,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_inclusive_across_quanta_boundaries_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 3500 AND c <= 5500",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(5500,3500,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
%%%
%%%
%%%
table_def_desc_on_additional_local_key_field_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE lk_desc_1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d DESC))")),
ok = riakc_ts:put(Pid, <<"lk_desc_1">>,
[{1,1,1,N} || N <- lists:seq(200,200*100,100)]).
table_def_desc_on_additional_local_key_field_test(Ctx) ->
Query =
"SELECT * FROM lk_desc_1 WHERE a = 1 AND b = 1 AND c >= 1 AND c <= 1 AND d >= 3500 AND d <= 5500",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,1,N} || N <- lists:seq(5500,3500,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
%%%
%%%
%%%
create_data_def_desc_on_varchar_table(Pid) ->
?assertEqual({ok, {[],[]}}, riakc_ts:query(Pid,
"CREATE TABLE desc_on_varchar_table ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d VARCHAR NOT NULL, "
"PRIMARY KEY ((a,b), a,b,d DESC))")),
ok = riakc_ts:put(Pid, <<"desc_on_varchar_table">>, [{1,1,1,<<N>>} || N <- "abcdefghijklmnopqrstuvwxyz"]).
sdfsfsf_test(Ctx) ->
Query =
"SELECT * FROM desc_on_varchar_table WHERE a = 1 AND b = 1",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,1,<<N>>} || N <- lists:reverse("abcdefghijklmnopqrstuvwxyz")]},
riakc_ts:query(client_pid(Ctx), Query)
).
| null | https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/tests/ts_cluster_keys_SUITE.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
s
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Tests for the different combinations of keys supported by
-------------------------------------------------------------------
--------------------------------------------------------------------
COMMON TEST CALLBACK FUNCTIONS
--------------------------------------------------------------------
create tables and populate them with data
Across more quanta
Missing an a
Missing a b
Missing an c, the the quantum
TABLE 4, small key where partition and local are the same
Tests for where clause filters on additional fields in the local key.
ERROR CASE TESTS
d is in the local key and set as nullable
Keys with different types
increasing `Num' increases the result set massivey
hard code some of the local key values to reduce the result set
Boolean Keys
false > true
Time Stamp Keys
DESCENDING KEYS
| Copyright ( c ) 2016 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
Riak Time Series .
-module(ts_cluster_keys_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
suite() ->
[{timetrap,{minutes,10}}].
init_per_suite(Config) ->
[Node|_] = Cluster = ts_setup:start_cluster(3),
Pid = rt:pbc(Node),
create_data_def_1(Pid),
create_data_def_2(Pid),
create_data_def_3(Pid),
create_data_def_4(Pid),
create_data_def_5(Pid),
create_data_def_8(Pid),
all_booleans_create_data(Pid),
all_timestamps_create_data(Pid),
all_types_create_data(Pid),
create_data_def_desc_on_quantum_table(Pid),
create_data_def_desc_on_varchar_table(Pid),
table_def_desc_on_additional_local_key_field_create_data(Pid),
[{cluster, Cluster} | Config].
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, _Config) ->
ok.
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(_TestCase, _Config) ->
ok.
groups() ->
[].
all() ->
rt:grep_test_functions(?MODULE).
client_pid(Ctx) ->
[Node|_] = proplists:get_value(cluster, Ctx),
rt:pbc(Node).
run_query(Ctx, Query) ->
riakc_ts:query(client_pid(Ctx), Query).
TABLE 1
create_data_def_1(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}},riakc_ts:query(Pid, table_def_1())),
ok = riakc_ts:put(Pid, <<"table1">>, [{1,1,N,1} || N <- lists:seq(1,6000)]).
column_names_def_1() ->
[<<"a">>, <<"b">>, <<"c">>, <<"d">>].
table_def_1() ->
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d))".
select_exclusive_def_1_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 0 AND c < 11",
Results =
[{1,1,N,1} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_exclusive_def_1_2_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 44 AND c < 54",
Results =
[{1,1,N,1} || N <- lists:seq(45,53)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_exclusive_def_1_across_quanta_1_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 500 AND c < 1500",
Results =
[{1,1,N,1} || N <- lists:seq(501,1499)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_exclusive_def_1_across_quanta_2_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c > 500 AND c < 4500",
Results =
[{1,1,N,1} || N <- lists:seq(501,4499)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_1_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND b = 1 AND c >= 11 AND c <= 20",
Results =
[{1,1,N,1} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
where_clause_must_cover_the_partition_key_missing_a_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE b = 1 AND c > 0 AND c < 11",
?assertMatch(
{error, {1001,<<_/binary>>}},
run_query(Ctx, Query)
).
where_clause_must_cover_the_partition_key_missing_b_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE a = 1 AND c > 0 AND c < 11",
?assertMatch(
{error, {1001,<<_/binary>>}},
run_query(Ctx, Query)
).
where_clause_must_cover_the_partition_key_missing_c_test(Ctx) ->
Query =
"SELECT * FROM table1 WHERE b = 1",
?assertMatch(
{error, {1001,<<_/binary>>}},
run_query(Ctx, Query)
).
TABLE 2 ( same columns as table 1 )
create_data_def_2(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_2())),
ok = riakc_ts:put(Pid, <<"table2">>, [{N,1,1,1} || N <- lists:seq(1,200)]).
table_def_2() ->
"CREATE TABLE table2 ("
"a TIMESTAMP NOT NULL, "
"b SINT64 NOT NULL, "
"c SINT64 NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((quantum(a, 1, 's')), a,b,c,d))".
select_exclusive_def_2_test(Ctx) ->
Query =
"SELECT * FROM table2 WHERE a > 0 AND a < 11",
Results =
[{N,1,1,1} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_2_test(Ctx) ->
Query =
"SELECT * FROM table2 WHERE a >= 11 AND a <= 20",
Results =
[{N,1,1,1} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_1(), Results}},
run_query(Ctx, Query)
).
TABLE 3 , small key where partition and local are the same
create_data_def_3(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_3())),
ok = riakc_ts:put(Pid, <<"table3">>, [{1,N} || N <- lists:seq(1,200)]).
column_names_def_3() ->
[<<"a">>, <<"b">>].
table_def_3() ->
"CREATE TABLE table3 ("
"a SINT64 NOT NULL, "
"b TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,quantum(b, 1, 's')),a,b))".
select_exclusive_def_3_test(Ctx) ->
Query =
"SELECT * FROM table3 WHERE b > 0 AND b < 11 AND a = 1",
Results =
[{1,N} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_3(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_3_test(Ctx) ->
Query =
"SELECT * FROM table3 WHERE b >= 11 AND b <= 20 AND a = 1",
Results =
[{1,N} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_3(), Results}},
run_query(Ctx, Query)
).
create_data_def_4(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_4())),
ok = riakc_ts:put(Pid, <<"table4">>, [{1,1,N} || N <- lists:seq(1,200)]).
column_names_def_4() ->
[<<"a">>, <<"b">>, <<"c">>].
table_def_4() ->
"CREATE TABLE table4 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')),a,b,c))".
select_exclusive_def_4_test(Ctx) ->
Query =
"SELECT * FROM table4 WHERE a = 1 AND b = 1 AND c > 0 AND c < 11",
Results =
[{1,1,N} || N <- lists:seq(1,10)],
ts_data:assert_row_sets(
{ok, {column_names_def_4(), Results}},
run_query(Ctx, Query)
).
select_inclusive_def_4_test(Ctx) ->
Query =
"SELECT * FROM table4 WHERE a = 1 AND b = 1 AND c >= 11 AND c <= 20",
Results =
[{1,1,N} || N <- lists:seq(11,20)],
ts_data:assert_row_sets(
{ok, {column_names_def_4(), Results}},
run_query(Ctx, Query)
).
TABLE 5 no quanta
column_names_def_5() ->
[<<"a">>, <<"b">>, <<"c">>].
table_def_5() ->
"CREATE TABLE table5 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,c),a,b,c))".
create_data_def_5(Pid) ->
ts_data:assert_row_sets({ok, {[],[]}}, riakc_ts:query(Pid, table_def_5())),
ok = riakc_ts:put(Pid, <<"table5">>, [{1,1,N} || N <- lists:seq(1,200)]).
select_def_5_test(Ctx) ->
Query =
"SELECT * FROM table5 WHERE a = 1 AND b = 1 AND c = 20",
ts_data:assert_row_sets(
{ok, {column_names_def_5(), [{1,1,20}]}},
run_query(Ctx, Query)
).
create_data_def_8(Pid) ->
ts_data:assert_row_sets(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE table8 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d))"
)),
ok = riakc_ts:put(Pid, <<"table8">>, [{1,1,N,N} || N <- lists:seq(1,6000)]).
d_equal_than_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d = 3000",
ts_data:assert_row_sets(
{rt_ignore_columns, [{1,1,3000,3000}]},
run_query(Ctx, Query)
).
d_greater_than_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d > 3000",
Results =
[{1,1,N,N} || N <- lists:seq(3001,4500)],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
d_greater_or_equal_to_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d >= 3000",
Results =
[{1,1,N,N} || N <- lists:seq(3000,4500)],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
d_not_filter_test(Ctx) ->
Query =
"SELECT * FROM table8 "
"WHERE a = 1 AND b = 1 AND c >= 2500 AND c <= 4500 AND d != 3000",
Results =
[{1,1,N,N} || N <- lists:seq(2500,4500), N /= 3000],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
nulls_in_additional_local_key_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d))"
)
).
duplicate_fields_in_local_key_1_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,c))"
)
).
duplicate_fields_in_local_key_2_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d,d))"
)
).
duplicate_fields_in_partition_key_1_not_allowed_test(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64, "
"PRIMARY KEY ((a,a,quantum(c, 1, 's')), a,a,c))"
)
).
multiple_quantum_functions_in_partition_key_not_allowed(Ctx) ->
?assertMatch(
{error, {1020, <<_/binary>>}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE table1 ("
"a SINT64 NOT NULL, "
"b TIMESTAMP NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,quantum(b, 1, 's'),quantum(c, 1, 's')), a,b,c))"
)
).
double_pk_double_boolean_lk_test(Ctx) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE double_pk_double_boolean_lk_test ("
"a DOUBLE NOT NULL, "
"b BOOLEAN NOT NULL, "
"PRIMARY KEY ((a), a,b))"
)),
Doubles = [N * 0.1 || N <- lists:seq(1,100)],
ok = riakc_ts:put(client_pid(Ctx), <<"double_pk_double_boolean_lk_test">>,
[{F,B} || F <- Doubles, B <- [true,false]]),
Query =
"SELECT * FROM double_pk_double_boolean_lk_test "
"WHERE a = 0.5 AND b = true",
ts_data:assert_row_sets(
{rt_ignore_columns, [{0.5,true}]},
run_query(Ctx, Query)
).
boolean_pk_boolean_double_lk_test(Ctx) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(client_pid(Ctx),
"CREATE TABLE boolean_pk_boolean_double_lk_test ("
"a BOOLEAN NOT NULL, "
"b DOUBLE NOT NULL, "
"PRIMARY KEY ((a), a,b))"
)),
Doubles = [N * 0.1 || N <- lists:seq(1,100)],
ok = riakc_ts:put(client_pid(Ctx), <<"boolean_pk_boolean_double_lk_test">>,
[{B,F} || F <- Doubles, B <- [true,false]]),
Query =
"SELECT * FROM boolean_pk_boolean_double_lk_test "
"WHERE a = false AND b = 0.5",
ts_data:assert_row_sets(
{rt_ignore_columns, [{false,0.5}]},
run_query(Ctx, Query)
).
all_types_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE all_types ("
"a VARCHAR NOT NULL, "
"b TIMESTAMP NOT NULL, "
"c SINT64 NOT NULL, "
"d BOOLEAN NOT NULL, "
"e DOUBLE NOT NULL, "
"f VARCHAR NOT NULL, "
"g TIMESTAMP NOT NULL, "
"h SINT64 NOT NULL, "
"i BOOLEAN NOT NULL, "
"j DOUBLE NOT NULL, "
"k VARCHAR NOT NULL, "
"l TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,c,d,e,f,g), a,b,c,d,e,f,g,h,i,j,k,l))"
)),
Num = 3,
Varchars = [<<"a">>,<<"b">>,<<"c">>],
Timestamps = lists:seq(1,Num),
Sint64s = lists:seq(1,Num),
Booleans = ts_booleans(),
Doubles = [N * 0.1 || N <- lists:seq(1,2)],
H = 1,
K = <<"k">>,
L = 1,
ok = riakc_ts:put(Pid, <<"all_types">>,
[{A,B,C,D,E,F,G,H,I,J,K,L} || A <- Varchars, B <- Timestamps,
C <- Sint64s, D <- Booleans,
E <- Doubles, F <- Varchars,
G <- Timestamps,
I <- Booleans, J <- Doubles]).
all_types_1_test(Ctx) ->
H = 1,
K = <<"k">>,
L = 1,
Doubles = [N * 0.1 || N <- lists:seq(1,2)],
Query =
"SELECT * FROM all_types "
"WHERE a = 'b' AND b = 1 AND c = 3 AND d = true AND e = 0.1 "
"AND f = 'a' AND g = 2",
Results =
[{<<"b">>,1,3,true,0.1,<<"a">>,2,H,I,J,K,L} ||
I <- ts_booleans()
,J <- Doubles
],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
all_types_or_filter_test(Ctx) ->
H = 1,
K = <<"k">>,
L = 1,
Doubles = [N * 0.1 || N <- lists:seq(1,2)],
Query =
"SELECT * FROM all_types "
"WHERE a = 'b' AND b = 1 AND c = 3 AND d = true AND e = 0.1 "
"AND f = 'a' AND g = 2 AND (i = true OR j = 0.2)",
Results =
[{<<"b">>,1,3,true,0.1,<<"a">>,2,H,I,J,K,L} ||
I <- ts_booleans(),
J <- Doubles,
I == true orelse J == 0.2
],
ts_data:assert_row_sets(
{rt_ignore_columns, Results},
run_query(Ctx, Query)
).
all_booleans_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE all_booleans ("
"a BOOLEAN NOT NULL, "
"b BOOLEAN NOT NULL, "
"c BOOLEAN NOT NULL, "
"d BOOLEAN NOT NULL, "
"e BOOLEAN NOT NULL, "
"f BOOLEAN NOT NULL, "
"g BOOLEAN NOT NULL, "
"PRIMARY KEY ((a,b,c), a,b,c,d,e,f,g))"
)),
ok = riakc_ts:put(Pid, <<"all_booleans">>,
[{Ba,Bb,Bc,Bd,Be,Bf,Bg} || Ba <- ts_booleans(),
Bb <- ts_booleans(), Bc <- ts_booleans(),
Bd <- ts_booleans(), Be <- ts_booleans(),
Bf <- ts_booleans(), Bg <- ts_booleans()]).
ts_booleans() ->
all_booleans_test(Ctx) ->
Query =
"SELECT * FROM all_booleans "
"WHERE a = true AND b = true AND c = true",
Results =
[{true,true,true,Bd,Be,Bf,Bg} || Bd <- ts_booleans(), Be <- ts_booleans(),
Bf <- ts_booleans(), Bg <- ts_booleans()],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_booleans_filter_on_g_test(Ctx) ->
Query =
"SELECT * FROM all_booleans "
"WHERE a = true AND b = true AND c = true AND g = false",
Results =
[{true,true,true,Bd,Be,Bf,false} || Bd <- ts_booleans(), Be <- ts_booleans(),
Bf <- ts_booleans()],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_booleans_filter_on_d_and_f_test(Ctx) ->
Query =
"SELECT * FROM all_booleans "
"WHERE a = true AND b = true AND c = true AND d = false AND f = true",
Results =
[{true,true,true,false,Be,true,Bg} || Be <- ts_booleans(), Bg <- ts_booleans()],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_timestamps_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE all_timestamps ("
"a TIMESTAMP NOT NULL, "
"b TIMESTAMP NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d TIMESTAMP NOT NULL, "
"e TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,c,quantum(b,15,s)), a,c,b,d,e))"
)),
ok = riakc_ts:put(Pid, <<"all_timestamps">>,
[{A,B,3,4,5} || A <- [1,2,3], B <- lists:seq(100, 10000, 100)]).
all_timestamps_across_quanta_test(Ctx) ->
Query =
"SELECT * FROM all_timestamps "
"WHERE a = 2 AND b > 200 AND b < 3000 AND c = 3",
Results =
[{2,B,3,4,5} || B <- lists:seq(300, 2900, 100)],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
all_timestamps_single_quanta_test(Ctx) ->
Query =
"SELECT * FROM all_timestamps "
"WHERE a = 2 AND b > 200 AND b <= 900 AND c = 3",
Results =
[{2,B,3,4,5} || B <- lists:seq(300, 900, 100)],
ts_data:assert_row_sets(
{rt_ignore_columns,Results},
run_query(Ctx, Query)
).
table_def_desc_on_quantum_table() ->
"CREATE TABLE desc_on_quantum_table ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c DESC))".
create_data_def_desc_on_quantum_table(Pid) ->
?assertEqual({ok, {[],[]}}, riakc_ts:query(Pid, table_def_desc_on_quantum_table())),
ok = riakc_ts:put(Pid, <<"desc_on_quantum_table">>, [{1,1,N} || N <- lists:seq(200,200*100,100)]).
desc_on_quantum___one_quantum_not_against_bounds_inclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 3100 AND c <= 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4800,3100,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum___one_quanta_not_against_bounds_exclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c > 3100 AND c < 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4700,3200,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum___one_quanta_at_bounds_inclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 3000 AND c <= 5000",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(5000,3000,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum___one_quanta_at_bounds_exclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c > 3000 AND c < 5000",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4900,3100,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 4200 AND c <= 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4800,4200,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_exclusive_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c > 4200 AND c < 4800",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(4700,4300,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_inclusive_quanta_boundaries_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 4000 AND c <= 5000",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(5000,4000,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
desc_on_quantum_one_subquery_inclusive_across_quanta_boundaries_test(Ctx) ->
Query =
"SELECT * FROM desc_on_quantum_table WHERE a = 1 AND b = 1 AND c >= 3500 AND c <= 5500",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,N} || N <- lists:seq(5500,3500,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
table_def_desc_on_additional_local_key_field_create_data(Pid) ->
?assertEqual(
{ok, {[],[]}},
riakc_ts:query(Pid,
"CREATE TABLE lk_desc_1 ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d SINT64 NOT NULL, "
"PRIMARY KEY ((a,b,quantum(c, 1, 's')), a,b,c,d DESC))")),
ok = riakc_ts:put(Pid, <<"lk_desc_1">>,
[{1,1,1,N} || N <- lists:seq(200,200*100,100)]).
table_def_desc_on_additional_local_key_field_test(Ctx) ->
Query =
"SELECT * FROM lk_desc_1 WHERE a = 1 AND b = 1 AND c >= 1 AND c <= 1 AND d >= 3500 AND d <= 5500",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,1,N} || N <- lists:seq(5500,3500,-100)]},
riakc_ts:query(client_pid(Ctx), Query)
).
create_data_def_desc_on_varchar_table(Pid) ->
?assertEqual({ok, {[],[]}}, riakc_ts:query(Pid,
"CREATE TABLE desc_on_varchar_table ("
"a SINT64 NOT NULL, "
"b SINT64 NOT NULL, "
"c TIMESTAMP NOT NULL, "
"d VARCHAR NOT NULL, "
"PRIMARY KEY ((a,b), a,b,d DESC))")),
ok = riakc_ts:put(Pid, <<"desc_on_varchar_table">>, [{1,1,1,<<N>>} || N <- "abcdefghijklmnopqrstuvwxyz"]).
sdfsfsf_test(Ctx) ->
Query =
"SELECT * FROM desc_on_varchar_table WHERE a = 1 AND b = 1",
ts_data:assert_row_sets(
{rt_ignore_columns,[{1,1,1,<<N>>} || N <- lists:reverse("abcdefghijklmnopqrstuvwxyz")]},
riakc_ts:query(client_pid(Ctx), Query)
).
|
f688673517575bb4ce8f56238211a45dba01318f57e44fd960dc0971104c8515 | jarvinet/scheme | listtest.scm | (define q (make-deque))
(define a (cons 'a 1))
(define b (cons 'b 1))
(define c (cons 'c 1))
(define d (cons 'd 1))
(front-insert-deque! q a)
(front-insert-deque! q b)
(front-insert-deque! q c)
(front-insert-deque! q d)
(print-deque q)
(define (f value)
(lambda (v)
(eq? (car v) value)))
(define n (find-deque q (f 'b)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'd)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'a)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'c)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'c)))
(delete-deque! q n)
(print-deque q)
;(define ht (make-hashtable))
;(display "foo") (newline)
;(lookup-hashtable ht 'a #t 1)
( lookup - hashtable ht ' b # t 2 )
( lookup - hashtable ht ' c # t 3 )
( lookup - hashtable ht 'd # t 4 )
;(lookup-hashtable ht 'e #t 5)
;(display "bar") (newline)
;(define z (lookup-hashtable ht 'c #f 0))
;(display "baz") (newline)
;(remove-hashtable ht 'c)
( display " foobar " ) ( newline )
| null | https://raw.githubusercontent.com/jarvinet/scheme/47633d7fc4d82d739a62ceec75c111f6549b1650/bin/test/listtest.scm | scheme | (define ht (make-hashtable))
(display "foo") (newline)
(lookup-hashtable ht 'a #t 1)
(lookup-hashtable ht 'e #t 5)
(display "bar") (newline)
(define z (lookup-hashtable ht 'c #f 0))
(display "baz") (newline)
(remove-hashtable ht 'c) | (define q (make-deque))
(define a (cons 'a 1))
(define b (cons 'b 1))
(define c (cons 'c 1))
(define d (cons 'd 1))
(front-insert-deque! q a)
(front-insert-deque! q b)
(front-insert-deque! q c)
(front-insert-deque! q d)
(print-deque q)
(define (f value)
(lambda (v)
(eq? (car v) value)))
(define n (find-deque q (f 'b)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'd)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'a)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'c)))
(delete-deque! q n)
(print-deque q)
(define n (find-deque q (f 'c)))
(delete-deque! q n)
(print-deque q)
( lookup - hashtable ht ' b # t 2 )
( lookup - hashtable ht ' c # t 3 )
( lookup - hashtable ht 'd # t 4 )
( display " foobar " ) ( newline )
|
fd1783d6ea662d551cc21f88a241bd767501e50682a739aa7749982adaf435fa | spurious/sagittarius-scheme-mirror | consumer.scm | ;;; -*- Scheme -*-
;;;
token.scm - OAuth 1.0 library .
;;;
Copyright ( c ) 2010 - 2013 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; based on cl-oauth
#!read-macro=sagittarius/regex
(library (net oauth consumer)
(export obtain-access-token
authorize-request-token
;; authorize-request-token-from-request
make-authorization-uri
obtain-request-token
access-protected-resource
oauth-uri-encode
oauth-compose-query)
(import (rnrs)
(sagittarius)
(sagittarius control)
(sagittarius regex)
(net oauth misc)
(net oauth token)
(net oauth signature)
(net oauth parameters)
(net oauth query-string)
(srfi :1 lists)
(srfi :19 time)
(rfc :5322)
(rfc uri)
(rfc http))
;;;;;;;;;;;;
;;; consumer functions
;; Message translator
;; if something wrong happens within oauth process, http response has
;; the error message, however we don't know which format it uses.
;; so let user handle it.
(define (default-message-translator status header body) body)
;; helper
(define (oauth-http-request uri
:key (auth-location :header)
(method 'GET)
(sender (http-null-sender))
(receiver (http-string-receiver))
(auth-parameters '())
(parameters '())
(additional-headers '()))
(receive (scheme user-info host port path query frag) (uri-parse uri)
(let ((q (oauth-compose-query (if (eq? auth-location :parameters)
(append parameters auth-parameters)
parameters)))
(headers (if (eq? auth-location :header)
(cons `("Authorization"
,(build-auth-string auth-parameters))
additional-headers)
additional-headers)))
(http-request method host (string-append path "?" q)
:secure (string=? scheme "https")
:sender sender
:receiver receiver
:extra-headers headers))))
(define (generate-auth-parameters consumer signature-method timestamp version
:optional (token #f))
(let ((parameters `(("oauth_consumer_key" ,(token-key consumer))
("oauth_signature_method" ,(string-upcase
(format "~a"
signature-method)))
("oauth_timestamp" ,(number->string timestamp))
("oauth_nonce" ,(number->string
(random-source (greatest-fixnum))))
("oauth_version" ,(format "~a" version)))))
(if token
(cons `("oauth_token" ,(uri-decode-string (token-key token)
:cgi-decode #t))
parameters)
parameters)))
;; Additional parameters will be stored in the user-data slot of the token.
(define (obtain-request-token uri consumer-token :key
(version :1.0)
(user-parameters '())
(timestamp (time-second (current-time)))
(auth-location :header)
(request-method 'POST)
(callback-uri #f)
(additional-headers '())
(signature-method :hmac-sha1)
(error-translator default-message-translator))
(let* ((callback-uri (or callback-uri "oob"))
(auth-parameters (cons `("oauth_callback" ,callback-uri)
(generate-auth-parameters consumer-token
signature-method
timestamp
version)))
(sbs (signature-base-string :uri uri :request-method request-method
:parameters (sort-parameters
(append user-parameters
auth-parameters))))
(signature (oauth-signature signature-method sbs
(token-secret consumer-token)))
(signed-parameters (cons `("oauth_signature" ,signature)
auth-parameters)))
(receive (status header body)
(oauth-http-request uri
:method request-method
:auth-location auth-location
:auth-parameters signed-parameters
:parameters user-parameters
:additional-headers additional-headers)
(unless (string=? status "200")
(assertion-violation 'obtain-request-token
(error-translator status header body)))
(let* ((response (query-string->alist body))
(key (cond ((assoc "oauth_token" response) => cadr)
(else #f)))
(secret (cond ((assoc "oauth_token_secret" response) => cadr)
(else #f)))
(user-data (lset-difference
(lambda (e1 e2) (equal? (car e1) e2))
response '("oauth_token" "oauth_token_secret"))))
(make-request-token :consumer consumer-token :key key
:secret secret :callback-uri callback-uri
:user-data user-data)))))
Return the service provider 's authorization URI . [ 6.2.1 ] in 1.0
(define (make-authorization-uri uri request-token
:key (version :1.0) (callback-uri #f)
(user-parameters '()))
(when (and request-token (request-token-authorized? request-token))
(assertion-violation 'make-authorization-uri
"given request token is already ahtuorised"
request-token))
(let* ((parameters (append user-parameters
(if request-token
`(("oauth_token" ,(token-key request-token)))
'())
(if callback-uri
`(("oauth_callback" ,callback-uri))
'()))))
(if (null? parameters)
uri
(string-append uri "?" (alist->query-string parameters)))))
Authorize a request token explicitly . Returns the authorized token .
(define (authorize-request-token request-token verificateion-code)
(when (and verificateion-code (string? verificateion-code))
(request-token-verification-code request-token verificateion-code)
(request-token-authorized-set! request-token #t))
request-token)
;; Additional parameters will be stored in the user-data slot of the
;; token. POST is recommended as request method. [6.3.1]
(define (obtain-access-token uri token :key
(consumer-token
(token-consumer token))
(request-method 'POST)
(auth-location :header)
(version :1.0)
(timestamp (time-second (current-time)))
(signature-method :hmac-sha1)
(error-translator default-message-translator))
(let1 refresh? (access-token? token)
(unless refresh?
(or (request-token-authorized? token)
(assertion-violation 'obtain-access-token
"request token is not authorised.")))
(let* ((parameters (append
(generate-auth-parameters consumer-token
signature-method
timestamp
version
token)
(cond (refresh?
`(("oauth_session_handle"
,(access-token-session-handle token))))
((request-token-verification-code token)
=> (lambda (it) `(("oauth_verifier" ,it))))
(else '()))))
(sbs (signature-base-string :uri uri :request-method request-method
:parameters (sort-parameters
parameters)))
(signature (oauth-signature signature-method sbs
(token-secret consumer-token)
(uri-decode-string (token-secret token)
:cgi-decode #t)))
(signed-parameters (cons `("oauth_signature" ,signature)
parameters)))
(receive (status header body)
(oauth-http-request uri
:method request-method
:auth-location auth-location
:auth-parameters signed-parameters)
(define (field name response)
(cond ((assoc name response) => cadr)
(else #f)))
(unless (string=? status "200")
(assertion-violation 'obtain-access-token
(error-translator status header body)))
(let* ((response (query-string->alist body))
(key (field "oauth_token" response))
(secret (field "oauth_token_secret" response))
(session-handle (field "oauth_session_handle" response))
(expires (and-let* ((r (field "oauth_expires_in" response)))
(add-duration! (current-time)
(make-time 'time-duration 0
(string->number r)))))
(authorization-expires
(and-let* ((r (field "oauth_authorization_expires_in"
response)))
(add-duration! (current-time)
(make-time 'time-duration 0
(string->number r)))))
(user-data (remove-oauth-parameters response)))
(unless (and key secret)
(assertion-violation
'obtain-access-token
"oauth_token or/and oauth_token_secret field(s) are not returned"))
(make-access-token :consumer consumer-token
:key (uri-decode-string key :cgi-decode #t)
:secret (uri-decode-string secret :cgi-decode #t)
:session-handle session-handle
:expires expires
:authorization-expires authorization-expires
:origin-uri uri
:user-data user-data))))))
(define (refresh-access-token access-token)
(obtain-access-token (access-token-origin-uri access-token) access-token))
(define (maybe-refresh-access-token access-token :optional (on-refresh #f))
(if (access-token-expired? access-token)
(let ((new-token (refresh-access-token access-token)))
(when on-refresh
(on-refresh new-token))
new-token)
access-token))
(define (get-problem-report-from-headers headers)
(or (and-let* ((auth-header (rfc5322-header-ref headers "www-authenticate"))
(len (string-length auth-header))
( (>= len 5) )
(type (substring auth-header 0 5))
( (string=? type "OAuth") )
( (> len 5)))
(map (lambda (token)
(string-split token "="))
(string-split (substring auth-header 6 len)
#/\s/)))
'()))
(define (get-problem-report headers body)
(let ((from-headers (get-problem-report-from-headers headers)))
from-headers))
Access the protected resource at URI using ACCESS - TOKEN .
;; If the token contains OAuth Session information it will be checked for
;; validity before the request is made. Should the server notify us that
;; it has prematurely expired the token will be refresh as well and the
;; request sent again using the new token. ON-REFRESH will be called
;; whenever the access token is renewed.
(define (access-protected-resource uri access-token :rest kwargs :key
(consumer-token
(token-consumer access-token))
(on-refresh #f)
(timestamp (time-second (current-time)))
(user-parameters '())
;; fxxking Twitter API...
(use-user-parameters-for-auth #t)
(additional-headers '())
(version :1.0)
(auth-location :header)
(request-method 'GET)
(signature-method :hmac-sha1)
(body #f)
(receiver (http-string-receiver))
(error-translator
default-message-translator))
(set! access-token (maybe-refresh-access-token access-token on-refresh))
(receive (normalized-uri query-string-parameters) (normalize-uri uri #t)
(let* ((auth-parameters (generate-auth-parameters consumer-token
signature-method
timestamp
version
access-token))
(sbs (signature-base-string :uri normalized-uri
:request-method request-method
:parameters
(sort-parameters
`(,@query-string-parameters
,@(if use-user-parameters-for-auth
user-parameters
'())
,@auth-parameters))
:post-data (and (not (list? body))
body)))
(signature (oauth-signature signature-method sbs
(token-secret consumer-token)
(token-secret access-token)))
(signed-parameters (cons `("oauth_signature" ,signature)
auth-parameters)))
(receive (status header body)
(oauth-http-request uri
:method request-method
:auth-location auth-location
:auth-parameters signed-parameters
:parameters user-parameters
:sender (cond ((list? body)
(http-multipart-sender body))
(body (http-blob-sender body))
(else (http-null-sender)))
:receiver receiver
:additional-headers additional-headers)
(if (string=? status "200")
(values body header #f #f)
(let* ((problem-report (get-problem-report header body))
(problem-hint (and-let* ((r (assoc "oauth_problem"
problem-report)))
(cdr r)))
(problem-advice (and-let* ((r (assoc "oauth_problem_advice"
problem-report)))
(cdr r))))
(cond ((and (string=? status "401")
(equal? problem-hint "token_expired"))
(let ((new-token (refresh-access-token access-token)))
(when on-refresh
(on-refresh new-token))
(apply access-protected-resource uri new-token
kwargs)))
(else
(values (error-translator status header body)
header
problem-hint problem-advice)))))))))
)
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/net/oauth/consumer.scm | scheme | -*- Scheme -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
based on cl-oauth
authorize-request-token-from-request
consumer functions
Message translator
if something wrong happens within oauth process, http response has
the error message, however we don't know which format it uses.
so let user handle it.
helper
Additional parameters will be stored in the user-data slot of the token.
Additional parameters will be stored in the user-data slot of the
token. POST is recommended as request method. [6.3.1]
If the token contains OAuth Session information it will be checked for
validity before the request is made. Should the server notify us that
it has prematurely expired the token will be refresh as well and the
request sent again using the new token. ON-REFRESH will be called
whenever the access token is renewed.
fxxking Twitter API... | token.scm - OAuth 1.0 library .
Copyright ( c ) 2010 - 2013 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
#!read-macro=sagittarius/regex
(library (net oauth consumer)
(export obtain-access-token
authorize-request-token
make-authorization-uri
obtain-request-token
access-protected-resource
oauth-uri-encode
oauth-compose-query)
(import (rnrs)
(sagittarius)
(sagittarius control)
(sagittarius regex)
(net oauth misc)
(net oauth token)
(net oauth signature)
(net oauth parameters)
(net oauth query-string)
(srfi :1 lists)
(srfi :19 time)
(rfc :5322)
(rfc uri)
(rfc http))
(define (default-message-translator status header body) body)
(define (oauth-http-request uri
:key (auth-location :header)
(method 'GET)
(sender (http-null-sender))
(receiver (http-string-receiver))
(auth-parameters '())
(parameters '())
(additional-headers '()))
(receive (scheme user-info host port path query frag) (uri-parse uri)
(let ((q (oauth-compose-query (if (eq? auth-location :parameters)
(append parameters auth-parameters)
parameters)))
(headers (if (eq? auth-location :header)
(cons `("Authorization"
,(build-auth-string auth-parameters))
additional-headers)
additional-headers)))
(http-request method host (string-append path "?" q)
:secure (string=? scheme "https")
:sender sender
:receiver receiver
:extra-headers headers))))
(define (generate-auth-parameters consumer signature-method timestamp version
:optional (token #f))
(let ((parameters `(("oauth_consumer_key" ,(token-key consumer))
("oauth_signature_method" ,(string-upcase
(format "~a"
signature-method)))
("oauth_timestamp" ,(number->string timestamp))
("oauth_nonce" ,(number->string
(random-source (greatest-fixnum))))
("oauth_version" ,(format "~a" version)))))
(if token
(cons `("oauth_token" ,(uri-decode-string (token-key token)
:cgi-decode #t))
parameters)
parameters)))
(define (obtain-request-token uri consumer-token :key
(version :1.0)
(user-parameters '())
(timestamp (time-second (current-time)))
(auth-location :header)
(request-method 'POST)
(callback-uri #f)
(additional-headers '())
(signature-method :hmac-sha1)
(error-translator default-message-translator))
(let* ((callback-uri (or callback-uri "oob"))
(auth-parameters (cons `("oauth_callback" ,callback-uri)
(generate-auth-parameters consumer-token
signature-method
timestamp
version)))
(sbs (signature-base-string :uri uri :request-method request-method
:parameters (sort-parameters
(append user-parameters
auth-parameters))))
(signature (oauth-signature signature-method sbs
(token-secret consumer-token)))
(signed-parameters (cons `("oauth_signature" ,signature)
auth-parameters)))
(receive (status header body)
(oauth-http-request uri
:method request-method
:auth-location auth-location
:auth-parameters signed-parameters
:parameters user-parameters
:additional-headers additional-headers)
(unless (string=? status "200")
(assertion-violation 'obtain-request-token
(error-translator status header body)))
(let* ((response (query-string->alist body))
(key (cond ((assoc "oauth_token" response) => cadr)
(else #f)))
(secret (cond ((assoc "oauth_token_secret" response) => cadr)
(else #f)))
(user-data (lset-difference
(lambda (e1 e2) (equal? (car e1) e2))
response '("oauth_token" "oauth_token_secret"))))
(make-request-token :consumer consumer-token :key key
:secret secret :callback-uri callback-uri
:user-data user-data)))))
Return the service provider 's authorization URI . [ 6.2.1 ] in 1.0
(define (make-authorization-uri uri request-token
:key (version :1.0) (callback-uri #f)
(user-parameters '()))
(when (and request-token (request-token-authorized? request-token))
(assertion-violation 'make-authorization-uri
"given request token is already ahtuorised"
request-token))
(let* ((parameters (append user-parameters
(if request-token
`(("oauth_token" ,(token-key request-token)))
'())
(if callback-uri
`(("oauth_callback" ,callback-uri))
'()))))
(if (null? parameters)
uri
(string-append uri "?" (alist->query-string parameters)))))
Authorize a request token explicitly . Returns the authorized token .
(define (authorize-request-token request-token verificateion-code)
(when (and verificateion-code (string? verificateion-code))
(request-token-verification-code request-token verificateion-code)
(request-token-authorized-set! request-token #t))
request-token)
(define (obtain-access-token uri token :key
(consumer-token
(token-consumer token))
(request-method 'POST)
(auth-location :header)
(version :1.0)
(timestamp (time-second (current-time)))
(signature-method :hmac-sha1)
(error-translator default-message-translator))
(let1 refresh? (access-token? token)
(unless refresh?
(or (request-token-authorized? token)
(assertion-violation 'obtain-access-token
"request token is not authorised.")))
(let* ((parameters (append
(generate-auth-parameters consumer-token
signature-method
timestamp
version
token)
(cond (refresh?
`(("oauth_session_handle"
,(access-token-session-handle token))))
((request-token-verification-code token)
=> (lambda (it) `(("oauth_verifier" ,it))))
(else '()))))
(sbs (signature-base-string :uri uri :request-method request-method
:parameters (sort-parameters
parameters)))
(signature (oauth-signature signature-method sbs
(token-secret consumer-token)
(uri-decode-string (token-secret token)
:cgi-decode #t)))
(signed-parameters (cons `("oauth_signature" ,signature)
parameters)))
(receive (status header body)
(oauth-http-request uri
:method request-method
:auth-location auth-location
:auth-parameters signed-parameters)
(define (field name response)
(cond ((assoc name response) => cadr)
(else #f)))
(unless (string=? status "200")
(assertion-violation 'obtain-access-token
(error-translator status header body)))
(let* ((response (query-string->alist body))
(key (field "oauth_token" response))
(secret (field "oauth_token_secret" response))
(session-handle (field "oauth_session_handle" response))
(expires (and-let* ((r (field "oauth_expires_in" response)))
(add-duration! (current-time)
(make-time 'time-duration 0
(string->number r)))))
(authorization-expires
(and-let* ((r (field "oauth_authorization_expires_in"
response)))
(add-duration! (current-time)
(make-time 'time-duration 0
(string->number r)))))
(user-data (remove-oauth-parameters response)))
(unless (and key secret)
(assertion-violation
'obtain-access-token
"oauth_token or/and oauth_token_secret field(s) are not returned"))
(make-access-token :consumer consumer-token
:key (uri-decode-string key :cgi-decode #t)
:secret (uri-decode-string secret :cgi-decode #t)
:session-handle session-handle
:expires expires
:authorization-expires authorization-expires
:origin-uri uri
:user-data user-data))))))
(define (refresh-access-token access-token)
(obtain-access-token (access-token-origin-uri access-token) access-token))
(define (maybe-refresh-access-token access-token :optional (on-refresh #f))
(if (access-token-expired? access-token)
(let ((new-token (refresh-access-token access-token)))
(when on-refresh
(on-refresh new-token))
new-token)
access-token))
(define (get-problem-report-from-headers headers)
(or (and-let* ((auth-header (rfc5322-header-ref headers "www-authenticate"))
(len (string-length auth-header))
( (>= len 5) )
(type (substring auth-header 0 5))
( (string=? type "OAuth") )
( (> len 5)))
(map (lambda (token)
(string-split token "="))
(string-split (substring auth-header 6 len)
#/\s/)))
'()))
(define (get-problem-report headers body)
(let ((from-headers (get-problem-report-from-headers headers)))
from-headers))
Access the protected resource at URI using ACCESS - TOKEN .
(define (access-protected-resource uri access-token :rest kwargs :key
(consumer-token
(token-consumer access-token))
(on-refresh #f)
(timestamp (time-second (current-time)))
(user-parameters '())
(use-user-parameters-for-auth #t)
(additional-headers '())
(version :1.0)
(auth-location :header)
(request-method 'GET)
(signature-method :hmac-sha1)
(body #f)
(receiver (http-string-receiver))
(error-translator
default-message-translator))
(set! access-token (maybe-refresh-access-token access-token on-refresh))
(receive (normalized-uri query-string-parameters) (normalize-uri uri #t)
(let* ((auth-parameters (generate-auth-parameters consumer-token
signature-method
timestamp
version
access-token))
(sbs (signature-base-string :uri normalized-uri
:request-method request-method
:parameters
(sort-parameters
`(,@query-string-parameters
,@(if use-user-parameters-for-auth
user-parameters
'())
,@auth-parameters))
:post-data (and (not (list? body))
body)))
(signature (oauth-signature signature-method sbs
(token-secret consumer-token)
(token-secret access-token)))
(signed-parameters (cons `("oauth_signature" ,signature)
auth-parameters)))
(receive (status header body)
(oauth-http-request uri
:method request-method
:auth-location auth-location
:auth-parameters signed-parameters
:parameters user-parameters
:sender (cond ((list? body)
(http-multipart-sender body))
(body (http-blob-sender body))
(else (http-null-sender)))
:receiver receiver
:additional-headers additional-headers)
(if (string=? status "200")
(values body header #f #f)
(let* ((problem-report (get-problem-report header body))
(problem-hint (and-let* ((r (assoc "oauth_problem"
problem-report)))
(cdr r)))
(problem-advice (and-let* ((r (assoc "oauth_problem_advice"
problem-report)))
(cdr r))))
(cond ((and (string=? status "401")
(equal? problem-hint "token_expired"))
(let ((new-token (refresh-access-token access-token)))
(when on-refresh
(on-refresh new-token))
(apply access-protected-resource uri new-token
kwargs)))
(else
(values (error-translator status header body)
header
problem-hint problem-advice)))))))))
)
|
6fa3938dc728e837bab0d379eb39471534f349bbcda669da41fb833c05900706 | ertugrulcetin/ClojureNews | login.cljs | (ns view.login)
(defn component
[log-in sign-up]
[:table
[:tbody
[:tr
[:td "username:"]
[:td
[:input {:id "loginUsernameId" :name "username" :type "text"}]]]
[:tr
[:td "password:"]
[:td
[:input {:id "loginPasswordId" :name "password" :type "password" :on-key-down (fn [e]
(if (= 13 (.-keyCode e))
(log-in ["loginUsernameId" "loginPasswordId"])))}]]]
[:tr
[:td
[:button {:id "loginButtonId" :on-click (fn [_]
(log-in ["loginUsernameId" "loginPasswordId"]))} "login"]]]
[:tr
[:td]
[:td
[:a {:href "/#!/forgotpassword" :style {:font-size 12 :text-decoration "underline"}} "Forgot Your Password?"]]]
[:tr
[:td
[:br]]]
[:tr
[:td "username:"]
[:td
[:input {:id "signUpUsernameId" :name "username" :type "text"}]]]
[:tr
[:td "password:"]
[:td
[:input {:id "signUpPasswordId" :name "password" :type "password" :on-key-down (fn [e]
(if (= 13 (.-keyCode e))
(sign-up ["signUpUsernameId" "signUpPasswordId"])))}]]]
[:tr
[:td
[:button {:id "signUpButtonId" :on-click (fn [_]
(sign-up ["signUpUsernameId" "signUpPasswordId"]))} "create account"]]]]]) | null | https://raw.githubusercontent.com/ertugrulcetin/ClojureNews/28002f6b620fa4977d561b0cfca0c7f6a635057b/src/cljs/view/login.cljs | clojure | (ns view.login)
(defn component
[log-in sign-up]
[:table
[:tbody
[:tr
[:td "username:"]
[:td
[:input {:id "loginUsernameId" :name "username" :type "text"}]]]
[:tr
[:td "password:"]
[:td
[:input {:id "loginPasswordId" :name "password" :type "password" :on-key-down (fn [e]
(if (= 13 (.-keyCode e))
(log-in ["loginUsernameId" "loginPasswordId"])))}]]]
[:tr
[:td
[:button {:id "loginButtonId" :on-click (fn [_]
(log-in ["loginUsernameId" "loginPasswordId"]))} "login"]]]
[:tr
[:td]
[:td
[:a {:href "/#!/forgotpassword" :style {:font-size 12 :text-decoration "underline"}} "Forgot Your Password?"]]]
[:tr
[:td
[:br]]]
[:tr
[:td "username:"]
[:td
[:input {:id "signUpUsernameId" :name "username" :type "text"}]]]
[:tr
[:td "password:"]
[:td
[:input {:id "signUpPasswordId" :name "password" :type "password" :on-key-down (fn [e]
(if (= 13 (.-keyCode e))
(sign-up ["signUpUsernameId" "signUpPasswordId"])))}]]]
[:tr
[:td
[:button {:id "signUpButtonId" :on-click (fn [_]
(sign-up ["signUpUsernameId" "signUpPasswordId"]))} "create account"]]]]]) | |
6fb7d8c7e691af636f66e1f097ab1684119a0e0cd245fdf926cb11b0410fb973 | Clojure2D/clojure2d-examples | randomwalktrail.clj | (ns examples.NOC.introduction.randomwalktrail
(:require [clojure2d.core :refer :all]
[fastmath.core :as m]
[fastmath.random :as r]
[fastmath.vector :as v])
(:import [fastmath.vector Vec2]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
no deftype approach
(defn draw
""
[canvas window ^long framecount state]
(let [[position history] (or state [(v/vec2 (* 0.5 (width canvas)) (* 0.5 (height canvas))) ;; position
clojure.lang.PersistentQueue/EMPTY]) ;; history
velocity (v/generate-vec2 (partial r/drand -2.0 2.0))
^Vec2 nposition (v/add position velocity)
^Vec2 nposition (v/vec2 (m/constrain (.x nposition) 8 (- (width window) 9))
(m/constrain (.y nposition) 8 (- (height window) 9)))
nhistory (conj history nposition)
nhistory (if (== (count nhistory) 1001) (pop nhistory) nhistory)]
(-> canvas
(set-background :white)
(set-color 175 175 175)
(crect (.x nposition) (.y nposition) 16 16)
(set-color 0 0 0)
(crect (.x nposition) (.y nposition) 16 16 true)
(path nhistory))
[nposition nhistory]))
(def window (show-window (canvas 400 400) "Random walk - trail" 30 draw))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/9de82f5ac0737b7e78e07a17cf03ac577d973817/src/NOC/introduction/randomwalktrail.clj | clojure | position
history | (ns examples.NOC.introduction.randomwalktrail
(:require [clojure2d.core :refer :all]
[fastmath.core :as m]
[fastmath.random :as r]
[fastmath.vector :as v])
(:import [fastmath.vector Vec2]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
no deftype approach
(defn draw
""
[canvas window ^long framecount state]
velocity (v/generate-vec2 (partial r/drand -2.0 2.0))
^Vec2 nposition (v/add position velocity)
^Vec2 nposition (v/vec2 (m/constrain (.x nposition) 8 (- (width window) 9))
(m/constrain (.y nposition) 8 (- (height window) 9)))
nhistory (conj history nposition)
nhistory (if (== (count nhistory) 1001) (pop nhistory) nhistory)]
(-> canvas
(set-background :white)
(set-color 175 175 175)
(crect (.x nposition) (.y nposition) 16 16)
(set-color 0 0 0)
(crect (.x nposition) (.y nposition) 16 16 true)
(path nhistory))
[nposition nhistory]))
(def window (show-window (canvas 400 400) "Random walk - trail" 30 draw))
|
15261a6f2d33355a1427608239b7d607ae9fc44d9f9a6a1c346f7fbe2c80c600 | PyroclastIO/metamorphic | util.cljc | (ns metamorphic.util
#?(:clj (:import [java.util UUID])))
(def final-state-name "__metamorphic-final__")
(defn conjv [xs x]
((fnil conj []) xs x))
(defn index-by [k xs]
(zipmap (map k xs) xs))
(defn kw->fn [kw]
#?(:clj
(let [user-ns (symbol (namespace kw))
user-fn (symbol (name kw))]
(or (ns-resolve user-ns user-fn)
(throw (Exception.))))
:cljs
(js/eval
(str (munge-str (str (namespace kw)))
"."
(munge-str (str (name kw)))))))
(defn resolve-fn [f]
(cond (keyword? f) (kw->fn f)
(fn? f) f
:else (throw (ex-info "Unsupported function type." {}))))
(defn invert-map-coll [m]
(reduce-kv
(fn [all k v]
(update all v conjv k))
{}
m))
(defn select-keys-by [m f]
(reduce-kv
(fn [all k v]
(if (f v)
(assoc all k v)
all))
{}
m))
(defn random-uuid []
#?(:clj (UUID/randomUUID)
:cljs (cljs.core/random-uuid)))
| null | https://raw.githubusercontent.com/PyroclastIO/metamorphic/27dc43ad89423873ca6bcc012cd91fbe2d39271d/src/metamorphic/util.cljc | clojure | (ns metamorphic.util
#?(:clj (:import [java.util UUID])))
(def final-state-name "__metamorphic-final__")
(defn conjv [xs x]
((fnil conj []) xs x))
(defn index-by [k xs]
(zipmap (map k xs) xs))
(defn kw->fn [kw]
#?(:clj
(let [user-ns (symbol (namespace kw))
user-fn (symbol (name kw))]
(or (ns-resolve user-ns user-fn)
(throw (Exception.))))
:cljs
(js/eval
(str (munge-str (str (namespace kw)))
"."
(munge-str (str (name kw)))))))
(defn resolve-fn [f]
(cond (keyword? f) (kw->fn f)
(fn? f) f
:else (throw (ex-info "Unsupported function type." {}))))
(defn invert-map-coll [m]
(reduce-kv
(fn [all k v]
(update all v conjv k))
{}
m))
(defn select-keys-by [m f]
(reduce-kv
(fn [all k v]
(if (f v)
(assoc all k v)
all))
{}
m))
(defn random-uuid []
#?(:clj (UUID/randomUUID)
:cljs (cljs.core/random-uuid)))
| |
7dbe617ffb12774bdeeed1953ab727f17a594bf88018ba4dfda5233470a81821 | kaizhang/bioinformatics-toolkit | Alignment.hs | {-# LANGUAGE Rank2Types #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE BangPatterns #-}
module Bio.Motif.Alignment
( alignment
, alignmentBy
, linPenal
, quadPenal
, cubPenal
, expPenal
, l1
, l2
, l3
, lInf
, AlignFn
, CombineFn
) where
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import qualified Data.Matrix.Unboxed as M
import Statistics.Sample (mean)
import Bio.Motif
import Bio.Utils.Functions
-- | penalty function takes the number of gaps and matched positions as input,
-- return penalty value
type PenalFn = Int -> Int -> Double
type DistanceFn = forall v. (G.Vector v Double, G.Vector v (Double, Double))
=> v Double -> v Double -> Double
type AlignFn = PWM
-> PWM
-> (Double, (Bool, Int)) -- ^ (distance, (on same direction,
position w.r.t . the first pwm ) )
-- | combine distances from different positions of alignment
type CombineFn = U.Vector Double -> Double
alignment :: AlignFn
alignment = alignmentBy jsd (expPenal 0.05) l1
-- | linear penalty
linPenal :: Double -> PenalFn
linPenal x nGap nMatch = fromIntegral nGap * x / fromIntegral nMatch
# INLINE linPenal #
-- | quadratic penalty
quadPenal :: Double -> PenalFn
quadPenal x nGap nMatch = fromIntegral (nGap ^ (2 :: Int)) * x / fromIntegral nMatch
# INLINE quadPenal #
-- | cubic penalty
cubPenal :: Double -> PenalFn
cubPenal x nGap nMatch = fromIntegral (nGap ^ (3 :: Int)) * x / fromIntegral nMatch
# INLINE cubPenal #
-- | exponentail penalty
expPenal :: Double -> PenalFn
expPenal x nGap nMatch = fromIntegral (2^nGap - 1 :: Int) * x / fromIntegral nMatch
# INLINE expPenal #
l1 :: CombineFn
l1 = mean
# INLINE l1 #
l2 :: CombineFn
l2 = sqrt . mean . U.map (**2)
# INLINE l2 #
l3 :: CombineFn
l3 = (**(1/3)) . mean . U.map (**3)
# INLINE l3 #
lInf :: CombineFn
lInf = U.maximum
# INLINE lInf #
-- internal gaps are not allowed, larger score means larger distance, so the smaller the better
^ compute the distance between two aligned pwms
-> PenalFn -- ^ gap penalty
-> CombineFn
-> AlignFn
alignmentBy fn pFn combFn m1 m2
| fst forwardAlign <= fst reverseAlign =
(fst forwardAlign, (True, snd forwardAlign))
| otherwise = (fst reverseAlign, (False, snd reverseAlign))
where
forwardAlign | d1 < d2 = (d1,i1)
| otherwise = (d2,-i2)
where
(d1,i1) = loop opti2 (1/0,-1) s2 s1 0
(d2,i2) = loop opti1 (1/0,-1) s1 s2 0
reverseAlign | d1 < d2 = (d1,i1)
| otherwise = (d2,-i2)
where
(d1,i1) = loop opti2 (1/0,-1) s2' s1 0
(d2,i2) = loop opti1 (1/0,-1) s1 s2' 0
loop opti (min',i') a b@(_:xs) !i
| opti U.! i >= min' = (min',i')
| d < min' = loop opti (d,i) a xs (i+1)
| otherwise = loop opti (min',i') a xs (i+1)
where
d = combFn sc + pFn nGap nMatch
sc = U.fromList $ zipWith fn a b
nMatch = U.length sc
nGap = n1 + n2 - 2 * nMatch
loop _ acc _ _ _ = acc
opti1 = optimalSc n1 n2
opti2 = optimalSc n2 n1
optimalSc x y = U.fromList $ scanr1 f $ go 0
where
f v min' = min v min'
go i | nM == 0 = []
| otherwise = pFn nG nM : go (i+1)
where
nM = min x $ y - i
nG = i + abs (x - (y-i))
s1 = M.toRows . _mat $ m1
s2 = M.toRows . _mat $ m2
s2' = M.toRows . _mat $ m2'
m2' = rcPWM m2
n1 = length s1
n2 = length s2
# INLINE alignmentBy #
| null | https://raw.githubusercontent.com/kaizhang/bioinformatics-toolkit/89b8d1cdf4fa7b075794c37c070cab8ac27acfaf/bioinformatics-toolkit/src/Bio/Motif/Alignment.hs | haskell | # LANGUAGE Rank2Types #
# LANGUAGE OverloadedStrings #
# LANGUAGE BangPatterns #
| penalty function takes the number of gaps and matched positions as input,
return penalty value
^ (distance, (on same direction,
| combine distances from different positions of alignment
| linear penalty
| quadratic penalty
| cubic penalty
| exponentail penalty
internal gaps are not allowed, larger score means larger distance, so the smaller the better
^ gap penalty | # LANGUAGE FlexibleContexts #
module Bio.Motif.Alignment
( alignment
, alignmentBy
, linPenal
, quadPenal
, cubPenal
, expPenal
, l1
, l2
, l3
, lInf
, AlignFn
, CombineFn
) where
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import qualified Data.Matrix.Unboxed as M
import Statistics.Sample (mean)
import Bio.Motif
import Bio.Utils.Functions
type PenalFn = Int -> Int -> Double
type DistanceFn = forall v. (G.Vector v Double, G.Vector v (Double, Double))
=> v Double -> v Double -> Double
type AlignFn = PWM
-> PWM
position w.r.t . the first pwm ) )
type CombineFn = U.Vector Double -> Double
alignment :: AlignFn
alignment = alignmentBy jsd (expPenal 0.05) l1
linPenal :: Double -> PenalFn
linPenal x nGap nMatch = fromIntegral nGap * x / fromIntegral nMatch
# INLINE linPenal #
quadPenal :: Double -> PenalFn
quadPenal x nGap nMatch = fromIntegral (nGap ^ (2 :: Int)) * x / fromIntegral nMatch
# INLINE quadPenal #
cubPenal :: Double -> PenalFn
cubPenal x nGap nMatch = fromIntegral (nGap ^ (3 :: Int)) * x / fromIntegral nMatch
# INLINE cubPenal #
expPenal :: Double -> PenalFn
expPenal x nGap nMatch = fromIntegral (2^nGap - 1 :: Int) * x / fromIntegral nMatch
# INLINE expPenal #
l1 :: CombineFn
l1 = mean
# INLINE l1 #
l2 :: CombineFn
l2 = sqrt . mean . U.map (**2)
# INLINE l2 #
l3 :: CombineFn
l3 = (**(1/3)) . mean . U.map (**3)
# INLINE l3 #
lInf :: CombineFn
lInf = U.maximum
# INLINE lInf #
^ compute the distance between two aligned pwms
-> CombineFn
-> AlignFn
alignmentBy fn pFn combFn m1 m2
| fst forwardAlign <= fst reverseAlign =
(fst forwardAlign, (True, snd forwardAlign))
| otherwise = (fst reverseAlign, (False, snd reverseAlign))
where
forwardAlign | d1 < d2 = (d1,i1)
| otherwise = (d2,-i2)
where
(d1,i1) = loop opti2 (1/0,-1) s2 s1 0
(d2,i2) = loop opti1 (1/0,-1) s1 s2 0
reverseAlign | d1 < d2 = (d1,i1)
| otherwise = (d2,-i2)
where
(d1,i1) = loop opti2 (1/0,-1) s2' s1 0
(d2,i2) = loop opti1 (1/0,-1) s1 s2' 0
loop opti (min',i') a b@(_:xs) !i
| opti U.! i >= min' = (min',i')
| d < min' = loop opti (d,i) a xs (i+1)
| otherwise = loop opti (min',i') a xs (i+1)
where
d = combFn sc + pFn nGap nMatch
sc = U.fromList $ zipWith fn a b
nMatch = U.length sc
nGap = n1 + n2 - 2 * nMatch
loop _ acc _ _ _ = acc
opti1 = optimalSc n1 n2
opti2 = optimalSc n2 n1
optimalSc x y = U.fromList $ scanr1 f $ go 0
where
f v min' = min v min'
go i | nM == 0 = []
| otherwise = pFn nG nM : go (i+1)
where
nM = min x $ y - i
nG = i + abs (x - (y-i))
s1 = M.toRows . _mat $ m1
s2 = M.toRows . _mat $ m2
s2' = M.toRows . _mat $ m2'
m2' = rcPWM m2
n1 = length s1
n2 = length s2
# INLINE alignmentBy #
|
002244be22f0b5b0d101bb65a1973fc5522cc762fcd4359800e403e020cb79c7 | gafiatulin/codewars | Lucas.hs | -- Lucas numbers
-- /
module Codewars.Exercise.Lucas where
lucasnum :: Int -> Integer
lucasnum n = (if (n<0) then (-1)^n' else 1) * (fibS n' + 2 * fibS (n'-1))
where n' = abs n
fib = (map fibS [0 ..] !!)
fibS 0 = 0
fibS 1 = 1
fibS k | k > 0 = fib (k-2) + fib (k-1)
| otherwise = (-1)^(k+1) * fibS (abs k)
| null | https://raw.githubusercontent.com/gafiatulin/codewars/535db608333e854be93ecfc165686a2162264fef/src/6%20kyu/Lucas.hs | haskell | Lucas numbers
/ |
module Codewars.Exercise.Lucas where
lucasnum :: Int -> Integer
lucasnum n = (if (n<0) then (-1)^n' else 1) * (fibS n' + 2 * fibS (n'-1))
where n' = abs n
fib = (map fibS [0 ..] !!)
fibS 0 = 0
fibS 1 = 1
fibS k | k > 0 = fib (k-2) + fib (k-1)
| otherwise = (-1)^(k+1) * fibS (abs k)
|
87eaff77ba3ae53664364e9fb1a2a052246ed6fbf722700dea67abe344490ec9 | rizo/snowflake-os | asmlibrarian.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* Build libraries of .cmx files *)
open Misc
open Config
open Compilenv
type error =
File_not_found of string
| Archiver_error of string
exception Error of error
let read_info name =
let filename =
try
find_in_path !load_path name
with Not_found ->
raise(Error(File_not_found name)) in
let (info, crc) = Compilenv.read_unit_info filename in
info.ui_force_link <- !Clflags.link_everything;
(* There is no need to keep the approximation in the .cmxa file,
since the compiler will go looking directly for .cmx files.
The linker, which is the only one that reads .cmxa files, does not
need the approximation. *)
info.ui_approx <- Clambda.Value_unknown;
(Filename.chop_suffix filename ".cmx" ^ ext_obj, (info, crc))
let create_archive file_list lib_name =
let archive_name = chop_extension_if_any lib_name ^ ext_lib in
let outchan = open_out_bin lib_name in
try
output_string outchan cmxa_magic_number;
let (objfile_list, descr_list) =
List.split (List.map read_info file_list) in
List.iter2
(fun file_name (unit, crc) ->
Asmlink.check_consistency file_name unit crc)
file_list descr_list;
let infos =
{ lib_units = descr_list;
lib_ccobjs = !Clflags.ccobjs;
lib_ccopts = !Clflags.ccopts } in
output_value outchan infos;
if Ccomp.create_archive archive_name objfile_list <> 0
then raise(Error(Archiver_error archive_name));
close_out outchan
with x ->
close_out outchan;
remove_file lib_name;
remove_file archive_name;
raise x
open Format
let report_error ppf = function
| File_not_found name ->
fprintf ppf "Cannot find file %s" name
| Archiver_error name ->
fprintf ppf "Error while creating the library %s" name
| null | https://raw.githubusercontent.com/rizo/snowflake-os/51df43d9ba715532d325e8880d3b8b2c589cd075/plugins/ocamlopt.opt/asmcomp/asmlibrarian.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Build libraries of .cmx files
There is no need to keep the approximation in the .cmxa file,
since the compiler will go looking directly for .cmx files.
The linker, which is the only one that reads .cmxa files, does not
need the approximation. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Misc
open Config
open Compilenv
type error =
File_not_found of string
| Archiver_error of string
exception Error of error
let read_info name =
let filename =
try
find_in_path !load_path name
with Not_found ->
raise(Error(File_not_found name)) in
let (info, crc) = Compilenv.read_unit_info filename in
info.ui_force_link <- !Clflags.link_everything;
info.ui_approx <- Clambda.Value_unknown;
(Filename.chop_suffix filename ".cmx" ^ ext_obj, (info, crc))
let create_archive file_list lib_name =
let archive_name = chop_extension_if_any lib_name ^ ext_lib in
let outchan = open_out_bin lib_name in
try
output_string outchan cmxa_magic_number;
let (objfile_list, descr_list) =
List.split (List.map read_info file_list) in
List.iter2
(fun file_name (unit, crc) ->
Asmlink.check_consistency file_name unit crc)
file_list descr_list;
let infos =
{ lib_units = descr_list;
lib_ccobjs = !Clflags.ccobjs;
lib_ccopts = !Clflags.ccopts } in
output_value outchan infos;
if Ccomp.create_archive archive_name objfile_list <> 0
then raise(Error(Archiver_error archive_name));
close_out outchan
with x ->
close_out outchan;
remove_file lib_name;
remove_file archive_name;
raise x
open Format
let report_error ppf = function
| File_not_found name ->
fprintf ppf "Cannot find file %s" name
| Archiver_error name ->
fprintf ppf "Error while creating the library %s" name
|
ed0d1b2a1bc8912e5b17d1d9ab666ca9ad0e9f201b26c32d4b6ad81946ffb518 | Copilot-Language/copilot-language | Interpret.hs | Copyright ( c ) 2011 National Institute of Aerospace / Galois , Inc.
| This module implements two interpreters , which may be used to simulated or
-- executed Copilot specifications on a computer to understand their behavior
-- to debug possible errors.
--
-- The interpreters included vary in how the present the results to the user.
One of them uses a format ( csv ) that may be more machine - readable , while the
-- other uses a format that may be easier for humans to read.
{-# LANGUAGE Safe #-}
{-# LANGUAGE GADTs, FlexibleInstances #-}
module Copilot.Language.Interpret
( --Input
csv
, interpret
-- , var
-- , array
-- , func
) where
import Copilot . Core . Type ( Typed , )
import Copilot . Core . Interpret ( ExtEnv ( .. ) )
import Copilot . Core . Type . Dynamic ( toDynF )
import qualified Copilot.Core.Interpret as I
import Copilot.Language.Spec (Spec)
import Copilot.Language.Reify
import Data . List ( foldl ' )
--------------------------------------------------------------------------------
-- data Input where
-- -- External variables.
: : Typed a = > String - > [ a ] - > Input
-- -- External arrays (list of lists).
-- Arr :: Typed a => String -> [[a]] -> Input
-- -- -- External functions (streams).
-- -- Func :: Typed a => String -> Stream a -> Input
-- var :: Typed a => String -> [a] -> Input
-- var = Var
-- array :: Typed a => String -> [[a]] -> Input
-- array = Arr
-- func :: Typed a => String -> Stream a -> Input
-- func = Func
--------------------------------------------------------------------------------
-- | Simulate a number of steps of a given specification, printing the results
-- in a table in comma-separated value (CSV) format.
csv :: Integer -> Spec -> IO ()
csv i spec = do
putStrLn "Note: CSV format does not output observers."
interpret' I.CSV i spec
--------------------------------------------------------------------------------
-- | Simulate a number of steps of a given specification, printing the results
-- in a table in readable format.
--
-- Compared to 'csv', this function is slower but the output may be more
-- readable.
interpret :: Integer -> Spec -> IO ()
interpret = interpret' I.Table
-- | Simulate a number of steps of a given specification, printing the results
-- in the format specified.
interpret' :: I.Format -> Integer -> Spec -> IO ()
interpret' format i spec = do
coreSpec <- reify spec
-- fexts <- funcExts
putStrLn $ I.interpret format (fromIntegral i) coreSpec
-- where
: : ExtEnv
= ExtEnv { varEnv = varEnv varArrExts
, arrEnv = arrEnv varArrExts
-- -- , funcEnv = fexts
-- }
-- We do the two folds below over the data type separately , since one
-- -- component is monadic.
funcExts : : IO [ ( Name , ) ]
-- funcExts =
-- let (names, specs) = unzip $ foldl' envf [] inputs in
-- do ss <- sequence specs
-- return $ zip names ss
-- where
envf : : [ ( Name , ) ] - > Input - > [ ( Name , ) ]
envf acc ( Func name ) =
( name , reify $ observer name ) : acc
envf acc _ = acc
varArrExts : : ExtEnv
varArrExts = foldl ' env ( ExtEnv [ ] [ ] ) inputs
-- where
env : : ExtEnv - > Input - > ExtEnv
env acc ( Var name xs ) =
-- acc { varEnv = (name, toDynF typeOf xs) : varEnv acc }
-- env acc (Arr name xs) =
-- acc { arrEnv = (name, map (toDynF typeOf) xs) : arrEnv acc }
env acc _ = acc
--------------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/Copilot-Language/copilot-language/ea3bb24bc02079d2b96be9e55c73cae7430fca2f/src/Copilot/Language/Interpret.hs | haskell | executed Copilot specifications on a computer to understand their behavior
to debug possible errors.
The interpreters included vary in how the present the results to the user.
other uses a format that may be easier for humans to read.
# LANGUAGE Safe #
# LANGUAGE GADTs, FlexibleInstances #
Input
, var
, array
, func
------------------------------------------------------------------------------
data Input where
-- External variables.
-- External arrays (list of lists).
Arr :: Typed a => String -> [[a]] -> Input
-- -- External functions (streams).
-- Func :: Typed a => String -> Stream a -> Input
var :: Typed a => String -> [a] -> Input
var = Var
array :: Typed a => String -> [[a]] -> Input
array = Arr
func :: Typed a => String -> Stream a -> Input
func = Func
------------------------------------------------------------------------------
| Simulate a number of steps of a given specification, printing the results
in a table in comma-separated value (CSV) format.
------------------------------------------------------------------------------
| Simulate a number of steps of a given specification, printing the results
in a table in readable format.
Compared to 'csv', this function is slower but the output may be more
readable.
| Simulate a number of steps of a given specification, printing the results
in the format specified.
fexts <- funcExts
where
-- , funcEnv = fexts
}
We do the two folds below over the data type separately , since one
-- component is monadic.
funcExts =
let (names, specs) = unzip $ foldl' envf [] inputs in
do ss <- sequence specs
return $ zip names ss
where
where
acc { varEnv = (name, toDynF typeOf xs) : varEnv acc }
env acc (Arr name xs) =
acc { arrEnv = (name, map (toDynF typeOf) xs) : arrEnv acc }
------------------------------------------------------------------------------ | Copyright ( c ) 2011 National Institute of Aerospace / Galois , Inc.
| This module implements two interpreters , which may be used to simulated or
One of them uses a format ( csv ) that may be more machine - readable , while the
module Copilot.Language.Interpret
csv
, interpret
) where
import Copilot . Core . Type ( Typed , )
import Copilot . Core . Interpret ( ExtEnv ( .. ) )
import Copilot . Core . Type . Dynamic ( toDynF )
import qualified Copilot.Core.Interpret as I
import Copilot.Language.Spec (Spec)
import Copilot.Language.Reify
import Data . List ( foldl ' )
: : Typed a = > String - > [ a ] - > Input
csv :: Integer -> Spec -> IO ()
csv i spec = do
putStrLn "Note: CSV format does not output observers."
interpret' I.CSV i spec
interpret :: Integer -> Spec -> IO ()
interpret = interpret' I.Table
interpret' :: I.Format -> Integer -> Spec -> IO ()
interpret' format i spec = do
coreSpec <- reify spec
putStrLn $ I.interpret format (fromIntegral i) coreSpec
: : ExtEnv
= ExtEnv { varEnv = varEnv varArrExts
, arrEnv = arrEnv varArrExts
funcExts : : IO [ ( Name , ) ]
envf : : [ ( Name , ) ] - > Input - > [ ( Name , ) ]
envf acc ( Func name ) =
( name , reify $ observer name ) : acc
envf acc _ = acc
varArrExts : : ExtEnv
varArrExts = foldl ' env ( ExtEnv [ ] [ ] ) inputs
env : : ExtEnv - > Input - > ExtEnv
env acc ( Var name xs ) =
env acc _ = acc
|
a113fd4b07a0f301781330a8af6413769fcd9881302791bc34ff7b8aeaddf485 | pingles/googlecloud | project.clj | (defproject googlecloud "0.3.4"
:description "Google Cloud service clients for Clojure"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:plugins [[lein-sub "0.3.0"]]
:sub ["core" "bigquery" "cloudstorage"]
:eval-in-leiningen true)
| null | https://raw.githubusercontent.com/pingles/googlecloud/8d31afb1c627d40f7293f85c479cbfa98317b056/project.clj | clojure | (defproject googlecloud "0.3.4"
:description "Google Cloud service clients for Clojure"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:plugins [[lein-sub "0.3.0"]]
:sub ["core" "bigquery" "cloudstorage"]
:eval-in-leiningen true)
| |
c6f51aeac3c5c3cb72986fdbe62536448babd7e565b10afffe3c8e510a47e261 | eglaysher/rldev | kfnTypes.ml |
: KFN type definitions
Copyright ( C ) 2006 Haeleth
This program is free software ; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 2 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program ; if not , write to the Free Software Foundation , Inc. , 59 Temple
Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
RLdev: KFN type definitions
Copyright (C) 2006 Haeleth
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type parameter = param_type * param_flag list
and param_type = Any | Int | IntC | IntV | Str | StrC | StrV | ResStr
| Special of (int * special_t * special_flag list) list | Complex of parameter list
and special_t = Named of string * parameter list | AsComplex of parameter list
and param_flag = Optional | Return | Uncount | Fake | TextObject | Tagged of string | Argc
and special_flag = NoParens
and flag = PushStore | IsJump | IsGoto | IsCond | IsTextout | NoBraces | IsLbr
and version_t = Class of string | Compare of (int * int * int * int -> bool)
let which_ident : (string * string -> string) ref = ref fst
let handle_module
: (int -> string -> unit) ref
= ref (fun num name -> failwith "not initialised")
let handle_opcode
: (version_t list -> string -> string -> flag list -> int -> int -> int -> parameter list option list -> unit) ref
= ref (fun verlimit ident ccstr flags op_type op_module op_function prototypes -> failwith "not initialised")
| null | https://raw.githubusercontent.com/eglaysher/rldev/e59103b165e1c20bd940942405b2eee767933c96/src/common/kfnTypes.ml | ocaml |
: KFN type definitions
Copyright ( C ) 2006 Haeleth
This program is free software ; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation ; either version 2 of the License , or ( at your option ) any later
version .
This program is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU General Public License for more
details .
You should have received a copy of the GNU General Public License along with
this program ; if not , write to the Free Software Foundation , Inc. , 59 Temple
Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
RLdev: KFN type definitions
Copyright (C) 2006 Haeleth
This program is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation; either version 2 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type parameter = param_type * param_flag list
and param_type = Any | Int | IntC | IntV | Str | StrC | StrV | ResStr
| Special of (int * special_t * special_flag list) list | Complex of parameter list
and special_t = Named of string * parameter list | AsComplex of parameter list
and param_flag = Optional | Return | Uncount | Fake | TextObject | Tagged of string | Argc
and special_flag = NoParens
and flag = PushStore | IsJump | IsGoto | IsCond | IsTextout | NoBraces | IsLbr
and version_t = Class of string | Compare of (int * int * int * int -> bool)
let which_ident : (string * string -> string) ref = ref fst
let handle_module
: (int -> string -> unit) ref
= ref (fun num name -> failwith "not initialised")
let handle_opcode
: (version_t list -> string -> string -> flag list -> int -> int -> int -> parameter list option list -> unit) ref
= ref (fun verlimit ident ccstr flags op_type op_module op_function prototypes -> failwith "not initialised")
| |
3f36bb71206d260dbc6f3073774335520b596e4db3057d72b12d7236bc7ff15f | satori-com/mzbench | mzb_dummycloud_plugin.erl | -module(mzb_dummycloud_plugin).
-export([
start/2,
create_cluster/3,
destroy_cluster/1
]).
%%%===================================================================
%%% API
%%%===================================================================
start(Name, Opts) -> {Name, Opts}.
create_cluster({_Name, _Opts}, _N, _Config) ->
{ok, _Ref = erlang:make_ref(), _User = undefined, ["127.0.0.1"]}.
destroy_cluster(_Ref) ->
ok.
| null | https://raw.githubusercontent.com/satori-com/mzbench/02be2684655cde94d537c322bb0611e258ae9718/server/src/mzb_dummycloud_plugin.erl | erlang | ===================================================================
API
=================================================================== | -module(mzb_dummycloud_plugin).
-export([
start/2,
create_cluster/3,
destroy_cluster/1
]).
start(Name, Opts) -> {Name, Opts}.
create_cluster({_Name, _Opts}, _N, _Config) ->
{ok, _Ref = erlang:make_ref(), _User = undefined, ["127.0.0.1"]}.
destroy_cluster(_Ref) ->
ok.
|
082460665abbf9ad2ee909c292132eb311000420b2ab368adf7dde0c4d12c153 | atdixon/thurber | stateful_team_score_test.clj | (ns demo.stateful-team-score-test
(:require [clojure.test :refer :all]
[game.stateful-team-score]
[test-support]
[thurber :as th])
(:import (org.joda.time Duration Instant)
(org.apache.beam.sdk.values TimestampedValue)
(org.apache.beam.sdk.testing TestStream PAssert TestStream$Builder)
(org.apache.beam.sdk.transforms.windowing IntervalWindow GlobalWindow Window WindowFn GlobalWindows FixedWindows)))
(def ^:private ^Instant base-time (Instant. 0))
(defn- ^Duration secs [n] (Duration/standardSeconds n))
(defn- ^Duration mins [n] (Duration/standardMinutes n))
(defn- ^TimestampedValue ->event* [event ^Duration offset]
(let [ts (.plus base-time offset)]
(TimestampedValue/of (assoc event :timestamp (.getMillis ts)) ts)))
(declare create-scenario*)
(deftest test-score-updates-one-team
(let [scenario (create-scenario* (GlobalWindows.)
(->event* {:user "burgundy" :team "red" :score (int 99)} (secs 10))
(->event* {:user "scarlet" :team "red" :score (int 1)} (secs 20))
(->event* {:user "scarlet" :team "red" :score (int 0)} (secs 30))
(->event* {:user "burgundy" :team "red" :score (int 100)} (secs 40))
(->event* {:user "burgundy" :team "red" :score (int 201)} (secs 50)))]
(-> (PAssert/that scenario)
(.inWindow GlobalWindow/INSTANCE)
(.containsInAnyOrder ^Iterable [["red" 100] ["red" 200] ["red" 401]]))
(test-support/run-test-pipeline! scenario)))
(deftest test-score-updates-per-team
(let [scenario (create-scenario* (GlobalWindows.)
(->event* {:user "burgundy" :team "red" :score (int 50)} (secs 10))
(->event* {:user "scarlet" :team "red" :score (int 50)} (secs 20))
(->event* {:user "navy" :team "blue" :score (int 70)} (secs 30))
(->event* {:user "sky" :team "blue" :score (int 80)} (secs 40))
(->event* {:user "sky" :team "blue" :score (int 50)} (secs 50)))]
(-> (PAssert/that scenario)
(.inWindow GlobalWindow/INSTANCE)
(.containsInAnyOrder ^Iterable
[["red" 100] ["blue" 150] ["blue" 200]]))
(test-support/run-test-pipeline! scenario)))
(deftest team-score-updates-per-window
(let [team-window-duration (mins 5)
window-1 (IntervalWindow. base-time team-window-duration)
window-2 (IntervalWindow. (.end window-1) team-window-duration)
scenario (create-scenario* (FixedWindows/of team-window-duration)
(->event* {:user "burgundy" :team "red" :score (int 50)} (mins 1))
(->event* {:user "scarlet" :team "red" :score (int 50)} (mins 2))
(->event* {:user "burgundy" :team "red" :score (int 50)} (mins 3))
(->event* {:user "burgundy" :team "red" :score (int 60)} (mins 6))
(->event* {:user "scarlet" :team "red" :score (int 60)} (mins 7)))]
(-> (PAssert/that scenario)
(.inWindow window-1)
(.containsInAnyOrder ^Iterable [["red" 100]]))
(-> (PAssert/that scenario)
(.inWindow window-2)
(.containsInAnyOrder ^Iterable [["red" 120]]))
(test-support/run-test-pipeline! scenario)))
(defn- create-scenario* [^WindowFn window & events]
(as-> (TestStream/create th/nippy)
^TestStream$Builder test-stream
(.advanceWatermarkTo test-stream base-time)
(apply test-support/add-elements! test-stream events)
(.advanceWatermarkToInfinity test-stream)
(th/apply! (test-support/create-test-pipeline)
test-stream
(Window/into window)
(th/partial #'th/->kv :team)
(th/partial
#'game.stateful-team-score/update-team-score 100)
#'th/kv->clj))) | null | https://raw.githubusercontent.com/atdixon/thurber/38b86a683e446f2deed55fad157476ae05940d50/test/demo/stateful_team_score_test.clj | clojure | (ns demo.stateful-team-score-test
(:require [clojure.test :refer :all]
[game.stateful-team-score]
[test-support]
[thurber :as th])
(:import (org.joda.time Duration Instant)
(org.apache.beam.sdk.values TimestampedValue)
(org.apache.beam.sdk.testing TestStream PAssert TestStream$Builder)
(org.apache.beam.sdk.transforms.windowing IntervalWindow GlobalWindow Window WindowFn GlobalWindows FixedWindows)))
(def ^:private ^Instant base-time (Instant. 0))
(defn- ^Duration secs [n] (Duration/standardSeconds n))
(defn- ^Duration mins [n] (Duration/standardMinutes n))
(defn- ^TimestampedValue ->event* [event ^Duration offset]
(let [ts (.plus base-time offset)]
(TimestampedValue/of (assoc event :timestamp (.getMillis ts)) ts)))
(declare create-scenario*)
(deftest test-score-updates-one-team
(let [scenario (create-scenario* (GlobalWindows.)
(->event* {:user "burgundy" :team "red" :score (int 99)} (secs 10))
(->event* {:user "scarlet" :team "red" :score (int 1)} (secs 20))
(->event* {:user "scarlet" :team "red" :score (int 0)} (secs 30))
(->event* {:user "burgundy" :team "red" :score (int 100)} (secs 40))
(->event* {:user "burgundy" :team "red" :score (int 201)} (secs 50)))]
(-> (PAssert/that scenario)
(.inWindow GlobalWindow/INSTANCE)
(.containsInAnyOrder ^Iterable [["red" 100] ["red" 200] ["red" 401]]))
(test-support/run-test-pipeline! scenario)))
(deftest test-score-updates-per-team
(let [scenario (create-scenario* (GlobalWindows.)
(->event* {:user "burgundy" :team "red" :score (int 50)} (secs 10))
(->event* {:user "scarlet" :team "red" :score (int 50)} (secs 20))
(->event* {:user "navy" :team "blue" :score (int 70)} (secs 30))
(->event* {:user "sky" :team "blue" :score (int 80)} (secs 40))
(->event* {:user "sky" :team "blue" :score (int 50)} (secs 50)))]
(-> (PAssert/that scenario)
(.inWindow GlobalWindow/INSTANCE)
(.containsInAnyOrder ^Iterable
[["red" 100] ["blue" 150] ["blue" 200]]))
(test-support/run-test-pipeline! scenario)))
(deftest team-score-updates-per-window
(let [team-window-duration (mins 5)
window-1 (IntervalWindow. base-time team-window-duration)
window-2 (IntervalWindow. (.end window-1) team-window-duration)
scenario (create-scenario* (FixedWindows/of team-window-duration)
(->event* {:user "burgundy" :team "red" :score (int 50)} (mins 1))
(->event* {:user "scarlet" :team "red" :score (int 50)} (mins 2))
(->event* {:user "burgundy" :team "red" :score (int 50)} (mins 3))
(->event* {:user "burgundy" :team "red" :score (int 60)} (mins 6))
(->event* {:user "scarlet" :team "red" :score (int 60)} (mins 7)))]
(-> (PAssert/that scenario)
(.inWindow window-1)
(.containsInAnyOrder ^Iterable [["red" 100]]))
(-> (PAssert/that scenario)
(.inWindow window-2)
(.containsInAnyOrder ^Iterable [["red" 120]]))
(test-support/run-test-pipeline! scenario)))
(defn- create-scenario* [^WindowFn window & events]
(as-> (TestStream/create th/nippy)
^TestStream$Builder test-stream
(.advanceWatermarkTo test-stream base-time)
(apply test-support/add-elements! test-stream events)
(.advanceWatermarkToInfinity test-stream)
(th/apply! (test-support/create-test-pipeline)
test-stream
(Window/into window)
(th/partial #'th/->kv :team)
(th/partial
#'game.stateful-team-score/update-team-score 100)
#'th/kv->clj))) | |
1de41723c94bd057311061d91493bb4740d66100e158c3e3e7f1cd9a0d3fcc9a | sweirich/trellys | Rewriting.hs | # LANGUAGE TemplateHaskell , DeriveDataTypeable , ScopedTypeVariables ,
FlexibleInstances , MultiParamTypeClasses , FlexibleContexts ,
UndecidableInstances , TypeSynonymInstances #
FlexibleInstances, MultiParamTypeClasses, FlexibleContexts,
UndecidableInstances, TypeSynonymInstances #-}
module Language.SepCore.Rewriting where
import Language.SepCore.Erasure
import Language.SepCore.Syntax
import Language.SepCore.PrettyPrint
import Language.SepCore.Monad
import Language.SepCore.Error
import Generics.RepLib hiding (Con(..))
import Control.Monad.Reader hiding (join)
import Unbound.LocallyNameless hiding (Con(..),Equal,Refl)
import Control.Monad.Trans
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Data.List
import Text.PrettyPrint
import qualified Data.Map as M
type Trace = StateT [ ETerm ] ( FreshMT ( ErrorT TypeError IO ) )
-- | val t judgement
isValue :: ETerm -> TCMonad Bool
isValue (ETermVar x) = do
v <- getValue (ArgNameTerm (translate x))
case v of
Value ->
return True
NonValue -> return False
isValue (EType i) = return True
isValue (EPi binding s) = do
((x,Embed t'), t) <- unbind binding
isValue t'
isValue (ELambda b) = return True
isValue (ERec b) = return True
isValue (ETCast t) = return True
isValue ( EApp ( ETermVar x ) t ' ) = do
-- v1 <- isValue (ETermVar x)
-- v2 <- isValue t'
-- return (v1 && v2)
isValue (EApp t t') = do
v1 <- isValue t
if v1 then case t of
EApp t1 t2 -> do
v2 <- isValue t'
return (v1 && v2)
ETermVar x -> do
v2 <- isValue t'
return (v1 && v2)
ERec b -> return False
ELambda b -> return False
_ -> return True else return False
isValue _ = return False
type Trace = [ETerm]
reduce : : Integer - > EExpr - > ( Integer - > EExpr - > TCMonad EExpr ) - > TCMonad EExpr
-- | instantiate variable from the definition context.
inst :: ETerm -> TCMonad ETerm
inst (ETermVar x) = do
env <- ask
case M.lookup (ArgNameTerm (translate x)) (snd env) of
Just a -> eraseArg a
Nothing -> return (ETermVar x)
| one step reduction
rewrite :: ETerm -> TCMonad ETerm
rewrite (ETermVar x) = do
inst (ETermVar x)
-- v <-isValue (ETermVar x)
-- if v then return (ETermVar x) else inst (ETermVar x)
-- | beta-v reduction
rewrite (EApp t1 t2) = do
case t1 of
ELambda b -> do
v <- isValue t2
if v then do
(n, t) <- unbind b
return (subst n t2 t) else do t2'<- rewrite t2
return (EApp (ELambda b) t2')
ERec b -> do
v <- isValue t2
if v then do
((x, f),t) <- unbind b
return (subst f (ERec b) (subst x t2 t)) else do t2' <- rewrite t2
return (EApp (ERec b) t2')
t -> do v <- isValue t
if v then do
t2' <- rewrite t2
return (EApp t1 t2') else do
t' <- rewrite t
return (EApp t' t2)
rewrite (ELet b t) = do
v <- isValue t
if v then do
(x,t') <- unbind b
return (subst x t t') else do t1 <- rewrite t
return (ELet b t1)
rewrite (ETCast t) = do
v <- isValue t
if v then return t else do t' <- rewrite t
return (ETCast t')
rewrite (ECase t b) = do
v <- isValue t
if v then
let a = fun t in
case a of
(ETermVar x) -> do
case lookup (name2String x) b of
Just branch -> do
(ls, t1) <- unbind branch
let args = (arg t)
let lenarg = length ls
let lenact = length args
let n = zip ls args
if lenarg == lenact then
return (substs n t1) else typeError $ disp ("The arguments of the term doesn't match with constructor") <+> disp (x)
Nothing -> typeError $ disp ("Can't find data constructors from the branches")
_ -> typeError $ disp ("not a correct form") else do t' <- rewrite t
return $ ECase t' b
rewrite t = return t
reduce :: ETerm -> Integer -> TCMonad [ETerm]
reduce t 0 = return [t]
reduce t i = do t' <- rewrite t
if aeq t t' then return [t'] else
do
cs <- reduce t' (i-1)
return (t':cs)
type LETerm = [ETerm]
instance Disp LETerm where
disp cs = (vcat [braces (disp c') | c' <- cs])
joinable :: ETerm -> Integer -> ETerm -> Integer -> TCMonad Bool
joinable t1 i t2 j = do trace1 <- reduce t1 i
trace2 <- reduce t2 j
-- typeError $ disp trace1 <+> text "$$"<+>disp trace2 <+> text "end."
let r = intersectBy aeq trace1 trace2
if null r then return False else return True
-- need to think more about this.
fun (EApp t1 t2) = fun t1
fun t = t
flat ( EApp t1 t2 ) = flat t1 + + flat t2
-- flat t = t
arg (EApp t1 t2) = arg t1 ++ [t2]
arg t = []
-- instantiate t [] = []
-- instantiate t (h:[]) = [t]
instantiate ( EApp t t ' ) ( h : cs ) = t : ( instantiate t ' cs )
| null | https://raw.githubusercontent.com/sweirich/trellys/63ea89d8fa09929c23504665c55a3d909fe047c5/lib/sep-core/Language/SepCore/Rewriting.hs | haskell | | val t judgement
v1 <- isValue (ETermVar x)
v2 <- isValue t'
return (v1 && v2)
| instantiate variable from the definition context.
v <-isValue (ETermVar x)
if v then return (ETermVar x) else inst (ETermVar x)
| beta-v reduction
typeError $ disp trace1 <+> text "$$"<+>disp trace2 <+> text "end."
need to think more about this.
flat t = t
instantiate t [] = []
instantiate t (h:[]) = [t] | # LANGUAGE TemplateHaskell , DeriveDataTypeable , ScopedTypeVariables ,
FlexibleInstances , MultiParamTypeClasses , FlexibleContexts ,
UndecidableInstances , TypeSynonymInstances #
FlexibleInstances, MultiParamTypeClasses, FlexibleContexts,
UndecidableInstances, TypeSynonymInstances #-}
module Language.SepCore.Rewriting where
import Language.SepCore.Erasure
import Language.SepCore.Syntax
import Language.SepCore.PrettyPrint
import Language.SepCore.Monad
import Language.SepCore.Error
import Generics.RepLib hiding (Con(..))
import Control.Monad.Reader hiding (join)
import Unbound.LocallyNameless hiding (Con(..),Equal,Refl)
import Control.Monad.Trans
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Data.List
import Text.PrettyPrint
import qualified Data.Map as M
type Trace = StateT [ ETerm ] ( FreshMT ( ErrorT TypeError IO ) )
isValue :: ETerm -> TCMonad Bool
isValue (ETermVar x) = do
v <- getValue (ArgNameTerm (translate x))
case v of
Value ->
return True
NonValue -> return False
isValue (EType i) = return True
isValue (EPi binding s) = do
((x,Embed t'), t) <- unbind binding
isValue t'
isValue (ELambda b) = return True
isValue (ERec b) = return True
isValue (ETCast t) = return True
isValue ( EApp ( ETermVar x ) t ' ) = do
isValue (EApp t t') = do
v1 <- isValue t
if v1 then case t of
EApp t1 t2 -> do
v2 <- isValue t'
return (v1 && v2)
ETermVar x -> do
v2 <- isValue t'
return (v1 && v2)
ERec b -> return False
ELambda b -> return False
_ -> return True else return False
isValue _ = return False
type Trace = [ETerm]
reduce : : Integer - > EExpr - > ( Integer - > EExpr - > TCMonad EExpr ) - > TCMonad EExpr
inst :: ETerm -> TCMonad ETerm
inst (ETermVar x) = do
env <- ask
case M.lookup (ArgNameTerm (translate x)) (snd env) of
Just a -> eraseArg a
Nothing -> return (ETermVar x)
| one step reduction
rewrite :: ETerm -> TCMonad ETerm
rewrite (ETermVar x) = do
inst (ETermVar x)
rewrite (EApp t1 t2) = do
case t1 of
ELambda b -> do
v <- isValue t2
if v then do
(n, t) <- unbind b
return (subst n t2 t) else do t2'<- rewrite t2
return (EApp (ELambda b) t2')
ERec b -> do
v <- isValue t2
if v then do
((x, f),t) <- unbind b
return (subst f (ERec b) (subst x t2 t)) else do t2' <- rewrite t2
return (EApp (ERec b) t2')
t -> do v <- isValue t
if v then do
t2' <- rewrite t2
return (EApp t1 t2') else do
t' <- rewrite t
return (EApp t' t2)
rewrite (ELet b t) = do
v <- isValue t
if v then do
(x,t') <- unbind b
return (subst x t t') else do t1 <- rewrite t
return (ELet b t1)
rewrite (ETCast t) = do
v <- isValue t
if v then return t else do t' <- rewrite t
return (ETCast t')
rewrite (ECase t b) = do
v <- isValue t
if v then
let a = fun t in
case a of
(ETermVar x) -> do
case lookup (name2String x) b of
Just branch -> do
(ls, t1) <- unbind branch
let args = (arg t)
let lenarg = length ls
let lenact = length args
let n = zip ls args
if lenarg == lenact then
return (substs n t1) else typeError $ disp ("The arguments of the term doesn't match with constructor") <+> disp (x)
Nothing -> typeError $ disp ("Can't find data constructors from the branches")
_ -> typeError $ disp ("not a correct form") else do t' <- rewrite t
return $ ECase t' b
rewrite t = return t
reduce :: ETerm -> Integer -> TCMonad [ETerm]
reduce t 0 = return [t]
reduce t i = do t' <- rewrite t
if aeq t t' then return [t'] else
do
cs <- reduce t' (i-1)
return (t':cs)
type LETerm = [ETerm]
instance Disp LETerm where
disp cs = (vcat [braces (disp c') | c' <- cs])
joinable :: ETerm -> Integer -> ETerm -> Integer -> TCMonad Bool
joinable t1 i t2 j = do trace1 <- reduce t1 i
trace2 <- reduce t2 j
let r = intersectBy aeq trace1 trace2
if null r then return False else return True
fun (EApp t1 t2) = fun t1
fun t = t
flat ( EApp t1 t2 ) = flat t1 + + flat t2
arg (EApp t1 t2) = arg t1 ++ [t2]
arg t = []
instantiate ( EApp t t ' ) ( h : cs ) = t : ( instantiate t ' cs )
|
13c2a53c273672c40f5ebd8bf7740b5a8826afee6649ff5111b2de15c95f828f | SamueleGiraudo/Bud-Music-Box | DegreeMonoid.ml | Author :
* Creation : apr . 2021
* Modifications : apr . 2021 , aug . 2022
* Creation: apr. 2021
* Modifications: apr. 2021, aug. 2022
*)
(* A degree monoid is a monoid structure on degrees. Operads on (multi-)patterns are
* parametrized by degree monoids. *)
type degree_monoid = {
is_element: int -> bool;
product: int -> int -> int;
unity: int
}
Returns a function testing if its argument is an element of the degree monoid dm .
let is_element dm =
dm.is_element
(* Returns the element which is the product of x and x' in the degree monoid dm. *)
let product dm x x' =
assert (dm.is_element x);
assert (dm.is_element x');
dm.product x x'
(* Returns the unity of the degree monoid dm. *)
let unity dm =
dm.unity
(* Returns the additive degree monoid. *)
let add_int =
{is_element = Fun.const true; product = (+); unity = 0}
(* Returns the cyclic degree monoid of order k. *)
let cyclic k =
assert (k >= 1);
{is_element = (fun x -> 0 <= x && x < k);
product = (fun x x' -> (x + x') mod k);
unity = 0}
(* Returns the max degree monoid with z as minimal element. *)
let max z =
{is_element = (fun x -> z <= x); product = max; unity = z}
| null | https://raw.githubusercontent.com/SamueleGiraudo/Bud-Music-Box/45eae635fcbd85555f74d864b31ab25ee50e6bde/Sources/DegreeMonoid.ml | ocaml | A degree monoid is a monoid structure on degrees. Operads on (multi-)patterns are
* parametrized by degree monoids.
Returns the element which is the product of x and x' in the degree monoid dm.
Returns the unity of the degree monoid dm.
Returns the additive degree monoid.
Returns the cyclic degree monoid of order k.
Returns the max degree monoid with z as minimal element. | Author :
* Creation : apr . 2021
* Modifications : apr . 2021 , aug . 2022
* Creation: apr. 2021
* Modifications: apr. 2021, aug. 2022
*)
type degree_monoid = {
is_element: int -> bool;
product: int -> int -> int;
unity: int
}
Returns a function testing if its argument is an element of the degree monoid dm .
let is_element dm =
dm.is_element
let product dm x x' =
assert (dm.is_element x);
assert (dm.is_element x');
dm.product x x'
let unity dm =
dm.unity
let add_int =
{is_element = Fun.const true; product = (+); unity = 0}
let cyclic k =
assert (k >= 1);
{is_element = (fun x -> 0 <= x && x < k);
product = (fun x x' -> (x + x') mod k);
unity = 0}
let max z =
{is_element = (fun x -> z <= x); product = max; unity = z}
|
fee8db8536fd5b8b0a78bde1c0269fd19f075ed5e13c6b0c1aff5d8361921105 | huangjs/cl | ddr-exs-tests.lisp | (in-package :ddr-tests)
Test cases for the exercises in ddr-exs.html
;;; MEMBER
(define-test member
(assert-false (ask '(member a nil)))
(assert-true (ask '(member a (cons a nil))))
(assert-true (ask '(member b (cons a (cons b (cons c nil))))))
(assert-false (ask '(member d (cons a (cons b (cons c nil))))))
)
;;; ALL-DIFFERENT
(define-test all-different
(assert-false (ask '(different a b)))
(tell '(all-different nil))
(assert-false (ask '(different a b)))
(tell '(all-different (cons a (cons b (cons c nil)))))
(assert-true (ask '(different a b)))
(assert-true (ask '(different a c)))
(assert-true (ask '(different b a)))
(assert-true (ask '(different b c)))
(assert-true (ask '(different c a)))
(assert-true (ask '(different c b)))
(assert-false (ask '(different a a)))
)
;;; MAP COLORING
(define-test color-map1
(assert-equal '((colors-for map1 red blue green yellow))
(ask '(colors-for map1 red blue green ?d)))
(assert-equal 2 (length (ask '(colors-for map1 red blue ?c ?d))))
(assert-equal 24 (length (ask '(colors-for map1 ?a ?b ?c ?d))))
(assert-equal nil (ask '(colors-for map1 red blue green red)))
)
(define-test color-map2
(assert-equal '((colors-for map2 red blue green blue yellow))
(ask '(colors-for map2 red blue green ?d ?e)))
(assert-equal 2 (length (ask '(colors-for map2 red blue ?c ?d ?e))))
(assert-equal 24 (length (ask '(colors-for map2 ?a ?b ?c ?d ?e))))
(assert-equal nil (ask '(colors-for map2 red blue green yellow ?e)))
)
(define-test color-map3
(assert-equal '((colors-for map3 red blue green yellow green blue))
(ask '(colors-for map3 red blue green yellow ?e ?f)))
(assert-equal 1 (length (ask '(colors-for map3 red blue green ?d ?e ?f))))
(assert-equal 24 (length (ask '(colors-for map3 ?a ?b ?c ?d ?e ?f))))
(assert-equal nil (ask '(colors-for map3 red blue green blue ?e ?f)))
)
;;; SHAKEY 1.0
Test cases for 1 box , no locks .
;;;
The goal state is always ( v1 - state ? rloc room1 ) , meaning
the box has to end up in room1 , and it does n't matter where
;;; the robot ends up.
(define-test shakey-1
(assert-equal '(nil)
(ask '(plan-for (v1-state room1 room1)
(v1-state ?1 room1)
?actions)
'?actions))
(assert-equal '((cons (push-box hall room1) nil))
(ask '(plan-for (v1-state hall hall)
(v1-state ?1 room1)
?actions)
'?actions))
(assert-equal '((cons (push-box room2 hall)
(cons (push-box hall room1) nil)))
(ask '(plan-for (v1-state room2 room2)
(v1-state ?1 room1)
?actions)
'?actions))
(assert-equal '((cons (move-to hall)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (push-box hall room1) nil)))))
(ask '(plan-for (v1-state room1 room2)
(v1-state ?1 room1)
?actions)
'?actions))
)
;;; SHAKEY 2.0
Test cases for 1 box with locks .
;;;
;;; The goal state is always
;;;
( v2 - state ? rloc room1 ? unlocked )
;;;
meaning the box has to end up in room1 , and we do n't care
;;; where the robot is or what rooms are unlocked.
(define-test shakey-2
;; Test with rooms unlocked
(assert-equal '(nil)
(ask '(plan-for (v2-state room1 room1 nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (push-box hall room1) nil))
(ask '(plan-for (v2-state hall hall (cons room1 nil))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (push-box room2 hall) (cons (push-box hall room1) nil)))
(ask '(plan-for (v2-state room2 room2 (cons room1 (cons room2 nil)))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (move-to hall)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (push-box hall room1) nil)))))
(ask '(plan-for (v2-state room1 room2 (cons room1 (cons room2 nil)))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
;; Test with the room with the box locked
(assert-equal '((cons (move-to hall)
(cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (push-box hall room1) nil))))))
(ask '(plan-for (v2-state room1 room2 (cons room1 nil))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
;; Test with the goal room locked, robot and box in hall
(assert-equal '((cons (unlock room1)
(cons (push-box hall room1) nil)))
(ask '(plan-for (v2-state hall hall nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
Test with the goal room locked , robot in hall , box in room2
(assert-equal '((cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1) nil)))))
(ask '(plan-for (v2-state hall room2 (cons room2 nil))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
;; Test with both rooms locked and the robot in the hall
(assert-equal '((cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1) nil))))))
(ask '(plan-for (v2-state hall room2 nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
;; Test with robot in locked room
(assert-equal nil
(ask '(plan-for (v2-state room1 room2 nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
)
;;; SHAKEY 3.0
;;; Test cases for N boxes with locks, going to the same room.
;;;
;;; The goal state that stops the recursion is that the list
;;; of box locations is nil:
;;;
;;; (v3-state ?rloc nil ?gloc ?unlocked)
(define-test shakey-3
;; Test already done case
(assert-equal '(nil)
(ask '(plan-for (v3-state ? nil ? ?)
?actions)
'?actions))
Test with 1 box , all rooms locked , the robot in the hall
(assert-equal '((cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1) nil))))))
(ask '(plan-for (v3-state hall (cons room2 nil) room1 nil)
?actions)
'?actions))
Test with 2 boxes , all rooms locked , the robot in the hall
(assert-equal '((cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1)
(cons (move-to hall)
(cons (unlock room3)
(cons (move-to room3)
(cons (push-box room3 hall)
(cons (push-box hall room1)
nil)))))))))))
(ask '(plan-for (v3-state hall (cons room2 (cons room3 nil)) room1 nil)
?actions)
'?actions))
)
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/other-code/cs325/www.cs.northwestern.edu/academics/courses/325/programs/ddr-exs-tests.lisp | lisp | MEMBER
ALL-DIFFERENT
MAP COLORING
SHAKEY 1.0
the robot ends up.
SHAKEY 2.0
The goal state is always
where the robot is or what rooms are unlocked.
Test with rooms unlocked
Test with the room with the box locked
Test with the goal room locked, robot and box in hall
Test with both rooms locked and the robot in the hall
Test with robot in locked room
SHAKEY 3.0
Test cases for N boxes with locks, going to the same room.
The goal state that stops the recursion is that the list
of box locations is nil:
(v3-state ?rloc nil ?gloc ?unlocked)
Test already done case | (in-package :ddr-tests)
Test cases for the exercises in ddr-exs.html
(define-test member
(assert-false (ask '(member a nil)))
(assert-true (ask '(member a (cons a nil))))
(assert-true (ask '(member b (cons a (cons b (cons c nil))))))
(assert-false (ask '(member d (cons a (cons b (cons c nil))))))
)
(define-test all-different
(assert-false (ask '(different a b)))
(tell '(all-different nil))
(assert-false (ask '(different a b)))
(tell '(all-different (cons a (cons b (cons c nil)))))
(assert-true (ask '(different a b)))
(assert-true (ask '(different a c)))
(assert-true (ask '(different b a)))
(assert-true (ask '(different b c)))
(assert-true (ask '(different c a)))
(assert-true (ask '(different c b)))
(assert-false (ask '(different a a)))
)
(define-test color-map1
(assert-equal '((colors-for map1 red blue green yellow))
(ask '(colors-for map1 red blue green ?d)))
(assert-equal 2 (length (ask '(colors-for map1 red blue ?c ?d))))
(assert-equal 24 (length (ask '(colors-for map1 ?a ?b ?c ?d))))
(assert-equal nil (ask '(colors-for map1 red blue green red)))
)
(define-test color-map2
(assert-equal '((colors-for map2 red blue green blue yellow))
(ask '(colors-for map2 red blue green ?d ?e)))
(assert-equal 2 (length (ask '(colors-for map2 red blue ?c ?d ?e))))
(assert-equal 24 (length (ask '(colors-for map2 ?a ?b ?c ?d ?e))))
(assert-equal nil (ask '(colors-for map2 red blue green yellow ?e)))
)
(define-test color-map3
(assert-equal '((colors-for map3 red blue green yellow green blue))
(ask '(colors-for map3 red blue green yellow ?e ?f)))
(assert-equal 1 (length (ask '(colors-for map3 red blue green ?d ?e ?f))))
(assert-equal 24 (length (ask '(colors-for map3 ?a ?b ?c ?d ?e ?f))))
(assert-equal nil (ask '(colors-for map3 red blue green blue ?e ?f)))
)
Test cases for 1 box , no locks .
The goal state is always ( v1 - state ? rloc room1 ) , meaning
the box has to end up in room1 , and it does n't matter where
(define-test shakey-1
(assert-equal '(nil)
(ask '(plan-for (v1-state room1 room1)
(v1-state ?1 room1)
?actions)
'?actions))
(assert-equal '((cons (push-box hall room1) nil))
(ask '(plan-for (v1-state hall hall)
(v1-state ?1 room1)
?actions)
'?actions))
(assert-equal '((cons (push-box room2 hall)
(cons (push-box hall room1) nil)))
(ask '(plan-for (v1-state room2 room2)
(v1-state ?1 room1)
?actions)
'?actions))
(assert-equal '((cons (move-to hall)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (push-box hall room1) nil)))))
(ask '(plan-for (v1-state room1 room2)
(v1-state ?1 room1)
?actions)
'?actions))
)
Test cases for 1 box with locks .
( v2 - state ? rloc room1 ? unlocked )
meaning the box has to end up in room1 , and we do n't care
(define-test shakey-2
(assert-equal '(nil)
(ask '(plan-for (v2-state room1 room1 nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (push-box hall room1) nil))
(ask '(plan-for (v2-state hall hall (cons room1 nil))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (push-box room2 hall) (cons (push-box hall room1) nil)))
(ask '(plan-for (v2-state room2 room2 (cons room1 (cons room2 nil)))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (move-to hall)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (push-box hall room1) nil)))))
(ask '(plan-for (v2-state room1 room2 (cons room1 (cons room2 nil)))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (move-to hall)
(cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (push-box hall room1) nil))))))
(ask '(plan-for (v2-state room1 room2 (cons room1 nil))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (unlock room1)
(cons (push-box hall room1) nil)))
(ask '(plan-for (v2-state hall hall nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
Test with the goal room locked , robot in hall , box in room2
(assert-equal '((cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1) nil)))))
(ask '(plan-for (v2-state hall room2 (cons room2 nil))
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal '((cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1) nil))))))
(ask '(plan-for (v2-state hall room2 nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
(assert-equal nil
(ask '(plan-for (v2-state room1 room2 nil)
(v2-state ?rloc room1 ?unlocked)
?actions)
'?actions))
)
(define-test shakey-3
(assert-equal '(nil)
(ask '(plan-for (v3-state ? nil ? ?)
?actions)
'?actions))
Test with 1 box , all rooms locked , the robot in the hall
(assert-equal '((cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1) nil))))))
(ask '(plan-for (v3-state hall (cons room2 nil) room1 nil)
?actions)
'?actions))
Test with 2 boxes , all rooms locked , the robot in the hall
(assert-equal '((cons (unlock room2)
(cons (move-to room2)
(cons (push-box room2 hall)
(cons (unlock room1)
(cons (push-box hall room1)
(cons (move-to hall)
(cons (unlock room3)
(cons (move-to room3)
(cons (push-box room3 hall)
(cons (push-box hall room1)
nil)))))))))))
(ask '(plan-for (v3-state hall (cons room2 (cons room3 nil)) room1 nil)
?actions)
'?actions))
)
|
8d667d7745b4e49358aa7483a4d7a76ff9422277ea28715cc1f4cf9e457cb0f9 | w7cook/AoPL | Stateful.hs | module Stateful where
import Prelude hiding (LT, GT, EQ, id)
import Base
import Data.Maybe
import Operators
--BEGIN:Addr11
data Value = IntV Int
| BoolV Bool
| ClosureV String Exp Env
| AddressV Int -- new
deriving (Eq, Show)
END : Addr11
--BEGIN:Memo4
type Memory = [Value]
--END:Memo4
--BEGIN:Acce3
access i mem = mem !! i
--END:Acce3
--BEGIN:Upda4
update :: Int -> Value -> Memory -> Memory
update addr val mem =
let (before, _ : after) = splitAt addr mem in
before ++ [val] ++ after
END : Upda4
BEGIN : Stat8
type Stateful t = Memory -> (t, Memory)
END : Stat8
BEGIN : Summ7
data Exp = Literal Value
| Unary UnaryOp Exp
| Binary BinaryOp Exp Exp
| If Exp Exp Exp
| Variable String
| Declare String Exp Exp
| Function String Exp
| Call Exp Exp
| Seq Exp Exp
| Mutable Exp -- new
| Access Exp -- new
| Assign Exp Exp -- new
deriving (Eq, Show)
type Env = [(String, Value)]
END : Summ7
--BEGIN:Summ9
--BEGIN:Stat11
evaluate :: Exp -> Env -> Stateful Value
--END:Stat11
evaluate (Literal v) env mem = (v, mem)
evaluate (Unary op a) env mem1 =
let (av, mem2) = evaluate a env mem1 in
(unary op av, mem2)
--BEGIN:Sema27
evaluate (Binary op a b) env mem1 =
let (av, mem2) = evaluate a env mem1 in
let (bv, mem3) = evaluate b env mem2 in
(binary op av bv, mem3)
--END:Sema27
evaluate (If a b c) env mem1 =
let (BoolV test, mem2) = evaluate a env mem1 in
evaluate (if test then b else c) env mem2
evaluate (Variable x) env mem = (fromJust (lookup x env), mem)
evaluate (Declare x e body) env mem1 =
let (ev, mem2) = evaluate e env mem1
newEnv = (x, ev) : env
in
evaluate body newEnv mem2
evaluate (Function x body) env mem = (ClosureV x body env, mem)
evaluate (Call f a) env mem1 =
let (ClosureV x body closeEnv, mem2) = evaluate f env mem1
(av, mem3) = evaluate a env mem2
newEnv = (x, av) : closeEnv
in
evaluate body newEnv mem3
evaluate (Seq a b) env mem1 =
let (_, mem2) = evaluate a env mem1 in
evaluate b env mem2
--END:Summ9 BEGIN:Summ11 BEGIN:Sema20
evaluate (Mutable e) env mem1 =
let (ev, mem2) = evaluate e env mem1 in
(AddressV (length mem2), mem2 ++ [ev])
--END:Sema20
--BEGIN:Sema23
evaluate (Access a) env mem1 =
let (AddressV i, mem2) = evaluate a env mem1 in
(access i mem2, mem2)
--END:Sema23
--BEGIN:Sema25
evaluate (Assign a e) env mem1 =
let (AddressV i, mem2) = evaluate a env mem1 in
let (ev, mem3) = evaluate e env mem2 in
(ev, update i ev mem3)
--END:Sema25
--END:Summ11
same as in IntBool.hs
unary Not (BoolV b) = BoolV (not b)
unary Neg (IntV i) = IntV (-i)
binary Add (IntV a) (IntV b) = IntV (a + b)
binary Sub (IntV a) (IntV b) = IntV (a - b)
binary Mul (IntV a) (IntV b) = IntV (a * b)
binary Div (IntV a) (IntV b) = IntV (a `div` b)
binary And (BoolV a) (BoolV b) = BoolV (a && b)
binary Or (BoolV a) (BoolV b) = BoolV (a || b)
binary LT (IntV a) (IntV b) = BoolV (a < b)
binary LE (IntV a) (IntV b) = BoolV (a <= b)
binary GE (IntV a) (IntV b) = BoolV (a >= b)
binary GT (IntV a) (IntV b) = BoolV (a > b)
binary EQ a b = BoolV (a == b)
binary op a b = error ("Invalid binary "
++ show op ++ " operation: " ++ show a ++ ", " ++ show b)
| null | https://raw.githubusercontent.com/w7cook/AoPL/af2f9d31ec658e9d175735335ad27101cca3e247/src/Stateful.hs | haskell | BEGIN:Addr11
new
BEGIN:Memo4
END:Memo4
BEGIN:Acce3
END:Acce3
BEGIN:Upda4
new
new
new
BEGIN:Summ9
BEGIN:Stat11
END:Stat11
BEGIN:Sema27
END:Sema27
END:Summ9 BEGIN:Summ11 BEGIN:Sema20
END:Sema20
BEGIN:Sema23
END:Sema23
BEGIN:Sema25
END:Sema25
END:Summ11 | module Stateful where
import Prelude hiding (LT, GT, EQ, id)
import Base
import Data.Maybe
import Operators
data Value = IntV Int
| BoolV Bool
| ClosureV String Exp Env
deriving (Eq, Show)
END : Addr11
type Memory = [Value]
access i mem = mem !! i
update :: Int -> Value -> Memory -> Memory
update addr val mem =
let (before, _ : after) = splitAt addr mem in
before ++ [val] ++ after
END : Upda4
BEGIN : Stat8
type Stateful t = Memory -> (t, Memory)
END : Stat8
BEGIN : Summ7
data Exp = Literal Value
| Unary UnaryOp Exp
| Binary BinaryOp Exp Exp
| If Exp Exp Exp
| Variable String
| Declare String Exp Exp
| Function String Exp
| Call Exp Exp
| Seq Exp Exp
deriving (Eq, Show)
type Env = [(String, Value)]
END : Summ7
evaluate :: Exp -> Env -> Stateful Value
evaluate (Literal v) env mem = (v, mem)
evaluate (Unary op a) env mem1 =
let (av, mem2) = evaluate a env mem1 in
(unary op av, mem2)
evaluate (Binary op a b) env mem1 =
let (av, mem2) = evaluate a env mem1 in
let (bv, mem3) = evaluate b env mem2 in
(binary op av bv, mem3)
evaluate (If a b c) env mem1 =
let (BoolV test, mem2) = evaluate a env mem1 in
evaluate (if test then b else c) env mem2
evaluate (Variable x) env mem = (fromJust (lookup x env), mem)
evaluate (Declare x e body) env mem1 =
let (ev, mem2) = evaluate e env mem1
newEnv = (x, ev) : env
in
evaluate body newEnv mem2
evaluate (Function x body) env mem = (ClosureV x body env, mem)
evaluate (Call f a) env mem1 =
let (ClosureV x body closeEnv, mem2) = evaluate f env mem1
(av, mem3) = evaluate a env mem2
newEnv = (x, av) : closeEnv
in
evaluate body newEnv mem3
evaluate (Seq a b) env mem1 =
let (_, mem2) = evaluate a env mem1 in
evaluate b env mem2
evaluate (Mutable e) env mem1 =
let (ev, mem2) = evaluate e env mem1 in
(AddressV (length mem2), mem2 ++ [ev])
evaluate (Access a) env mem1 =
let (AddressV i, mem2) = evaluate a env mem1 in
(access i mem2, mem2)
evaluate (Assign a e) env mem1 =
let (AddressV i, mem2) = evaluate a env mem1 in
let (ev, mem3) = evaluate e env mem2 in
(ev, update i ev mem3)
same as in IntBool.hs
unary Not (BoolV b) = BoolV (not b)
unary Neg (IntV i) = IntV (-i)
binary Add (IntV a) (IntV b) = IntV (a + b)
binary Sub (IntV a) (IntV b) = IntV (a - b)
binary Mul (IntV a) (IntV b) = IntV (a * b)
binary Div (IntV a) (IntV b) = IntV (a `div` b)
binary And (BoolV a) (BoolV b) = BoolV (a && b)
binary Or (BoolV a) (BoolV b) = BoolV (a || b)
binary LT (IntV a) (IntV b) = BoolV (a < b)
binary LE (IntV a) (IntV b) = BoolV (a <= b)
binary GE (IntV a) (IntV b) = BoolV (a >= b)
binary GT (IntV a) (IntV b) = BoolV (a > b)
binary EQ a b = BoolV (a == b)
binary op a b = error ("Invalid binary "
++ show op ++ " operation: " ++ show a ++ ", " ++ show b)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.