_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
43a4d5c88ef49cb40110247b6e5471eefeaf142dba9dfeb5eb2f1a5bd5e0aa36 | ethereum/act | Syntax.hs | {-# LANGUAGE GADTs #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE TypeApplications #
{-|
Module : Syntax
Description : Functions for manipulating and collapsing all our different ASTs.
-}
module Syntax where
import Prelude hiding (LT, GT)
import Data.List
import Data.Map (Map,empty,insertWith,unionsWith)
import Data.Singletons
import Syntax.TimeAgnostic as Agnostic
import qualified Syntax.Annotated as Annotated
import Syntax.Untyped hiding (Constant,Rewrite)
import qualified Syntax.Untyped as Untyped
-----------------------------------------
-- * Extract from fully refined ASTs * --
-----------------------------------------
-- | Invariant predicates can always be expressed as a single expression.
invExp :: Annotated.InvariantPred -> Annotated.Exp Bool
invExp = uncurry (<>)
locsFromBehaviour :: Annotated.Behaviour -> [Annotated.StorageLocation]
locsFromBehaviour (Behaviour _ _ _ _ preconds postconds rewrites returns) = nub $
concatMap locsFromExp preconds
<> concatMap locsFromExp postconds
<> concatMap locsFromRewrite rewrites
<> maybe [] locsFromTypedExp returns
locsFromConstructor :: Annotated.Constructor -> [Annotated.StorageLocation]
locsFromConstructor (Constructor _ _ _ pre post initialStorage rewrites) = nub $
concatMap locsFromExp pre
<> concatMap locsFromExp post
<> concatMap locsFromRewrite rewrites
<> concatMap locsFromRewrite (Rewrite <$> initialStorage)
------------------------------------
-- * Extract from any typed AST * --
------------------------------------
behvsFromClaims :: [Claim t] -> [Behaviour t]
behvsFromClaims claims = [b | B b <- claims]
locsFromRewrite :: Rewrite t -> [StorageLocation t]
locsFromRewrite update = nub $ case update of
Constant loc -> [loc]
Rewrite (Update _ item e) -> locsFromItem item <> locsFromExp e
locFromRewrite :: Rewrite t -> StorageLocation t
locFromRewrite = onRewrite id locFromUpdate
locFromUpdate :: StorageUpdate t -> StorageLocation t
locFromUpdate (Update _ item _) = _Loc item
locsFromItem :: TStorageItem a t -> [StorageLocation t]
locsFromItem item = _Loc item : concatMap locsFromTypedExp (ixsFromItem item)
locsFromTypedExp :: TypedExp t -> [StorageLocation t]
locsFromTypedExp (TExp _ e) = locsFromExp e
locsFromExp :: Exp a t -> [StorageLocation t]
locsFromExp = nub . go
where
go :: Exp a t -> [StorageLocation t]
go e = case e of
And _ a b -> go a <> go b
Or _ a b -> go a <> go b
Impl _ a b -> go a <> go b
Eq _ a b -> go a <> go b
LT _ a b -> go a <> go b
LEQ _ a b -> go a <> go b
GT _ a b -> go a <> go b
GEQ _ a b -> go a <> go b
NEq _ a b -> go a <> go b
Neg _ a -> go a
Add _ a b -> go a <> go b
Sub _ a b -> go a <> go b
Mul _ a b -> go a <> go b
Div _ a b -> go a <> go b
Mod _ a b -> go a <> go b
Exp _ a b -> go a <> go b
Cat _ a b -> go a <> go b
Slice _ a b c -> go a <> go b <> go c
ByStr {} -> []
ByLit {} -> []
LitInt {} -> []
IntMin {} -> []
IntMax {} -> []
UIntMin {} -> []
UIntMax {} -> []
LitBool {} -> []
IntEnv {} -> []
ByEnv {} -> []
ITE _ x y z -> go x <> go y <> go z
TEntry _ _ a -> locsFromItem a
Var {} -> []
ethEnvFromBehaviour :: Behaviour t -> [EthEnv]
ethEnvFromBehaviour (Behaviour _ _ _ _ preconds postconds rewrites returns) = nub $
concatMap ethEnvFromExp preconds
<> concatMap ethEnvFromExp postconds
<> concatMap ethEnvFromRewrite rewrites
<> maybe [] ethEnvFromTypedExp returns
ethEnvFromConstructor :: Constructor t -> [EthEnv]
ethEnvFromConstructor (Constructor _ _ _ pre post initialStorage rewrites) = nub $
concatMap ethEnvFromExp pre
<> concatMap ethEnvFromExp post
<> concatMap ethEnvFromRewrite rewrites
<> concatMap ethEnvFromRewrite (Rewrite <$> initialStorage)
ethEnvFromRewrite :: Rewrite t -> [EthEnv]
ethEnvFromRewrite rewrite = case rewrite of
Constant (Loc _ item) -> ethEnvFromItem item
Rewrite (Update _ item e) -> nub $ ethEnvFromItem item <> ethEnvFromExp e
ethEnvFromItem :: TStorageItem a t -> [EthEnv]
ethEnvFromItem = nub . concatMap ethEnvFromTypedExp . ixsFromItem
ethEnvFromTypedExp :: TypedExp t -> [EthEnv]
ethEnvFromTypedExp (TExp _ e) = ethEnvFromExp e
ethEnvFromExp :: Exp a t -> [EthEnv]
ethEnvFromExp = nub . go
where
go :: Exp a t -> [EthEnv]
go e = case e of
And _ a b -> go a <> go b
Or _ a b -> go a <> go b
Impl _ a b -> go a <> go b
Eq _ a b -> go a <> go b
LT _ a b -> go a <> go b
LEQ _ a b -> go a <> go b
GT _ a b -> go a <> go b
GEQ _ a b -> go a <> go b
NEq _ a b -> go a <> go b
Neg _ a -> go a
Add _ a b -> go a <> go b
Sub _ a b -> go a <> go b
Mul _ a b -> go a <> go b
Div _ a b -> go a <> go b
Mod _ a b -> go a <> go b
Exp _ a b -> go a <> go b
Cat _ a b -> go a <> go b
Slice _ a b c -> go a <> go b <> go c
ITE _ a b c -> go a <> go b <> go c
ByStr {} -> []
ByLit {} -> []
LitInt {} -> []
LitBool {} -> []
IntMin {} -> []
IntMax {} -> []
UIntMin {} -> []
UIntMax {} -> []
IntEnv _ a -> [a]
ByEnv _ a -> [a]
TEntry _ _ a -> ethEnvFromItem a
Var {} -> []
idFromRewrite :: Rewrite t -> Id
idFromRewrite = onRewrite idFromLocation idFromUpdate
idFromItem :: TStorageItem a t -> Id
idFromItem (Item _ _ name _) = name
idFromUpdate :: StorageUpdate t -> Id
idFromUpdate (Update _ item _) = idFromItem item
idFromLocation :: StorageLocation t -> Id
idFromLocation (Loc _ item) = idFromItem item
contractFromRewrite :: Rewrite t -> Id
contractFromRewrite = onRewrite contractFromLoc contractFromUpdate
contractFromItem :: TStorageItem a t -> Id
contractFromItem (Item _ c _ _) = c
ixsFromItem :: TStorageItem a t -> [TypedExp t]
ixsFromItem (Item _ _ _ ixs) = ixs
contractsInvolved :: Behaviour t -> [Id]
contractsInvolved = fmap contractFromRewrite . _stateUpdates
contractFromLoc :: StorageLocation t -> Id
contractFromLoc (Loc _ item) = contractFromItem item
contractFromUpdate :: StorageUpdate t -> Id
contractFromUpdate (Update _ item _) = contractFromItem item
ixsFromLocation :: StorageLocation t -> [TypedExp t]
ixsFromLocation (Loc _ item) = ixsFromItem item
ixsFromUpdate :: StorageUpdate t -> [TypedExp t]
ixsFromUpdate (Update _ item _) = ixsFromItem item
ixsFromRewrite :: Rewrite t -> [TypedExp t]
ixsFromRewrite = onRewrite ixsFromLocation ixsFromUpdate
itemType :: TStorageItem a t -> ActType
itemType (Item t _ _ _) = SomeSing t
isMapping :: StorageLocation t -> Bool
isMapping = not . null . ixsFromLocation
onRewrite :: (StorageLocation t -> a) -> (StorageUpdate t -> a) -> Rewrite t -> a
onRewrite f _ (Constant a) = f a
onRewrite _ g (Rewrite a) = g a
updatesFromRewrites :: [Rewrite t] -> [StorageUpdate t]
updatesFromRewrites rs = [u | Rewrite u <- rs]
locsFromRewrites :: [Rewrite t] -> [StorageLocation t]
locsFromRewrites rs = [l | Constant l <- rs]
--------------------------------------
-- * Extraction from untyped ASTs * --
--------------------------------------
nameFromStorage :: Untyped.Storage -> Id
nameFromStorage (Untyped.Rewrite (PEntry _ x _) _) = x
nameFromStorage (Untyped.Constant (PEntry _ x _)) = x
nameFromStorage store = error $ "Internal error: cannot extract name from " ++ show store
getPosn :: Expr -> Pn
getPosn expr = case expr of
EAnd pn _ _ -> pn
EOr pn _ _ -> pn
ENot pn _ -> pn
EImpl pn _ _ -> pn
EEq pn _ _ -> pn
ENeq pn _ _ -> pn
ELEQ pn _ _ -> pn
ELT pn _ _ -> pn
EGEQ pn _ _ -> pn
EGT pn _ _ -> pn
EAdd pn _ _ -> pn
ESub pn _ _ -> pn
EITE pn _ _ _ -> pn
EMul pn _ _ -> pn
EDiv pn _ _ -> pn
EMod pn _ _ -> pn
EExp pn _ _ -> pn
Zoom pn _ _ -> pn
EUTEntry pn _ _ -> pn
EPreEntry pn _ _ -> pn
EPostEntry pn _ _ -> pn
Func pn _ _ -> pn
ListConst e -> getPosn e
ECat pn _ _ -> pn
ESlice pn _ _ _ -> pn
ENewaddr pn _ _ -> pn
ENewaddr2 pn _ _ _ -> pn
BYHash pn _ -> pn
BYAbiE pn _ -> pn
StringLit pn _ -> pn
WildExp pn -> pn
EnvExp pn _ -> pn
IntLit pn _ -> pn
BoolLit pn _ -> pn
posFromDef :: Defn -> Pn
posFromDef (Defn e _) = getPosn e
-- | Returns all the identifiers used in an expression,
-- as well all of the positions they're used in.
idFromRewrites :: Expr -> Map Id [Pn]
idFromRewrites e = case e of
EAnd _ a b -> idFromRewrites' [a,b]
EOr _ a b -> idFromRewrites' [a,b]
ENot _ a -> idFromRewrites a
EImpl _ a b -> idFromRewrites' [a,b]
EEq _ a b -> idFromRewrites' [a,b]
ENeq _ a b -> idFromRewrites' [a,b]
ELEQ _ a b -> idFromRewrites' [a,b]
ELT _ a b -> idFromRewrites' [a,b]
EGEQ _ a b -> idFromRewrites' [a,b]
EGT _ a b -> idFromRewrites' [a,b]
EAdd _ a b -> idFromRewrites' [a,b]
ESub _ a b -> idFromRewrites' [a,b]
EITE _ a b c -> idFromRewrites' [a,b,c]
EMul _ a b -> idFromRewrites' [a,b]
EDiv _ a b -> idFromRewrites' [a,b]
EMod _ a b -> idFromRewrites' [a,b]
EExp _ a b -> idFromRewrites' [a,b]
Zoom _ a b -> idFromRewrites' [a,b]
EUTEntry p x es -> insertWith (<>) x [p] $ idFromRewrites' es
EPreEntry p x es -> insertWith (<>) x [p] $ idFromRewrites' es
EPostEntry p x es -> insertWith (<>) x [p] $ idFromRewrites' es
Func _ _ es -> idFromRewrites' es
ListConst a -> idFromRewrites a
ECat _ a b -> idFromRewrites' [a,b]
ESlice _ a b c -> idFromRewrites' [a,b,c]
ENewaddr _ a b -> idFromRewrites' [a,b]
ENewaddr2 _ a b c -> idFromRewrites' [a,b,c]
BYHash _ a -> idFromRewrites a
BYAbiE _ a -> idFromRewrites a
StringLit {} -> empty
WildExp {} -> empty
EnvExp {} -> empty
IntLit {} -> empty
BoolLit {} -> empty
where
idFromRewrites' = unionsWith (<>) . fmap idFromRewrites
-- | True iff the case is a wildcard.
isWild :: Case -> Bool
isWild (Case _ (WildExp _) _) = True
isWild _ = False
| null | https://raw.githubusercontent.com/ethereum/act/36b65b71eea22ebe4287f066190ecf47f4394fa3/src/Syntax.hs | haskell | # LANGUAGE GADTs #
|
Module : Syntax
Description : Functions for manipulating and collapsing all our different ASTs.
---------------------------------------
* Extract from fully refined ASTs * --
---------------------------------------
| Invariant predicates can always be expressed as a single expression.
----------------------------------
* Extract from any typed AST * --
----------------------------------
------------------------------------
* Extraction from untyped ASTs * --
------------------------------------
| Returns all the identifiers used in an expression,
as well all of the positions they're used in.
| True iff the case is a wildcard. | # LANGUAGE PatternSynonyms #
# LANGUAGE TypeApplications #
module Syntax where
import Prelude hiding (LT, GT)
import Data.List
import Data.Map (Map,empty,insertWith,unionsWith)
import Data.Singletons
import Syntax.TimeAgnostic as Agnostic
import qualified Syntax.Annotated as Annotated
import Syntax.Untyped hiding (Constant,Rewrite)
import qualified Syntax.Untyped as Untyped
invExp :: Annotated.InvariantPred -> Annotated.Exp Bool
invExp = uncurry (<>)
locsFromBehaviour :: Annotated.Behaviour -> [Annotated.StorageLocation]
locsFromBehaviour (Behaviour _ _ _ _ preconds postconds rewrites returns) = nub $
concatMap locsFromExp preconds
<> concatMap locsFromExp postconds
<> concatMap locsFromRewrite rewrites
<> maybe [] locsFromTypedExp returns
locsFromConstructor :: Annotated.Constructor -> [Annotated.StorageLocation]
locsFromConstructor (Constructor _ _ _ pre post initialStorage rewrites) = nub $
concatMap locsFromExp pre
<> concatMap locsFromExp post
<> concatMap locsFromRewrite rewrites
<> concatMap locsFromRewrite (Rewrite <$> initialStorage)
behvsFromClaims :: [Claim t] -> [Behaviour t]
behvsFromClaims claims = [b | B b <- claims]
locsFromRewrite :: Rewrite t -> [StorageLocation t]
locsFromRewrite update = nub $ case update of
Constant loc -> [loc]
Rewrite (Update _ item e) -> locsFromItem item <> locsFromExp e
locFromRewrite :: Rewrite t -> StorageLocation t
locFromRewrite = onRewrite id locFromUpdate
locFromUpdate :: StorageUpdate t -> StorageLocation t
locFromUpdate (Update _ item _) = _Loc item
locsFromItem :: TStorageItem a t -> [StorageLocation t]
locsFromItem item = _Loc item : concatMap locsFromTypedExp (ixsFromItem item)
locsFromTypedExp :: TypedExp t -> [StorageLocation t]
locsFromTypedExp (TExp _ e) = locsFromExp e
locsFromExp :: Exp a t -> [StorageLocation t]
locsFromExp = nub . go
where
go :: Exp a t -> [StorageLocation t]
go e = case e of
And _ a b -> go a <> go b
Or _ a b -> go a <> go b
Impl _ a b -> go a <> go b
Eq _ a b -> go a <> go b
LT _ a b -> go a <> go b
LEQ _ a b -> go a <> go b
GT _ a b -> go a <> go b
GEQ _ a b -> go a <> go b
NEq _ a b -> go a <> go b
Neg _ a -> go a
Add _ a b -> go a <> go b
Sub _ a b -> go a <> go b
Mul _ a b -> go a <> go b
Div _ a b -> go a <> go b
Mod _ a b -> go a <> go b
Exp _ a b -> go a <> go b
Cat _ a b -> go a <> go b
Slice _ a b c -> go a <> go b <> go c
ByStr {} -> []
ByLit {} -> []
LitInt {} -> []
IntMin {} -> []
IntMax {} -> []
UIntMin {} -> []
UIntMax {} -> []
LitBool {} -> []
IntEnv {} -> []
ByEnv {} -> []
ITE _ x y z -> go x <> go y <> go z
TEntry _ _ a -> locsFromItem a
Var {} -> []
ethEnvFromBehaviour :: Behaviour t -> [EthEnv]
ethEnvFromBehaviour (Behaviour _ _ _ _ preconds postconds rewrites returns) = nub $
concatMap ethEnvFromExp preconds
<> concatMap ethEnvFromExp postconds
<> concatMap ethEnvFromRewrite rewrites
<> maybe [] ethEnvFromTypedExp returns
ethEnvFromConstructor :: Constructor t -> [EthEnv]
ethEnvFromConstructor (Constructor _ _ _ pre post initialStorage rewrites) = nub $
concatMap ethEnvFromExp pre
<> concatMap ethEnvFromExp post
<> concatMap ethEnvFromRewrite rewrites
<> concatMap ethEnvFromRewrite (Rewrite <$> initialStorage)
ethEnvFromRewrite :: Rewrite t -> [EthEnv]
ethEnvFromRewrite rewrite = case rewrite of
Constant (Loc _ item) -> ethEnvFromItem item
Rewrite (Update _ item e) -> nub $ ethEnvFromItem item <> ethEnvFromExp e
ethEnvFromItem :: TStorageItem a t -> [EthEnv]
ethEnvFromItem = nub . concatMap ethEnvFromTypedExp . ixsFromItem
ethEnvFromTypedExp :: TypedExp t -> [EthEnv]
ethEnvFromTypedExp (TExp _ e) = ethEnvFromExp e
ethEnvFromExp :: Exp a t -> [EthEnv]
ethEnvFromExp = nub . go
where
go :: Exp a t -> [EthEnv]
go e = case e of
And _ a b -> go a <> go b
Or _ a b -> go a <> go b
Impl _ a b -> go a <> go b
Eq _ a b -> go a <> go b
LT _ a b -> go a <> go b
LEQ _ a b -> go a <> go b
GT _ a b -> go a <> go b
GEQ _ a b -> go a <> go b
NEq _ a b -> go a <> go b
Neg _ a -> go a
Add _ a b -> go a <> go b
Sub _ a b -> go a <> go b
Mul _ a b -> go a <> go b
Div _ a b -> go a <> go b
Mod _ a b -> go a <> go b
Exp _ a b -> go a <> go b
Cat _ a b -> go a <> go b
Slice _ a b c -> go a <> go b <> go c
ITE _ a b c -> go a <> go b <> go c
ByStr {} -> []
ByLit {} -> []
LitInt {} -> []
LitBool {} -> []
IntMin {} -> []
IntMax {} -> []
UIntMin {} -> []
UIntMax {} -> []
IntEnv _ a -> [a]
ByEnv _ a -> [a]
TEntry _ _ a -> ethEnvFromItem a
Var {} -> []
idFromRewrite :: Rewrite t -> Id
idFromRewrite = onRewrite idFromLocation idFromUpdate
idFromItem :: TStorageItem a t -> Id
idFromItem (Item _ _ name _) = name
idFromUpdate :: StorageUpdate t -> Id
idFromUpdate (Update _ item _) = idFromItem item
idFromLocation :: StorageLocation t -> Id
idFromLocation (Loc _ item) = idFromItem item
contractFromRewrite :: Rewrite t -> Id
contractFromRewrite = onRewrite contractFromLoc contractFromUpdate
contractFromItem :: TStorageItem a t -> Id
contractFromItem (Item _ c _ _) = c
ixsFromItem :: TStorageItem a t -> [TypedExp t]
ixsFromItem (Item _ _ _ ixs) = ixs
contractsInvolved :: Behaviour t -> [Id]
contractsInvolved = fmap contractFromRewrite . _stateUpdates
contractFromLoc :: StorageLocation t -> Id
contractFromLoc (Loc _ item) = contractFromItem item
contractFromUpdate :: StorageUpdate t -> Id
contractFromUpdate (Update _ item _) = contractFromItem item
ixsFromLocation :: StorageLocation t -> [TypedExp t]
ixsFromLocation (Loc _ item) = ixsFromItem item
ixsFromUpdate :: StorageUpdate t -> [TypedExp t]
ixsFromUpdate (Update _ item _) = ixsFromItem item
ixsFromRewrite :: Rewrite t -> [TypedExp t]
ixsFromRewrite = onRewrite ixsFromLocation ixsFromUpdate
itemType :: TStorageItem a t -> ActType
itemType (Item t _ _ _) = SomeSing t
isMapping :: StorageLocation t -> Bool
isMapping = not . null . ixsFromLocation
onRewrite :: (StorageLocation t -> a) -> (StorageUpdate t -> a) -> Rewrite t -> a
onRewrite f _ (Constant a) = f a
onRewrite _ g (Rewrite a) = g a
updatesFromRewrites :: [Rewrite t] -> [StorageUpdate t]
updatesFromRewrites rs = [u | Rewrite u <- rs]
locsFromRewrites :: [Rewrite t] -> [StorageLocation t]
locsFromRewrites rs = [l | Constant l <- rs]
nameFromStorage :: Untyped.Storage -> Id
nameFromStorage (Untyped.Rewrite (PEntry _ x _) _) = x
nameFromStorage (Untyped.Constant (PEntry _ x _)) = x
nameFromStorage store = error $ "Internal error: cannot extract name from " ++ show store
getPosn :: Expr -> Pn
getPosn expr = case expr of
EAnd pn _ _ -> pn
EOr pn _ _ -> pn
ENot pn _ -> pn
EImpl pn _ _ -> pn
EEq pn _ _ -> pn
ENeq pn _ _ -> pn
ELEQ pn _ _ -> pn
ELT pn _ _ -> pn
EGEQ pn _ _ -> pn
EGT pn _ _ -> pn
EAdd pn _ _ -> pn
ESub pn _ _ -> pn
EITE pn _ _ _ -> pn
EMul pn _ _ -> pn
EDiv pn _ _ -> pn
EMod pn _ _ -> pn
EExp pn _ _ -> pn
Zoom pn _ _ -> pn
EUTEntry pn _ _ -> pn
EPreEntry pn _ _ -> pn
EPostEntry pn _ _ -> pn
Func pn _ _ -> pn
ListConst e -> getPosn e
ECat pn _ _ -> pn
ESlice pn _ _ _ -> pn
ENewaddr pn _ _ -> pn
ENewaddr2 pn _ _ _ -> pn
BYHash pn _ -> pn
BYAbiE pn _ -> pn
StringLit pn _ -> pn
WildExp pn -> pn
EnvExp pn _ -> pn
IntLit pn _ -> pn
BoolLit pn _ -> pn
posFromDef :: Defn -> Pn
posFromDef (Defn e _) = getPosn e
idFromRewrites :: Expr -> Map Id [Pn]
idFromRewrites e = case e of
EAnd _ a b -> idFromRewrites' [a,b]
EOr _ a b -> idFromRewrites' [a,b]
ENot _ a -> idFromRewrites a
EImpl _ a b -> idFromRewrites' [a,b]
EEq _ a b -> idFromRewrites' [a,b]
ENeq _ a b -> idFromRewrites' [a,b]
ELEQ _ a b -> idFromRewrites' [a,b]
ELT _ a b -> idFromRewrites' [a,b]
EGEQ _ a b -> idFromRewrites' [a,b]
EGT _ a b -> idFromRewrites' [a,b]
EAdd _ a b -> idFromRewrites' [a,b]
ESub _ a b -> idFromRewrites' [a,b]
EITE _ a b c -> idFromRewrites' [a,b,c]
EMul _ a b -> idFromRewrites' [a,b]
EDiv _ a b -> idFromRewrites' [a,b]
EMod _ a b -> idFromRewrites' [a,b]
EExp _ a b -> idFromRewrites' [a,b]
Zoom _ a b -> idFromRewrites' [a,b]
EUTEntry p x es -> insertWith (<>) x [p] $ idFromRewrites' es
EPreEntry p x es -> insertWith (<>) x [p] $ idFromRewrites' es
EPostEntry p x es -> insertWith (<>) x [p] $ idFromRewrites' es
Func _ _ es -> idFromRewrites' es
ListConst a -> idFromRewrites a
ECat _ a b -> idFromRewrites' [a,b]
ESlice _ a b c -> idFromRewrites' [a,b,c]
ENewaddr _ a b -> idFromRewrites' [a,b]
ENewaddr2 _ a b c -> idFromRewrites' [a,b,c]
BYHash _ a -> idFromRewrites a
BYAbiE _ a -> idFromRewrites a
StringLit {} -> empty
WildExp {} -> empty
EnvExp {} -> empty
IntLit {} -> empty
BoolLit {} -> empty
where
idFromRewrites' = unionsWith (<>) . fmap idFromRewrites
isWild :: Case -> Bool
isWild (Case _ (WildExp _) _) = True
isWild _ = False
|
83b1fab2c0980bc7a3cd44ec7e006930d24d9431b238a145d6e2e1db19595249 | cardmagic/lucash | arch.scm | ; -*- Mode: Scheme; Syntax: Scheme; Package: Scheme; -*-
Copyright ( c ) 1993 - 1999 by and . See file COPYING .
; This is file arch.scm.
;;;; Architecture description
(define architecture-version "Vanilla 20")
; Things that the VM and the runtime system both need to know.
(define bits-used-per-byte 8)
(define byte-limit (expt 2 bits-used-per-byte))
; Bytecodes: for compiler and interpreter
; Instruction specification is
; (op . args)
; OP may be a name or a list of names
; ARGS are
- a byte
; byte - a byte
; junk - a byte that is ignored (e.g. when a peephole optimization merges
two instructions into a single , shorter one )
two - bytes - two bytes
index - a two byte index into the current template
small - index - a one byte index into the current template
; offset - two bytes giving an offset into the current instruction stream
; stob - a byte specifying a type for a stored object
0 1 2 ... - the number of non - instruction - stream arguments ( some
instructions take a variable number of arguments ; the first
; number is the argument count implemented by the VM)
; + - any number of additional arguments are allowed
(define-syntax define-instruction-set
(lambda (form rename compare)
(let ((data (do ((data (reverse (cdr form)) (cdr data))
(new '() (let ((next (car data)))
(if (pair? (car next))
(append (map (lambda (op)
(cons op (cdr next)))
(car next))
new)
(cons next new)))))
((null? data) new))))
`(begin (define-enumeration op
,(map car data))
(define opcode-arg-specs
'#(,@(map cdr data)))))))
; Instructions marked *EXP* are experimental and are not normally used by
; byte-code compiler.
(define-instruction-set
first opcode in a procedure , never actually
; executed
(make-env two-bytes) ; cons an environment
value to * val * , two - byte index
value to * val * , one - byte index
(local byte byte) ; back and over
((local0 local1 local2)
byte) ; back encoded into op-code for efficiency
same , but counts are two bytes
back over value , counts are two bytes
(global index) ; value to *val*
(set-global! index 1)
(closure index byte) ; byte = 0 -> use environment in *env*
byte = 1 - > use environment in * val *
(make-flat-env env-data) ; make new environment from env-data
(push 1) ; push *val* onto stack
((local0-push push-local0) ; common combination
byte junk 1)
(pop) ; pop top of stack into *val*
(stack-ref byte) ; index'th element of stack into *val*
* val * to index'th element of stack
(make-cont offset byte) ; save state in *cont*
save state in * cont * , two - byte size
copy * cont * to * val * , use WITH -
; to use copied continuation
(get-cont-from-heap) ; copy next continuation from heap (this
; op-code is used internally by the VM)
;; different ways to call procedures
(call nargs 1 +) ; last argument is the procedure to call
ditto , is two bytes
same , move args to just above * cont * first
; (*EXP*, and no two-byte version)
last argument is procedure to call , second to
; last is a list of additional arguments, next
two bytes are the number of stack arguments
(closed-apply 2 +) ; arguments are as for Scheme's APPLY, with
; the number of non-list arguments pushed on
; the top of the stack
first arg is cont , second is procedure
(call-with-values +) ; values are on stack, consumer is in the
; continuation pointed to by *cont*
Three different ways to return from calls and one way to ignore any
;; returned values
(return 1) ; return to continuation in *cont*
values are on stack , count is next two bytes
(closed-values +) ; values are on stack, count is pushed on stack
(ignore-values +) ; ignore (and dispose of) returned values
;; Different ways to jump
(goto-template index) ; jump to another template (*EXP*)
; does not poll for interrupts
(call-template index nargs) ; call a template instead of a procedure
; nargs is needed for interrupt handling
Current VM only handles the zero - arg case .
(jump-if-false offset 1) ; boolean in *val*
(jump offset)
jump using delta specified by * *
; defaults to instruction after deltas (*EXP*)
;; For the closed-compiled definitions of n-ary arithmetic functions.
;; The opcode sequences used are:
;; binary-reduce1 binary-op binary-reduce2 return
;; and
;; binary-reduce1 binary-op binary-comparison-reduce2 return
((binary-reduce1 binary-reduce2 binary-comparison-reduce2))
Scalar primitives
(eq? 2)
((number? integer? rational? real? complex? exact?) 1)
((exact->inexact inexact->exact) 1)
((+ *) 2 0 1 +)
((- /) 2 1)
((= < > <= >=) 2 +)
((quotient remainder) 2)
((floor numerator denominator
real-part imag-part
exp log sin cos tan asin acos sqrt
angle magnitude)
1)
(atan 2)
((make-polar make-rectangular) 2)
(bitwise-not 1)
((bitwise-and bitwise-ior bitwise-xor) 2)
(arithmetic-shift 2)
(char? 1)
((char=? char<?) 2)
((char->ascii ascii->char) 1)
(eof-object? 1)
;; Data manipulation
(stored-object-has-type? stob 1)
(stored-object-length stob 1)
(make-stored-object byte stob)
(closed-make-stored-object stob) ; size pushed on stack
(stored-object-ref stob byte 1) ; byte is the offset
(stored-object-set! stob byte 2)
(make-vector-object stob 2) ; size + init
(stored-object-indexed-ref stob 2) ; vector + offset
(stored-object-indexed-set! stob 3) ; vector + offset + value
(make-byte-vector 2)
(byte-vector-length 1)
(byte-vector-ref 2)
(byte-vector-set! 3)
(make-string 2)
(string-length 1)
(string-ref 2)
(string-set! 3)
(intern 1)
(location-defined? 1)
(set-location-defined?! 2)
((immutable? make-immutable!) 1)
;; channels (unbuffered, non-blocking I/O)
(open-channel 2)
(close-channel 1)
(channel-maybe-read 5)
(channel-maybe-write 4)
(add-pending-channel 2)
(channel-ready? 1)
(channel-abort 1) ; stop channel operation
(open-channels-list) ; return a list of the open channels
;; Misc
((unassigned unspecific))
(trap 1) ; raise exception
(false) ; return #f (for bootstrapping)
(eof-object) ; hard to get otherwise
(write-image 3)
(collect)
(string-hash 1) ; used by the static linker for the initial table
(add-finalizer! 2)
(memory-status 2)
(find-all 1) ; makes a vector of all objects of a given type
(find-all-records 1) ; makes a vector of all records of a given type
(current-thread)
(set-current-thread! 1)
(session-data) ; session specific data
(set-session-data! 1)
(set-exception-handlers! 1)
(return-from-exception 1)
(set-interrupt-handlers! 1)
(set-enabled-interrupts! 1)
(return-from-interrupt)
(schedule-interrupt 1)
(wait 2) ; do nothing until something happens
(call-external-value 1 +)
(lookup-shared-binding 2)
(define-shared-binding 3)
(undefine-shared-binding 2)
(time 2)
(vm-extension 2) ; access to extensions of the virtual machine
(return-from-callback 2) ; return from an callback
;; Unnecessary primitives
(string=? 2)
(reverse-list->string 2)
(assq 2)
(checked-record-ref 3)
(checked-record-set! 4)
(copy-bytes! 5)
;; ports (buffered I/O) - these are all unnecessary
;; byte = 0 -> port is supplied
= 1 - > get port from dynamic environment
((read-char peek-char) byte 1 0)
(write-char byte 2 1)
;; For writing informative messages when debugging
(message 1)
)
(define-enumeration interrupt
order matters - higher priority first
keyboard
post-gc ; handler is passed a list of finalizers
i/o-read-completion ; handler is passed channel and status
i/o-write-completion ; handler is passed channel and status
os-signal
))
; Possible problems
(define-enumeration exception
(unassigned-local
undefined-global
unbound-global
bad-procedure
wrong-number-of-arguments
wrong-type-argument
arithmetic-overflow
index-out-of-range
heap-overflow
out-of-memory
cannot-open-channel
channel-os-index-already-in-use
closed-channel
pending-channel-i/o
buffer-full/empty
unimplemented-instruction
trap
proceeding-after-exception
bad-option
unbound-external-name
too-many-arguments-to-external-procedure
too-many-arguments-in-callback
callback-return-uncovered
extension-exception
extension-return-error
os-error
unresumable-records-in-image
gc-protection-mismatch
))
; Used by (READ-CHAR) and (WRITE-CHAR) to get the appropriate ports from
; the fluid environment.
(define-enumeration current-port-marker
(current-input-port
current-output-port))
;----------------
; Encoding for template protocols:
0 ... MAX - STACK - ARGS = that number of arguments , no rest list
TWO - BYTE - NARGS = ( 2*MAX - STACK - ARGS)+1 = next two bytes are the fixed argument
; count
TWO - BYTE - NARGS+LIST = TWO - BYTE - NARGS + 1 = next two bytes are the fixed
; argument count, plus a rest list
(define maximum-stack-args 63)
(define *last-protocol* maximum-stack-args)
(define (next-protocol)
(set! *last-protocol* (+ *last-protocol* 1))
*last-protocol*)
(define two-byte-nargs-protocol (next-protocol))
; Used for all n-ary procedures.
(define two-byte-nargs+list-protocol (next-protocol))
; Real protocol is at the end of the code vector, along with the required
; stack size:
... real - protocol stack - size0 stack - size1
; This stuff has to be at the end of the code vector because the necessary stack
; size is not determined until after the code vector has been assembled.
(define big-stack-protocol (next-protocol))
; The rest are used only for the definitions of various Scheme primitives.
For VECTOR , RECORD , VALUES , EXTERNAL - CALL , APPLY
Next byte is the minimum number of arguments ( 1 for EXT - CALL , 2 for APPLY ,
; 0 for the rest).
Stack = arg0 arg1 ... argN rest - list N+1 total - arg - count
The first two arguments are always on the stack .
(define args+nargs-protocol (next-protocol))
Followed by four bytes : the offsets of code for the 0 , 1 , 2 , and 3 + arg cases .
; A zero indicatest that the primitive doesn't accept that many arguments.
If there are fewer than three arguments they are all on the stack . In the
3 + case this is the same as args+nargs above .
(define nary-dispatch-protocol (next-protocol))
The maximum number of arguments that can be passed to EXTERNAL - CALL .
; This is determined by the C procedure `external_call()'.
(define maximum-external-call-args 12)
;----------------
; The number of stack slots available to each procedure by default.
Procedures that need more than this must use one of the two - byte - nargs
; protocols. All of these are given in terms of descriptors.
(define default-stack-space 64)
(define environment-stack-size 2) ; header + superior environment
(define continuation-stack-size 5) ; header + continuation + template +
; pc + env
(define available-stack-space 8000) ; how much stack space is available for
any one procedure
;----------------
Options for op / time
(define-enumeration time-option
(run-time
real-time
cheap-time ; cheap (no system call) access to the polling clock
;current-time
))
Options for op / memory - status
(define-enumeration memory-status-option
(available
heap-size
stack-size
gc-count
expand-heap!
pointer-hash
))
The two types of special channels can not be used for normal I / O.
(define-enumeration channel-status-option
(closed
input
output
special-input ; socket accept, ???
special-output ; ???
))
Indicies into a port 's status word
(define-enumeration port-status-options
(input
output
open-for-input
open-for-output
))
(define-enumeration stob
D - vector types ( traced by GC )
pair
symbol
vector
closure
location
cell
channel
port
ratnum
record
continuation
extended-number
template
weak-pointer
shared-binding
unused-d-header1
unused-d-header2
B - vector types ( not traced by GC )
string ; = least b-vector type
byte-vector
double ; double precision floating point
bignum
))
; This is here to try to ensure that it is changed when STOB changes.
(define least-b-vector-type (enum stob string))
; (stob predicate constructor . (accessor modifier)*)
; If nothing else, the run-time system and the VM need to agree on
; which slot of a pair is the car and which is the cdr.
(define stob-data
'((pair pair? cons
(car set-car!) (cdr set-cdr!))
RTS calls op / string->symbol
(symbol->string))
(location location? make-location
(location-id set-location-id!)
(contents set-contents!))
(cell cell? make-cell
(cell-ref cell-set!))
(closure closure? make-closure
(closure-template) (closure-env))
(weak-pointer weak-pointer? make-weak-pointer
(weak-pointer-ref))
(shared-binding shared-binding? make-shared-binding
(shared-binding-name)
(shared-binding-is-import?)
(shared-binding-ref shared-binding-set!))
(port port? make-port
(port-handler set-port-handler!)
(port-status set-port-status!)
(port-lock set-port-lock!)
(port-locked? set-port-locked?!)
(port-data set-port-data!)
(port-buffer set-port-buffer!)
(port-index set-port-index!)
(port-limit set-port-limit!)
(port-pending-eof? set-port-pending-eof?!))
(channel channel? #f
(channel-status)
(channel-id)
(channel-os-index))
))
| null | https://raw.githubusercontent.com/cardmagic/lucash/0452d410430d12140c14948f7f583624f819cdad/reference/scsh-0.6.6/scheme/vm/arch.scm | scheme | -*- Mode: Scheme; Syntax: Scheme; Package: Scheme; -*-
This is file arch.scm.
Architecture description
Things that the VM and the runtime system both need to know.
Bytecodes: for compiler and interpreter
Instruction specification is
(op . args)
OP may be a name or a list of names
ARGS are
byte - a byte
junk - a byte that is ignored (e.g. when a peephole optimization merges
offset - two bytes giving an offset into the current instruction stream
stob - a byte specifying a type for a stored object
the first
number is the argument count implemented by the VM)
+ - any number of additional arguments are allowed
Instructions marked *EXP* are experimental and are not normally used by
byte-code compiler.
executed
cons an environment
back and over
back encoded into op-code for efficiency
value to *val*
byte = 0 -> use environment in *env*
make new environment from env-data
push *val* onto stack
common combination
pop top of stack into *val*
index'th element of stack into *val*
save state in *cont*
to use copied continuation
copy next continuation from heap (this
op-code is used internally by the VM)
different ways to call procedures
last argument is the procedure to call
(*EXP*, and no two-byte version)
last is a list of additional arguments, next
arguments are as for Scheme's APPLY, with
the number of non-list arguments pushed on
the top of the stack
values are on stack, consumer is in the
continuation pointed to by *cont*
returned values
return to continuation in *cont*
values are on stack, count is pushed on stack
ignore (and dispose of) returned values
Different ways to jump
jump to another template (*EXP*)
does not poll for interrupts
call a template instead of a procedure
nargs is needed for interrupt handling
boolean in *val*
defaults to instruction after deltas (*EXP*)
For the closed-compiled definitions of n-ary arithmetic functions.
The opcode sequences used are:
binary-reduce1 binary-op binary-reduce2 return
and
binary-reduce1 binary-op binary-comparison-reduce2 return
Data manipulation
size pushed on stack
byte is the offset
size + init
vector + offset
vector + offset + value
channels (unbuffered, non-blocking I/O)
stop channel operation
return a list of the open channels
Misc
raise exception
return #f (for bootstrapping)
hard to get otherwise
used by the static linker for the initial table
makes a vector of all objects of a given type
makes a vector of all records of a given type
session specific data
do nothing until something happens
access to extensions of the virtual machine
return from an callback
Unnecessary primitives
ports (buffered I/O) - these are all unnecessary
byte = 0 -> port is supplied
For writing informative messages when debugging
handler is passed a list of finalizers
handler is passed channel and status
handler is passed channel and status
Possible problems
Used by (READ-CHAR) and (WRITE-CHAR) to get the appropriate ports from
the fluid environment.
----------------
Encoding for template protocols:
count
argument count, plus a rest list
Used for all n-ary procedures.
Real protocol is at the end of the code vector, along with the required
stack size:
This stuff has to be at the end of the code vector because the necessary stack
size is not determined until after the code vector has been assembled.
The rest are used only for the definitions of various Scheme primitives.
0 for the rest).
A zero indicatest that the primitive doesn't accept that many arguments.
This is determined by the C procedure `external_call()'.
----------------
The number of stack slots available to each procedure by default.
protocols. All of these are given in terms of descriptors.
header + superior environment
header + continuation + template +
pc + env
how much stack space is available for
----------------
cheap (no system call) access to the polling clock
current-time
socket accept, ???
???
= least b-vector type
double precision floating point
This is here to try to ensure that it is changed when STOB changes.
(stob predicate constructor . (accessor modifier)*)
If nothing else, the run-time system and the VM need to agree on
which slot of a pair is the car and which is the cdr. | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
(define architecture-version "Vanilla 20")
(define bits-used-per-byte 8)
(define byte-limit (expt 2 bits-used-per-byte))
- a byte
two instructions into a single , shorter one )
two - bytes - two bytes
index - a two byte index into the current template
small - index - a one byte index into the current template
0 1 2 ... - the number of non - instruction - stream arguments ( some
(define-syntax define-instruction-set
(lambda (form rename compare)
(let ((data (do ((data (reverse (cdr form)) (cdr data))
(new '() (let ((next (car data)))
(if (pair? (car next))
(append (map (lambda (op)
(cons op (cdr next)))
(car next))
new)
(cons next new)))))
((null? data) new))))
`(begin (define-enumeration op
,(map car data))
(define opcode-arg-specs
'#(,@(map cdr data)))))))
(define-instruction-set
first opcode in a procedure , never actually
value to * val * , two - byte index
value to * val * , one - byte index
((local0 local1 local2)
same , but counts are two bytes
back over value , counts are two bytes
(set-global! index 1)
byte = 1 - > use environment in * val *
byte junk 1)
* val * to index'th element of stack
save state in * cont * , two - byte size
copy * cont * to * val * , use WITH -
ditto , is two bytes
same , move args to just above * cont * first
last argument is procedure to call , second to
two bytes are the number of stack arguments
first arg is cont , second is procedure
Three different ways to return from calls and one way to ignore any
values are on stack , count is next two bytes
Current VM only handles the zero - arg case .
(jump offset)
jump using delta specified by * *
((binary-reduce1 binary-reduce2 binary-comparison-reduce2))
Scalar primitives
(eq? 2)
((number? integer? rational? real? complex? exact?) 1)
((exact->inexact inexact->exact) 1)
((+ *) 2 0 1 +)
((- /) 2 1)
((= < > <= >=) 2 +)
((quotient remainder) 2)
((floor numerator denominator
real-part imag-part
exp log sin cos tan asin acos sqrt
angle magnitude)
1)
(atan 2)
((make-polar make-rectangular) 2)
(bitwise-not 1)
((bitwise-and bitwise-ior bitwise-xor) 2)
(arithmetic-shift 2)
(char? 1)
((char=? char<?) 2)
((char->ascii ascii->char) 1)
(eof-object? 1)
(stored-object-has-type? stob 1)
(stored-object-length stob 1)
(make-stored-object byte stob)
(stored-object-set! stob byte 2)
(make-byte-vector 2)
(byte-vector-length 1)
(byte-vector-ref 2)
(byte-vector-set! 3)
(make-string 2)
(string-length 1)
(string-ref 2)
(string-set! 3)
(intern 1)
(location-defined? 1)
(set-location-defined?! 2)
((immutable? make-immutable!) 1)
(open-channel 2)
(close-channel 1)
(channel-maybe-read 5)
(channel-maybe-write 4)
(add-pending-channel 2)
(channel-ready? 1)
((unassigned unspecific))
(write-image 3)
(collect)
(add-finalizer! 2)
(memory-status 2)
(current-thread)
(set-current-thread! 1)
(set-session-data! 1)
(set-exception-handlers! 1)
(return-from-exception 1)
(set-interrupt-handlers! 1)
(set-enabled-interrupts! 1)
(return-from-interrupt)
(schedule-interrupt 1)
(call-external-value 1 +)
(lookup-shared-binding 2)
(define-shared-binding 3)
(undefine-shared-binding 2)
(time 2)
(string=? 2)
(reverse-list->string 2)
(assq 2)
(checked-record-ref 3)
(checked-record-set! 4)
(copy-bytes! 5)
= 1 - > get port from dynamic environment
((read-char peek-char) byte 1 0)
(write-char byte 2 1)
(message 1)
)
(define-enumeration interrupt
order matters - higher priority first
keyboard
os-signal
))
(define-enumeration exception
(unassigned-local
undefined-global
unbound-global
bad-procedure
wrong-number-of-arguments
wrong-type-argument
arithmetic-overflow
index-out-of-range
heap-overflow
out-of-memory
cannot-open-channel
channel-os-index-already-in-use
closed-channel
pending-channel-i/o
buffer-full/empty
unimplemented-instruction
trap
proceeding-after-exception
bad-option
unbound-external-name
too-many-arguments-to-external-procedure
too-many-arguments-in-callback
callback-return-uncovered
extension-exception
extension-return-error
os-error
unresumable-records-in-image
gc-protection-mismatch
))
(define-enumeration current-port-marker
(current-input-port
current-output-port))
0 ... MAX - STACK - ARGS = that number of arguments , no rest list
TWO - BYTE - NARGS = ( 2*MAX - STACK - ARGS)+1 = next two bytes are the fixed argument
TWO - BYTE - NARGS+LIST = TWO - BYTE - NARGS + 1 = next two bytes are the fixed
(define maximum-stack-args 63)
(define *last-protocol* maximum-stack-args)
(define (next-protocol)
(set! *last-protocol* (+ *last-protocol* 1))
*last-protocol*)
(define two-byte-nargs-protocol (next-protocol))
(define two-byte-nargs+list-protocol (next-protocol))
... real - protocol stack - size0 stack - size1
(define big-stack-protocol (next-protocol))
For VECTOR , RECORD , VALUES , EXTERNAL - CALL , APPLY
Next byte is the minimum number of arguments ( 1 for EXT - CALL , 2 for APPLY ,
Stack = arg0 arg1 ... argN rest - list N+1 total - arg - count
The first two arguments are always on the stack .
(define args+nargs-protocol (next-protocol))
Followed by four bytes : the offsets of code for the 0 , 1 , 2 , and 3 + arg cases .
If there are fewer than three arguments they are all on the stack . In the
3 + case this is the same as args+nargs above .
(define nary-dispatch-protocol (next-protocol))
The maximum number of arguments that can be passed to EXTERNAL - CALL .
(define maximum-external-call-args 12)
Procedures that need more than this must use one of the two - byte - nargs
(define default-stack-space 64)
any one procedure
Options for op / time
(define-enumeration time-option
(run-time
real-time
))
Options for op / memory - status
(define-enumeration memory-status-option
(available
heap-size
stack-size
gc-count
expand-heap!
pointer-hash
))
The two types of special channels can not be used for normal I / O.
(define-enumeration channel-status-option
(closed
input
output
))
Indicies into a port 's status word
(define-enumeration port-status-options
(input
output
open-for-input
open-for-output
))
(define-enumeration stob
D - vector types ( traced by GC )
pair
symbol
vector
closure
location
cell
channel
port
ratnum
record
continuation
extended-number
template
weak-pointer
shared-binding
unused-d-header1
unused-d-header2
B - vector types ( not traced by GC )
byte-vector
bignum
))
(define least-b-vector-type (enum stob string))
(define stob-data
'((pair pair? cons
(car set-car!) (cdr set-cdr!))
RTS calls op / string->symbol
(symbol->string))
(location location? make-location
(location-id set-location-id!)
(contents set-contents!))
(cell cell? make-cell
(cell-ref cell-set!))
(closure closure? make-closure
(closure-template) (closure-env))
(weak-pointer weak-pointer? make-weak-pointer
(weak-pointer-ref))
(shared-binding shared-binding? make-shared-binding
(shared-binding-name)
(shared-binding-is-import?)
(shared-binding-ref shared-binding-set!))
(port port? make-port
(port-handler set-port-handler!)
(port-status set-port-status!)
(port-lock set-port-lock!)
(port-locked? set-port-locked?!)
(port-data set-port-data!)
(port-buffer set-port-buffer!)
(port-index set-port-index!)
(port-limit set-port-limit!)
(port-pending-eof? set-port-pending-eof?!))
(channel channel? #f
(channel-status)
(channel-id)
(channel-os-index))
))
|
aafee23904aaaf3f6651c9329290676ebfeb1da60ba4a830913a809cce79cb46 | nathanmarz/cascalog | predmacro.clj | (ns cascalog.logic.predmacro
"This namespace contains functions that help to define predicate
macro instances, and compile predicate macro instances out into
sequences of RawPredicate instances."
(:require [clojure.set :refer (intersection)]
[clojure.walk :refer (postwalk)]
[cascalog.logic.predicate :as p]
[cascalog.logic.vars :as v]
[jackknife.core :as u]
[jackknife.seq :as s])
(:import [jcascalog PredicateMacro PredicateMacroTemplate]))
# # Predicate Macro Building Functions
;; "expand" is called from "normalize" in cascalog.parse. The parsing
;; code takes care of the recursive expansion needed on the results
;; of a call to "expand".
(defprotocol IPredMacro
(expand [_ input output]
"Returns a sequence of vectors suitable to feed into
cascalog.parse/normalize."))
(defn predmacro? [o]
(satisfies? IPredMacro (if (var? o) @o o)))
(extend-protocol p/ICouldFilter
cascalog.logic.predmacro.IPredMacro
(filter? [_] true))
(extend-protocol IPredMacro
;; Predicate macro templates really should just extend this protocol
;; directly. getCompiledPredMacro calls into build-predmacro below
and returns a reified instance of IPredMacro .
PredicateMacroTemplate
(expand [p input output]
(expand (.getCompiledPredMacro p) input output))
clojure.lang.Var
(expand [v input output]
(if (predmacro? v)
(expand @v input output)
(u/throw-runtime)))
TODO : jCascalog shold just use these interfaces directly . If this
;; were the case, we wouldn't have to extend the protocol here.
PredicateMacro
(expand [p input output]
(letfn [(to-fields [fields]
(jcascalog.Fields. (or fields [])))]
(-> p (.getPredicates (to-fields input)
(to-fields output))))))
;; kind of a hack, simulate using pred macros like filters
(defn use-as-filter?
"If a predicate macro had a single output variable defined and you
try to use it with no output variables, the predicate macro acts as
a filter."
[output-decl outvars]
(and (empty? outvars)
(sequential? output-decl)
(= 1 (count output-decl))))
(defn predmacro*
"Functional version of predmacro. See predmacro for details."
[fun]
(reify IPredMacro
(expand [_ invars outvars]
(fun invars outvars))))
(defmacro predmacro
"A more general but more verbose way to create predicate macros.
Creates a function that takes in [invars outvars] and returns a
list of predicates. When making predicate macros this way, you must
create intermediate variables with gen-nullable-var(s). This is
because unlike the (<- [?a :> ?b] ...) way of doing pred macros,
Cascalog doesn't have a declaration for the inputs/outputs.
See -macros
"
[& body]
`(predmacro* (fn ~@body)))
(defn validate-declarations!
"Assert that the same variables aren't used on input and output when
defining a predicate macro."
[input-decl output-decl]
(when (seq (intersection (set input-decl)
(set output-decl)))
;; TODO: ignore destructuring characters and check that no
;; constants are present.
(u/throw-runtime (format
(str "Cannot declare the same var as "
"an input and output to predicate macro: %s %s")
input-decl output-decl))))
(defn build-predmacro
"Build a predicate macro via input and output declarations. This
function takes a sequence of declared inputs, a seq of declared
outputs and a sequence of raw predicates. Upon use, any variable
name not in the input or output declarations will be replaced with a
random Cascalog variable (uniqued by appending a suffix, so nullable
vs non-nullable will be maintained)."
[input-decl output-decl raw-predicates]
(validate-declarations! (when input-decl
(s/collectify input-decl))
(when output-decl
(s/collectify output-decl)))
(reify IPredMacro
(expand [_ invars outvars]
(let [outvars (if (use-as-filter? output-decl outvars)
[true]
outvars)
replacement-m (s/mk-destructured-seq-map input-decl invars
output-decl outvars)
update (memoize (fn [v]
(if (v/cascalog-var? v)
(replacement-m (str v) (v/uniquify-var v))
v)))]
(->> raw-predicates
(mapcat (fn [pred]
(map (fn [{:keys [input output] :as p}]
(-> p
(assoc :input (postwalk update input))
(assoc :output (postwalk update output))))
(p/normalize pred)))))))))
| null | https://raw.githubusercontent.com/nathanmarz/cascalog/deaad977aa98985f68f3d1cc3e081d345184c0c8/cascalog-core/src/clj/cascalog/logic/predmacro.clj | clojure | "expand" is called from "normalize" in cascalog.parse. The parsing
code takes care of the recursive expansion needed on the results
of a call to "expand".
Predicate macro templates really should just extend this protocol
directly. getCompiledPredMacro calls into build-predmacro below
were the case, we wouldn't have to extend the protocol here.
kind of a hack, simulate using pred macros like filters
TODO: ignore destructuring characters and check that no
constants are present. | (ns cascalog.logic.predmacro
"This namespace contains functions that help to define predicate
macro instances, and compile predicate macro instances out into
sequences of RawPredicate instances."
(:require [clojure.set :refer (intersection)]
[clojure.walk :refer (postwalk)]
[cascalog.logic.predicate :as p]
[cascalog.logic.vars :as v]
[jackknife.core :as u]
[jackknife.seq :as s])
(:import [jcascalog PredicateMacro PredicateMacroTemplate]))
# # Predicate Macro Building Functions
(defprotocol IPredMacro
(expand [_ input output]
"Returns a sequence of vectors suitable to feed into
cascalog.parse/normalize."))
(defn predmacro? [o]
(satisfies? IPredMacro (if (var? o) @o o)))
(extend-protocol p/ICouldFilter
cascalog.logic.predmacro.IPredMacro
(filter? [_] true))
(extend-protocol IPredMacro
and returns a reified instance of IPredMacro .
PredicateMacroTemplate
(expand [p input output]
(expand (.getCompiledPredMacro p) input output))
clojure.lang.Var
(expand [v input output]
(if (predmacro? v)
(expand @v input output)
(u/throw-runtime)))
TODO : jCascalog shold just use these interfaces directly . If this
PredicateMacro
(expand [p input output]
(letfn [(to-fields [fields]
(jcascalog.Fields. (or fields [])))]
(-> p (.getPredicates (to-fields input)
(to-fields output))))))
(defn use-as-filter?
"If a predicate macro had a single output variable defined and you
try to use it with no output variables, the predicate macro acts as
a filter."
[output-decl outvars]
(and (empty? outvars)
(sequential? output-decl)
(= 1 (count output-decl))))
(defn predmacro*
"Functional version of predmacro. See predmacro for details."
[fun]
(reify IPredMacro
(expand [_ invars outvars]
(fun invars outvars))))
(defmacro predmacro
"A more general but more verbose way to create predicate macros.
Creates a function that takes in [invars outvars] and returns a
list of predicates. When making predicate macros this way, you must
create intermediate variables with gen-nullable-var(s). This is
because unlike the (<- [?a :> ?b] ...) way of doing pred macros,
Cascalog doesn't have a declaration for the inputs/outputs.
See -macros
"
[& body]
`(predmacro* (fn ~@body)))
(defn validate-declarations!
"Assert that the same variables aren't used on input and output when
defining a predicate macro."
[input-decl output-decl]
(when (seq (intersection (set input-decl)
(set output-decl)))
(u/throw-runtime (format
(str "Cannot declare the same var as "
"an input and output to predicate macro: %s %s")
input-decl output-decl))))
(defn build-predmacro
"Build a predicate macro via input and output declarations. This
function takes a sequence of declared inputs, a seq of declared
outputs and a sequence of raw predicates. Upon use, any variable
name not in the input or output declarations will be replaced with a
random Cascalog variable (uniqued by appending a suffix, so nullable
vs non-nullable will be maintained)."
[input-decl output-decl raw-predicates]
(validate-declarations! (when input-decl
(s/collectify input-decl))
(when output-decl
(s/collectify output-decl)))
(reify IPredMacro
(expand [_ invars outvars]
(let [outvars (if (use-as-filter? output-decl outvars)
[true]
outvars)
replacement-m (s/mk-destructured-seq-map input-decl invars
output-decl outvars)
update (memoize (fn [v]
(if (v/cascalog-var? v)
(replacement-m (str v) (v/uniquify-var v))
v)))]
(->> raw-predicates
(mapcat (fn [pred]
(map (fn [{:keys [input output] :as p}]
(-> p
(assoc :input (postwalk update input))
(assoc :output (postwalk update output))))
(p/normalize pred)))))))))
|
9b7f417d5c33c4699ec494fbe65adc271559ef2c194aa5bbcf41a87edeb9b1f1 | RedPRL/algaeff | Fun.ml | module Deep =
struct
let finally k f =
match f () with
| x -> Effect.Deep.continue k x
| exception e -> Effect.Deep.discontinue k e
end
module Shallow =
struct
let finally_with k f h =
match f () with
| x -> Effect.Shallow.continue_with k x h
| exception e -> Effect.Shallow.discontinue_with k e h
end
| null | https://raw.githubusercontent.com/RedPRL/algaeff/6867d391f2aafb5129bd8e792de5a8f7bad14a76/src/Fun.ml | ocaml | module Deep =
struct
let finally k f =
match f () with
| x -> Effect.Deep.continue k x
| exception e -> Effect.Deep.discontinue k e
end
module Shallow =
struct
let finally_with k f h =
match f () with
| x -> Effect.Shallow.continue_with k x h
| exception e -> Effect.Shallow.discontinue_with k e h
end
| |
4a95cc6535f0865ec3445127c448104a68b58f06688e777213e8786a3abcbe2a | dgtized/shimmers | imperfect_curves.cljs | (ns shimmers.sketches.imperfect-curves
(:require
[shimmers.common.svg :as csvg]
[shimmers.common.ui.controls :as ctrl]
[shimmers.math.deterministic-random :as dr]
[shimmers.sketch :as sketch :include-macros true]
[shimmers.view.sketch :as view-sketch]
[thi.ng.geom.line :as gl]
[thi.ng.geom.vector :as gv]
[thi.ng.math.core :as tm]))
original plan was a perspective drawing of two building faces joined by a curve
(def width 800)
(def height 600)
(defn rv [x y]
(gv/vec2 (* width x) (* height y)))
(defn upper-ellipse [p rx ry]
(for [t (range tm/PI tm/TWO_PI 0.1)]
(tm/+ (gv/vec2 (* rx (Math/cos t))
(* ry (Math/sin t)))
p)))
(defn shapes []
(for [t (dr/var-range 10)]
(gl/linestrip2 (upper-ellipse (rv 0.5 0.5) (* t 0.3 width) (* t 0.4 height)))))
(defn scene []
(csvg/svg {:width width
:height height
:stroke "black"
:fill "white"
:stroke-width 0.5}
(shapes)))
(sketch/definition imperfect-curves
{:created-at "2022-02-08"
:type :svg
:tags #{}}
(ctrl/mount (view-sketch/page-for scene :imperfect-curves)
"sketch-host"))
| null | https://raw.githubusercontent.com/dgtized/shimmers/f096c20d7ebcb9796c7830efcd7e3f24767a46db/src/shimmers/sketches/imperfect_curves.cljs | clojure | (ns shimmers.sketches.imperfect-curves
(:require
[shimmers.common.svg :as csvg]
[shimmers.common.ui.controls :as ctrl]
[shimmers.math.deterministic-random :as dr]
[shimmers.sketch :as sketch :include-macros true]
[shimmers.view.sketch :as view-sketch]
[thi.ng.geom.line :as gl]
[thi.ng.geom.vector :as gv]
[thi.ng.math.core :as tm]))
original plan was a perspective drawing of two building faces joined by a curve
(def width 800)
(def height 600)
(defn rv [x y]
(gv/vec2 (* width x) (* height y)))
(defn upper-ellipse [p rx ry]
(for [t (range tm/PI tm/TWO_PI 0.1)]
(tm/+ (gv/vec2 (* rx (Math/cos t))
(* ry (Math/sin t)))
p)))
(defn shapes []
(for [t (dr/var-range 10)]
(gl/linestrip2 (upper-ellipse (rv 0.5 0.5) (* t 0.3 width) (* t 0.4 height)))))
(defn scene []
(csvg/svg {:width width
:height height
:stroke "black"
:fill "white"
:stroke-width 0.5}
(shapes)))
(sketch/definition imperfect-curves
{:created-at "2022-02-08"
:type :svg
:tags #{}}
(ctrl/mount (view-sketch/page-for scene :imperfect-curves)
"sketch-host"))
| |
13b9dfbdc207614e8326c2e0d10a16341eb309a148413ede220c8b4e4ddf7290 | mrkkrp/megaparsec | PosSpec.hs | module Text.Megaparsec.PosSpec (spec) where
import Control.Exception (evaluate)
import Data.Function (on)
import Data.List (isInfixOf)
import Test.Hspec
import Test.Hspec.Megaparsec.AdHoc ()
import Test.QuickCheck
import Text.Megaparsec.Pos
spec :: Spec
spec = do
describe "mkPos" $ do
context "when the argument is a non-positive number" $
it "throws InvalidPosException" $
property $ \n ->
n <= 0 ==>
evaluate (mkPos n) `shouldThrow` (== InvalidPosException n)
context "when the argument is not 0" $
it "returns Pos with the given value" $
property $ \n ->
(n > 0) ==> (unPos (mkPos n) `shouldBe` n)
describe "Read and Show instances of Pos" $
it "printed representation of Pos is isomorphic to its value" $
property $ \x ->
read (show x) === (x :: Pos)
describe "Ord instance of Pos" $
it "works just like Ord instance of underlying Word" $
property $ \x y ->
compare x y === (compare `on` unPos) x y
describe "Semigroup instance of Pos" $
it "works like addition" $
property $ \x y ->
x <> y === mkPos (unPos x + unPos y)
.&&. unPos (x <> y) === unPos x + unPos y
describe "initialPos" $
it "constructs initial position correctly" $
property $ \path ->
let x = initialPos path
in sourceName x === path
.&&. sourceLine x === mkPos 1
.&&. sourceColumn x === mkPos 1
describe "Read and Show instances of SourcePos" $
it "printed representation of SourcePos in isomorphic to its value" $
property $ \x ->
read (show x) === (x :: SourcePos)
describe "sourcePosPretty" $ do
it "displays file name" $
property $ \x ->
sourceName x `isInfixOf` sourcePosPretty x
it "displays line number" $
property $ \x ->
(show . unPos . sourceLine) x `isInfixOf` sourcePosPretty x
it "displays column number" $
property $ \x ->
(show . unPos . sourceColumn) x `isInfixOf` sourcePosPretty x
| null | https://raw.githubusercontent.com/mrkkrp/megaparsec/7d6b82642d6c42f449d445b368f4286da16136e0/megaparsec-tests/tests/Text/Megaparsec/PosSpec.hs | haskell | module Text.Megaparsec.PosSpec (spec) where
import Control.Exception (evaluate)
import Data.Function (on)
import Data.List (isInfixOf)
import Test.Hspec
import Test.Hspec.Megaparsec.AdHoc ()
import Test.QuickCheck
import Text.Megaparsec.Pos
spec :: Spec
spec = do
describe "mkPos" $ do
context "when the argument is a non-positive number" $
it "throws InvalidPosException" $
property $ \n ->
n <= 0 ==>
evaluate (mkPos n) `shouldThrow` (== InvalidPosException n)
context "when the argument is not 0" $
it "returns Pos with the given value" $
property $ \n ->
(n > 0) ==> (unPos (mkPos n) `shouldBe` n)
describe "Read and Show instances of Pos" $
it "printed representation of Pos is isomorphic to its value" $
property $ \x ->
read (show x) === (x :: Pos)
describe "Ord instance of Pos" $
it "works just like Ord instance of underlying Word" $
property $ \x y ->
compare x y === (compare `on` unPos) x y
describe "Semigroup instance of Pos" $
it "works like addition" $
property $ \x y ->
x <> y === mkPos (unPos x + unPos y)
.&&. unPos (x <> y) === unPos x + unPos y
describe "initialPos" $
it "constructs initial position correctly" $
property $ \path ->
let x = initialPos path
in sourceName x === path
.&&. sourceLine x === mkPos 1
.&&. sourceColumn x === mkPos 1
describe "Read and Show instances of SourcePos" $
it "printed representation of SourcePos in isomorphic to its value" $
property $ \x ->
read (show x) === (x :: SourcePos)
describe "sourcePosPretty" $ do
it "displays file name" $
property $ \x ->
sourceName x `isInfixOf` sourcePosPretty x
it "displays line number" $
property $ \x ->
(show . unPos . sourceLine) x `isInfixOf` sourcePosPretty x
it "displays column number" $
property $ \x ->
(show . unPos . sourceColumn) x `isInfixOf` sourcePosPretty x
| |
43c04ca2f93fc5bb55358c5a6aafe01f1e5e282f6739b1b27bae208c3a2832e5 | kelamg/HtDP2e-workthrough | ex242.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-reader.ss" "lang")((modname ex242) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
A [ Maybe X ] is one of :
; – #false
; – X
;; [Maybe String] is either #false or a string
;; [Maybe [List-of-string]] is either #false or a list of strings
;; [List-of [Maybe String]] is a list of either #false or a string
; String [List-of String] -> [Maybe [List-of String]]
returns the remainder of los starting with s
; #false otherwise
;
The signature means that the function accepts two arguments :
; a String and a list of strings
; and returns:
; either #false or a list of strings
(check-expect (occurs "a" (list "b" "a" "d" "e"))
(list "d" "e"))
(check-expect (occurs "a" (list "b" "c" "d")) #f)
(define (occurs s los)
(cond
[(empty? los) #f]
[(string=? s (first los)) (rest los)]
[else (occurs s (rest los))])) | null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Abstraction/ex242.rkt | racket | about the language level of this file in a form that our tools can easily process.
– #false
– X
[Maybe String] is either #false or a string
[Maybe [List-of-string]] is either #false or a list of strings
[List-of [Maybe String]] is a list of either #false or a string
String [List-of String] -> [Maybe [List-of String]]
#false otherwise
a String and a list of strings
and returns:
either #false or a list of strings | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-reader.ss" "lang")((modname ex242) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
A [ Maybe X ] is one of :
returns the remainder of los starting with s
The signature means that the function accepts two arguments :
(check-expect (occurs "a" (list "b" "a" "d" "e"))
(list "d" "e"))
(check-expect (occurs "a" (list "b" "c" "d")) #f)
(define (occurs s los)
(cond
[(empty? los) #f]
[(string=? s (first los)) (rest los)]
[else (occurs s (rest los))])) |
607137cf4df15ed2fc23a988ca5e46bb3f701247e536c121c676497df5e5bb54 | noteed/language-glsl | Parser.hs | module Language.GLSL.Parser where
import Prelude hiding (break, exponent)
import Text.ParserCombinators.Parsec hiding (State, parse)
import Text.ParserCombinators.Parsec.Expr
import Language.GLSL.Syntax
----------------------------------------------------------------------
Parser state , hold a symbol table .
----------------------------------------------------------------------
data S = S
type P a = GenParser Char S a
----------------------------------------------------------------------
-- Reserved words
----------------------------------------------------------------------
-- List of keywords.
keywords :: [String]
keywords = concat $ map words $
[ "attribute const uniform varying"
, "layout"
, "centroid flat smooth noperspective"
, "break continue do for while switch case default"
, "if else"
, "in out inout"
, "float int void bool true false"
, "invariant"
, "discard return"
, "mat2 mat3 mat4"
, "mat2x2 mat2x3 mat2x4"
, "mat3x2 mat3x3 mat3x4"
, "mat4x2 mat4x3 mat4x4"
, "vec2 vec3 vec4 ivec2 ivec3 ivec4 bvec2 bvec3 bvec4"
, "uint uvec2 uvec3 uvec4"
, "lowp mediump highp precision"
, "sampler1D sampler2D sampler3D samplerCube"
, "sampler1DShadow sampler2DShadow samplerCubeShadow"
, "sampler1DArray sampler2DArray"
, "sampler1DArrayShadow sampler2DArrayShadow"
, "isampler1D isampler2D isampler3D isamplerCube"
, "isampler1DArray isampler2DArray"
, "usampler1D usampler2D usampler3D usamplerCube"
, "usampler1DArray usampler2DArray"
, "sampler2DRect sampler2DRectShadow isampler2DRect usampler2DRect"
, "samplerBuffer isamplerBuffer usamplerBuffer"
, "sampler2DMS isampler2DMS usampler2DMS"
, "sampler2DMSArray isampler2DMSArray usampler2DMSArray"
, "struct"
]
-- List of keywords reserved for future use.
reservedWords :: [String]
reservedWords = concat $ map words $
[ "common partition active"
, "asm"
, "class union enum typedef template this packed"
, "goto"
, "inline noinline volatile public static extern external interface"
, "long short double half fixed unsigned superp"
, "input output"
, "hvec2 hvec3 hvec4 dvec2 dvec3 dvec4 fvec2 fvec3 fvec4"
, "sampler3DRect"
, "filter"
, "image1D image2D image3D imageCube"
, "iimage1D iimage2D iimage3D iimageCube"
, "uimage1D uimage2D uimage3D uimageCube"
, "image1DArray image2DArray"
, "iimage1DArray iimage2DArray uimage1DArray uimage2DArray"
, "image1DShadow image2DShadow"
, "image1DArrayShadow image2DArrayShadow"
, "imageBuffer iimageBuffer uimageBuffer"
, "sizeof cast"
, "namespace using"
, "row_major"
]
----------------------------------------------------------------------
-- Convenience parsers
----------------------------------------------------------------------
comment :: P ()
comment = do
_ <- char '/'
_ <- choice
[ do _ <- char '*'
manyTill anyChar (try $ string "*/")
, do _ <- char '/'
manyTill anyChar ((newline >> return ()) <|> eof)
]
return ()
blank :: P ()
blank = try comment <|> (space >> return ())
-- Acts like p and discards any following space character.
lexeme :: P a -> P a
lexeme p = do
x <- p
skipMany blank
return x
parse :: [Char] -> Either ParseError TranslationUnit
parse =
runParser (do {skipMany blank ; r <- translationUnit ; eof ; return r})
S "GLSL"
----------------------------------------------------------------------
-- Lexical elements (tokens)
----------------------------------------------------------------------
semicolon :: P ()
semicolon = lexeme $ char ';' >> return ()
comma :: P ()
comma = lexeme $ char ',' >> return ()
colon :: P ()
colon = lexeme $ char ':' >> return ()
lbrace :: P ()
lbrace = lexeme $ char '{' >> return ()
rbrace :: P ()
rbrace = lexeme $ char '}' >> return ()
lbracket :: P ()
lbracket = lexeme $ char '[' >> return ()
rbracket :: P ()
rbracket = lexeme $ char ']' >> return ()
lparen :: P ()
lparen = lexeme $ char '(' >> return ()
rparen :: P ()
rparen = lexeme $ char ')' >> return ()
-- Try to parse a given string, making sure it is not a
-- prefix of an identifier.
keyword :: String -> P ()
keyword w = lexeme $ try (string w >> notFollowedBy identifierTail)
-- Parses and returns an identifier.
TODO an identifier ca n't start with " gl _ " unless
-- it is to redeclare a predeclared "gl_" identifier.
identifier :: P String
identifier = lexeme $ do
h <- identifierHead
t <- many identifierTail
check (h:t)
where check i | i `elem` reservedWords = fail $
i ++ " is reserved"
| i `elem` keywords = fail $
i ++ " is a keyword"
| otherwise = checkUnderscore i i
checkUnderscore i ('_':'_':_) = fail $
i ++ " is reserved (two consecutive underscores)"
checkUnderscore i (_:cs) = checkUnderscore i cs
checkUnderscore i [] = return i
TODO the size of the int should fit its type .
intConstant :: P Expr
intConstant = choice
[ hexadecimal
, octal
, badOctal >> fail "Invalid octal number"
, decimal
]
floatingConstant :: P Expr
floatingConstant = choice
[ floatExponent
, floatPoint
, pointFloat
]
-- Try to parse a given string, and allow identifier characters
-- (or anything else) to directly follow.
operator :: String -> P String
operator = lexeme . try . string
----------------------------------------------------------------------
-- Lexical elements helpers
----------------------------------------------------------------------
identifierHead :: P Char
identifierHead = letter <|> char '_'
identifierTail :: P Char
identifierTail = alphaNum <|> char '_'
hexadecimal :: P Expr
hexadecimal = lexeme $ try $ do
_ <- char '0'
_ <- oneOf "Xx"
d <- many1 hexDigit
TODO
return $ IntConstant Hexadecimal $ read ("0x" ++ d)
octal :: P Expr
octal = lexeme $ try $ do
_ <- char '0'
d <- many1 octDigit
TODO
return $ IntConstant Octal $ read ("0o" ++ d)
badOctal :: P ()
badOctal = lexeme $ try $ char '0' >> many1 hexDigit >> return ()
decimal :: P Expr
decimal = lexeme $ try $ do
d <- many1 digit
notFollowedBy (char '.' <|> (exponent >> return ' '))
TODO
return $ IntConstant Decimal $ read d
floatExponent :: P Expr
floatExponent = lexeme $ try $ do
d <- many1 digit
e <- exponent
TODO
return $ FloatConstant $ read $ d ++ e
floatPoint :: P Expr
floatPoint = lexeme $ try $ do
d <- many1 digit
_ <- char '.'
d' <- many digit
let d'' = if null d' then "0" else d'
e <- optionMaybe exponent
TODO
return $ FloatConstant $ read $ d ++ "." ++ d'' ++ maybe "" id e
pointFloat :: P Expr
pointFloat = lexeme $ try $ do
_ <- char '.'
d <- many1 digit
e <- optionMaybe exponent
m <- optionMaybe $ oneOf "Ff"
return $ FloatConstant $ read $ "0." ++ d ++ maybe "" id e
exponent :: P String
exponent = lexeme $ try $ do
_ <- oneOf "Ee"
s <- optionMaybe (oneOf "+-")
d <- many1 digit
return $ "e" ++ maybe "" (:[]) s ++ d
----------------------------------------------------------------------
-- Tables for buildExpressionParser
----------------------------------------------------------------------
infixLeft :: String -> (a -> a -> a) -> Operator Char S a
infixLeft s r = Infix (lexeme (try $ string s) >> return r) AssocLeft
infixLeft' :: String -> (a -> a -> a) -> Operator Char S a
infixLeft' s r = Infix (lexeme (try $ string s >> notFollowedBy (char '=')) >> return r) AssocLeft
infixLeft'' :: Char -> (a -> a -> a) -> Operator Char S a
infixLeft'' c r = Infix (lexeme (try $ char c >> notFollowedBy (oneOf (c:"="))) >> return r) AssocLeft
infixRight :: String -> (a -> a -> a) -> Operator Char S a
infixRight s r = Infix (lexeme (try $ string s) >> return r) AssocRight
conditionalTable :: [[Operator Char S Expr]]
conditionalTable =
[ [infixLeft' "*" Mul, infixLeft' "/" Div, infixLeft' "%" Mod]
, [infixLeft' "+" Add, infixLeft' "-" Sub]
, [infixLeft' "<<" LeftShift, infixLeft' ">>" RightShift]
, [infixLeft' "<" Lt, infixLeft' ">" Gt
,infixLeft "<=" Lte, infixLeft ">=" Gte]
, [infixLeft "==" Equ, infixLeft "!=" Neq]
, [infixLeft'' '&' BitAnd]
, [infixLeft' "^" BitXor]
, [infixLeft'' '|' BitOr]
, [infixLeft "&&" And]
, [infixLeft "||" Or]
]
assignmentTable :: [[Operator Char S Expr]]
assignmentTable =
[ [infixRight "=" Equal]
, [infixRight "+=" AddAssign]
, [infixRight "-=" SubAssign]
, [infixRight "*=" MulAssign]
, [infixRight "/=" DivAssign]
, [infixRight "%=" ModAssign]
, [infixRight "<<=" LeftAssign]
, [infixRight ">>=" RightAssign]
, [infixRight "&=" AndAssign]
, [infixRight "^=" XorAssign]
, [infixRight "|=" OrAssign]
]
expressionTable :: [[Operator Char S Expr]]
expressionTable =
[ [infixLeft "," Sequence]
]
----------------------------------------------------------------------
-- Grammar
----------------------------------------------------------------------
primaryExpression :: P Expr
primaryExpression = choice
[ Variable `fmap` try identifier
-- int constant
, intConstant
-- uint constant
-- float constant
, floatingConstant
-- bool constant
, keyword "true" >> return (BoolConstant True)
, keyword "false" >> return (BoolConstant False)
-- expression within parentheses
, between lparen rparen expression
]
postfixExpression :: P Expr
postfixExpression = do
e <- try (functionCallGeneric >>= \(i,p) -> return (FunctionCall i p))
<|> primaryExpression
p <- many $ choice
[ between lbracket rbracket integerExpression >>= return . flip Bracket
, dotFunctionCallGeneric
, dotFieldSelection
, operator "++" >> return PostInc
, operator "--" >> return PostDec
]
return $ foldl (flip ($)) e p
dotFunctionCallGeneric :: P (Expr -> Expr)
dotFunctionCallGeneric =
lexeme (try $ string "." >> functionCallGeneric) >>= \(i,p) -> return (\e -> MethodCall e i p)
dotFieldSelection :: P (Expr -> Expr)
dotFieldSelection =
lexeme (try $ string "." >> identifier) >>= return . flip FieldSelection
integerExpression :: P Expr
integerExpression = expression
-- Those productions are pushed inside postfixExpression.
-- functionCall = functionCallOrMethod
functionCallOrMethod = functionCallGeneric < | > postfixExpression DOT functionCallGeneric
functionCallGeneric :: P (FunctionIdentifier, Parameters)
functionCallGeneric = do
i <- functionCallHeader
p <- choice
[ keyword "void" >> return ParamVoid
, assignmentExpression `sepBy` comma >>= return . Params
]
rparen
return (i, p)
Those productions are pushed inside functionCallGeneric .
-- functionCallHeaderNoParameters = undefined
-- functionCallHeaderWithParameters = undefined
functionCallHeader :: P FunctionIdentifier
functionCallHeader = do
i <- functionIdentifier
lparen
return i
functionIdentifier :: P FunctionIdentifier
functionIdentifier = choice
[ try identifier >>= return . FuncId
, typeSpecifier >>= return . FuncIdTypeSpec -- TODO if the 'identifier' is declared as a type, should be this case
-- no need for fieldSelection
]
unaryExpression :: P Expr
unaryExpression = do
p <- many $ choice
[ operator "++" >> return PreInc
, operator "--" >> return PreDec
, operator "+" >> return UnaryPlus
, operator "-" >> return UnaryNegate
, operator "!" >> return UnaryNot
, operator "~" >> return UnaryOneComplement
]
e <- postfixExpression
return $ foldr ($) e p
-- inside unaryExpression
-- unaryOperator = choice
-- implemented throught buildExpressionParser
-- multiplicativeExpression = undefined
-- additiveExpression = undefined
-- shiftExpression = undefined
-- relationalExpression = undefined
-- equalityExpression = undefined
-- andExpression = undefined
-- exclusiveOrExpression = undefined
-- inclusiveOrExpression = undefined
-- logicalAndExpression = undefined
-- logicalXorExpression = undefined
-- logicalOrExpression = undefined
conditionalExpression :: P Expr
conditionalExpression = do
loe <- buildExpressionParser conditionalTable unaryExpression
ter <- optionMaybe $ do
_ <- lexeme (string "?")
e <- expression
_ <- lexeme (string ":")
a <- assignmentExpression
return (e, a)
case ter of
Nothing -> return loe
Just (e, a) -> return $ Selection loe e a
assignmentExpression :: P Expr
assignmentExpression = buildExpressionParser assignmentTable conditionalExpression
expression :: P Expr
expression = buildExpressionParser expressionTable assignmentExpression
constantExpression :: P Expr
constantExpression = conditionalExpression
The grammar include here function definition but we do n't
do this here because they should occur only at top level ( page 28 ) .
-- Function definitions are handled in externalDefinition instead.
declaration :: P Declaration
declaration = choice
[ try $ do
t <- fullySpecifiedType
l <- idecl `sepBy` comma
semicolon
return $ InitDeclaration (TypeDeclarator t) l
, do keyword "invariant"
i <- idecl `sepBy` comma
semicolon
return $ InitDeclaration InvariantDeclarator i
, do keyword "precision"
q <- precisionQualifier
s <- typeSpecifierNoPrecision
semicolon
return $ Precision q s
, do q <- typeQualifier
choice
[ semicolon >> return (TQ q)
, do i <- identifier
lbrace
s <- structDeclarationList
rbrace
m <- optionMaybe $ do
j <- identifier
n <- optionMaybe $ between lbracket rbracket $ optionMaybe constantExpression
return (j,n)
semicolon
return $ Block q i s m
]
]
where idecl = do
i <- identifier
m <- optionMaybe $ between lbracket rbracket $
optionMaybe constantExpression
j <- optionMaybe $ lexeme (string "=") >> initializer
return $ InitDecl i m j
functionPrototype :: P FunctionPrototype
functionPrototype = do
(t, i, p) <- functionDeclarator
rparen
return $ FuncProt t i p
functionDeclarator :: P (FullType, String, [ParameterDeclaration])
functionDeclarator = do
(t, i) <- functionHeader
p <- parameterDeclaration `sepBy` comma
return (t, i, p)
-- inside functionDeclarator
-- functionHeaderWithParameters = undefined
functionHeader :: P (FullType, String)
functionHeader = do
t <- fullySpecifiedType
i <- identifier
lparen
return (t, i)
-- inside parameterDeclaration
-- parameterDeclarator = undefined
-- expanding parameterDeclarator and parameterTypeSpecifier, the rule is:
-- parameterDeclaration:
parameterTypeQualifier [ parameterQualifier ] identifier[[e ] ]
[ parameterQualifier ] identifier[[e ] ]
parameterTypeQualifier [ parameterQualifier ]
[ parameterQualifier ]
-- which is simply
[ parameterTypeQualifier ] [ parameterQualifier ] [ identifier[[e ] ] ]
parameterDeclaration :: P ParameterDeclaration
parameterDeclaration = do
tq <- optionMaybe parameterTypeQualifier
q <- optionMaybe parameterQualifier
s <- typeSpecifier
m <- optionMaybe $ do
i <- identifier
FIXME ca n't the bracket be empty , i.e. a [ ] ?
return (i,b)
return $ ParameterDeclaration tq q s m
parameterQualifier :: P ParameterQualifier
parameterQualifier = choice
-- "empty" case handled in the caller
[ (try . lexeme . string) "inout" >> return InOutParameter
, (try . lexeme . string) "in" >> return InParameter
, (try . lexeme . string) "out" >> return OutParameter
]
-- inside parameterDeclaration
parameterTypeSpecifier =
-- FIXME not correct w.r.t. the specs.
-- The specs allow
-- int
-- int, foo
-- invariant foo, bar[]
-- and disallow
-- invariant bar[]
-- It is not used, it is inside declaration.
-- initDeclaratorList = undefined
-- inside initDeclaratorList
-- singleDeclaration = undefined
fullySpecifiedType :: P FullType
fullySpecifiedType = choice
[ try typeSpecifier >>= return . FullType Nothing
, do q <- typeQualifier
s <- typeSpecifier
return $ FullType (Just q) s
]
invariantQualifier :: P InvariantQualifier
invariantQualifier = keyword "invariant" >> return Invariant
interpolationQualifier :: P InterpolationQualifier
interpolationQualifier = choice
[ keyword "smooth" >> return Smooth
, keyword "flat" >> return Flat
, keyword "noperspective" >> return NoPerspective
]
layoutQualifier :: P LayoutQualifier
layoutQualifier = do
keyword "layout"
lparen
q <- layoutQualifierId `sepBy` comma
rparen
return $ Layout q
-- implemented directly in layoutQualifier
-- layoutQualifierIdList = undefined
layoutQualifierId :: P LayoutQualifierId
layoutQualifierId = do
i <- identifier
c <- optionMaybe $ lexeme (string "=") >> intConstant
return $ LayoutQualId i c
parameterTypeQualifier :: P ParameterTypeQualifier
parameterTypeQualifier = keyword "const" >> return ConstParameter
-- sto
-- lay [sto]
-- int [sto]
-- inv [sto]
-- inv int sto
typeQualifier :: P TypeQualifier
typeQualifier = choice
[ do s <- storageQualifier
return $ TypeQualSto s
, do l <- layoutQualifier
s <- optionMaybe storageQualifier
return $ TypeQualLay l s
, do i <- interpolationQualifier
s <- optionMaybe storageQualifier
return $ TypeQualInt i s
, do i <- invariantQualifier
choice
[ do j <- interpolationQualifier
s <- storageQualifier
return $ TypeQualInv3 i j s
, do s <- optionMaybe storageQualifier
return $ TypeQualInv i s
]
]
TODO see 4.3 for restrictions
storageQualifier :: P StorageQualifier
storageQualifier = choice
[ keyword "const" >> return Const
TODO vertex only , is deprecated
, keyword "varying" >> return Varying -- deprecated
, keyword "in" >> return In
, keyword "out" >> return Out
, keyword "centroid" >> (choice
[ keyword "varying" >> return CentroidVarying -- deprecated
, keyword "in" >> return CentroidIn
, keyword "out" >> return CentroidOut
])
, keyword "uniform" >> return Uniform
]
typeSpecifier :: P TypeSpecifier
typeSpecifier = choice
[ do q <- try precisionQualifier
s <- typeSpecifierNoPrecision
return $ TypeSpec (Just q) s
, typeSpecifierNoPrecision >>= return . TypeSpec Nothing
]
typeSpecifierNoPrecision :: P TypeSpecifierNoPrecision
typeSpecifierNoPrecision = do
s <- typeSpecifierNonArray
choice
[ try (lbracket >> rbracket) >> return (TypeSpecNoPrecision s (Just Nothing))
, lbracket >> constantExpression >>= \c -> rbracket >> return (TypeSpecNoPrecision s (Just $ Just c))
, return $ TypeSpecNoPrecision s Nothing
]
-- Basic types, structs, and user-defined types.
typeSpecifierNonArray :: P TypeSpecifierNonArray
typeSpecifierNonArray = choice
[ keyword "void" >> return Void
, keyword "float" >> return Float
, keyword "int" >> return Int
, keyword "uint" >> return UInt
, keyword "bool" >> return Bool
, keyword "vec2" >> return Vec2
, keyword "vec3" >> return Vec3
, keyword "vec4" >> return Vec4
, keyword "bvec2" >> return BVec2
, keyword "bvec3" >> return BVec3
, keyword "bvec4" >> return BVec4
, keyword "ivec2" >> return IVec2
, keyword "ivec3" >> return IVec3
, keyword "ivec4" >> return IVec4
, keyword "uvec2" >> return UVec2
, keyword "uvec3" >> return UVec3
, keyword "uvec4" >> return UVec4
, keyword "mat2" >> return Mat2
, keyword "mat3" >> return Mat3
, keyword "mat4" >> return Mat4
, keyword "mat2x2" >> return Mat2x2
, keyword "mat2x3" >> return Mat2x3
, keyword "mat2x4" >> return Mat2x4
, keyword "mat3x2" >> return Mat3x2
, keyword "mat3x3" >> return Mat3x3
, keyword "mat3x4" >> return Mat3x4
, keyword "mat4x2" >> return Mat4x2
, keyword "mat4x3" >> return Mat4x3
, keyword "mat4x4" >> return Mat4x4
, keyword "sampler1D" >> return Sampler1D
, keyword "sampler2D" >> return Sampler2D
, keyword "sampler3D" >> return Sampler3D
, keyword "samplerCube" >> return SamplerCube
, keyword "sampler1DShadow" >> return Sampler1DShadow
, keyword "sampler2DShadow" >> return Sampler2DShadow
, keyword "samplerCubeShadow" >> return SamplerCubeShadow
, keyword "sampler1DArray" >> return Sampler1DArray
, keyword "sampler2DArray" >> return Sampler2DArray
, keyword "sampler1DArrayShadow" >> return Sampler1DArrayShadow
, keyword "sampler2DArrayShadow" >> return Sampler2DArrayShadow
, keyword "isampler1D" >> return ISampler1D
, keyword "isampler2D" >> return ISampler2D
, keyword "isampler3D" >> return ISampler3D
, keyword "isamplerCube" >> return ISamplerCube
, keyword "isampler1DArray" >> return ISampler1DArray
, keyword "isampler2DArray" >> return ISampler2DArray
, keyword "usampler1D" >> return USampler1D
, keyword "usampler2D" >> return USampler2D
, keyword "usampler3D" >> return USampler3D
, keyword "usamplerCube" >> return USamplerCube
, keyword "usampler1DArray" >> return USampler1DArray
, keyword "usampler2DArray" >> return USampler2DArray
, keyword "sampler2DRect" >> return Sampler2DRect
, keyword "sampler2DRectShadow" >> return Sampler2DRectShadow
, keyword "isampler2DRect" >> return ISampler2DRect
, keyword "usampler2DRect" >> return USampler2DRect
, keyword "samplerBuffer" >> return SamplerBuffer
, keyword "isamplerBuffer" >> return ISamplerBuffer
, keyword "usamplerBuffer" >> return USamplerBuffer
, keyword "sampler2DMS" >> return Sampler2DMS
, keyword "isampler2DMS" >> return ISampler2DMS
, keyword "usampler2DMS" >> return USampler2DMS
, keyword "sampler2DMSArray" >> return Sampler2DMSArray
, keyword "isampler2DMSArray" >> return ISampler2DMSArray
, keyword "usampler2DMSArray" >> return USampler2DMSArray
, structSpecifier
, identifier >>= return . TypeName -- verify if it is declared
]
precisionQualifier :: P PrecisionQualifier
precisionQualifier = choice
[ keyword "highp" >> return HighP
, keyword "mediump" >> return MediumP
, keyword "lowp" >> return LowP
]
structSpecifier :: P TypeSpecifierNonArray
structSpecifier = do
keyword "struct"
i <- optionMaybe identifier
lbrace
d <- structDeclarationList
rbrace
return $ StructSpecifier i d
structDeclarationList :: P [Field]
structDeclarationList = many1 structDeclaration
structDeclaration :: P Field
structDeclaration = do
q <- optionMaybe typeQualifier
s <- typeSpecifier
l <- structDeclaratorList
semicolon
return $ Field q s l
structDeclaratorList :: P [StructDeclarator]
structDeclaratorList = structDeclarator `sepBy` comma
structDeclarator :: P StructDeclarator
structDeclarator = do
i <- identifier
choice
[ do lbracket
e <- optionMaybe constantExpression
rbracket
return $ StructDeclarator i (Just e)
, return $ StructDeclarator i Nothing
]
initializer :: P Expr
initializer = assignmentExpression
declarationStatement :: P Declaration
declarationStatement = declaration
statement :: P Statement
statement = CompoundStatement `fmap` compoundStatement
<|> simpleStatement
simpleStatement :: P Statement
simpleStatement = choice
[ declarationStatement >>= return . DeclarationStatement
, expressionStatement >>= return . ExpressionStatement
, selectionStatement
, switchStatement
, caseLabel >>= return . CaseLabel
, iterationStatement
, jumpStatement
]
compoundStatement :: P Compound
compoundStatement = choice
[ try (lbrace >> rbrace) >> return (Compound [])
, between lbrace rbrace statementList >>= return . Compound
]
statementNoNewScope :: P Statement
statementNoNewScope = CompoundStatement `fmap` compoundStatementNoNewScope
<|> simpleStatement
compoundStatementNoNewScope :: P Compound
compoundStatementNoNewScope = compoundStatement
statementList :: P [Statement]
statementList = many1 statement
expressionStatement :: P (Maybe Expr)
expressionStatement = choice
[ semicolon >> return Nothing
, expression >>= \e -> semicolon >> return (Just e)
]
selectionStatement :: P Statement
selectionStatement = do
keyword "if"
lparen
c <- expression
rparen
t <- statement
f <- optionMaybe (keyword "else" >> statement)
return $ SelectionStatement c t f
-- inside selectionStatement
-- selectionRestStatement = undefined
condition :: P Condition
condition = choice
[ expression >>= return . Condition
, do t <- fullySpecifiedType
i <- identifier
_ <- lexeme (string "=")
j <- initializer
return $ InitializedCondition t i j
]
switchStatement :: P Statement
switchStatement = do
keyword "switch"
lparen
e <- expression
rparen
lbrace
l <- switchStatementList
rbrace
return $ SwitchStatement e l
switchStatementList :: P [Statement]
switchStatementList = many statement
caseLabel :: P CaseLabel
caseLabel = choice
[ keyword "case" >> expression >>= \e -> colon >> return (Case e)
, keyword "default" >> colon >> return Default
]
iterationStatement :: P Statement
iterationStatement = choice
[ do keyword "while"
lparen
c <- condition
rparen
s <- statementNoNewScope
return $ While c s
, do keyword "do"
s <- statement
keyword "while"
lparen
e <- expression
rparen
semicolon
return $ DoWhile s e
, do keyword "for"
lparen
i <- forInitStatement
c <- optionMaybe condition
semicolon
e <- optionMaybe expression
rparen
s <- statementNoNewScope
return $ For i c e s
]
forInitStatement :: P (Either (Maybe Expr) Declaration)
forInitStatement = (expressionStatement >>= return . Left)
<|> (declarationStatement >>= return . Right)
-- inside iterationStatement
-- conditionOp = undefined
-- inside iterationStatement
-- forRestStatement = undefined
jumpStatement :: P Statement
jumpStatement = choice
[ keyword "continue" >> semicolon >> return Continue
, keyword "break" >> semicolon >> return Break
, try (keyword "return" >> semicolon) >> return (Return Nothing)
, keyword "return" >> expression >>= \e -> semicolon >> return (Return $ Just e)
, keyword "discard" >> semicolon >> return Discard
]
translationUnit :: P TranslationUnit
translationUnit = TranslationUnit `fmap` many1 externalDeclaration
externalDeclaration :: P ExternalDeclaration
externalDeclaration = choice
[ do p <- try functionPrototype
choice
[ semicolon >> return (FunctionDeclaration p)
, compoundStatementNoNewScope >>= return . FunctionDefinition p
]
, Declaration `fmap` declaration
]
-- inside externalDeclaration, used only in tests
functionDefinition :: P ExternalDeclaration
functionDefinition = do
fp <- functionPrototype
cs <- compoundStatementNoNewScope
return $ FunctionDefinition fp cs
| null | https://raw.githubusercontent.com/noteed/language-glsl/103ed6dc5abe0c548ce345b264a9af554ee80c52/Language/GLSL/Parser.hs | haskell | --------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Reserved words
--------------------------------------------------------------------
List of keywords.
List of keywords reserved for future use.
--------------------------------------------------------------------
Convenience parsers
--------------------------------------------------------------------
Acts like p and discards any following space character.
--------------------------------------------------------------------
Lexical elements (tokens)
--------------------------------------------------------------------
Try to parse a given string, making sure it is not a
prefix of an identifier.
Parses and returns an identifier.
it is to redeclare a predeclared "gl_" identifier.
Try to parse a given string, and allow identifier characters
(or anything else) to directly follow.
--------------------------------------------------------------------
Lexical elements helpers
--------------------------------------------------------------------
--------------------------------------------------------------------
Tables for buildExpressionParser
--------------------------------------------------------------------
--------------------------------------------------------------------
Grammar
--------------------------------------------------------------------
int constant
uint constant
float constant
bool constant
expression within parentheses
Those productions are pushed inside postfixExpression.
functionCall = functionCallOrMethod
functionCallHeaderNoParameters = undefined
functionCallHeaderWithParameters = undefined
TODO if the 'identifier' is declared as a type, should be this case
no need for fieldSelection
inside unaryExpression
unaryOperator = choice
implemented throught buildExpressionParser
multiplicativeExpression = undefined
additiveExpression = undefined
shiftExpression = undefined
relationalExpression = undefined
equalityExpression = undefined
andExpression = undefined
exclusiveOrExpression = undefined
inclusiveOrExpression = undefined
logicalAndExpression = undefined
logicalXorExpression = undefined
logicalOrExpression = undefined
Function definitions are handled in externalDefinition instead.
inside functionDeclarator
functionHeaderWithParameters = undefined
inside parameterDeclaration
parameterDeclarator = undefined
expanding parameterDeclarator and parameterTypeSpecifier, the rule is:
parameterDeclaration:
which is simply
"empty" case handled in the caller
inside parameterDeclaration
FIXME not correct w.r.t. the specs.
The specs allow
int
int, foo
invariant foo, bar[]
and disallow
invariant bar[]
It is not used, it is inside declaration.
initDeclaratorList = undefined
inside initDeclaratorList
singleDeclaration = undefined
implemented directly in layoutQualifier
layoutQualifierIdList = undefined
sto
lay [sto]
int [sto]
inv [sto]
inv int sto
deprecated
deprecated
Basic types, structs, and user-defined types.
verify if it is declared
inside selectionStatement
selectionRestStatement = undefined
inside iterationStatement
conditionOp = undefined
inside iterationStatement
forRestStatement = undefined
inside externalDeclaration, used only in tests | module Language.GLSL.Parser where
import Prelude hiding (break, exponent)
import Text.ParserCombinators.Parsec hiding (State, parse)
import Text.ParserCombinators.Parsec.Expr
import Language.GLSL.Syntax
Parser state , hold a symbol table .
data S = S
type P a = GenParser Char S a
keywords :: [String]
keywords = concat $ map words $
[ "attribute const uniform varying"
, "layout"
, "centroid flat smooth noperspective"
, "break continue do for while switch case default"
, "if else"
, "in out inout"
, "float int void bool true false"
, "invariant"
, "discard return"
, "mat2 mat3 mat4"
, "mat2x2 mat2x3 mat2x4"
, "mat3x2 mat3x3 mat3x4"
, "mat4x2 mat4x3 mat4x4"
, "vec2 vec3 vec4 ivec2 ivec3 ivec4 bvec2 bvec3 bvec4"
, "uint uvec2 uvec3 uvec4"
, "lowp mediump highp precision"
, "sampler1D sampler2D sampler3D samplerCube"
, "sampler1DShadow sampler2DShadow samplerCubeShadow"
, "sampler1DArray sampler2DArray"
, "sampler1DArrayShadow sampler2DArrayShadow"
, "isampler1D isampler2D isampler3D isamplerCube"
, "isampler1DArray isampler2DArray"
, "usampler1D usampler2D usampler3D usamplerCube"
, "usampler1DArray usampler2DArray"
, "sampler2DRect sampler2DRectShadow isampler2DRect usampler2DRect"
, "samplerBuffer isamplerBuffer usamplerBuffer"
, "sampler2DMS isampler2DMS usampler2DMS"
, "sampler2DMSArray isampler2DMSArray usampler2DMSArray"
, "struct"
]
reservedWords :: [String]
reservedWords = concat $ map words $
[ "common partition active"
, "asm"
, "class union enum typedef template this packed"
, "goto"
, "inline noinline volatile public static extern external interface"
, "long short double half fixed unsigned superp"
, "input output"
, "hvec2 hvec3 hvec4 dvec2 dvec3 dvec4 fvec2 fvec3 fvec4"
, "sampler3DRect"
, "filter"
, "image1D image2D image3D imageCube"
, "iimage1D iimage2D iimage3D iimageCube"
, "uimage1D uimage2D uimage3D uimageCube"
, "image1DArray image2DArray"
, "iimage1DArray iimage2DArray uimage1DArray uimage2DArray"
, "image1DShadow image2DShadow"
, "image1DArrayShadow image2DArrayShadow"
, "imageBuffer iimageBuffer uimageBuffer"
, "sizeof cast"
, "namespace using"
, "row_major"
]
comment :: P ()
comment = do
_ <- char '/'
_ <- choice
[ do _ <- char '*'
manyTill anyChar (try $ string "*/")
, do _ <- char '/'
manyTill anyChar ((newline >> return ()) <|> eof)
]
return ()
blank :: P ()
blank = try comment <|> (space >> return ())
lexeme :: P a -> P a
lexeme p = do
x <- p
skipMany blank
return x
parse :: [Char] -> Either ParseError TranslationUnit
parse =
runParser (do {skipMany blank ; r <- translationUnit ; eof ; return r})
S "GLSL"
semicolon :: P ()
semicolon = lexeme $ char ';' >> return ()
comma :: P ()
comma = lexeme $ char ',' >> return ()
colon :: P ()
colon = lexeme $ char ':' >> return ()
lbrace :: P ()
lbrace = lexeme $ char '{' >> return ()
rbrace :: P ()
rbrace = lexeme $ char '}' >> return ()
lbracket :: P ()
lbracket = lexeme $ char '[' >> return ()
rbracket :: P ()
rbracket = lexeme $ char ']' >> return ()
lparen :: P ()
lparen = lexeme $ char '(' >> return ()
rparen :: P ()
rparen = lexeme $ char ')' >> return ()
keyword :: String -> P ()
keyword w = lexeme $ try (string w >> notFollowedBy identifierTail)
TODO an identifier ca n't start with " gl _ " unless
identifier :: P String
identifier = lexeme $ do
h <- identifierHead
t <- many identifierTail
check (h:t)
where check i | i `elem` reservedWords = fail $
i ++ " is reserved"
| i `elem` keywords = fail $
i ++ " is a keyword"
| otherwise = checkUnderscore i i
checkUnderscore i ('_':'_':_) = fail $
i ++ " is reserved (two consecutive underscores)"
checkUnderscore i (_:cs) = checkUnderscore i cs
checkUnderscore i [] = return i
TODO the size of the int should fit its type .
intConstant :: P Expr
intConstant = choice
[ hexadecimal
, octal
, badOctal >> fail "Invalid octal number"
, decimal
]
floatingConstant :: P Expr
floatingConstant = choice
[ floatExponent
, floatPoint
, pointFloat
]
operator :: String -> P String
operator = lexeme . try . string
identifierHead :: P Char
identifierHead = letter <|> char '_'
identifierTail :: P Char
identifierTail = alphaNum <|> char '_'
hexadecimal :: P Expr
hexadecimal = lexeme $ try $ do
_ <- char '0'
_ <- oneOf "Xx"
d <- many1 hexDigit
TODO
return $ IntConstant Hexadecimal $ read ("0x" ++ d)
octal :: P Expr
octal = lexeme $ try $ do
_ <- char '0'
d <- many1 octDigit
TODO
return $ IntConstant Octal $ read ("0o" ++ d)
badOctal :: P ()
badOctal = lexeme $ try $ char '0' >> many1 hexDigit >> return ()
decimal :: P Expr
decimal = lexeme $ try $ do
d <- many1 digit
notFollowedBy (char '.' <|> (exponent >> return ' '))
TODO
return $ IntConstant Decimal $ read d
floatExponent :: P Expr
floatExponent = lexeme $ try $ do
d <- many1 digit
e <- exponent
TODO
return $ FloatConstant $ read $ d ++ e
floatPoint :: P Expr
floatPoint = lexeme $ try $ do
d <- many1 digit
_ <- char '.'
d' <- many digit
let d'' = if null d' then "0" else d'
e <- optionMaybe exponent
TODO
return $ FloatConstant $ read $ d ++ "." ++ d'' ++ maybe "" id e
pointFloat :: P Expr
pointFloat = lexeme $ try $ do
_ <- char '.'
d <- many1 digit
e <- optionMaybe exponent
m <- optionMaybe $ oneOf "Ff"
return $ FloatConstant $ read $ "0." ++ d ++ maybe "" id e
exponent :: P String
exponent = lexeme $ try $ do
_ <- oneOf "Ee"
s <- optionMaybe (oneOf "+-")
d <- many1 digit
return $ "e" ++ maybe "" (:[]) s ++ d
infixLeft :: String -> (a -> a -> a) -> Operator Char S a
infixLeft s r = Infix (lexeme (try $ string s) >> return r) AssocLeft
infixLeft' :: String -> (a -> a -> a) -> Operator Char S a
infixLeft' s r = Infix (lexeme (try $ string s >> notFollowedBy (char '=')) >> return r) AssocLeft
infixLeft'' :: Char -> (a -> a -> a) -> Operator Char S a
infixLeft'' c r = Infix (lexeme (try $ char c >> notFollowedBy (oneOf (c:"="))) >> return r) AssocLeft
infixRight :: String -> (a -> a -> a) -> Operator Char S a
infixRight s r = Infix (lexeme (try $ string s) >> return r) AssocRight
conditionalTable :: [[Operator Char S Expr]]
conditionalTable =
[ [infixLeft' "*" Mul, infixLeft' "/" Div, infixLeft' "%" Mod]
, [infixLeft' "+" Add, infixLeft' "-" Sub]
, [infixLeft' "<<" LeftShift, infixLeft' ">>" RightShift]
, [infixLeft' "<" Lt, infixLeft' ">" Gt
,infixLeft "<=" Lte, infixLeft ">=" Gte]
, [infixLeft "==" Equ, infixLeft "!=" Neq]
, [infixLeft'' '&' BitAnd]
, [infixLeft' "^" BitXor]
, [infixLeft'' '|' BitOr]
, [infixLeft "&&" And]
, [infixLeft "||" Or]
]
assignmentTable :: [[Operator Char S Expr]]
assignmentTable =
[ [infixRight "=" Equal]
, [infixRight "+=" AddAssign]
, [infixRight "-=" SubAssign]
, [infixRight "*=" MulAssign]
, [infixRight "/=" DivAssign]
, [infixRight "%=" ModAssign]
, [infixRight "<<=" LeftAssign]
, [infixRight ">>=" RightAssign]
, [infixRight "&=" AndAssign]
, [infixRight "^=" XorAssign]
, [infixRight "|=" OrAssign]
]
expressionTable :: [[Operator Char S Expr]]
expressionTable =
[ [infixLeft "," Sequence]
]
primaryExpression :: P Expr
primaryExpression = choice
[ Variable `fmap` try identifier
, intConstant
, floatingConstant
, keyword "true" >> return (BoolConstant True)
, keyword "false" >> return (BoolConstant False)
, between lparen rparen expression
]
postfixExpression :: P Expr
postfixExpression = do
e <- try (functionCallGeneric >>= \(i,p) -> return (FunctionCall i p))
<|> primaryExpression
p <- many $ choice
[ between lbracket rbracket integerExpression >>= return . flip Bracket
, dotFunctionCallGeneric
, dotFieldSelection
, operator "++" >> return PostInc
, operator "--" >> return PostDec
]
return $ foldl (flip ($)) e p
dotFunctionCallGeneric :: P (Expr -> Expr)
dotFunctionCallGeneric =
lexeme (try $ string "." >> functionCallGeneric) >>= \(i,p) -> return (\e -> MethodCall e i p)
dotFieldSelection :: P (Expr -> Expr)
dotFieldSelection =
lexeme (try $ string "." >> identifier) >>= return . flip FieldSelection
integerExpression :: P Expr
integerExpression = expression
functionCallOrMethod = functionCallGeneric < | > postfixExpression DOT functionCallGeneric
functionCallGeneric :: P (FunctionIdentifier, Parameters)
functionCallGeneric = do
i <- functionCallHeader
p <- choice
[ keyword "void" >> return ParamVoid
, assignmentExpression `sepBy` comma >>= return . Params
]
rparen
return (i, p)
Those productions are pushed inside functionCallGeneric .
functionCallHeader :: P FunctionIdentifier
functionCallHeader = do
i <- functionIdentifier
lparen
return i
functionIdentifier :: P FunctionIdentifier
functionIdentifier = choice
[ try identifier >>= return . FuncId
]
unaryExpression :: P Expr
unaryExpression = do
p <- many $ choice
[ operator "++" >> return PreInc
, operator "--" >> return PreDec
, operator "+" >> return UnaryPlus
, operator "-" >> return UnaryNegate
, operator "!" >> return UnaryNot
, operator "~" >> return UnaryOneComplement
]
e <- postfixExpression
return $ foldr ($) e p
conditionalExpression :: P Expr
conditionalExpression = do
loe <- buildExpressionParser conditionalTable unaryExpression
ter <- optionMaybe $ do
_ <- lexeme (string "?")
e <- expression
_ <- lexeme (string ":")
a <- assignmentExpression
return (e, a)
case ter of
Nothing -> return loe
Just (e, a) -> return $ Selection loe e a
assignmentExpression :: P Expr
assignmentExpression = buildExpressionParser assignmentTable conditionalExpression
expression :: P Expr
expression = buildExpressionParser expressionTable assignmentExpression
constantExpression :: P Expr
constantExpression = conditionalExpression
The grammar include here function definition but we do n't
do this here because they should occur only at top level ( page 28 ) .
declaration :: P Declaration
declaration = choice
[ try $ do
t <- fullySpecifiedType
l <- idecl `sepBy` comma
semicolon
return $ InitDeclaration (TypeDeclarator t) l
, do keyword "invariant"
i <- idecl `sepBy` comma
semicolon
return $ InitDeclaration InvariantDeclarator i
, do keyword "precision"
q <- precisionQualifier
s <- typeSpecifierNoPrecision
semicolon
return $ Precision q s
, do q <- typeQualifier
choice
[ semicolon >> return (TQ q)
, do i <- identifier
lbrace
s <- structDeclarationList
rbrace
m <- optionMaybe $ do
j <- identifier
n <- optionMaybe $ between lbracket rbracket $ optionMaybe constantExpression
return (j,n)
semicolon
return $ Block q i s m
]
]
where idecl = do
i <- identifier
m <- optionMaybe $ between lbracket rbracket $
optionMaybe constantExpression
j <- optionMaybe $ lexeme (string "=") >> initializer
return $ InitDecl i m j
functionPrototype :: P FunctionPrototype
functionPrototype = do
(t, i, p) <- functionDeclarator
rparen
return $ FuncProt t i p
functionDeclarator :: P (FullType, String, [ParameterDeclaration])
functionDeclarator = do
(t, i) <- functionHeader
p <- parameterDeclaration `sepBy` comma
return (t, i, p)
functionHeader :: P (FullType, String)
functionHeader = do
t <- fullySpecifiedType
i <- identifier
lparen
return (t, i)
parameterTypeQualifier [ parameterQualifier ] identifier[[e ] ]
[ parameterQualifier ] identifier[[e ] ]
parameterTypeQualifier [ parameterQualifier ]
[ parameterQualifier ]
[ parameterTypeQualifier ] [ parameterQualifier ] [ identifier[[e ] ] ]
parameterDeclaration :: P ParameterDeclaration
parameterDeclaration = do
tq <- optionMaybe parameterTypeQualifier
q <- optionMaybe parameterQualifier
s <- typeSpecifier
m <- optionMaybe $ do
i <- identifier
FIXME ca n't the bracket be empty , i.e. a [ ] ?
return (i,b)
return $ ParameterDeclaration tq q s m
parameterQualifier :: P ParameterQualifier
parameterQualifier = choice
[ (try . lexeme . string) "inout" >> return InOutParameter
, (try . lexeme . string) "in" >> return InParameter
, (try . lexeme . string) "out" >> return OutParameter
]
parameterTypeSpecifier =
fullySpecifiedType :: P FullType
fullySpecifiedType = choice
[ try typeSpecifier >>= return . FullType Nothing
, do q <- typeQualifier
s <- typeSpecifier
return $ FullType (Just q) s
]
invariantQualifier :: P InvariantQualifier
invariantQualifier = keyword "invariant" >> return Invariant
interpolationQualifier :: P InterpolationQualifier
interpolationQualifier = choice
[ keyword "smooth" >> return Smooth
, keyword "flat" >> return Flat
, keyword "noperspective" >> return NoPerspective
]
layoutQualifier :: P LayoutQualifier
layoutQualifier = do
keyword "layout"
lparen
q <- layoutQualifierId `sepBy` comma
rparen
return $ Layout q
layoutQualifierId :: P LayoutQualifierId
layoutQualifierId = do
i <- identifier
c <- optionMaybe $ lexeme (string "=") >> intConstant
return $ LayoutQualId i c
parameterTypeQualifier :: P ParameterTypeQualifier
parameterTypeQualifier = keyword "const" >> return ConstParameter
typeQualifier :: P TypeQualifier
typeQualifier = choice
[ do s <- storageQualifier
return $ TypeQualSto s
, do l <- layoutQualifier
s <- optionMaybe storageQualifier
return $ TypeQualLay l s
, do i <- interpolationQualifier
s <- optionMaybe storageQualifier
return $ TypeQualInt i s
, do i <- invariantQualifier
choice
[ do j <- interpolationQualifier
s <- storageQualifier
return $ TypeQualInv3 i j s
, do s <- optionMaybe storageQualifier
return $ TypeQualInv i s
]
]
TODO see 4.3 for restrictions
storageQualifier :: P StorageQualifier
storageQualifier = choice
[ keyword "const" >> return Const
TODO vertex only , is deprecated
, keyword "in" >> return In
, keyword "out" >> return Out
, keyword "centroid" >> (choice
, keyword "in" >> return CentroidIn
, keyword "out" >> return CentroidOut
])
, keyword "uniform" >> return Uniform
]
typeSpecifier :: P TypeSpecifier
typeSpecifier = choice
[ do q <- try precisionQualifier
s <- typeSpecifierNoPrecision
return $ TypeSpec (Just q) s
, typeSpecifierNoPrecision >>= return . TypeSpec Nothing
]
typeSpecifierNoPrecision :: P TypeSpecifierNoPrecision
typeSpecifierNoPrecision = do
s <- typeSpecifierNonArray
choice
[ try (lbracket >> rbracket) >> return (TypeSpecNoPrecision s (Just Nothing))
, lbracket >> constantExpression >>= \c -> rbracket >> return (TypeSpecNoPrecision s (Just $ Just c))
, return $ TypeSpecNoPrecision s Nothing
]
typeSpecifierNonArray :: P TypeSpecifierNonArray
typeSpecifierNonArray = choice
[ keyword "void" >> return Void
, keyword "float" >> return Float
, keyword "int" >> return Int
, keyword "uint" >> return UInt
, keyword "bool" >> return Bool
, keyword "vec2" >> return Vec2
, keyword "vec3" >> return Vec3
, keyword "vec4" >> return Vec4
, keyword "bvec2" >> return BVec2
, keyword "bvec3" >> return BVec3
, keyword "bvec4" >> return BVec4
, keyword "ivec2" >> return IVec2
, keyword "ivec3" >> return IVec3
, keyword "ivec4" >> return IVec4
, keyword "uvec2" >> return UVec2
, keyword "uvec3" >> return UVec3
, keyword "uvec4" >> return UVec4
, keyword "mat2" >> return Mat2
, keyword "mat3" >> return Mat3
, keyword "mat4" >> return Mat4
, keyword "mat2x2" >> return Mat2x2
, keyword "mat2x3" >> return Mat2x3
, keyword "mat2x4" >> return Mat2x4
, keyword "mat3x2" >> return Mat3x2
, keyword "mat3x3" >> return Mat3x3
, keyword "mat3x4" >> return Mat3x4
, keyword "mat4x2" >> return Mat4x2
, keyword "mat4x3" >> return Mat4x3
, keyword "mat4x4" >> return Mat4x4
, keyword "sampler1D" >> return Sampler1D
, keyword "sampler2D" >> return Sampler2D
, keyword "sampler3D" >> return Sampler3D
, keyword "samplerCube" >> return SamplerCube
, keyword "sampler1DShadow" >> return Sampler1DShadow
, keyword "sampler2DShadow" >> return Sampler2DShadow
, keyword "samplerCubeShadow" >> return SamplerCubeShadow
, keyword "sampler1DArray" >> return Sampler1DArray
, keyword "sampler2DArray" >> return Sampler2DArray
, keyword "sampler1DArrayShadow" >> return Sampler1DArrayShadow
, keyword "sampler2DArrayShadow" >> return Sampler2DArrayShadow
, keyword "isampler1D" >> return ISampler1D
, keyword "isampler2D" >> return ISampler2D
, keyword "isampler3D" >> return ISampler3D
, keyword "isamplerCube" >> return ISamplerCube
, keyword "isampler1DArray" >> return ISampler1DArray
, keyword "isampler2DArray" >> return ISampler2DArray
, keyword "usampler1D" >> return USampler1D
, keyword "usampler2D" >> return USampler2D
, keyword "usampler3D" >> return USampler3D
, keyword "usamplerCube" >> return USamplerCube
, keyword "usampler1DArray" >> return USampler1DArray
, keyword "usampler2DArray" >> return USampler2DArray
, keyword "sampler2DRect" >> return Sampler2DRect
, keyword "sampler2DRectShadow" >> return Sampler2DRectShadow
, keyword "isampler2DRect" >> return ISampler2DRect
, keyword "usampler2DRect" >> return USampler2DRect
, keyword "samplerBuffer" >> return SamplerBuffer
, keyword "isamplerBuffer" >> return ISamplerBuffer
, keyword "usamplerBuffer" >> return USamplerBuffer
, keyword "sampler2DMS" >> return Sampler2DMS
, keyword "isampler2DMS" >> return ISampler2DMS
, keyword "usampler2DMS" >> return USampler2DMS
, keyword "sampler2DMSArray" >> return Sampler2DMSArray
, keyword "isampler2DMSArray" >> return ISampler2DMSArray
, keyword "usampler2DMSArray" >> return USampler2DMSArray
, structSpecifier
]
precisionQualifier :: P PrecisionQualifier
precisionQualifier = choice
[ keyword "highp" >> return HighP
, keyword "mediump" >> return MediumP
, keyword "lowp" >> return LowP
]
structSpecifier :: P TypeSpecifierNonArray
structSpecifier = do
keyword "struct"
i <- optionMaybe identifier
lbrace
d <- structDeclarationList
rbrace
return $ StructSpecifier i d
structDeclarationList :: P [Field]
structDeclarationList = many1 structDeclaration
structDeclaration :: P Field
structDeclaration = do
q <- optionMaybe typeQualifier
s <- typeSpecifier
l <- structDeclaratorList
semicolon
return $ Field q s l
structDeclaratorList :: P [StructDeclarator]
structDeclaratorList = structDeclarator `sepBy` comma
structDeclarator :: P StructDeclarator
structDeclarator = do
i <- identifier
choice
[ do lbracket
e <- optionMaybe constantExpression
rbracket
return $ StructDeclarator i (Just e)
, return $ StructDeclarator i Nothing
]
initializer :: P Expr
initializer = assignmentExpression
declarationStatement :: P Declaration
declarationStatement = declaration
statement :: P Statement
statement = CompoundStatement `fmap` compoundStatement
<|> simpleStatement
simpleStatement :: P Statement
simpleStatement = choice
[ declarationStatement >>= return . DeclarationStatement
, expressionStatement >>= return . ExpressionStatement
, selectionStatement
, switchStatement
, caseLabel >>= return . CaseLabel
, iterationStatement
, jumpStatement
]
compoundStatement :: P Compound
compoundStatement = choice
[ try (lbrace >> rbrace) >> return (Compound [])
, between lbrace rbrace statementList >>= return . Compound
]
statementNoNewScope :: P Statement
statementNoNewScope = CompoundStatement `fmap` compoundStatementNoNewScope
<|> simpleStatement
compoundStatementNoNewScope :: P Compound
compoundStatementNoNewScope = compoundStatement
statementList :: P [Statement]
statementList = many1 statement
expressionStatement :: P (Maybe Expr)
expressionStatement = choice
[ semicolon >> return Nothing
, expression >>= \e -> semicolon >> return (Just e)
]
selectionStatement :: P Statement
selectionStatement = do
keyword "if"
lparen
c <- expression
rparen
t <- statement
f <- optionMaybe (keyword "else" >> statement)
return $ SelectionStatement c t f
condition :: P Condition
condition = choice
[ expression >>= return . Condition
, do t <- fullySpecifiedType
i <- identifier
_ <- lexeme (string "=")
j <- initializer
return $ InitializedCondition t i j
]
switchStatement :: P Statement
switchStatement = do
keyword "switch"
lparen
e <- expression
rparen
lbrace
l <- switchStatementList
rbrace
return $ SwitchStatement e l
switchStatementList :: P [Statement]
switchStatementList = many statement
caseLabel :: P CaseLabel
caseLabel = choice
[ keyword "case" >> expression >>= \e -> colon >> return (Case e)
, keyword "default" >> colon >> return Default
]
iterationStatement :: P Statement
iterationStatement = choice
[ do keyword "while"
lparen
c <- condition
rparen
s <- statementNoNewScope
return $ While c s
, do keyword "do"
s <- statement
keyword "while"
lparen
e <- expression
rparen
semicolon
return $ DoWhile s e
, do keyword "for"
lparen
i <- forInitStatement
c <- optionMaybe condition
semicolon
e <- optionMaybe expression
rparen
s <- statementNoNewScope
return $ For i c e s
]
forInitStatement :: P (Either (Maybe Expr) Declaration)
forInitStatement = (expressionStatement >>= return . Left)
<|> (declarationStatement >>= return . Right)
jumpStatement :: P Statement
jumpStatement = choice
[ keyword "continue" >> semicolon >> return Continue
, keyword "break" >> semicolon >> return Break
, try (keyword "return" >> semicolon) >> return (Return Nothing)
, keyword "return" >> expression >>= \e -> semicolon >> return (Return $ Just e)
, keyword "discard" >> semicolon >> return Discard
]
translationUnit :: P TranslationUnit
translationUnit = TranslationUnit `fmap` many1 externalDeclaration
externalDeclaration :: P ExternalDeclaration
externalDeclaration = choice
[ do p <- try functionPrototype
choice
[ semicolon >> return (FunctionDeclaration p)
, compoundStatementNoNewScope >>= return . FunctionDefinition p
]
, Declaration `fmap` declaration
]
functionDefinition :: P ExternalDeclaration
functionDefinition = do
fp <- functionPrototype
cs <- compoundStatementNoNewScope
return $ FunctionDefinition fp cs
|
8ed1817b289507aa74a5c06a486c1ade7cdac524b8aac129d21db0e488baec08 | vincenthz/hs-git | FileReader.hs | -- |
Module : Data . Git . Storage . FileReader
-- License : BSD-style
Maintainer : < >
-- Stability : experimental
-- Portability : unix
--
{-# LANGUAGE DeriveDataTypeable #-}
module Data.Git.Storage.FileReader
( FileReader
, fileReaderNew
, fileReaderClose
, withFileReader
, withFileReaderDecompress
, fileReaderGetPos
, fileReaderGet
, fileReaderGetLBS
, fileReaderGetBS
, fileReaderGetRef
, fileReaderGetVLF
, fileReaderSeek
, fileReaderParse
, fileReaderInflateToSize
) where
import Control.Exception (bracket, throwIO)
import Data.ByteString (ByteString)
import Data.ByteString.Unsafe
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Lazy.Internal (defaultChunkSize)
import Data.IORef
import Data.Git.Imports
import Data.Git.OS
import Data.Git.Ref
import qualified Data.Git.Parser as P
import Data.Data
import Data.Word
import Codec.Zlib
import Codec.Zlib.Lowlevel
import Crypto.Hash
import Foreign.ForeignPtr
import qualified Control.Exception as E
import System.IO (hSeek, SeekMode(..))
data FileReader = FileReader
{ fbHandle :: Handle
, fbUseInflate :: Bool
, fbInflate :: Inflate
, fbRemaining :: IORef (Maybe ByteString)
, fbPos :: IORef Word64
}
data InflateException = InflateException Word64 Word64 String
deriving (Show,Eq,Typeable)
instance E.Exception InflateException
fileReaderNew :: Bool -> Handle -> IO FileReader
fileReaderNew decompress handle = do
ref <- newIORef (Just B.empty)
pos <- newIORef 0
inflate <- initInflate defaultWindowBits
return $ FileReader handle decompress inflate ref pos
fileReaderClose :: FileReader -> IO ()
fileReaderClose = hClose . fbHandle
withFileReader :: LocalPath -> (FileReader -> IO a) -> IO a
withFileReader path f =
bracket (openFile path ReadMode) (hClose) $ \handle ->
bracket (fileReaderNew False handle) (\_ -> return ()) f
withFileReaderDecompress :: LocalPath -> (FileReader -> IO a) -> IO a
withFileReaderDecompress path f =
bracket (openFile path ReadMode) (hClose) $ \handle ->
bracket (fileReaderNew True handle) (\_ -> return ()) f
fileReaderGetNext :: FileReader -> IO (Maybe ByteString)
fileReaderGetNext fb = do
bs <- if fbUseInflate fb then inflateTillPop else B.hGet (fbHandle fb) 8192
modifyIORef (fbPos fb) (\pos -> pos + (fromIntegral $ B.length bs))
return $ nothingOnNull bs
where
inflateTillPop = do
b <- B.hGet (fbHandle fb) 4096
if B.null b
then finishInflate (fbInflate fb)
else (>>= maybe inflateTillPop return) =<< feedInflate (fbInflate fb) b
nothingOnNull b
| B.null b = Nothing
| otherwise = Just b
fileReaderGetPos :: FileReader -> IO Word64
fileReaderGetPos fr = do
storeLeft <- maybe 0 B.length <$> readIORef (fbRemaining fr)
pos <- readIORef (fbPos fr)
return (pos - fromIntegral storeLeft)
fileReaderFill :: FileReader -> IO ()
fileReaderFill fb = fileReaderGetNext fb >>= writeIORef (fbRemaining fb)
fileReaderGet :: Int -> FileReader -> IO [ByteString]
fileReaderGet size fb@(FileReader { fbRemaining = ref }) = loop size
where
loop left = do
b <- maybe B.empty id <$> readIORef ref
if B.length b >= left
then do
let (b1, b2) = B.splitAt left b
writeIORef ref (Just b2)
return [b1]
else do
let nleft = left - B.length b
fileReaderFill fb
liftM (b :) (loop nleft)
fileReaderGetLBS :: Int -> FileReader -> IO L.ByteString
fileReaderGetLBS size fb = L.fromChunks <$> fileReaderGet size fb
fileReaderGetBS :: Int -> FileReader -> IO ByteString
fileReaderGetBS size fb = B.concat <$> fileReaderGet size fb
fileReaderGetRef :: HashAlgorithm hash => hash -> FileReader -> IO (Ref hash)
fileReaderGetRef alg fr = fromBinary <$> fileReaderGetBS (hashDigestSize alg) fr
-- | seek in a handle, and reset the remaining buffer to empty.
fileReaderSeek :: FileReader -> Word64 -> IO ()
fileReaderSeek (FileReader { fbHandle = handle, fbRemaining = ref, fbPos = pos }) absPos = do
writeIORef ref (Just B.empty) >> writeIORef pos absPos >> hSeek handle AbsoluteSeek (fromIntegral absPos)
-- | parse from a filebuffer
fileReaderParse :: FileReader -> P.Parser a -> IO a
fileReaderParse fr@(FileReader { fbRemaining = ref }) parseF = do
initBS <- maybe B.empty id <$> readIORef ref
result <- P.parseFeed (fileReaderGetNext fr) parseF initBS
case result of
P.ParseOK remaining a -> writeIORef ref (Just remaining) >> return a
P.ParseMore _ -> error "parsing failed: partial with a handle, reached EOF ?"
P.ParseFail err -> error ("parsing failed: " ++ err)
| get a Variable Length Field . get byte as long as MSB is set , and one byte after
fileReaderGetVLF :: FileReader -> IO [Word8]
fileReaderGetVLF fr = fileReaderParse fr P.vlf
fileReaderInflateToSize :: FileReader -> Word64 -> IO L.ByteString
fileReaderInflateToSize fb@(FileReader { fbRemaining = ref }) outputSize = do
--pos <- fileReaderGetPos fb
( " inflate to size " + + show outputSize + + " " + + show pos )
inflate <- inflateNew
l <- loop inflate outputSize
--posend <- fileReaderGetPos fb
( " inflated input " + + show posend )
return $ L.fromChunks l
where loop inflate left = do
rbs <- readIORef ref
let maxToInflate = min left (16 * 1024)
let lastBlock = if left == maxToInflate then True else False
(dbs,remaining) <- inflateToSize inflate (fromIntegral maxToInflate) lastBlock (maybe B.empty id rbs) (maybe B.empty id <$> fileReaderGetNext fb)
`E.catch` augmentAndRaise left
writeIORef ref (Just remaining)
let nleft = left - fromIntegral (B.length dbs)
if nleft > 0
then liftM (dbs:) (loop inflate nleft)
else return [dbs]
augmentAndRaise :: Word64 -> E.SomeException -> IO a
augmentAndRaise left exn = throwIO $ InflateException outputSize left (show exn)
-- lowlevel helpers to inflate only to a specific size.
inflateNew :: IO (ForeignPtr ZStreamStruct)
inflateNew = do
zstr <- zstreamNew
inflateInit2 zstr defaultWindowBits
newForeignPtr c_free_z_stream_inflate zstr
inflateToSize :: ForeignPtr ZStreamStruct -> Int -> Bool -> ByteString -> IO ByteString -> IO (ByteString, ByteString)
inflateToSize inflate sz isLastBlock ibs nextBs = withForeignPtr inflate $ \zstr -> do
let boundSz = min defaultChunkSize sz
-- create an output buffer
fbuff <- mallocForeignPtrBytes boundSz
withForeignPtr fbuff $ \buff -> do
c_set_avail_out zstr buff (fromIntegral boundSz)
rbs <- loop zstr ibs
bs <- B.packCStringLen (buff, boundSz)
return (bs, rbs)
where
loop zstr nbs = do
(ai, streamEnd) <- inflateOneInput zstr nbs
ao <- c_get_avail_out zstr
if (isLastBlock && streamEnd) || (not isLastBlock && ao == 0)
then return $ bsTakeLast ai nbs
else do
--when (ai /= 0) $ error ("input not consumed completly: ai" ++ show ai)
(if ai == 0
then nextBs
else return (bsTakeLast ai nbs)) >>= loop zstr
inflateOneInput zstr bs = unsafeUseAsCStringLen bs $ \(istr, ilen) -> do
c_set_avail_in zstr istr $ fromIntegral ilen
r <- c_call_inflate_noflush zstr
when (r < 0 && r /= (-5)) $ do
throwIO $ ZlibException $ fromIntegral r
ai <- c_get_avail_in zstr
return (ai, r == 1)
bsTakeLast len bs = B.drop (B.length bs - fromIntegral len) bs
| null | https://raw.githubusercontent.com/vincenthz/hs-git/77e4f2f6446733c9b3eec9f916ed0fb0d4fd5a04/Data/Git/Storage/FileReader.hs | haskell | |
License : BSD-style
Stability : experimental
Portability : unix
# LANGUAGE DeriveDataTypeable #
| seek in a handle, and reset the remaining buffer to empty.
| parse from a filebuffer
pos <- fileReaderGetPos fb
posend <- fileReaderGetPos fb
lowlevel helpers to inflate only to a specific size.
create an output buffer
when (ai /= 0) $ error ("input not consumed completly: ai" ++ show ai) | Module : Data . Git . Storage . FileReader
Maintainer : < >
module Data.Git.Storage.FileReader
( FileReader
, fileReaderNew
, fileReaderClose
, withFileReader
, withFileReaderDecompress
, fileReaderGetPos
, fileReaderGet
, fileReaderGetLBS
, fileReaderGetBS
, fileReaderGetRef
, fileReaderGetVLF
, fileReaderSeek
, fileReaderParse
, fileReaderInflateToSize
) where
import Control.Exception (bracket, throwIO)
import Data.ByteString (ByteString)
import Data.ByteString.Unsafe
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Lazy.Internal (defaultChunkSize)
import Data.IORef
import Data.Git.Imports
import Data.Git.OS
import Data.Git.Ref
import qualified Data.Git.Parser as P
import Data.Data
import Data.Word
import Codec.Zlib
import Codec.Zlib.Lowlevel
import Crypto.Hash
import Foreign.ForeignPtr
import qualified Control.Exception as E
import System.IO (hSeek, SeekMode(..))
data FileReader = FileReader
{ fbHandle :: Handle
, fbUseInflate :: Bool
, fbInflate :: Inflate
, fbRemaining :: IORef (Maybe ByteString)
, fbPos :: IORef Word64
}
data InflateException = InflateException Word64 Word64 String
deriving (Show,Eq,Typeable)
instance E.Exception InflateException
fileReaderNew :: Bool -> Handle -> IO FileReader
fileReaderNew decompress handle = do
ref <- newIORef (Just B.empty)
pos <- newIORef 0
inflate <- initInflate defaultWindowBits
return $ FileReader handle decompress inflate ref pos
fileReaderClose :: FileReader -> IO ()
fileReaderClose = hClose . fbHandle
withFileReader :: LocalPath -> (FileReader -> IO a) -> IO a
withFileReader path f =
bracket (openFile path ReadMode) (hClose) $ \handle ->
bracket (fileReaderNew False handle) (\_ -> return ()) f
withFileReaderDecompress :: LocalPath -> (FileReader -> IO a) -> IO a
withFileReaderDecompress path f =
bracket (openFile path ReadMode) (hClose) $ \handle ->
bracket (fileReaderNew True handle) (\_ -> return ()) f
fileReaderGetNext :: FileReader -> IO (Maybe ByteString)
fileReaderGetNext fb = do
bs <- if fbUseInflate fb then inflateTillPop else B.hGet (fbHandle fb) 8192
modifyIORef (fbPos fb) (\pos -> pos + (fromIntegral $ B.length bs))
return $ nothingOnNull bs
where
inflateTillPop = do
b <- B.hGet (fbHandle fb) 4096
if B.null b
then finishInflate (fbInflate fb)
else (>>= maybe inflateTillPop return) =<< feedInflate (fbInflate fb) b
nothingOnNull b
| B.null b = Nothing
| otherwise = Just b
fileReaderGetPos :: FileReader -> IO Word64
fileReaderGetPos fr = do
storeLeft <- maybe 0 B.length <$> readIORef (fbRemaining fr)
pos <- readIORef (fbPos fr)
return (pos - fromIntegral storeLeft)
fileReaderFill :: FileReader -> IO ()
fileReaderFill fb = fileReaderGetNext fb >>= writeIORef (fbRemaining fb)
fileReaderGet :: Int -> FileReader -> IO [ByteString]
fileReaderGet size fb@(FileReader { fbRemaining = ref }) = loop size
where
loop left = do
b <- maybe B.empty id <$> readIORef ref
if B.length b >= left
then do
let (b1, b2) = B.splitAt left b
writeIORef ref (Just b2)
return [b1]
else do
let nleft = left - B.length b
fileReaderFill fb
liftM (b :) (loop nleft)
fileReaderGetLBS :: Int -> FileReader -> IO L.ByteString
fileReaderGetLBS size fb = L.fromChunks <$> fileReaderGet size fb
fileReaderGetBS :: Int -> FileReader -> IO ByteString
fileReaderGetBS size fb = B.concat <$> fileReaderGet size fb
fileReaderGetRef :: HashAlgorithm hash => hash -> FileReader -> IO (Ref hash)
fileReaderGetRef alg fr = fromBinary <$> fileReaderGetBS (hashDigestSize alg) fr
fileReaderSeek :: FileReader -> Word64 -> IO ()
fileReaderSeek (FileReader { fbHandle = handle, fbRemaining = ref, fbPos = pos }) absPos = do
writeIORef ref (Just B.empty) >> writeIORef pos absPos >> hSeek handle AbsoluteSeek (fromIntegral absPos)
fileReaderParse :: FileReader -> P.Parser a -> IO a
fileReaderParse fr@(FileReader { fbRemaining = ref }) parseF = do
initBS <- maybe B.empty id <$> readIORef ref
result <- P.parseFeed (fileReaderGetNext fr) parseF initBS
case result of
P.ParseOK remaining a -> writeIORef ref (Just remaining) >> return a
P.ParseMore _ -> error "parsing failed: partial with a handle, reached EOF ?"
P.ParseFail err -> error ("parsing failed: " ++ err)
| get a Variable Length Field . get byte as long as MSB is set , and one byte after
fileReaderGetVLF :: FileReader -> IO [Word8]
fileReaderGetVLF fr = fileReaderParse fr P.vlf
fileReaderInflateToSize :: FileReader -> Word64 -> IO L.ByteString
fileReaderInflateToSize fb@(FileReader { fbRemaining = ref }) outputSize = do
( " inflate to size " + + show outputSize + + " " + + show pos )
inflate <- inflateNew
l <- loop inflate outputSize
( " inflated input " + + show posend )
return $ L.fromChunks l
where loop inflate left = do
rbs <- readIORef ref
let maxToInflate = min left (16 * 1024)
let lastBlock = if left == maxToInflate then True else False
(dbs,remaining) <- inflateToSize inflate (fromIntegral maxToInflate) lastBlock (maybe B.empty id rbs) (maybe B.empty id <$> fileReaderGetNext fb)
`E.catch` augmentAndRaise left
writeIORef ref (Just remaining)
let nleft = left - fromIntegral (B.length dbs)
if nleft > 0
then liftM (dbs:) (loop inflate nleft)
else return [dbs]
augmentAndRaise :: Word64 -> E.SomeException -> IO a
augmentAndRaise left exn = throwIO $ InflateException outputSize left (show exn)
inflateNew :: IO (ForeignPtr ZStreamStruct)
inflateNew = do
zstr <- zstreamNew
inflateInit2 zstr defaultWindowBits
newForeignPtr c_free_z_stream_inflate zstr
inflateToSize :: ForeignPtr ZStreamStruct -> Int -> Bool -> ByteString -> IO ByteString -> IO (ByteString, ByteString)
inflateToSize inflate sz isLastBlock ibs nextBs = withForeignPtr inflate $ \zstr -> do
let boundSz = min defaultChunkSize sz
fbuff <- mallocForeignPtrBytes boundSz
withForeignPtr fbuff $ \buff -> do
c_set_avail_out zstr buff (fromIntegral boundSz)
rbs <- loop zstr ibs
bs <- B.packCStringLen (buff, boundSz)
return (bs, rbs)
where
loop zstr nbs = do
(ai, streamEnd) <- inflateOneInput zstr nbs
ao <- c_get_avail_out zstr
if (isLastBlock && streamEnd) || (not isLastBlock && ao == 0)
then return $ bsTakeLast ai nbs
else do
(if ai == 0
then nextBs
else return (bsTakeLast ai nbs)) >>= loop zstr
inflateOneInput zstr bs = unsafeUseAsCStringLen bs $ \(istr, ilen) -> do
c_set_avail_in zstr istr $ fromIntegral ilen
r <- c_call_inflate_noflush zstr
when (r < 0 && r /= (-5)) $ do
throwIO $ ZlibException $ fromIntegral r
ai <- c_get_avail_in zstr
return (ai, r == 1)
bsTakeLast len bs = B.drop (B.length bs - fromIntegral len) bs
|
cb90df203de1a792c763e8c116e53ae6c4c11a85d2c2277e6976a9de67f60443 | soarlab/FPTaylor | opt_nlopt.mli | (* ========================================================================== *)
: A Tool for Rigorous Estimation of Round - off Errors
(* *)
Author : , University of Utah
(* *)
This file is distributed under the terms of the MIT license
(* ========================================================================== *)
(* -------------------------------------------------------------------------- *)
Optimization with the library
(* -------------------------------------------------------------------------- *)
open Opt_common
val min_max_expr : opt_pars -> Expr.constraints -> Expr.expr -> float * float
| null | https://raw.githubusercontent.com/soarlab/FPTaylor/efbbc83970fe3c9f4cb33fafbbe1050dd18749cd/opt_nlopt.mli | ocaml | ==========================================================================
==========================================================================
--------------------------------------------------------------------------
-------------------------------------------------------------------------- | : A Tool for Rigorous Estimation of Round - off Errors
Author : , University of Utah
This file is distributed under the terms of the MIT license
Optimization with the library
open Opt_common
val min_max_expr : opt_pars -> Expr.constraints -> Expr.expr -> float * float
|
581a950f8d762df65bbdb726443b9617ea1682ffd555e2a851048f0548035ff5 | mmontone/cl-gradual | inference.lisp | (in-package :gradual)
(defun infer-type (form)
(%infer-type form
(make-typing-environment)))
(defgeneric %infer-type (form typing-environment)
)
(defmethod %infer-type ((form constant-form) typing-environment)
(type-of (value-of form)))
(defmethod %infer-type ((form free-application-form) typing-environment)
(let ((function-type (fun-type (operator-of form)))
(args-types (mapcar #'infer-type (arguments-of form))))
(return-type function-type)))
(defmethod %infer-type ((form let-form) typing-environment)
(let ((fresh-typing-environment typing-environment))
(loop for binding in (bindings-of form)
do
(setf fresh-typing-environment
(set-env-var-type fresh-typing-environment
(name-of binding)
(if (not (cl-walker::type-spec binding))
(%infer-type (value-of binding) typing-environment)
(cl-walker::type-spec binding)))))
(%infer-type (car (last (body-of form))) fresh-typing-environment)))
(defmethod %infer-type ((form walked-lexical-variable-reference-form) typing-environment)
(env-var-type typing-environment (name-of form)))
(defmethod %infer-type ((form the-form) typing-environment)
(cl-walker::type-of form))
(defmethod %infer-type ((form lambda-function-form) typing-environment)
(let* ((args-type-declarations (remove-if-not (lambda (declare)
(typep declare 'cl-walker::var-type-declaration-form))
(declares-of form)))
(arg-types (mapcar (lambda (arg)
(cons (name-of arg)
(let ((declared-type (aand
(find (name-of arg)
args-type-declarations
:key #'name-of)
(cl-walker::type-of it)))
(lambda-list-type (cl-walker::type-spec arg)))
(when (and (and declared-type lambda-list-type)
(not (equalp declared-type lambda-list-type)))
(error "Duplicate type declaration for ~A" (name-of arg)))
(or declared-type lambda-list-type
(and (typep arg 'cl-walker::optional-function-argument-form)
(aand
(default-value-of arg)
(%infer-type it typing-environment)))
t))))
(arguments-of form)))
(return-type (let ((return-type-declaration
(remove-if-not (lambda (declare)
(typep declare 'cl-walker::return-type-declaration-form))
(declares-of form))))
(if return-type-declaration
(cl-walker::type-of return-type-declaration)
; else
(let ((fresh-typing-environment (copy-typing-environment typing-environment)))
(loop for (arg . type) in arg-types
do (setf fresh-typing-environment (set-env-var-type fresh-typing-environment arg type)))
(%infer-type (car (last (body-of form)))
fresh-typing-environment))))))
(make-function-type :required-args-types (mapcar #'cdr arg-types)
:return-type return-type)))
| null | https://raw.githubusercontent.com/mmontone/cl-gradual/f8c0d992e2886a3fa37e49d9de772391961f234c/inference.lisp | lisp | else | (in-package :gradual)
(defun infer-type (form)
(%infer-type form
(make-typing-environment)))
(defgeneric %infer-type (form typing-environment)
)
(defmethod %infer-type ((form constant-form) typing-environment)
(type-of (value-of form)))
(defmethod %infer-type ((form free-application-form) typing-environment)
(let ((function-type (fun-type (operator-of form)))
(args-types (mapcar #'infer-type (arguments-of form))))
(return-type function-type)))
(defmethod %infer-type ((form let-form) typing-environment)
(let ((fresh-typing-environment typing-environment))
(loop for binding in (bindings-of form)
do
(setf fresh-typing-environment
(set-env-var-type fresh-typing-environment
(name-of binding)
(if (not (cl-walker::type-spec binding))
(%infer-type (value-of binding) typing-environment)
(cl-walker::type-spec binding)))))
(%infer-type (car (last (body-of form))) fresh-typing-environment)))
(defmethod %infer-type ((form walked-lexical-variable-reference-form) typing-environment)
(env-var-type typing-environment (name-of form)))
(defmethod %infer-type ((form the-form) typing-environment)
(cl-walker::type-of form))
(defmethod %infer-type ((form lambda-function-form) typing-environment)
(let* ((args-type-declarations (remove-if-not (lambda (declare)
(typep declare 'cl-walker::var-type-declaration-form))
(declares-of form)))
(arg-types (mapcar (lambda (arg)
(cons (name-of arg)
(let ((declared-type (aand
(find (name-of arg)
args-type-declarations
:key #'name-of)
(cl-walker::type-of it)))
(lambda-list-type (cl-walker::type-spec arg)))
(when (and (and declared-type lambda-list-type)
(not (equalp declared-type lambda-list-type)))
(error "Duplicate type declaration for ~A" (name-of arg)))
(or declared-type lambda-list-type
(and (typep arg 'cl-walker::optional-function-argument-form)
(aand
(default-value-of arg)
(%infer-type it typing-environment)))
t))))
(arguments-of form)))
(return-type (let ((return-type-declaration
(remove-if-not (lambda (declare)
(typep declare 'cl-walker::return-type-declaration-form))
(declares-of form))))
(if return-type-declaration
(cl-walker::type-of return-type-declaration)
(let ((fresh-typing-environment (copy-typing-environment typing-environment)))
(loop for (arg . type) in arg-types
do (setf fresh-typing-environment (set-env-var-type fresh-typing-environment arg type)))
(%infer-type (car (last (body-of form)))
fresh-typing-environment))))))
(make-function-type :required-args-types (mapcar #'cdr arg-types)
:return-type return-type)))
|
0a1f8def85336d6d26efc3d71de0eaf742ae2ef76ebc72e10e00f322bbb9d073 | cnuernber/depsviz | depsviz.clj | (ns cnuernber.depsviz
(:require [cnuernber.depsviz.graph :as deps-graph]
[cnuernber.depsviz.tools-deps :as tools-deps]
[cnuernber.depsviz.leiningen :as leiningen]
[clojure.set :as c-set]
[dorothy.core :as dorothy-core]
[dorothy.jvm :as dorothy-jvm]
[clojure.java.browse :refer [browse-url]]
[clojure.string :as str]
[clojure.tools.cli :as cli]
[clojure.java.io :as io])
(:gen-class))
(defn selected?
[node]
(= (:select node)
(:location node)))
(defn do-prune
"Prune all items that are a conflict or are dependent something that has a conflict."
[graph]
(->> (:edges graph)
(filter #(> (count %) 2))
(map second)
(deps-graph/keep-only graph)))
(defn do-focus
[graph node-names]
(->> node-names
(mapcat (partial deps-graph/find-nodes graph))
(deps-graph/keep-only graph)))
(defn do-highlight
[graph node-names]
(let [child->parent-map (deps-graph/child->parent-map graph)]
(->> node-names
(mapcat (partial deps-graph/find-nodes graph))
(mapcat (partial deps-graph/path-to-root child->parent-map))
distinct
(reduce #(update-in %1 [:nodes %2] assoc :highlight? true)
graph))))
(defn do-remove
[graph node-names]
(let [cur-roots (set (deps-graph/roots graph))
graph (->> node-names
(mapcat (partial deps-graph/find-nodes graph))
(deps-graph/remove-nodes graph))
new-roots (set (deps-graph/roots graph))
orphaned-nodes (c-set/difference cur-roots new-roots)]
(deps-graph/remove-nodes graph orphaned-nodes)))
(defn process-graph-options
[graph options]
(let [{:keys [prune focus highlight remove]} options]
(cond-> graph
prune
do-prune
focus
(do-focus focus)
highlight
(do-highlight highlight)
remove
(do-remove remove))))
(defn node-name
[node-id]
(format "%s\n%s" (first node-id) (second node-id)))
(defn graph->dot
[deps-graph options]
(let [deps-graph (process-graph-options deps-graph options)
node-list (->> (deps-graph/dfs-seq deps-graph)
(map (partial deps-graph/get-node deps-graph)))
root (first node-list)
node-list (rest node-list)
root-name (node-name (:id root))
dot-seq (->> node-list
(map (fn [{:keys [select location] :as node}]
(let [item-name (node-name (:id node))]
[(:dot-node-id node) (merge {:label item-name}
(when-not (selected? node)
{:color :red})
(when (:highlight? node)
{:color :blue}))])))
(concat [{:attrs {:rankdir (if (:vertical options)
:TB
:LR)}
:type ::dorothy-core/graph-attrs}
[(:dot-node-id root) {:label root-name :shape :doubleoctagon}]]))]
(->> (:edges deps-graph)
(map (fn [[lhs rhs conflict-info]]
(let [parent-node (deps-graph/get-node deps-graph lhs)
child-node (deps-graph/get-node deps-graph rhs)]
[(:dot-node-id parent-node) (:dot-node-id child-node)
(merge
{}
(when (:highlight? child-node)
{:pendwidth 2
:weight 500
:color :blue})
(when conflict-info
{:color :red
:label (str (:location conflict-info))
:penwidth 2
:weight 500}))])))
(concat dot-seq)
dorothy-core/digraph
dorothy-core/dot)))
(defn- extension
[^String item]
(let [last-idx (.lastIndexOf item ".")]
(if (> last-idx 0)
(.substring item (+ last-idx 1))
"")))
(defn build-dot
[fname options]
(-> (case (extension fname)
"edn" (tools-deps/load-graph fname options)
"clj" (leiningen/lein->graph fname options))
(deps-graph/merge-nodes-by :name selected?)
(graph->dot options)))
(def cli-help ["-h" "--help" "This usage summary."])
(def cli-save-dot ["-s" "--save-dot" "Save the generated GraphViz DOT file well as the output file."])
(def cli-no-view
["-n" "--no-view" "If given, the image will not be opened after creation."
:default false])
(defn ^:private allowed-extension
[path]
(let [x (str/last-index-of path ".")
ext (subs path (inc x))]
(#{"png" "pdf"} ext)))
(defn cli-output-file
[default-path]
["-o" "--output-file FILE" "Output file path. Extension chooses format: pdf or png."
:id :output-path
:default default-path
:validate [allowed-extension "Supported output formats are 'pdf' and 'png'."]])
(def cli-vertical
["-v" "--vertical" "Use a vertical, not horizontal, layout."])
(defn conj-option
"Used as :assoc-fn for an option to conj'es the values together."
[m k v]
(update m k conj v))
(defn ^:private usage
[command summary errors]
(->> [(str "Usage: depsviz [options]")
""
"Options:"
summary]
(str/join \newline)
println)
(when errors
(println "\nErrors:")
(doseq [e errors] (println " " e)))
nil)
(def vizdeps-cli-options
[["-f" "--focus ARTIFACT" "Excludes artifacts whose names do not match a supplied value. Repeatable."
:assoc-fn conj-option]
["-H" "--highlight ARTIFACT" "Highlight the artifact, and any dependencies to it, in blue. Repeatable."
:assoc-fn conj-option]
["-i" "--input FNAME" "File to draw dependencies from. Defaults to (first-that-exists [\"deps.edn\" \"project.clj\"])."
:id :input]
["-w" "--with-profiles PROFILE" "List of leiningen profiles (defaults to user). Additive only. Repeatable."
:assoc-fn conj-option]
["-r" "--remove ARTIFACT" "Excludes artifaces whose names match supplied value (defaults to org.clojure). Repeatable."
:assoc-fn conj-option]
cli-no-view
(cli-output-file "dependencies.pdf")
["-p" "--prune" "Exclude artifacts and dependencies that do not involve version conflicts."]
cli-save-dot
cli-vertical
cli-help])
(defn parse-cli-options
"Parses the CLI options; handles --help and errors (returning nil) or just
returns the parsed options."
[command cli-options args]
(let [{:keys [options errors summary]} (cli/parse-opts args cli-options)]
(if (or (:help options) errors)
(usage command summary errors)
options)))
(defn parse-options
[args]
(when-let [options (parse-cli-options "depsviz" vizdeps-cli-options args)]
(update options :with-profiles (fn [profile-list]
(if-not profile-list
[:user]
(mapv keyword profile-list))))))
(defn doit
[args]
(when-let [options (parse-options args)]
(let [out-format (-> (:output-path options)
extension
keyword)
input-file (or (:input options)
(->> ["deps.edn" "project.clj"]
(filter #(.exists (io/file %)))
first))]
(when-not (.exists (io/file input-file))
(throw (ex-info "Input file does not exist:" {:input input-file})))
(let [dot-data (build-dot input-file options)
output-path (:output-path options)
output-format (-> (extension output-path)
keyword)]
(dorothy-jvm/save! dot-data output-path {:format output-format})
(when (:save-dot options)
(let [x (str/last-index-of output-path ".")
dot-path (str (subs output-path 0 x) ".dot")
^File dot-file (io/file dot-path)]
(spit dot-file dot-data)))
(when-not (:no-view options)
(browse-url output-path))))))
(defn -main
[& args]
(doit args)
(shutdown-agents))
| null | https://raw.githubusercontent.com/cnuernber/depsviz/3b9f4314d828b55929c486bdd701f5dbb1a9a380/src/cnuernber/depsviz.clj | clojure | handles --help and errors (returning nil) or just | (ns cnuernber.depsviz
(:require [cnuernber.depsviz.graph :as deps-graph]
[cnuernber.depsviz.tools-deps :as tools-deps]
[cnuernber.depsviz.leiningen :as leiningen]
[clojure.set :as c-set]
[dorothy.core :as dorothy-core]
[dorothy.jvm :as dorothy-jvm]
[clojure.java.browse :refer [browse-url]]
[clojure.string :as str]
[clojure.tools.cli :as cli]
[clojure.java.io :as io])
(:gen-class))
(defn selected?
[node]
(= (:select node)
(:location node)))
(defn do-prune
"Prune all items that are a conflict or are dependent something that has a conflict."
[graph]
(->> (:edges graph)
(filter #(> (count %) 2))
(map second)
(deps-graph/keep-only graph)))
(defn do-focus
[graph node-names]
(->> node-names
(mapcat (partial deps-graph/find-nodes graph))
(deps-graph/keep-only graph)))
(defn do-highlight
[graph node-names]
(let [child->parent-map (deps-graph/child->parent-map graph)]
(->> node-names
(mapcat (partial deps-graph/find-nodes graph))
(mapcat (partial deps-graph/path-to-root child->parent-map))
distinct
(reduce #(update-in %1 [:nodes %2] assoc :highlight? true)
graph))))
(defn do-remove
[graph node-names]
(let [cur-roots (set (deps-graph/roots graph))
graph (->> node-names
(mapcat (partial deps-graph/find-nodes graph))
(deps-graph/remove-nodes graph))
new-roots (set (deps-graph/roots graph))
orphaned-nodes (c-set/difference cur-roots new-roots)]
(deps-graph/remove-nodes graph orphaned-nodes)))
(defn process-graph-options
[graph options]
(let [{:keys [prune focus highlight remove]} options]
(cond-> graph
prune
do-prune
focus
(do-focus focus)
highlight
(do-highlight highlight)
remove
(do-remove remove))))
(defn node-name
[node-id]
(format "%s\n%s" (first node-id) (second node-id)))
(defn graph->dot
[deps-graph options]
(let [deps-graph (process-graph-options deps-graph options)
node-list (->> (deps-graph/dfs-seq deps-graph)
(map (partial deps-graph/get-node deps-graph)))
root (first node-list)
node-list (rest node-list)
root-name (node-name (:id root))
dot-seq (->> node-list
(map (fn [{:keys [select location] :as node}]
(let [item-name (node-name (:id node))]
[(:dot-node-id node) (merge {:label item-name}
(when-not (selected? node)
{:color :red})
(when (:highlight? node)
{:color :blue}))])))
(concat [{:attrs {:rankdir (if (:vertical options)
:TB
:LR)}
:type ::dorothy-core/graph-attrs}
[(:dot-node-id root) {:label root-name :shape :doubleoctagon}]]))]
(->> (:edges deps-graph)
(map (fn [[lhs rhs conflict-info]]
(let [parent-node (deps-graph/get-node deps-graph lhs)
child-node (deps-graph/get-node deps-graph rhs)]
[(:dot-node-id parent-node) (:dot-node-id child-node)
(merge
{}
(when (:highlight? child-node)
{:pendwidth 2
:weight 500
:color :blue})
(when conflict-info
{:color :red
:label (str (:location conflict-info))
:penwidth 2
:weight 500}))])))
(concat dot-seq)
dorothy-core/digraph
dorothy-core/dot)))
(defn- extension
[^String item]
(let [last-idx (.lastIndexOf item ".")]
(if (> last-idx 0)
(.substring item (+ last-idx 1))
"")))
(defn build-dot
[fname options]
(-> (case (extension fname)
"edn" (tools-deps/load-graph fname options)
"clj" (leiningen/lein->graph fname options))
(deps-graph/merge-nodes-by :name selected?)
(graph->dot options)))
(def cli-help ["-h" "--help" "This usage summary."])
(def cli-save-dot ["-s" "--save-dot" "Save the generated GraphViz DOT file well as the output file."])
(def cli-no-view
["-n" "--no-view" "If given, the image will not be opened after creation."
:default false])
(defn ^:private allowed-extension
[path]
(let [x (str/last-index-of path ".")
ext (subs path (inc x))]
(#{"png" "pdf"} ext)))
(defn cli-output-file
[default-path]
["-o" "--output-file FILE" "Output file path. Extension chooses format: pdf or png."
:id :output-path
:default default-path
:validate [allowed-extension "Supported output formats are 'pdf' and 'png'."]])
(def cli-vertical
["-v" "--vertical" "Use a vertical, not horizontal, layout."])
(defn conj-option
"Used as :assoc-fn for an option to conj'es the values together."
[m k v]
(update m k conj v))
(defn ^:private usage
[command summary errors]
(->> [(str "Usage: depsviz [options]")
""
"Options:"
summary]
(str/join \newline)
println)
(when errors
(println "\nErrors:")
(doseq [e errors] (println " " e)))
nil)
(def vizdeps-cli-options
[["-f" "--focus ARTIFACT" "Excludes artifacts whose names do not match a supplied value. Repeatable."
:assoc-fn conj-option]
["-H" "--highlight ARTIFACT" "Highlight the artifact, and any dependencies to it, in blue. Repeatable."
:assoc-fn conj-option]
["-i" "--input FNAME" "File to draw dependencies from. Defaults to (first-that-exists [\"deps.edn\" \"project.clj\"])."
:id :input]
["-w" "--with-profiles PROFILE" "List of leiningen profiles (defaults to user). Additive only. Repeatable."
:assoc-fn conj-option]
["-r" "--remove ARTIFACT" "Excludes artifaces whose names match supplied value (defaults to org.clojure). Repeatable."
:assoc-fn conj-option]
cli-no-view
(cli-output-file "dependencies.pdf")
["-p" "--prune" "Exclude artifacts and dependencies that do not involve version conflicts."]
cli-save-dot
cli-vertical
cli-help])
(defn parse-cli-options
returns the parsed options."
[command cli-options args]
(let [{:keys [options errors summary]} (cli/parse-opts args cli-options)]
(if (or (:help options) errors)
(usage command summary errors)
options)))
(defn parse-options
[args]
(when-let [options (parse-cli-options "depsviz" vizdeps-cli-options args)]
(update options :with-profiles (fn [profile-list]
(if-not profile-list
[:user]
(mapv keyword profile-list))))))
(defn doit
[args]
(when-let [options (parse-options args)]
(let [out-format (-> (:output-path options)
extension
keyword)
input-file (or (:input options)
(->> ["deps.edn" "project.clj"]
(filter #(.exists (io/file %)))
first))]
(when-not (.exists (io/file input-file))
(throw (ex-info "Input file does not exist:" {:input input-file})))
(let [dot-data (build-dot input-file options)
output-path (:output-path options)
output-format (-> (extension output-path)
keyword)]
(dorothy-jvm/save! dot-data output-path {:format output-format})
(when (:save-dot options)
(let [x (str/last-index-of output-path ".")
dot-path (str (subs output-path 0 x) ".dot")
^File dot-file (io/file dot-path)]
(spit dot-file dot-data)))
(when-not (:no-view options)
(browse-url output-path))))))
(defn -main
[& args]
(doit args)
(shutdown-agents))
|
10ac75394d545acaa3be64b1ac630e7ab25511f3e2cdebbe53c5f5600bac291f | osimon8/CombinatorC | combinator.ml | type symbol = string
type id = int
type value = int32
type data = symbol * value
type signal = data list
type size = int * int
type arithemtic_op =
| Add
| Sub
| Mul
| Div
| Mod
| Exp
| Lshift
| Rshift
| AND
| OR
| XOR
type decider_op =
| Gt
| Lt
| Gte
| Lte
| Eq
| Neq
type decider_output_type =
| One
| InpCount
type aop =
| Symbol of string
| Const of value
| Each
type dop =
| Symbol of string
| Const of value
| Each
| Anything
| Everything
type op = Aop of aop | Dop of dop
(* left input * operation * right input * output *)
type arithemtic_config = aop * arithemtic_op * aop * aop
(* left input * operation * right input * output *)
type decider_config = dop * decider_op * dop * dop * decider_output_type
type constant_config = signal
type lamp_config = dop * decider_op * dop
type cfg =
| A of arithemtic_config
| D of decider_config
| C of constant_config
| L of lamp_config
type arithmetic_combinator = id * arithemtic_config
type decider_combinator = id * decider_config
type pole_type =
| Small
| Medium
| Big
| Substation
type combinator =
| Arithmetic of arithmetic_combinator
| Decider of decider_combinator
| Constant of id * constant_config
| Lamp of id * lamp_config
| Pole of id * pole_type
let size_of_combinator (comb:combinator) : size =
begin match comb with
| Arithmetic _
| Decider _ -> (1, 2)
| Constant _ -> (1, 1)
| Lamp _ -> (1, 1)
| Pole (_, t) -> begin match t with
| Small -> (1, 1)
| Medium -> (1, 1)
| Big -> (2, 2)
| Substation -> (2, 2)
end
end
let id_of_combinator (comb:combinator) : id =
begin match comb with
| Arithmetic (id, _) -> id
| Decider (id, _) -> id
| Constant (id, _) -> id
| Lamp (id, _) -> id
| Pole (id, _) -> id
end
(* let input_signals_of_combinator (comb:combinator) : op list =
begin match comb with
| Arithmetic (_, ((o1, _, o2, _))) -> [o1; o2]
| _ -> []
end *)
let uses_signal (comb:combinator) (s:symbol) : bool =
let aop_uses (aop:aop) =
begin match aop with
| Symbol s1 -> s1 = s
| Const _ -> false
| Each -> true
end in
let dop_uses dop inn t =
begin match dop with
| Symbol s1 -> (inn || (match t with | InpCount -> true | One -> false)) && s1 = s
| Const _ -> false
| Anything
| Everything
| Each -> true
end in
begin match comb with
| Arithmetic (_, (op1, _, op2, _)) -> aop_uses op1 || aop_uses op2
| Decider (_, (op1, _, op2, op3, t)) -> dop_uses op1 true t || dop_uses op2 true t || dop_uses op3 false t
| Constant (_, sigs) -> List.mem s (List.map fst sigs)
| Lamp (_, (op1, _, op2)) -> dop_uses op1 true One || dop_uses op2 true One
| Pole _ -> false
end
let uses_signal_in_input (comb:combinator) (s:symbol) : bool =
let aop_uses (aop:aop) =
begin match aop with
| Symbol s1 -> s1 = s
| Const _ -> false
| Each -> true
end in
let dop_uses dop =
begin match dop with
| Symbol s1 -> s1 = s
| Const _ -> false
| Anything
| Everything
| Each -> true
end in
begin match comb with
| Arithmetic (id, (op1, _, op2, _)) -> aop_uses op1 || aop_uses op2
| Decider (id, (op1, _, op2, op3, t)) -> dop_uses op1 || dop_uses op2
| Constant (_, sigs) -> List.mem s (List.map fst sigs)
| Lamp (_, (op1, _, op2)) -> dop_uses op1 || dop_uses op2
| Pole _ -> false
end
let uses_wildcard (comb:combinator) : bool =
let aop_uses (aop:aop) =
begin match aop with
| Each -> true
| _ -> false
end in
let dop_uses dop =
begin match dop with
| Anything
| Everything
| Each -> true
| _ -> false
end in
begin match comb with
| Arithmetic (id, (op1, _, op2, op3)) -> aop_uses op1 || aop_uses op2 || aop_uses op3
| Decider (id, (op1, _, op2, op3, t)) -> dop_uses op1 || dop_uses op2 || dop_uses op3
| Constant (_, sigs) -> false
| Lamp (_, (op1, _, op2)) -> dop_uses op1 || dop_uses op2
| Pole _ -> false
end
let replace_signal_A (comb:arithmetic_combinator) (s:symbol) (v:value) : arithmetic_combinator =
let r2 (comb:arithmetic_combinator) s v : arithmetic_combinator =
let id, ((o1, op, o2, out)) = comb in
begin match o2 with
| Symbol sy -> if sy = s then (id, (o1, op, Const v, out)) else comb
| _ -> comb
end
in
let id, ((o1, op, o2, out)) = comb in
begin match o1 with
| Symbol sy -> if sy = s then r2 (id, (Const v, op, o2, out)) s v else r2 comb s v
| _ -> r2 comb s v
end
let replace_signal_D (comb:decider_combinator) (s:symbol) (v:value) : decider_combinator =
let r2 (comb:decider_combinator) s v : decider_combinator =
let id, ((o1, op, o2, out, t)) = comb in
begin match o2 with
| Symbol sy -> if sy = s then (id, (o1, op, Const v, out, t)) else comb
| _ -> comb
end
in
let id, ((o1, op, o2, out, t)) = comb in
begin match o1 with
| Symbol sy -> if sy = s then r2 (id, (Const v, op, o2, out, t)) s v else r2 comb s v
| _ -> r2 comb s v
end
let string_of_arithmetic_op (op:arithemtic_op) : string =
begin match op with
| Add -> "+"
| Sub -> "-"
| Mul -> "*"
| Div -> "/"
| Mod -> "%"
| Exp -> "^"
| Lshift -> "<<"
| Rshift -> ">>"
| AND -> "AND"
| OR -> "OR"
| XOR -> "XOR"
end
let string_of_decider_op (op:decider_op) : string =
begin match op with
| Gt -> ">"
| Lt -> "<"
| Gte -> "≥"
| Lte -> "≤"
| Eq -> "="
| Neq -> "≠"
end
let string_of_combinator (comb:combinator) : string =
begin match comb with
| Arithmetic (id, _) -> "Arithmetic: " ^ string_of_int id
| Decider (id, _) -> "Decider: " ^ string_of_int id
| Constant (id, _) -> "Constant: " ^ string_of_int id
| Lamp (id, _) -> "Lamp: " ^ string_of_int id
| Pole (id, t) ->
begin match t with
| Small -> "Small Electric Pole: "
| Medium -> "Medium Electric Pole: "
| Big -> "Big Electric Pole: "
| Substation -> "Substation: "
end
^ string_of_int id
end | null | https://raw.githubusercontent.com/osimon8/CombinatorC/0bdbbc893ee458ec75eab7a48712d07a62e190aa/src/ast/combinator.ml | ocaml | left input * operation * right input * output
left input * operation * right input * output
let input_signals_of_combinator (comb:combinator) : op list =
begin match comb with
| Arithmetic (_, ((o1, _, o2, _))) -> [o1; o2]
| _ -> []
end | type symbol = string
type id = int
type value = int32
type data = symbol * value
type signal = data list
type size = int * int
type arithemtic_op =
| Add
| Sub
| Mul
| Div
| Mod
| Exp
| Lshift
| Rshift
| AND
| OR
| XOR
type decider_op =
| Gt
| Lt
| Gte
| Lte
| Eq
| Neq
type decider_output_type =
| One
| InpCount
type aop =
| Symbol of string
| Const of value
| Each
type dop =
| Symbol of string
| Const of value
| Each
| Anything
| Everything
type op = Aop of aop | Dop of dop
type arithemtic_config = aop * arithemtic_op * aop * aop
type decider_config = dop * decider_op * dop * dop * decider_output_type
type constant_config = signal
type lamp_config = dop * decider_op * dop
type cfg =
| A of arithemtic_config
| D of decider_config
| C of constant_config
| L of lamp_config
type arithmetic_combinator = id * arithemtic_config
type decider_combinator = id * decider_config
type pole_type =
| Small
| Medium
| Big
| Substation
type combinator =
| Arithmetic of arithmetic_combinator
| Decider of decider_combinator
| Constant of id * constant_config
| Lamp of id * lamp_config
| Pole of id * pole_type
let size_of_combinator (comb:combinator) : size =
begin match comb with
| Arithmetic _
| Decider _ -> (1, 2)
| Constant _ -> (1, 1)
| Lamp _ -> (1, 1)
| Pole (_, t) -> begin match t with
| Small -> (1, 1)
| Medium -> (1, 1)
| Big -> (2, 2)
| Substation -> (2, 2)
end
end
let id_of_combinator (comb:combinator) : id =
begin match comb with
| Arithmetic (id, _) -> id
| Decider (id, _) -> id
| Constant (id, _) -> id
| Lamp (id, _) -> id
| Pole (id, _) -> id
end
let uses_signal (comb:combinator) (s:symbol) : bool =
let aop_uses (aop:aop) =
begin match aop with
| Symbol s1 -> s1 = s
| Const _ -> false
| Each -> true
end in
let dop_uses dop inn t =
begin match dop with
| Symbol s1 -> (inn || (match t with | InpCount -> true | One -> false)) && s1 = s
| Const _ -> false
| Anything
| Everything
| Each -> true
end in
begin match comb with
| Arithmetic (_, (op1, _, op2, _)) -> aop_uses op1 || aop_uses op2
| Decider (_, (op1, _, op2, op3, t)) -> dop_uses op1 true t || dop_uses op2 true t || dop_uses op3 false t
| Constant (_, sigs) -> List.mem s (List.map fst sigs)
| Lamp (_, (op1, _, op2)) -> dop_uses op1 true One || dop_uses op2 true One
| Pole _ -> false
end
let uses_signal_in_input (comb:combinator) (s:symbol) : bool =
let aop_uses (aop:aop) =
begin match aop with
| Symbol s1 -> s1 = s
| Const _ -> false
| Each -> true
end in
let dop_uses dop =
begin match dop with
| Symbol s1 -> s1 = s
| Const _ -> false
| Anything
| Everything
| Each -> true
end in
begin match comb with
| Arithmetic (id, (op1, _, op2, _)) -> aop_uses op1 || aop_uses op2
| Decider (id, (op1, _, op2, op3, t)) -> dop_uses op1 || dop_uses op2
| Constant (_, sigs) -> List.mem s (List.map fst sigs)
| Lamp (_, (op1, _, op2)) -> dop_uses op1 || dop_uses op2
| Pole _ -> false
end
let uses_wildcard (comb:combinator) : bool =
let aop_uses (aop:aop) =
begin match aop with
| Each -> true
| _ -> false
end in
let dop_uses dop =
begin match dop with
| Anything
| Everything
| Each -> true
| _ -> false
end in
begin match comb with
| Arithmetic (id, (op1, _, op2, op3)) -> aop_uses op1 || aop_uses op2 || aop_uses op3
| Decider (id, (op1, _, op2, op3, t)) -> dop_uses op1 || dop_uses op2 || dop_uses op3
| Constant (_, sigs) -> false
| Lamp (_, (op1, _, op2)) -> dop_uses op1 || dop_uses op2
| Pole _ -> false
end
let replace_signal_A (comb:arithmetic_combinator) (s:symbol) (v:value) : arithmetic_combinator =
let r2 (comb:arithmetic_combinator) s v : arithmetic_combinator =
let id, ((o1, op, o2, out)) = comb in
begin match o2 with
| Symbol sy -> if sy = s then (id, (o1, op, Const v, out)) else comb
| _ -> comb
end
in
let id, ((o1, op, o2, out)) = comb in
begin match o1 with
| Symbol sy -> if sy = s then r2 (id, (Const v, op, o2, out)) s v else r2 comb s v
| _ -> r2 comb s v
end
let replace_signal_D (comb:decider_combinator) (s:symbol) (v:value) : decider_combinator =
let r2 (comb:decider_combinator) s v : decider_combinator =
let id, ((o1, op, o2, out, t)) = comb in
begin match o2 with
| Symbol sy -> if sy = s then (id, (o1, op, Const v, out, t)) else comb
| _ -> comb
end
in
let id, ((o1, op, o2, out, t)) = comb in
begin match o1 with
| Symbol sy -> if sy = s then r2 (id, (Const v, op, o2, out, t)) s v else r2 comb s v
| _ -> r2 comb s v
end
let string_of_arithmetic_op (op:arithemtic_op) : string =
begin match op with
| Add -> "+"
| Sub -> "-"
| Mul -> "*"
| Div -> "/"
| Mod -> "%"
| Exp -> "^"
| Lshift -> "<<"
| Rshift -> ">>"
| AND -> "AND"
| OR -> "OR"
| XOR -> "XOR"
end
let string_of_decider_op (op:decider_op) : string =
begin match op with
| Gt -> ">"
| Lt -> "<"
| Gte -> "≥"
| Lte -> "≤"
| Eq -> "="
| Neq -> "≠"
end
let string_of_combinator (comb:combinator) : string =
begin match comb with
| Arithmetic (id, _) -> "Arithmetic: " ^ string_of_int id
| Decider (id, _) -> "Decider: " ^ string_of_int id
| Constant (id, _) -> "Constant: " ^ string_of_int id
| Lamp (id, _) -> "Lamp: " ^ string_of_int id
| Pole (id, t) ->
begin match t with
| Small -> "Small Electric Pole: "
| Medium -> "Medium Electric Pole: "
| Big -> "Big Electric Pole: "
| Substation -> "Substation: "
end
^ string_of_int id
end |
de2ffd48229c17bd7a436393b05adcf676a9462e297a535678cbd73f1fb517b1 | DSiSc/why3 | jsmain.ml |
module D = Dom_html
let d = D.document
(* Grid Layout *)
let make_board () =
let make_input () =
let input =
D.createInput ~_type:(Js.string "text") ~name:(Js.string "input") d
in
input##size <- 1;
input##maxLength <- 1;
input##align <- Js.string "center";
let style = input##style in
style##border <- Js.string "none";
style##fontFamily <- Js.string "monospace";
style##fontSize <- Js.string "20px";
style##fontWeight <- Js.string "bold";
style##paddingBottom <- Js.string "5px";
style##paddingTop <- Js.string "5px";
style##paddingLeft <- Js.string "10px";
style##paddingRight <- Js.string "10px";
let enforce_digit _ =
begin
match Js.to_string input##value with
| "1" | "2" | "3" | "4" | "5"
| "6" | "7" | "8" | "9" -> ()
| _ -> input##value <- Js.string ""
end;
Js._false
in
input##onchange <- Dom_html.handler enforce_digit;
input
in
let make_td i j input =
let td = D.createTd d in
td##align <- Js.string "center";
let style = td##style in
style##borderStyle <- Js.string "solid";
style##borderColor <- Js.string "#000000";
let widths = function
| 0 -> 3, 0 | 2 -> 1, 1 | 3 -> 1, 0
| 5 -> 1, 1 | 6 -> 1, 0 | 8 -> 1, 3
| _ -> 1, 0 in
let (top, bottom) = widths i in
let (left, right) = widths j in
let px k = Js.string (string_of_int k ^ "px") in
style##borderTopWidth <- px top;
style##borderBottomWidth <- px bottom;
style##borderLeftWidth <- px left;
style##borderRightWidth <- px right;
Dom.appendChild td input;
td
in
let rows = Array.init 9 (fun i -> Array.init 9 (fun j -> make_input ())) in
let table = D.createTable d in
table##cellPadding <- Js.string "0px";
table##cellSpacing <- Js.string "0px";
let tbody = D.createTbody d in
Dom.appendChild table tbody;
ArrayLabels.iteri rows ~f:(fun i row ->
let tr = D.createTr d in
ArrayLabels.iteri row ~f:(fun j cell ->
let td = make_td i j cell in
ignore (Dom.appendChild tr td));
ignore (Dom.appendChild tbody tr));
(rows, table)
Solver
open Why3extract
let display_sol rows a =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
cell##value <- Js.string (Why3__BigInt.to_string a.(9*i+j));
cell##style##backgroundColor <- Js.string "#ffffff"
done
done
let no_sol rows =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
cell##style##backgroundColor <- Js.string "#ff0000"
done
done
let solve_board rows _ =
let sudoku = Sudoku__TheClassicalSudokuGrid.classical_sudoku () in
let input_grid = Array.make 81 Why3__BigInt.zero in
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
let v =
match Js.to_string cell##value with
| "" -> 0
| s -> Char.code s.[0] - Char.code '0'
in
input_grid.(9*i+j) <- Why3__BigInt.of_int v
done
done;
begin
try
let a = Sudoku__Solver.check_then_solve sudoku input_grid in
display_sol rows a
with Sudoku__Solver.NoSolution -> no_sol rows
end;
Js._false
(* reset board to empty cells *)
let reset_board rows _ =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
cell##value <- Js.string "";
cell##style##backgroundColor <- Js.string "#ffffff";
done
done;
Js._false
(* load examples *)
let load_board rows test _ =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
let v = test.(9*i+j) in
let v = if v = 0 then "" else string_of_int v in
cell##value <- Js.string v;
cell##style##backgroundColor <- Js.string "#ffffff";
done
done;
Js._false
let test1 =
[| 2;0;9;0;0;0;0;1;0;
0;0;0;0;6;0;0;0;0;
0;5;3;8;0;2;7;0;0;
3;0;0;0;0;0;0;0;0;
0;0;0;0;7;5;0;0;3;
0;4;1;2;0;8;9;0;0;
0;0;4;0;9;0;0;2;0;
8;0;0;0;0;1;0;0;5;
0;0;0;0;0;0;0;7;6 |]
let test2 =
[| 7;0;0;0;0;0;0;0;8;
0;9;0;7;0;6;0;3;0;
0;0;1;0;0;0;9;0;0;
0;7;0;1;0;4;0;5;0;
0;0;0;0;6;0;0;0;0;
0;5;0;3;0;7;0;1;0;
0;0;2;0;0;0;1;0;0;
0;1;0;9;0;8;0;7;0;
8;0;0;0;0;0;0;0;6 |]
let test3 =
[| 0;0;0;0;0;0;0;0;0;
0;0;0;0;0;3;0;8;5;
0;0;1;0;2;0;0;0;0;
0;0;0;5;0;7;0;0;0;
0;0;4;0;0;0;1;0;0;
0;9;0;0;0;0;0;0;0;
5;0;0;0;0;0;0;7;3;
0;0;2;0;1;0;0;0;0;
0;0;0;0;4;0;0;0;9 |]
let onload (_event : #Dom_html.event Js.t) : bool Js.t =
let (rows, table) = make_board () in
let solve = Js.Opt.get (d##getElementById (Js.string "solve"))
(fun () -> assert false) in
solve##onclick <- Dom_html.handler (solve_board rows);
let reset = Js.Opt.get (d##getElementById (Js.string "reset"))
(fun () -> assert false) in
reset##onclick <- Dom_html.handler (reset_board rows);
let sample1 = Js.Opt.get (d##getElementById (Js.string "sample1"))
(fun () -> assert false) in
sample1##onclick <- Dom_html.handler (load_board rows test1);
let sample2 = Js.Opt.get (d##getElementById (Js.string "sample2"))
(fun () -> assert false) in
sample2##onclick <- Dom_html.handler (load_board rows test2);
let sample3= Js.Opt.get (d##getElementById (Js.string "sample3"))
(fun () -> assert false) in
sample3##onclick <- Dom_html.handler (load_board rows test3);
let board = Js.Opt.get (d##getElementById (Js.string "board"))
(fun () -> assert false) in
Dom.appendChild board table;
board##style##padding <- Js.string "40px";
Js._false
let _ = Dom_html.window##onload <- Dom_html.handler onload
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/examples/sudoku/jsmain.ml | ocaml | Grid Layout
reset board to empty cells
load examples |
module D = Dom_html
let d = D.document
let make_board () =
let make_input () =
let input =
D.createInput ~_type:(Js.string "text") ~name:(Js.string "input") d
in
input##size <- 1;
input##maxLength <- 1;
input##align <- Js.string "center";
let style = input##style in
style##border <- Js.string "none";
style##fontFamily <- Js.string "monospace";
style##fontSize <- Js.string "20px";
style##fontWeight <- Js.string "bold";
style##paddingBottom <- Js.string "5px";
style##paddingTop <- Js.string "5px";
style##paddingLeft <- Js.string "10px";
style##paddingRight <- Js.string "10px";
let enforce_digit _ =
begin
match Js.to_string input##value with
| "1" | "2" | "3" | "4" | "5"
| "6" | "7" | "8" | "9" -> ()
| _ -> input##value <- Js.string ""
end;
Js._false
in
input##onchange <- Dom_html.handler enforce_digit;
input
in
let make_td i j input =
let td = D.createTd d in
td##align <- Js.string "center";
let style = td##style in
style##borderStyle <- Js.string "solid";
style##borderColor <- Js.string "#000000";
let widths = function
| 0 -> 3, 0 | 2 -> 1, 1 | 3 -> 1, 0
| 5 -> 1, 1 | 6 -> 1, 0 | 8 -> 1, 3
| _ -> 1, 0 in
let (top, bottom) = widths i in
let (left, right) = widths j in
let px k = Js.string (string_of_int k ^ "px") in
style##borderTopWidth <- px top;
style##borderBottomWidth <- px bottom;
style##borderLeftWidth <- px left;
style##borderRightWidth <- px right;
Dom.appendChild td input;
td
in
let rows = Array.init 9 (fun i -> Array.init 9 (fun j -> make_input ())) in
let table = D.createTable d in
table##cellPadding <- Js.string "0px";
table##cellSpacing <- Js.string "0px";
let tbody = D.createTbody d in
Dom.appendChild table tbody;
ArrayLabels.iteri rows ~f:(fun i row ->
let tr = D.createTr d in
ArrayLabels.iteri row ~f:(fun j cell ->
let td = make_td i j cell in
ignore (Dom.appendChild tr td));
ignore (Dom.appendChild tbody tr));
(rows, table)
Solver
open Why3extract
let display_sol rows a =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
cell##value <- Js.string (Why3__BigInt.to_string a.(9*i+j));
cell##style##backgroundColor <- Js.string "#ffffff"
done
done
let no_sol rows =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
cell##style##backgroundColor <- Js.string "#ff0000"
done
done
let solve_board rows _ =
let sudoku = Sudoku__TheClassicalSudokuGrid.classical_sudoku () in
let input_grid = Array.make 81 Why3__BigInt.zero in
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
let v =
match Js.to_string cell##value with
| "" -> 0
| s -> Char.code s.[0] - Char.code '0'
in
input_grid.(9*i+j) <- Why3__BigInt.of_int v
done
done;
begin
try
let a = Sudoku__Solver.check_then_solve sudoku input_grid in
display_sol rows a
with Sudoku__Solver.NoSolution -> no_sol rows
end;
Js._false
let reset_board rows _ =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
cell##value <- Js.string "";
cell##style##backgroundColor <- Js.string "#ffffff";
done
done;
Js._false
let load_board rows test _ =
for i=0 to 8 do
for j=0 to 8 do
let cell = rows.(i).(j) in
let v = test.(9*i+j) in
let v = if v = 0 then "" else string_of_int v in
cell##value <- Js.string v;
cell##style##backgroundColor <- Js.string "#ffffff";
done
done;
Js._false
let test1 =
[| 2;0;9;0;0;0;0;1;0;
0;0;0;0;6;0;0;0;0;
0;5;3;8;0;2;7;0;0;
3;0;0;0;0;0;0;0;0;
0;0;0;0;7;5;0;0;3;
0;4;1;2;0;8;9;0;0;
0;0;4;0;9;0;0;2;0;
8;0;0;0;0;1;0;0;5;
0;0;0;0;0;0;0;7;6 |]
let test2 =
[| 7;0;0;0;0;0;0;0;8;
0;9;0;7;0;6;0;3;0;
0;0;1;0;0;0;9;0;0;
0;7;0;1;0;4;0;5;0;
0;0;0;0;6;0;0;0;0;
0;5;0;3;0;7;0;1;0;
0;0;2;0;0;0;1;0;0;
0;1;0;9;0;8;0;7;0;
8;0;0;0;0;0;0;0;6 |]
let test3 =
[| 0;0;0;0;0;0;0;0;0;
0;0;0;0;0;3;0;8;5;
0;0;1;0;2;0;0;0;0;
0;0;0;5;0;7;0;0;0;
0;0;4;0;0;0;1;0;0;
0;9;0;0;0;0;0;0;0;
5;0;0;0;0;0;0;7;3;
0;0;2;0;1;0;0;0;0;
0;0;0;0;4;0;0;0;9 |]
let onload (_event : #Dom_html.event Js.t) : bool Js.t =
let (rows, table) = make_board () in
let solve = Js.Opt.get (d##getElementById (Js.string "solve"))
(fun () -> assert false) in
solve##onclick <- Dom_html.handler (solve_board rows);
let reset = Js.Opt.get (d##getElementById (Js.string "reset"))
(fun () -> assert false) in
reset##onclick <- Dom_html.handler (reset_board rows);
let sample1 = Js.Opt.get (d##getElementById (Js.string "sample1"))
(fun () -> assert false) in
sample1##onclick <- Dom_html.handler (load_board rows test1);
let sample2 = Js.Opt.get (d##getElementById (Js.string "sample2"))
(fun () -> assert false) in
sample2##onclick <- Dom_html.handler (load_board rows test2);
let sample3= Js.Opt.get (d##getElementById (Js.string "sample3"))
(fun () -> assert false) in
sample3##onclick <- Dom_html.handler (load_board rows test3);
let board = Js.Opt.get (d##getElementById (Js.string "board"))
(fun () -> assert false) in
Dom.appendChild board table;
board##style##padding <- Js.string "40px";
Js._false
let _ = Dom_html.window##onload <- Dom_html.handler onload
|
0dba921cb4b4058efc405b7e5474e45ba73bff0a2530bb2dbb53c8ebcdc88101 | tarcieri/reia | sleep_benchmark.erl | -module (sleep_benchmark).
-export ([run/0]).
run() ->
timer:sleep(1000).
| null | https://raw.githubusercontent.com/tarcieri/reia/77b8b5603ae5d89a0d8fc0b3e179ef052260b5bf/benchmarks/sleep_benchmark.erl | erlang | -module (sleep_benchmark).
-export ([run/0]).
run() ->
timer:sleep(1000).
| |
b76e8e54264987fa848fc70832b0d07cc028b57fbd9a0eb101962097cea4adb8 | freckle/yesod-auth-oauth2 | DispatchError.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module Yesod.Auth.OAuth2.DispatchError
( DispatchError(..)
, handleDispatchError
, onDispatchError
) where
import Control.Monad.Except
import Data.Text (Text, pack)
import Network.OAuth.OAuth2.Compat (Errors)
import UnliftIO.Except ()
import UnliftIO.Exception
import Yesod.Auth hiding (ServerError)
import Yesod.Auth.OAuth2.ErrorResponse
import Yesod.Auth.OAuth2.Exception
import Yesod.Auth.OAuth2.Random
import Yesod.Core hiding (ErrorResponse)
data DispatchError
= MissingParameter Text
| InvalidStateToken (Maybe Text) Text
| InvalidCallbackUri Text
| OAuth2HandshakeError ErrorResponse
| OAuth2ResultError Errors
| FetchCredsIOException IOException
| FetchCredsYesodOAuth2Exception YesodOAuth2Exception
| OtherDispatchError Text
deriving stock Show
deriving anyclass Exception
| User - friendly message for any given ' DispatchError '
--
-- Most of these are opaque to the user. The exception details are present for
-- the server logs.
--
dispatchErrorMessage :: DispatchError -> Text
dispatchErrorMessage = \case
MissingParameter name ->
"Parameter '" <> name <> "' is required, but not present in the URL"
InvalidStateToken{} -> "State token is invalid, please try again"
InvalidCallbackUri{} ->
"Callback URI was not valid, this server may be misconfigured (no approot)"
OAuth2HandshakeError er -> "OAuth2 handshake failure: " <> erUserMessage er
OAuth2ResultError{} -> "Login failed, please try again"
FetchCredsIOException{} -> "Login failed, please try again"
FetchCredsYesodOAuth2Exception{} -> "Login failed, please try again"
OtherDispatchError{} -> "Login failed, please try again"
handleDispatchError
:: MonadAuthHandler site m
=> ExceptT DispatchError m TypedContent
-> m TypedContent
handleDispatchError f = do
result <- runExceptT f
either onDispatchError pure result
onDispatchError :: MonadAuthHandler site m => DispatchError -> m TypedContent
onDispatchError err = do
errorId <- liftIO $ randomText 16
let suffix = " [errorId=" <> errorId <> "]"
$(logError) $ pack (displayException err) <> suffix
let message = dispatchErrorMessage err <> suffix
messageValue =
object ["error" .= object ["id" .= errorId, "message" .= message]]
loginR <- ($ LoginR) <$> getRouteToParent
selectRep $ do
provideRep @_ @Html $ onErrorHtml loginR message
provideRep @_ @Value $ pure messageValue
| null | https://raw.githubusercontent.com/freckle/yesod-auth-oauth2/3c15ecd871a5b10f98123059b97e973221ff7395/src/Yesod/Auth/OAuth2/DispatchError.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
Most of these are opaque to the user. The exception details are present for
the server logs.
| # LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module Yesod.Auth.OAuth2.DispatchError
( DispatchError(..)
, handleDispatchError
, onDispatchError
) where
import Control.Monad.Except
import Data.Text (Text, pack)
import Network.OAuth.OAuth2.Compat (Errors)
import UnliftIO.Except ()
import UnliftIO.Exception
import Yesod.Auth hiding (ServerError)
import Yesod.Auth.OAuth2.ErrorResponse
import Yesod.Auth.OAuth2.Exception
import Yesod.Auth.OAuth2.Random
import Yesod.Core hiding (ErrorResponse)
data DispatchError
= MissingParameter Text
| InvalidStateToken (Maybe Text) Text
| InvalidCallbackUri Text
| OAuth2HandshakeError ErrorResponse
| OAuth2ResultError Errors
| FetchCredsIOException IOException
| FetchCredsYesodOAuth2Exception YesodOAuth2Exception
| OtherDispatchError Text
deriving stock Show
deriving anyclass Exception
| User - friendly message for any given ' DispatchError '
dispatchErrorMessage :: DispatchError -> Text
dispatchErrorMessage = \case
MissingParameter name ->
"Parameter '" <> name <> "' is required, but not present in the URL"
InvalidStateToken{} -> "State token is invalid, please try again"
InvalidCallbackUri{} ->
"Callback URI was not valid, this server may be misconfigured (no approot)"
OAuth2HandshakeError er -> "OAuth2 handshake failure: " <> erUserMessage er
OAuth2ResultError{} -> "Login failed, please try again"
FetchCredsIOException{} -> "Login failed, please try again"
FetchCredsYesodOAuth2Exception{} -> "Login failed, please try again"
OtherDispatchError{} -> "Login failed, please try again"
handleDispatchError
:: MonadAuthHandler site m
=> ExceptT DispatchError m TypedContent
-> m TypedContent
handleDispatchError f = do
result <- runExceptT f
either onDispatchError pure result
onDispatchError :: MonadAuthHandler site m => DispatchError -> m TypedContent
onDispatchError err = do
errorId <- liftIO $ randomText 16
let suffix = " [errorId=" <> errorId <> "]"
$(logError) $ pack (displayException err) <> suffix
let message = dispatchErrorMessage err <> suffix
messageValue =
object ["error" .= object ["id" .= errorId, "message" .= message]]
loginR <- ($ LoginR) <$> getRouteToParent
selectRep $ do
provideRep @_ @Html $ onErrorHtml loginR message
provideRep @_ @Value $ pure messageValue
|
4c7334e485f7493cb88310759623d4da36b74e18390f5703ef2c3aa11391f2cd | usocket/usocket | test-datagram.lisp | -*- Mode : LISP ; Base : 10 ; Syntax : ANSI - Common - lisp ; Package : USOCKET - TEST -*-
;;;; See LICENSE for licensing information.
(in-package :usocket-test)
(defvar *echo-server* nil)
(defvar *echo-server-port* nil)
(defun start-server ()
(multiple-value-bind (thread socket)
(socket-server "127.0.0.1" 0 #'identity nil
:in-new-thread t
:protocol :datagram)
(setq *echo-server* thread
*echo-server-port* (get-local-port socket))))
(defparameter *max-buffer-size* 32)
(defvar *send-buffer*
(make-array *max-buffer-size* :element-type '(unsigned-byte 8) :initial-element 0))
(defvar *receive-buffer*
(make-array *max-buffer-size* :element-type '(unsigned-byte 8) :initial-element 0))
(defun clean-buffers ()
(fill *send-buffer* 0)
(fill *receive-buffer* 0))
UDP Send Test # 1 : connected socket
(deftest udp-send.1
(progn
(unless (and *echo-server* *echo-server-port*)
(start-server))
(let ((s (socket-connect "127.0.0.1" *echo-server-port* :protocol :datagram)))
(clean-buffers)
(replace *send-buffer* #(1 2 3 4 5))
(socket-send s *send-buffer* 5)
(wait-for-input s :timeout 3)
(multiple-value-bind (buffer size host port)
(socket-receive s *receive-buffer* *max-buffer-size*)
(declare (ignore buffer size host port))
(reduce #'+ *receive-buffer* :start 0 :end 5))))
15)
UDP Send Test # 2 : unconnected socket
(deftest udp-send.2
(progn
(unless (and *echo-server* *echo-server-port*)
(start-server))
(let ((s (socket-connect nil nil :protocol :datagram)))
(clean-buffers)
(replace *send-buffer* #(1 2 3 4 5))
(socket-send s *send-buffer* 5 :host "127.0.0.1" :port *echo-server-port*)
(wait-for-input s :timeout 3)
(multiple-value-bind (buffer size host port)
(socket-receive s *receive-buffer* *max-buffer-size*)
(declare (ignore buffer size host port))
(reduce #'+ *receive-buffer* :start 0 :end 5))))
15)
remarkable UDP test code
(let* ((host "localhost")
(port 1111)
(server-sock
(socket-connect nil nil :protocol ':datagram :local-host host :local-port port))
(client-sock
(socket-connect host port :protocol ':datagram))
(octet-vector
(make-array 2 :element-type '(unsigned-byte 8) :initial-contents `(,(char-code #\O) ,(char-code #\K))))
(recv-octet-vector
(make-array 2 :element-type '(unsigned-byte 8))))
(socket-send client-sock octet-vector 2)
(socket-receive server-sock recv-octet-vector 2)
(prog1 (and (equalp octet-vector recv-octet-vector)
recv-octet-vector)
(socket-close server-sock)
(socket-close client-sock)))
#(79 75))
test code for LispWorks / UDP
(with-caught-conditions (#+win32 CONNECTION-RESET-ERROR
#-win32 CONNECTION-REFUSED-ERROR
nil)
(let ((sock (socket-connect "localhost" 1234
:protocol ':datagram :element-type '(unsigned-byte 8))))
(unwind-protect
(progn
(socket-send sock (make-array 16 :element-type '(unsigned-byte 8) :initial-element 0) 16)
(let ((buffer (make-array 16 :element-type '(unsigned-byte 8) :initial-element 0)))
(socket-receive sock buffer 16)))
(socket-close sock))))
nil)
(defun frank-wfi-test ()
(let ((s (socket-connect nil nil :protocol :datagram
:element-type '(unsigned-byte 8)
:local-port 8001)))
(unwind-protect
(do ((i 0 (1+ i))
(buffer (make-array 1024 :element-type '(unsigned-byte 8)
:initial-element 0))
(now (get-universal-time))
(done nil))
((or done (= i 4))
nil)
(format t "~Ds ~D Waiting state ~S~%" (- (get-universal-time) now) i (usocket::state s))
(when (wait-for-input s :ready-only t :timeout 5)
(format t "~D state ~S~%" i (usocket::state s))
(handler-bind
((error (lambda (c)
(format t "socket-receive error: ~A~%" c)
(break)
nil)))
(multiple-value-bind (buffer count remote-host remote-port)
(socket-receive s buffer 1024)
(handler-bind
((error (lambda (c)
(format t "socket-send error: ~A~%" c)
(break))))
(when buffer
(socket-send s (subseq buffer 0 count) count
:host remote-host
:port remote-port)))))))
(socket-close s))))
| null | https://raw.githubusercontent.com/usocket/usocket/d1d18eacd848fd4eb39a971bfa11c17fcf2611f0/tests/test-datagram.lisp | lisp | Base : 10 ; Syntax : ANSI - Common - lisp ; Package : USOCKET - TEST -*-
See LICENSE for licensing information. |
(in-package :usocket-test)
(defvar *echo-server* nil)
(defvar *echo-server-port* nil)
(defun start-server ()
(multiple-value-bind (thread socket)
(socket-server "127.0.0.1" 0 #'identity nil
:in-new-thread t
:protocol :datagram)
(setq *echo-server* thread
*echo-server-port* (get-local-port socket))))
(defparameter *max-buffer-size* 32)
(defvar *send-buffer*
(make-array *max-buffer-size* :element-type '(unsigned-byte 8) :initial-element 0))
(defvar *receive-buffer*
(make-array *max-buffer-size* :element-type '(unsigned-byte 8) :initial-element 0))
(defun clean-buffers ()
(fill *send-buffer* 0)
(fill *receive-buffer* 0))
UDP Send Test # 1 : connected socket
(deftest udp-send.1
(progn
(unless (and *echo-server* *echo-server-port*)
(start-server))
(let ((s (socket-connect "127.0.0.1" *echo-server-port* :protocol :datagram)))
(clean-buffers)
(replace *send-buffer* #(1 2 3 4 5))
(socket-send s *send-buffer* 5)
(wait-for-input s :timeout 3)
(multiple-value-bind (buffer size host port)
(socket-receive s *receive-buffer* *max-buffer-size*)
(declare (ignore buffer size host port))
(reduce #'+ *receive-buffer* :start 0 :end 5))))
15)
UDP Send Test # 2 : unconnected socket
(deftest udp-send.2
(progn
(unless (and *echo-server* *echo-server-port*)
(start-server))
(let ((s (socket-connect nil nil :protocol :datagram)))
(clean-buffers)
(replace *send-buffer* #(1 2 3 4 5))
(socket-send s *send-buffer* 5 :host "127.0.0.1" :port *echo-server-port*)
(wait-for-input s :timeout 3)
(multiple-value-bind (buffer size host port)
(socket-receive s *receive-buffer* *max-buffer-size*)
(declare (ignore buffer size host port))
(reduce #'+ *receive-buffer* :start 0 :end 5))))
15)
remarkable UDP test code
(let* ((host "localhost")
(port 1111)
(server-sock
(socket-connect nil nil :protocol ':datagram :local-host host :local-port port))
(client-sock
(socket-connect host port :protocol ':datagram))
(octet-vector
(make-array 2 :element-type '(unsigned-byte 8) :initial-contents `(,(char-code #\O) ,(char-code #\K))))
(recv-octet-vector
(make-array 2 :element-type '(unsigned-byte 8))))
(socket-send client-sock octet-vector 2)
(socket-receive server-sock recv-octet-vector 2)
(prog1 (and (equalp octet-vector recv-octet-vector)
recv-octet-vector)
(socket-close server-sock)
(socket-close client-sock)))
#(79 75))
test code for LispWorks / UDP
(with-caught-conditions (#+win32 CONNECTION-RESET-ERROR
#-win32 CONNECTION-REFUSED-ERROR
nil)
(let ((sock (socket-connect "localhost" 1234
:protocol ':datagram :element-type '(unsigned-byte 8))))
(unwind-protect
(progn
(socket-send sock (make-array 16 :element-type '(unsigned-byte 8) :initial-element 0) 16)
(let ((buffer (make-array 16 :element-type '(unsigned-byte 8) :initial-element 0)))
(socket-receive sock buffer 16)))
(socket-close sock))))
nil)
(defun frank-wfi-test ()
(let ((s (socket-connect nil nil :protocol :datagram
:element-type '(unsigned-byte 8)
:local-port 8001)))
(unwind-protect
(do ((i 0 (1+ i))
(buffer (make-array 1024 :element-type '(unsigned-byte 8)
:initial-element 0))
(now (get-universal-time))
(done nil))
((or done (= i 4))
nil)
(format t "~Ds ~D Waiting state ~S~%" (- (get-universal-time) now) i (usocket::state s))
(when (wait-for-input s :ready-only t :timeout 5)
(format t "~D state ~S~%" i (usocket::state s))
(handler-bind
((error (lambda (c)
(format t "socket-receive error: ~A~%" c)
(break)
nil)))
(multiple-value-bind (buffer count remote-host remote-port)
(socket-receive s buffer 1024)
(handler-bind
((error (lambda (c)
(format t "socket-send error: ~A~%" c)
(break))))
(when buffer
(socket-send s (subseq buffer 0 count) count
:host remote-host
:port remote-port)))))))
(socket-close s))))
|
29213ff01e068f7311ef9e741e234e6ac6fcbddcde2b743df20a7fe04bea8bba | apache/couchdb-couch-mrview | couch_mrview_red_views_tests.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_mrview_red_views_tests).
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-define(TIMEOUT, 1000).
setup() ->
{ok, Db} = couch_mrview_test_util:init_db(?tempdb(), red),
Db.
teardown(Db) ->
couch_db:close(Db),
couch_server:delete(Db#db.name, [?ADMIN_CTX]),
ok.
reduce_views_test_() ->
{
"Reduce views",
{
setup,
fun test_util:start_couch/0, fun test_util:stop_couch/1,
{
foreach,
fun setup/0, fun teardown/1,
[
fun should_reduce_basic/1,
fun should_reduce_key_range/1,
fun should_reduce_with_group_level/1,
fun should_reduce_with_group_exact/1
]
}
}
}.
should_reduce_basic(Db) ->
Result = run_query(Db, []),
Expect = {ok, [
{meta, []},
{row, [{key, null}, {value, 55}]}
]},
?_assertEqual(Expect, Result).
should_reduce_key_range(Db) ->
Result = run_query(Db, [{start_key, [0, 2]}, {end_key, [0, 4]}]),
Expect = {ok, [
{meta, []},
{row, [{key, null}, {value, 6}]}
]},
?_assertEqual(Expect, Result).
should_reduce_with_group_level(Db) ->
Result = run_query(Db, [{group_level, 1}]),
Expect = {ok, [
{meta, []},
{row, [{key, [0]}, {value, 30}]},
{row, [{key, [1]}, {value, 25}]}
]},
?_assertEqual(Expect, Result).
should_reduce_with_group_exact(Db) ->
Result = run_query(Db, [{group_level, exact}]),
Expect = {ok, [
{meta, []},
{row, [{key, [0, 2]}, {value, 2}]},
{row, [{key, [0, 4]}, {value, 4}]},
{row, [{key, [0, 6]}, {value, 6}]},
{row, [{key, [0, 8]}, {value, 8}]},
{row, [{key, [0, 10]}, {value, 10}]},
{row, [{key, [1, 1]}, {value, 1}]},
{row, [{key, [1, 3]}, {value, 3}]},
{row, [{key, [1, 5]}, {value, 5}]},
{row, [{key, [1, 7]}, {value, 7}]},
{row, [{key, [1, 9]}, {value, 9}]}
]},
?_assertEqual(Expect, Result).
run_query(Db, Opts) ->
couch_mrview:query_view(Db, <<"_design/red">>, <<"baz">>, Opts).
| null | https://raw.githubusercontent.com/apache/couchdb-couch-mrview/f08c26a098a46366cfaf0e14b940af1f11d84577/test/couch_mrview_red_views_tests.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(couch_mrview_red_views_tests).
-include_lib("couch/include/couch_eunit.hrl").
-include_lib("couch/include/couch_db.hrl").
-define(TIMEOUT, 1000).
setup() ->
{ok, Db} = couch_mrview_test_util:init_db(?tempdb(), red),
Db.
teardown(Db) ->
couch_db:close(Db),
couch_server:delete(Db#db.name, [?ADMIN_CTX]),
ok.
reduce_views_test_() ->
{
"Reduce views",
{
setup,
fun test_util:start_couch/0, fun test_util:stop_couch/1,
{
foreach,
fun setup/0, fun teardown/1,
[
fun should_reduce_basic/1,
fun should_reduce_key_range/1,
fun should_reduce_with_group_level/1,
fun should_reduce_with_group_exact/1
]
}
}
}.
should_reduce_basic(Db) ->
Result = run_query(Db, []),
Expect = {ok, [
{meta, []},
{row, [{key, null}, {value, 55}]}
]},
?_assertEqual(Expect, Result).
should_reduce_key_range(Db) ->
Result = run_query(Db, [{start_key, [0, 2]}, {end_key, [0, 4]}]),
Expect = {ok, [
{meta, []},
{row, [{key, null}, {value, 6}]}
]},
?_assertEqual(Expect, Result).
should_reduce_with_group_level(Db) ->
Result = run_query(Db, [{group_level, 1}]),
Expect = {ok, [
{meta, []},
{row, [{key, [0]}, {value, 30}]},
{row, [{key, [1]}, {value, 25}]}
]},
?_assertEqual(Expect, Result).
should_reduce_with_group_exact(Db) ->
Result = run_query(Db, [{group_level, exact}]),
Expect = {ok, [
{meta, []},
{row, [{key, [0, 2]}, {value, 2}]},
{row, [{key, [0, 4]}, {value, 4}]},
{row, [{key, [0, 6]}, {value, 6}]},
{row, [{key, [0, 8]}, {value, 8}]},
{row, [{key, [0, 10]}, {value, 10}]},
{row, [{key, [1, 1]}, {value, 1}]},
{row, [{key, [1, 3]}, {value, 3}]},
{row, [{key, [1, 5]}, {value, 5}]},
{row, [{key, [1, 7]}, {value, 7}]},
{row, [{key, [1, 9]}, {value, 9}]}
]},
?_assertEqual(Expect, Result).
run_query(Db, Opts) ->
couch_mrview:query_view(Db, <<"_design/red">>, <<"baz">>, Opts).
|
d54500d3ddc580d08eda97e8d7c71060a025676c8b781fdc514b9fdd24817ee9 | ekmett/unpacked-containers | BitQueue.hs | # LANGUAGE CPP #
{-# LANGUAGE BangPatterns #-}
#include "containers.h"
-----------------------------------------------------------------------------
-- |
Module : Internal . BitQueue
Copyright : ( c ) 2016
-- License : BSD-style
-- Maintainer :
-- Portability : portable
--
-- = WARNING
--
-- This module is considered __internal__.
--
The Package Versioning Policy _ _ does not apply _ _ .
--
-- This contents of this module may change __in any way whatsoever__
-- and __without any warning__ between minor versions of this package.
--
-- Authors importing this module are expected to track development
-- closely.
--
-- = Description
--
-- An extremely light-weight, fast, and limited representation of a string of
up to ( 2*WORDSIZE - 2 ) bits . In fact , there are two representations ,
-- misleadingly named bit queue builder and bit queue. The builder supports
-- only `emptyQB`, creating an empty builder, and `snocQB`, enqueueing a bit.
-- The bit queue builder is then turned into a bit queue using `buildQ`, after
which bits can be removed one by one using ` unconsQ ` . If the size limit is
-- exceeded, further operations will silently produce nonsense.
-----------------------------------------------------------------------------
module Internal.BitQueue
( BitQueue
, BitQueueB
, emptyQB
, snocQB
, buildQ
, unconsQ
, toListQ
) where
#if !MIN_VERSION_base(4,8,0)
import Data.Word (Word)
#endif
import Internal.BitUtil (shiftLL, shiftRL, wordSize)
import Data.Bits ((.|.), (.&.), testBit)
#if MIN_VERSION_base(4,8,0)
import Data.Bits (countTrailingZeros)
#elif MIN_VERSION_base(4,5,0)
import Data.Bits (popCount)
#endif
#if !MIN_VERSION_base(4,5,0)
-- We could almost certainly improve this fall-back (copied straight from the
default definition in Data . Bits ) , but it hardly seems worth the trouble
to speed things up on GHC 7.4 and below .
countTrailingZeros :: Word -> Int
countTrailingZeros x = go 0
where
go i | i >= wordSize = i
| testBit x i = i
| otherwise = go (i+1)
#elif !MIN_VERSION_base(4,8,0)
countTrailingZeros :: Word -> Int
countTrailingZeros x = popCount ((x .&. (-x)) - 1)
# INLINE countTrailingZeros #
#endif
A bit queue builder . We represent a double word using two words
-- because we don't currently have access to proper double words.
data BitQueueB = BQB {-# UNPACK #-} !Word
{-# UNPACK #-} !Word
newtype BitQueue = BQ BitQueueB deriving Show
-- Intended for debugging.
instance Show BitQueueB where
show (BQB hi lo) = "BQ"++
show (map (testBit hi) [(wordSize - 1),(wordSize - 2)..0]
++ map (testBit lo) [(wordSize - 1),(wordSize - 2)..0])
-- | Create an empty bit queue builder. This is represented as a single guard
-- bit in the most significant position.
emptyQB :: BitQueueB
emptyQB = BQB (1 `shiftLL` (wordSize - 1)) 0
# INLINE emptyQB #
Shift the double word to the right by one bit .
shiftQBR1 :: BitQueueB -> BitQueueB
shiftQBR1 (BQB hi lo) = BQB hi' lo' where
lo' = (lo `shiftRL` 1) .|. (hi `shiftLL` (wordSize - 1))
hi' = hi `shiftRL` 1
# INLINE shiftQBR1 #
| Enqueue a bit . This works by shifting the queue right one bit ,
-- then setting the most significant bit as requested.
# INLINE snocQB #
snocQB :: BitQueueB -> Bool -> BitQueueB
snocQB bq b = case shiftQBR1 bq of
BQB hi lo -> BQB (hi .|. (fromIntegral (fromEnum b) `shiftLL` (wordSize - 1))) lo
-- | Convert a bit queue builder to a bit queue. This shifts in a new
-- guard bit on the left, and shifts right until the old guard bit falls
-- off.
# INLINE buildQ #
buildQ :: BitQueueB -> BitQueue
buildQ (BQB hi 0) = BQ (BQB 0 lo') where
zeros = countTrailingZeros hi
lo' = ((hi `shiftRL` 1) .|. (1 `shiftLL` (wordSize - 1))) `shiftRL` zeros
buildQ (BQB hi lo) = BQ (BQB hi' lo') where
zeros = countTrailingZeros lo
lo1 = (lo `shiftRL` 1) .|. (hi `shiftLL` (wordSize - 1))
hi1 = (hi `shiftRL` 1) .|. (1 `shiftLL` (wordSize - 1))
lo' = (lo1 `shiftRL` zeros) .|. (hi1 `shiftLL` (wordSize - zeros))
hi' = hi1 `shiftRL` zeros
-- Test if the queue is empty, which occurs when theres
-- nothing left but a guard bit in the least significant
-- place.
nullQ :: BitQueue -> Bool
nullQ (BQ (BQB 0 1)) = True
nullQ _ = False
# INLINE nullQ #
-- | Dequeue an element, or discover the queue is empty.
unconsQ :: BitQueue -> Maybe (Bool, BitQueue)
unconsQ q | nullQ q = Nothing
unconsQ (BQ bq@(BQB _ lo)) = Just (hd, BQ tl)
where
!hd = (lo .&. 1) /= 0
!tl = shiftQBR1 bq
# INLINE unconsQ #
-- | Convert a bit queue to a list of bits by unconsing.
-- This is used to test that the queue functions properly.
toListQ :: BitQueue -> [Bool]
toListQ bq = case unconsQ bq of
Nothing -> []
Just (hd, tl) -> hd : toListQ tl
| null | https://raw.githubusercontent.com/ekmett/unpacked-containers/7dc56993a57511b58257b5d389473e638a7082d2/unpacked-containers/utils/Internal/BitQueue.hs | haskell | # LANGUAGE BangPatterns #
---------------------------------------------------------------------------
|
License : BSD-style
Maintainer :
Portability : portable
= WARNING
This module is considered __internal__.
This contents of this module may change __in any way whatsoever__
and __without any warning__ between minor versions of this package.
Authors importing this module are expected to track development
closely.
= Description
An extremely light-weight, fast, and limited representation of a string of
misleadingly named bit queue builder and bit queue. The builder supports
only `emptyQB`, creating an empty builder, and `snocQB`, enqueueing a bit.
The bit queue builder is then turned into a bit queue using `buildQ`, after
exceeded, further operations will silently produce nonsense.
---------------------------------------------------------------------------
We could almost certainly improve this fall-back (copied straight from the
because we don't currently have access to proper double words.
# UNPACK #
# UNPACK #
Intended for debugging.
| Create an empty bit queue builder. This is represented as a single guard
bit in the most significant position.
then setting the most significant bit as requested.
| Convert a bit queue builder to a bit queue. This shifts in a new
guard bit on the left, and shifts right until the old guard bit falls
off.
Test if the queue is empty, which occurs when theres
nothing left but a guard bit in the least significant
place.
| Dequeue an element, or discover the queue is empty.
| Convert a bit queue to a list of bits by unconsing.
This is used to test that the queue functions properly. | # LANGUAGE CPP #
#include "containers.h"
Module : Internal . BitQueue
Copyright : ( c ) 2016
The Package Versioning Policy _ _ does not apply _ _ .
up to ( 2*WORDSIZE - 2 ) bits . In fact , there are two representations ,
which bits can be removed one by one using ` unconsQ ` . If the size limit is
module Internal.BitQueue
( BitQueue
, BitQueueB
, emptyQB
, snocQB
, buildQ
, unconsQ
, toListQ
) where
#if !MIN_VERSION_base(4,8,0)
import Data.Word (Word)
#endif
import Internal.BitUtil (shiftLL, shiftRL, wordSize)
import Data.Bits ((.|.), (.&.), testBit)
#if MIN_VERSION_base(4,8,0)
import Data.Bits (countTrailingZeros)
#elif MIN_VERSION_base(4,5,0)
import Data.Bits (popCount)
#endif
#if !MIN_VERSION_base(4,5,0)
default definition in Data . Bits ) , but it hardly seems worth the trouble
to speed things up on GHC 7.4 and below .
countTrailingZeros :: Word -> Int
countTrailingZeros x = go 0
where
go i | i >= wordSize = i
| testBit x i = i
| otherwise = go (i+1)
#elif !MIN_VERSION_base(4,8,0)
countTrailingZeros :: Word -> Int
countTrailingZeros x = popCount ((x .&. (-x)) - 1)
# INLINE countTrailingZeros #
#endif
A bit queue builder . We represent a double word using two words
newtype BitQueue = BQ BitQueueB deriving Show
instance Show BitQueueB where
show (BQB hi lo) = "BQ"++
show (map (testBit hi) [(wordSize - 1),(wordSize - 2)..0]
++ map (testBit lo) [(wordSize - 1),(wordSize - 2)..0])
emptyQB :: BitQueueB
emptyQB = BQB (1 `shiftLL` (wordSize - 1)) 0
# INLINE emptyQB #
Shift the double word to the right by one bit .
shiftQBR1 :: BitQueueB -> BitQueueB
shiftQBR1 (BQB hi lo) = BQB hi' lo' where
lo' = (lo `shiftRL` 1) .|. (hi `shiftLL` (wordSize - 1))
hi' = hi `shiftRL` 1
# INLINE shiftQBR1 #
| Enqueue a bit . This works by shifting the queue right one bit ,
# INLINE snocQB #
snocQB :: BitQueueB -> Bool -> BitQueueB
snocQB bq b = case shiftQBR1 bq of
BQB hi lo -> BQB (hi .|. (fromIntegral (fromEnum b) `shiftLL` (wordSize - 1))) lo
# INLINE buildQ #
buildQ :: BitQueueB -> BitQueue
buildQ (BQB hi 0) = BQ (BQB 0 lo') where
zeros = countTrailingZeros hi
lo' = ((hi `shiftRL` 1) .|. (1 `shiftLL` (wordSize - 1))) `shiftRL` zeros
buildQ (BQB hi lo) = BQ (BQB hi' lo') where
zeros = countTrailingZeros lo
lo1 = (lo `shiftRL` 1) .|. (hi `shiftLL` (wordSize - 1))
hi1 = (hi `shiftRL` 1) .|. (1 `shiftLL` (wordSize - 1))
lo' = (lo1 `shiftRL` zeros) .|. (hi1 `shiftLL` (wordSize - zeros))
hi' = hi1 `shiftRL` zeros
nullQ :: BitQueue -> Bool
nullQ (BQ (BQB 0 1)) = True
nullQ _ = False
# INLINE nullQ #
unconsQ :: BitQueue -> Maybe (Bool, BitQueue)
unconsQ q | nullQ q = Nothing
unconsQ (BQ bq@(BQB _ lo)) = Just (hd, BQ tl)
where
!hd = (lo .&. 1) /= 0
!tl = shiftQBR1 bq
# INLINE unconsQ #
toListQ :: BitQueue -> [Bool]
toListQ bq = case unconsQ bq of
Nothing -> []
Just (hd, tl) -> hd : toListQ tl
|
5b20d4ca3f815eeb32f7fb72138e01e66a0d891fcc648805ebac1747c9a9d87a | simmone/racket-simple-xlsx | set-styles.rkt | #lang racket
(require "../lib/dimension.rkt")
(require "../lib/sheet-lib.rkt")
(require "lib.rkt")
(require "style.rkt")
(require "border-style.rkt")
(require "font-style.rkt")
(require "alignment-style.rkt")
(require "number-style.rkt")
(require "fill-style.rkt")
(provide (contract-out
[set-col-range-width (-> string? natural? void?)]
[set-row-range-height (-> string? natural? void?)]
[set-freeze-row-col-range (-> natural? natural? void?)]
[set-merge-cell-range (-> cell-range? void?)]
[set-cell-range-border-style (-> string? border-direction? rgb? border-mode? void?)]
[border-direction? (-> string? boolean?)]
[set-cell-range-font-style (-> string? natural? string? rgb? void?)]
[set-row-range-font-style (-> string? natural? string? rgb? void?)]
[set-col-range-font-style (-> string? natural? string? rgb? void?)]
[set-cell-range-alignment-style (-> string? horizontal_mode? vertical_mode? void?)]
[set-row-range-alignment-style (-> string? horizontal_mode? vertical_mode? void?)]
[set-col-range-alignment-style (-> string? horizontal_mode? vertical_mode? void?)]
[set-cell-range-number-style (-> string? string? void?)]
[set-row-range-number-style (-> string? string? void?)]
[set-col-range-number-style (-> string? string? void?)]
[set-cell-range-date-style (-> string? string? void?)]
[set-row-range-date-style (-> string? string? void?)]
[set-col-range-date-style (-> string? string? void?)]
[set-cell-range-fill-style (-> string? rgb? fill-pattern? void?)]
[set-row-range-fill-style (-> string? rgb? fill-pattern? void?)]
[set-col-range-fill-style (-> string? rgb? fill-pattern? void?)]
[update-style (-> STYLE? (or/c STYLE? BORDER-STYLE? FONT-STYLE? ALIGNMENT-STYLE? NUMBER-STYLE? FILL-STYLE?) STYLE?)]
))
(define (set-col-range-width col_range width)
(let ([_col_range (to-col-range col_range)])
(let loop ([col_index (car _col_range)])
(when (<= col_index (cdr _col_range))
(hash-set! (SHEET-STYLE-col->width_map (*CURRENT_SHEET_STYLE*)) col_index width)
(loop (add1 col_index))))))
(define (set-row-range-height row_range height)
(let ([_row_range (to-row-range row_range)])
(let loop ([row_index (car _row_range)])
(when (<= row_index (cdr _row_range))
(hash-set! (SHEET-STYLE-row->height_map (*CURRENT_SHEET_STYLE*)) row_index height)
(loop (add1 row_index))))))
(define (set-freeze-row-col-range rows cols)
(set-SHEET-STYLE-freeze_range! (*CURRENT_SHEET_STYLE*) (cons rows cols)))
(define (set-merge-cell-range cell_range)
(hash-set! (SHEET-STYLE-cell_range_merge_map (*CURRENT_SHEET_STYLE*)) cell_range #t))
(define (border-direction? direction)
(ormap (lambda (_direction) (string=? _direction direction)) '("all" "side" "top" "bottom" "left" "right")))
(define (set-cell-range-border-style cell_range border_direction border_color border_mode)
(cond
[(string=? border_direction "side")
(let-values ([(top_cells bottom_cells left_cells right_cells) (get-cell-range-four-sides-cells cell_range)])
(add-cells-style top_cells (BORDER-STYLE "" border_color border_mode "" "" "" "" "" ""))
(add-cells-style bottom_cells (BORDER-STYLE "" "" "" border_color border_mode "" "" "" ""))
(add-cells-style left_cells (BORDER-STYLE "" "" "" "" "" border_color border_mode "" ""))
(add-cells-style right_cells (BORDER-STYLE "" "" "" "" "" "" "" border_color border_mode))
)]
[(string=? border_direction "all")
(add-cell-range-style cell_range (BORDER-STYLE "" border_color border_mode border_color border_mode border_color border_mode border_color border_mode))]
[(string=? border_direction "top")
(add-cell-range-style cell_range (BORDER-STYLE "" border_color border_mode "" "" "" "" "" ""))]
[(string=? border_direction "bottom")
(add-cell-range-style cell_range (BORDER-STYLE "" "" "" border_color border_mode "" "" "" ""))]
[(string=? border_direction "left")
(add-cell-range-style cell_range (BORDER-STYLE "" "" "" "" "" border_color border_mode "" ""))]
[(string=? border_direction "right")
(add-cell-range-style cell_range (BORDER-STYLE "" "" "" "" "" "" "" border_color border_mode))]))
(define (set-cell-range-font-style cell_range font_size font_name font_color)
(add-cell-range-style cell_range (FONT-STYLE "" font_size font_name font_color)))
(define (set-row-range-font-style row_range font_size font_name font_color)
(add-row-range-style row_range (FONT-STYLE "" font_size font_name font_color)))
(define (set-col-range-font-style col_range font_size font_name font_color)
(add-col-range-style col_range (FONT-STYLE "" font_size font_name font_color)))
(define (set-cell-range-alignment-style cell_range horizontal_placement vertical_placement)
(add-cell-range-style cell_range (ALIGNMENT-STYLE "" horizontal_placement vertical_placement)))
(define (set-row-range-alignment-style row_range horizontal_placement vertical_placement)
(add-row-range-style row_range (ALIGNMENT-STYLE "" horizontal_placement vertical_placement)))
(define (set-col-range-alignment-style col_range horizontal_placement vertical_placement)
(add-col-range-style col_range (ALIGNMENT-STYLE "" horizontal_placement vertical_placement)))
(define (set-cell-range-number-style cell_range format)
(add-cell-range-style cell_range (NUMBER-STYLE "" format)))
(define (set-row-range-number-style row_range format)
(add-row-range-style row_range (NUMBER-STYLE "" format)))
(define (set-col-range-number-style col_range format)
(add-col-range-style col_range (NUMBER-STYLE "" format)))
(define (set-cell-range-date-style cell_range format)
(set-cell-range-number-style cell_range format))
(define (set-row-range-date-style row_range format)
(set-row-range-number-style row_range format))
(define (set-col-range-date-style col_range format)
(set-col-range-number-style col_range format))
(define (set-cell-range-fill-style cell_range color pattern)
(add-cell-range-style cell_range (FILL-STYLE ""color pattern)))
(define (set-row-range-fill-style row_range color pattern)
(add-row-range-style row_range (FILL-STYLE ""color pattern)))
(define (set-col-range-fill-style col_range color pattern)
(add-col-range-style col_range (FILL-STYLE ""color pattern)))
(define (add-cell-range-style cell_range new_style)
(add-cells-style (cell_range->cell_list cell_range) new_style))
(define (add-cells-style cells new_style)
(let loop ([_cells cells])
(when (not (null? _cells))
(let* ([old_cell_style (hash-ref (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)) (car _cells) (new-style))]
[updated_cell_style (update-style old_cell_style new_style)])
(hash-set! (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)) (car _cells) updated_cell_style)
)
(loop (cdr _cells)))))
(define (add-row-range-style row_range new_style)
(let* ([row_range (to-row-range row_range)]
[start_row_index (car row_range)]
[end_row_index (cdr row_range)]
[cells (hash-keys (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)))])
(let loop ([loop_row_index start_row_index])
(when (<= loop_row_index end_row_index)
(let* ([old_row_style (hash-ref (SHEET-STYLE-row->style_map (*CURRENT_SHEET_STYLE*)) loop_row_index (new-style))]
[updated_row_style (update-style old_row_style new_style)])
(hash-set! (SHEET-STYLE-row->style_map (*CURRENT_SHEET_STYLE*)) loop_row_index updated_row_style)
(add-cells-style (get-row-cells loop_row_index) updated_row_style))
(loop (add1 loop_row_index))))))
(define (add-col-range-style col_range new_style)
(let* ([col_range (to-col-range col_range)]
[start_col_index (car col_range)]
[end_col_index (cdr col_range)]
[cells (hash-keys (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)))])
(let loop ([loop_col_index start_col_index])
(when (<= loop_col_index end_col_index)
(let* ([old_col_style (hash-ref (SHEET-STYLE-col->style_map (*CURRENT_SHEET_STYLE*)) loop_col_index (new-style))]
[updated_col_style (update-style old_col_style new_style)])
(hash-set! (SHEET-STYLE-col->style_map (*CURRENT_SHEET_STYLE*)) loop_col_index updated_col_style)
(add-cells-style (get-col-cells loop_col_index) updated_col_style))
(loop (add1 loop_col_index))))))
(define (update-style _style new_style)
(cond
[(BORDER-STYLE? new_style)
(let ([_border_style (STYLE-border_style _style)])
(if _border_style
(begin
(when (not (string=? (BORDER-STYLE-top_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [top_color (BORDER-STYLE-top_color new_style)])))
(when (not (string=? (BORDER-STYLE-top_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [top_mode (BORDER-STYLE-top_mode new_style)])))
(when (not (string=? (BORDER-STYLE-bottom_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [bottom_color (BORDER-STYLE-bottom_color new_style)])))
(when (not (string=? (BORDER-STYLE-bottom_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [bottom_mode (BORDER-STYLE-bottom_mode new_style)])))
(when (not (string=? (BORDER-STYLE-left_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [left_color (BORDER-STYLE-left_color new_style)])))
(when (not (string=? (BORDER-STYLE-left_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [left_mode (BORDER-STYLE-left_mode new_style)])))
(when (not (string=? (BORDER-STYLE-right_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [right_color (BORDER-STYLE-right_color new_style)])))
(when (not (string=? (BORDER-STYLE-right_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [right_mode (BORDER-STYLE-right_mode new_style)])))
(struct-copy STYLE _style [border_style _border_style]))
(struct-copy STYLE _style [border_style new_style])))]
[(FONT-STYLE? new_style)
(struct-copy STYLE _style [font_style new_style])]
[(ALIGNMENT-STYLE? new_style)
(struct-copy STYLE _style [alignment_style new_style])]
[(NUMBER-STYLE? new_style)
(struct-copy STYLE _style [number_style new_style])]
[(FILL-STYLE? new_style)
(struct-copy STYLE _style [fill_style new_style])]
[(STYLE? new_style)
(let ([updated_new_style _style])
(when (STYLE-border_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-border_style new_style))))
(when (STYLE-font_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-font_style new_style))))
(when (STYLE-alignment_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-alignment_style new_style))))
(when (STYLE-number_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-number_style new_style))))
(when (STYLE-fill_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-fill_style new_style))))
updated_new_style)]
))
| null | https://raw.githubusercontent.com/simmone/racket-simple-xlsx/e0ac3190b6700b0ee1dd80ed91a8f4318533d012/simple-xlsx/style/set-styles.rkt | racket | #lang racket
(require "../lib/dimension.rkt")
(require "../lib/sheet-lib.rkt")
(require "lib.rkt")
(require "style.rkt")
(require "border-style.rkt")
(require "font-style.rkt")
(require "alignment-style.rkt")
(require "number-style.rkt")
(require "fill-style.rkt")
(provide (contract-out
[set-col-range-width (-> string? natural? void?)]
[set-row-range-height (-> string? natural? void?)]
[set-freeze-row-col-range (-> natural? natural? void?)]
[set-merge-cell-range (-> cell-range? void?)]
[set-cell-range-border-style (-> string? border-direction? rgb? border-mode? void?)]
[border-direction? (-> string? boolean?)]
[set-cell-range-font-style (-> string? natural? string? rgb? void?)]
[set-row-range-font-style (-> string? natural? string? rgb? void?)]
[set-col-range-font-style (-> string? natural? string? rgb? void?)]
[set-cell-range-alignment-style (-> string? horizontal_mode? vertical_mode? void?)]
[set-row-range-alignment-style (-> string? horizontal_mode? vertical_mode? void?)]
[set-col-range-alignment-style (-> string? horizontal_mode? vertical_mode? void?)]
[set-cell-range-number-style (-> string? string? void?)]
[set-row-range-number-style (-> string? string? void?)]
[set-col-range-number-style (-> string? string? void?)]
[set-cell-range-date-style (-> string? string? void?)]
[set-row-range-date-style (-> string? string? void?)]
[set-col-range-date-style (-> string? string? void?)]
[set-cell-range-fill-style (-> string? rgb? fill-pattern? void?)]
[set-row-range-fill-style (-> string? rgb? fill-pattern? void?)]
[set-col-range-fill-style (-> string? rgb? fill-pattern? void?)]
[update-style (-> STYLE? (or/c STYLE? BORDER-STYLE? FONT-STYLE? ALIGNMENT-STYLE? NUMBER-STYLE? FILL-STYLE?) STYLE?)]
))
(define (set-col-range-width col_range width)
(let ([_col_range (to-col-range col_range)])
(let loop ([col_index (car _col_range)])
(when (<= col_index (cdr _col_range))
(hash-set! (SHEET-STYLE-col->width_map (*CURRENT_SHEET_STYLE*)) col_index width)
(loop (add1 col_index))))))
(define (set-row-range-height row_range height)
(let ([_row_range (to-row-range row_range)])
(let loop ([row_index (car _row_range)])
(when (<= row_index (cdr _row_range))
(hash-set! (SHEET-STYLE-row->height_map (*CURRENT_SHEET_STYLE*)) row_index height)
(loop (add1 row_index))))))
(define (set-freeze-row-col-range rows cols)
(set-SHEET-STYLE-freeze_range! (*CURRENT_SHEET_STYLE*) (cons rows cols)))
(define (set-merge-cell-range cell_range)
(hash-set! (SHEET-STYLE-cell_range_merge_map (*CURRENT_SHEET_STYLE*)) cell_range #t))
(define (border-direction? direction)
(ormap (lambda (_direction) (string=? _direction direction)) '("all" "side" "top" "bottom" "left" "right")))
(define (set-cell-range-border-style cell_range border_direction border_color border_mode)
(cond
[(string=? border_direction "side")
(let-values ([(top_cells bottom_cells left_cells right_cells) (get-cell-range-four-sides-cells cell_range)])
(add-cells-style top_cells (BORDER-STYLE "" border_color border_mode "" "" "" "" "" ""))
(add-cells-style bottom_cells (BORDER-STYLE "" "" "" border_color border_mode "" "" "" ""))
(add-cells-style left_cells (BORDER-STYLE "" "" "" "" "" border_color border_mode "" ""))
(add-cells-style right_cells (BORDER-STYLE "" "" "" "" "" "" "" border_color border_mode))
)]
[(string=? border_direction "all")
(add-cell-range-style cell_range (BORDER-STYLE "" border_color border_mode border_color border_mode border_color border_mode border_color border_mode))]
[(string=? border_direction "top")
(add-cell-range-style cell_range (BORDER-STYLE "" border_color border_mode "" "" "" "" "" ""))]
[(string=? border_direction "bottom")
(add-cell-range-style cell_range (BORDER-STYLE "" "" "" border_color border_mode "" "" "" ""))]
[(string=? border_direction "left")
(add-cell-range-style cell_range (BORDER-STYLE "" "" "" "" "" border_color border_mode "" ""))]
[(string=? border_direction "right")
(add-cell-range-style cell_range (BORDER-STYLE "" "" "" "" "" "" "" border_color border_mode))]))
(define (set-cell-range-font-style cell_range font_size font_name font_color)
(add-cell-range-style cell_range (FONT-STYLE "" font_size font_name font_color)))
(define (set-row-range-font-style row_range font_size font_name font_color)
(add-row-range-style row_range (FONT-STYLE "" font_size font_name font_color)))
(define (set-col-range-font-style col_range font_size font_name font_color)
(add-col-range-style col_range (FONT-STYLE "" font_size font_name font_color)))
(define (set-cell-range-alignment-style cell_range horizontal_placement vertical_placement)
(add-cell-range-style cell_range (ALIGNMENT-STYLE "" horizontal_placement vertical_placement)))
(define (set-row-range-alignment-style row_range horizontal_placement vertical_placement)
(add-row-range-style row_range (ALIGNMENT-STYLE "" horizontal_placement vertical_placement)))
(define (set-col-range-alignment-style col_range horizontal_placement vertical_placement)
(add-col-range-style col_range (ALIGNMENT-STYLE "" horizontal_placement vertical_placement)))
(define (set-cell-range-number-style cell_range format)
(add-cell-range-style cell_range (NUMBER-STYLE "" format)))
(define (set-row-range-number-style row_range format)
(add-row-range-style row_range (NUMBER-STYLE "" format)))
(define (set-col-range-number-style col_range format)
(add-col-range-style col_range (NUMBER-STYLE "" format)))
(define (set-cell-range-date-style cell_range format)
(set-cell-range-number-style cell_range format))
(define (set-row-range-date-style row_range format)
(set-row-range-number-style row_range format))
(define (set-col-range-date-style col_range format)
(set-col-range-number-style col_range format))
(define (set-cell-range-fill-style cell_range color pattern)
(add-cell-range-style cell_range (FILL-STYLE ""color pattern)))
(define (set-row-range-fill-style row_range color pattern)
(add-row-range-style row_range (FILL-STYLE ""color pattern)))
(define (set-col-range-fill-style col_range color pattern)
(add-col-range-style col_range (FILL-STYLE ""color pattern)))
(define (add-cell-range-style cell_range new_style)
(add-cells-style (cell_range->cell_list cell_range) new_style))
(define (add-cells-style cells new_style)
(let loop ([_cells cells])
(when (not (null? _cells))
(let* ([old_cell_style (hash-ref (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)) (car _cells) (new-style))]
[updated_cell_style (update-style old_cell_style new_style)])
(hash-set! (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)) (car _cells) updated_cell_style)
)
(loop (cdr _cells)))))
(define (add-row-range-style row_range new_style)
(let* ([row_range (to-row-range row_range)]
[start_row_index (car row_range)]
[end_row_index (cdr row_range)]
[cells (hash-keys (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)))])
(let loop ([loop_row_index start_row_index])
(when (<= loop_row_index end_row_index)
(let* ([old_row_style (hash-ref (SHEET-STYLE-row->style_map (*CURRENT_SHEET_STYLE*)) loop_row_index (new-style))]
[updated_row_style (update-style old_row_style new_style)])
(hash-set! (SHEET-STYLE-row->style_map (*CURRENT_SHEET_STYLE*)) loop_row_index updated_row_style)
(add-cells-style (get-row-cells loop_row_index) updated_row_style))
(loop (add1 loop_row_index))))))
(define (add-col-range-style col_range new_style)
(let* ([col_range (to-col-range col_range)]
[start_col_index (car col_range)]
[end_col_index (cdr col_range)]
[cells (hash-keys (SHEET-STYLE-cell->style_map (*CURRENT_SHEET_STYLE*)))])
(let loop ([loop_col_index start_col_index])
(when (<= loop_col_index end_col_index)
(let* ([old_col_style (hash-ref (SHEET-STYLE-col->style_map (*CURRENT_SHEET_STYLE*)) loop_col_index (new-style))]
[updated_col_style (update-style old_col_style new_style)])
(hash-set! (SHEET-STYLE-col->style_map (*CURRENT_SHEET_STYLE*)) loop_col_index updated_col_style)
(add-cells-style (get-col-cells loop_col_index) updated_col_style))
(loop (add1 loop_col_index))))))
(define (update-style _style new_style)
(cond
[(BORDER-STYLE? new_style)
(let ([_border_style (STYLE-border_style _style)])
(if _border_style
(begin
(when (not (string=? (BORDER-STYLE-top_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [top_color (BORDER-STYLE-top_color new_style)])))
(when (not (string=? (BORDER-STYLE-top_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [top_mode (BORDER-STYLE-top_mode new_style)])))
(when (not (string=? (BORDER-STYLE-bottom_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [bottom_color (BORDER-STYLE-bottom_color new_style)])))
(when (not (string=? (BORDER-STYLE-bottom_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [bottom_mode (BORDER-STYLE-bottom_mode new_style)])))
(when (not (string=? (BORDER-STYLE-left_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [left_color (BORDER-STYLE-left_color new_style)])))
(when (not (string=? (BORDER-STYLE-left_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [left_mode (BORDER-STYLE-left_mode new_style)])))
(when (not (string=? (BORDER-STYLE-right_color new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [right_color (BORDER-STYLE-right_color new_style)])))
(when (not (string=? (BORDER-STYLE-right_mode new_style) ""))
(set! _border_style (struct-copy BORDER-STYLE _border_style [right_mode (BORDER-STYLE-right_mode new_style)])))
(struct-copy STYLE _style [border_style _border_style]))
(struct-copy STYLE _style [border_style new_style])))]
[(FONT-STYLE? new_style)
(struct-copy STYLE _style [font_style new_style])]
[(ALIGNMENT-STYLE? new_style)
(struct-copy STYLE _style [alignment_style new_style])]
[(NUMBER-STYLE? new_style)
(struct-copy STYLE _style [number_style new_style])]
[(FILL-STYLE? new_style)
(struct-copy STYLE _style [fill_style new_style])]
[(STYLE? new_style)
(let ([updated_new_style _style])
(when (STYLE-border_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-border_style new_style))))
(when (STYLE-font_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-font_style new_style))))
(when (STYLE-alignment_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-alignment_style new_style))))
(when (STYLE-number_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-number_style new_style))))
(when (STYLE-fill_style new_style)
(set! updated_new_style (update-style updated_new_style (STYLE-fill_style new_style))))
updated_new_style)]
))
| |
7b28264e87b67caa0a3e1bfe5f04a627079f8b7dc13eda37d788115e3f9d8779 | shentufoundation/deepsea | BinNat.ml | open BinNums
open BinPos
open Datatypes
module N =
struct
(** val succ_double : coq_N -> coq_N **)
let succ_double = function
| N0 -> Npos Coq_xH
| Npos p -> Npos (Coq_xI p)
(** val double : coq_N -> coq_N **)
let double = function
| N0 -> N0
| Npos p -> Npos (Coq_xO p)
* succ_pos : coq_N - > positive *
let succ_pos = function
| N0 -> Coq_xH
| Npos p -> Pos.succ p
(** val sub : coq_N -> coq_N -> coq_N **)
let sub n m =
match n with
| N0 -> N0
| Npos n' ->
(match m with
| N0 -> n
| Npos m' ->
(match Pos.sub_mask n' m' with
| Pos.IsPos p -> Npos p
| _ -> N0))
(** val compare : coq_N -> coq_N -> comparison **)
let compare n m =
match n with
| N0 -> (match m with
| N0 -> Eq
| Npos _ -> Lt)
| Npos n' -> (match m with
| N0 -> Gt
| Npos m' -> Pos.compare n' m')
* leb : coq_N - > coq_N - > bool *
let leb x y =
match compare x y with
| Gt -> Coq_false
| _ -> Coq_true
* val pos_div_eucl : positive - > coq_N - > ( coq_N , coq_N ) prod *
let rec pos_div_eucl a b =
match a with
| Coq_xI a' ->
let Coq_pair (q, r) = pos_div_eucl a' b in
let r' = succ_double r in
(match leb b r' with
| Coq_true -> Coq_pair ((succ_double q), (sub r' b))
| Coq_false -> Coq_pair ((double q), r'))
| Coq_xO a' ->
let Coq_pair (q, r) = pos_div_eucl a' b in
let r' = double r in
(match leb b r' with
| Coq_true -> Coq_pair ((succ_double q), (sub r' b))
| Coq_false -> Coq_pair ((double q), r'))
| Coq_xH ->
(match b with
| N0 -> Coq_pair (N0, (Npos Coq_xH))
| Npos p ->
(match p with
| Coq_xH -> Coq_pair ((Npos Coq_xH), N0)
| _ -> Coq_pair (N0, (Npos Coq_xH))))
(** val coq_lor : coq_N -> coq_N -> coq_N **)
let coq_lor n m =
match n with
| N0 -> m
| Npos p -> (match m with
| N0 -> n
| Npos q -> Npos (Pos.coq_lor p q))
* coq_land : coq_N - > coq_N - > coq_N *
let coq_land n m =
match n with
| N0 -> N0
| Npos p -> (match m with
| N0 -> N0
| Npos q -> Pos.coq_land p q)
* : coq_N - > coq_N - > coq_N *
let rec ldiff n m =
match n with
| N0 -> N0
| Npos p -> (match m with
| N0 -> n
| Npos q -> Pos.ldiff p q)
* coq_lxor : coq_N - > coq_N - > coq_N *
let coq_lxor n m =
match n with
| N0 -> m
| Npos p -> (match m with
| N0 -> n
| Npos q -> Pos.coq_lxor p q)
* : coq_N - > coq_N - > bool *
let testbit a n =
match a with
| N0 -> Coq_false
| Npos p -> Pos.testbit p n
end
| null | https://raw.githubusercontent.com/shentufoundation/deepsea/970576a97c8992655ed2f173f576502d73b827e1/src/backend/extraction/BinNat.ml | ocaml | * val succ_double : coq_N -> coq_N *
* val double : coq_N -> coq_N *
* val sub : coq_N -> coq_N -> coq_N *
* val compare : coq_N -> coq_N -> comparison *
* val coq_lor : coq_N -> coq_N -> coq_N * | open BinNums
open BinPos
open Datatypes
module N =
struct
let succ_double = function
| N0 -> Npos Coq_xH
| Npos p -> Npos (Coq_xI p)
let double = function
| N0 -> N0
| Npos p -> Npos (Coq_xO p)
* succ_pos : coq_N - > positive *
let succ_pos = function
| N0 -> Coq_xH
| Npos p -> Pos.succ p
let sub n m =
match n with
| N0 -> N0
| Npos n' ->
(match m with
| N0 -> n
| Npos m' ->
(match Pos.sub_mask n' m' with
| Pos.IsPos p -> Npos p
| _ -> N0))
let compare n m =
match n with
| N0 -> (match m with
| N0 -> Eq
| Npos _ -> Lt)
| Npos n' -> (match m with
| N0 -> Gt
| Npos m' -> Pos.compare n' m')
* leb : coq_N - > coq_N - > bool *
let leb x y =
match compare x y with
| Gt -> Coq_false
| _ -> Coq_true
* val pos_div_eucl : positive - > coq_N - > ( coq_N , coq_N ) prod *
let rec pos_div_eucl a b =
match a with
| Coq_xI a' ->
let Coq_pair (q, r) = pos_div_eucl a' b in
let r' = succ_double r in
(match leb b r' with
| Coq_true -> Coq_pair ((succ_double q), (sub r' b))
| Coq_false -> Coq_pair ((double q), r'))
| Coq_xO a' ->
let Coq_pair (q, r) = pos_div_eucl a' b in
let r' = double r in
(match leb b r' with
| Coq_true -> Coq_pair ((succ_double q), (sub r' b))
| Coq_false -> Coq_pair ((double q), r'))
| Coq_xH ->
(match b with
| N0 -> Coq_pair (N0, (Npos Coq_xH))
| Npos p ->
(match p with
| Coq_xH -> Coq_pair ((Npos Coq_xH), N0)
| _ -> Coq_pair (N0, (Npos Coq_xH))))
let coq_lor n m =
match n with
| N0 -> m
| Npos p -> (match m with
| N0 -> n
| Npos q -> Npos (Pos.coq_lor p q))
* coq_land : coq_N - > coq_N - > coq_N *
let coq_land n m =
match n with
| N0 -> N0
| Npos p -> (match m with
| N0 -> N0
| Npos q -> Pos.coq_land p q)
* : coq_N - > coq_N - > coq_N *
let rec ldiff n m =
match n with
| N0 -> N0
| Npos p -> (match m with
| N0 -> n
| Npos q -> Pos.ldiff p q)
* coq_lxor : coq_N - > coq_N - > coq_N *
let coq_lxor n m =
match n with
| N0 -> m
| Npos p -> (match m with
| N0 -> n
| Npos q -> Pos.coq_lxor p q)
* : coq_N - > coq_N - > bool *
let testbit a n =
match a with
| N0 -> Coq_false
| Npos p -> Pos.testbit p n
end
|
0adbb22b01ca59a10d5ed9a7b56dfe2d5a9116bd507bf0235d631c2e7871aab8 | wireless-net/erlang-nommu | ex_treeCtrl.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 - 2013 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
-module(ex_treeCtrl).
-behaviour(wx_object).
%% Client API
-export([start/1]).
%% wx_object callbacks
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_cast/2, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
%% Setup sizers
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxTreeCtrl"}]),
%% Setup treeCtrl
TreeCtrl = wxTreeCtrl:new(Panel, []),
RootId = wxTreeCtrl:addRoot(TreeCtrl, "Root"),
Name the first items
Items = ["item "++integer_to_list(Int)||
Int <- lists:seq(1,10)],
Create the first items in the treeCtrl
SubItems = [{wxTreeCtrl:appendItem(TreeCtrl, RootId, Item), Item}||
Item <- Items],
%% Create sub items
[wxTreeCtrl:appendItem(TreeCtrl, ItemId, Item++" sub item "++integer_to_list(Int))||
{ItemId, Item} <- SubItems, Int <- lists:seq(1,10)],
wxTreeCtrl:expand(TreeCtrl, RootId),
%% Add to sizers
Options = [{flag, ?wxEXPAND}, {proportion, 1}],
wxSizer:add(Sizer, TreeCtrl, Options),
wxSizer:add(MainSizer, Sizer, Options),
wxTreeCtrl:connect(TreeCtrl, command_tree_item_collapsed),
wxTreeCtrl:connect(TreeCtrl, command_tree_item_expanded),
wxTreeCtrl:connect(TreeCtrl, command_tree_sel_changed),
wxPanel:setSizer(Panel, MainSizer),
{Panel, #state{parent=Panel, config=Config}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Async Events are handled in handle_event as in handle_info
handle_event(#wx{event = #wxTree{type = command_tree_item_collapsed,
item = Item},
obj = TreeCtrl},
State = #state{}) ->
ItemText = wxTreeCtrl:getItemText(TreeCtrl, Item),
demo:format(State#state.config, "You have collapsed ~p.\n", [ItemText]),
{noreply, State};
handle_event(#wx{event = #wxTree{type = command_tree_item_expanded,
item = Item},
obj = TreeCtrl},
State = #state{}) ->
ItemText = wxTreeCtrl:getItemText(TreeCtrl, Item),
demo:format(State#state.config, "You have expanded ~p.\n", [ItemText]),
{noreply, State};
handle_event(#wx{event = #wxTree{type = command_tree_sel_changed,
item = Item},
obj = TreeCtrl},
State = #state{}) ->
ItemText = wxTreeCtrl:getItemText(TreeCtrl, Item),
demo:format(State#state.config, "You have selected ~p.\n", [ItemText]),
{noreply, State}.
%% Callbacks handled as normal gen_server callbacks
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(shutdown, _From, State=#state{parent=Panel}) ->
wxPanel:destroy(Panel),
{stop, normal, ok, State};
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
handle_cast(Msg, State) ->
io:format("Got cast ~p~n",[Msg]),
{noreply,State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _State) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Local functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
| null | https://raw.githubusercontent.com/wireless-net/erlang-nommu/79f32f81418e022d8ad8e0e447deaea407289926/lib/wx/examples/demo/ex_treeCtrl.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Client API
wx_object callbacks
Setup sizers
Setup treeCtrl
Create sub items
Add to sizers
Async Events are handled in handle_event as in handle_info
Callbacks handled as normal gen_server callbacks
Local functions
| Copyright Ericsson AB 2009 - 2013 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(ex_treeCtrl).
-behaviour(wx_object).
-export([start/1]).
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_cast/2, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxTreeCtrl"}]),
TreeCtrl = wxTreeCtrl:new(Panel, []),
RootId = wxTreeCtrl:addRoot(TreeCtrl, "Root"),
Name the first items
Items = ["item "++integer_to_list(Int)||
Int <- lists:seq(1,10)],
Create the first items in the treeCtrl
SubItems = [{wxTreeCtrl:appendItem(TreeCtrl, RootId, Item), Item}||
Item <- Items],
[wxTreeCtrl:appendItem(TreeCtrl, ItemId, Item++" sub item "++integer_to_list(Int))||
{ItemId, Item} <- SubItems, Int <- lists:seq(1,10)],
wxTreeCtrl:expand(TreeCtrl, RootId),
Options = [{flag, ?wxEXPAND}, {proportion, 1}],
wxSizer:add(Sizer, TreeCtrl, Options),
wxSizer:add(MainSizer, Sizer, Options),
wxTreeCtrl:connect(TreeCtrl, command_tree_item_collapsed),
wxTreeCtrl:connect(TreeCtrl, command_tree_item_expanded),
wxTreeCtrl:connect(TreeCtrl, command_tree_sel_changed),
wxPanel:setSizer(Panel, MainSizer),
{Panel, #state{parent=Panel, config=Config}}.
handle_event(#wx{event = #wxTree{type = command_tree_item_collapsed,
item = Item},
obj = TreeCtrl},
State = #state{}) ->
ItemText = wxTreeCtrl:getItemText(TreeCtrl, Item),
demo:format(State#state.config, "You have collapsed ~p.\n", [ItemText]),
{noreply, State};
handle_event(#wx{event = #wxTree{type = command_tree_item_expanded,
item = Item},
obj = TreeCtrl},
State = #state{}) ->
ItemText = wxTreeCtrl:getItemText(TreeCtrl, Item),
demo:format(State#state.config, "You have expanded ~p.\n", [ItemText]),
{noreply, State};
handle_event(#wx{event = #wxTree{type = command_tree_sel_changed,
item = Item},
obj = TreeCtrl},
State = #state{}) ->
ItemText = wxTreeCtrl:getItemText(TreeCtrl, Item),
demo:format(State#state.config, "You have selected ~p.\n", [ItemText]),
{noreply, State}.
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(shutdown, _From, State=#state{parent=Panel}) ->
wxPanel:destroy(Panel),
{stop, normal, ok, State};
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
handle_cast(Msg, State) ->
io:format("Got cast ~p~n",[Msg]),
{noreply,State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _State) ->
ok.
|
3bbfe33754181cbd8d7edc48131b0b8c9107aa3b1e7116af6779cb6d8350c32b | kevinlynx/ext-blog | defpackage.lisp |
(defpackage #:ext-blog.theme.test
(:use common-lisp))
| null | https://raw.githubusercontent.com/kevinlynx/ext-blog/4f6a6f0ab64f9384d53d41d1208ebaa7b9575534/theme/test/defpackage.lisp | lisp |
(defpackage #:ext-blog.theme.test
(:use common-lisp))
| |
e8fe1ceee8fb9756c4909a9409795f57d5d02ea6f910484c1ead8f656063e2b2 | darkleaf/publicator | register.clj | (ns publicator.web.responders.user.register
(:require
[publicator.use-cases.interactors.user.register :as interactor]
[publicator.web.responders.base :as responders.base]
[publicator.web.responses :as responses]
[publicator.web.presenters.explain-data :as explain-data]
[publicator.web.forms.user.register :as form]
[publicator.web.routing :as routing]))
(defmethod responders.base/result->resp ::interactor/initial-params [[_ params]]
(let [form (form/build params)]
(responses/render-form form)))
(defmethod responders.base/result->resp ::interactor/already-registered [_]
(-> (form/already-registered-error)
responses/render-errors))
(derive ::interactor/processed ::responders.base/redirect-to-root)
(derive ::interactor/invalid-params ::responders.base/invalid-params)
(derive ::interactor/already-logged-in ::responders.base/forbidden)
| null | https://raw.githubusercontent.com/darkleaf/publicator/e07eee93d8f3d9c07a15d574619d5ea59c00f87d/web/src/publicator/web/responders/user/register.clj | clojure | (ns publicator.web.responders.user.register
(:require
[publicator.use-cases.interactors.user.register :as interactor]
[publicator.web.responders.base :as responders.base]
[publicator.web.responses :as responses]
[publicator.web.presenters.explain-data :as explain-data]
[publicator.web.forms.user.register :as form]
[publicator.web.routing :as routing]))
(defmethod responders.base/result->resp ::interactor/initial-params [[_ params]]
(let [form (form/build params)]
(responses/render-form form)))
(defmethod responders.base/result->resp ::interactor/already-registered [_]
(-> (form/already-registered-error)
responses/render-errors))
(derive ::interactor/processed ::responders.base/redirect-to-root)
(derive ::interactor/invalid-params ::responders.base/invalid-params)
(derive ::interactor/already-logged-in ::responders.base/forbidden)
| |
298afcaed8b1a620e000873a04dde51f1b7d8fc84f6421836627fbb929ede4f1 | haskell-distributed/distributed-process | TH.hs | -- | Template Haskell support
# LANGUAGE TemplateHaskell , CPP #
module Control.Distributed.Process.Internal.Closure.TH
( -- * User-level API
remotable
, remotableDecl
, mkStatic
, functionSDict
, functionTDict
, mkClosure
, mkStaticClosure
) where
import Prelude hiding (succ, any)
import Control.Applicative ((<$>))
import Language.Haskell.TH
( -- Q monad and operations
Q
, reify
, Loc(loc_module)
, location
-- Names
, Name
, mkName
, nameBase
Algebraic data types
, Dec(SigD)
, Exp
, Type(AppT, ForallT, VarT, ArrowT)
, Info(VarI)
#if MIN_VERSION_template_haskell(2,17,0)
, Specificity
#endif
, TyVarBndr(PlainTV, KindedTV)
, Pred
#if MIN_VERSION_template_haskell(2,10,0)
, conT
, appT
#else
, classP
#endif
, varT
-- Lifted constructors
-- .. Literals
, stringL
-- .. Patterns
, normalB
, clause
-- .. Expressions
, varE
, litE
-- .. Top-level declarations
, funD
, sigD
)
import Data.Maybe (catMaybes)
import Data.Binary (encode)
import Data.Generics (everywhereM, mkM, gmapM)
import Data.Rank1Dynamic (toDynamic)
import Data.Rank1Typeable
( Zero
, Succ
, TypVar
)
import Control.Distributed.Static
( RemoteTable
, registerStatic
, Static
, staticLabel
, closure
, staticCompose
, staticClosure
)
import Control.Distributed.Process.Internal.Types (Process)
import Control.Distributed.Process.Serializable
( SerializableDict(SerializableDict)
)
import Control.Distributed.Process.Internal.Closure.BuiltIn (staticDecode)
--------------------------------------------------------------------------------
-- User-level API --
--------------------------------------------------------------------------------
-- | Create the closure, decoder, and metadata definitions for the given list
-- of functions
remotable :: [Name] -> Q [Dec]
remotable ns = do
types <- mapM getType ns
(closures, inserts) <- unzip <$> mapM generateDefs types
rtable <- createMetaData (mkName "__remoteTable") (concat inserts)
return $ concat closures ++ rtable
-- | Like 'remotable', but parameterized by the declaration of a function
-- instead of the function name. So where for 'remotable' you'd do
--
-- > f :: T1 -> T2
-- > f = ...
-- >
-- > remotable ['f]
--
-- with 'remotableDecl' you would instead do
--
-- > remotableDecl [
-- > [d| f :: T1 -> T2 ;
-- > f = ...
-- > |]
-- > ]
--
-- 'remotableDecl' creates the function specified as well as the various
-- dictionaries and static versions that 'remotable' also creates.
-- 'remotableDecl' is sometimes necessary when you want to refer to, say,
@$(mkClosure ' f)@ within the definition of @f@ itself .
--
-- NOTE: 'remotableDecl' creates @__remoteTableDecl@ instead of @__remoteTable@
-- so that you can use both 'remotable' and 'remotableDecl' within the same
-- module.
remotableDecl :: [Q [Dec]] -> Q [Dec]
remotableDecl qDecs = do
decs <- concat <$> sequence qDecs
let types = catMaybes (map typeOf decs)
(closures, inserts) <- unzip <$> mapM generateDefs types
rtable <- createMetaData (mkName "__remoteTableDecl") (concat inserts)
return $ decs ++ concat closures ++ rtable
where
typeOf :: Dec -> Maybe (Name, Type)
typeOf (SigD name typ) = Just (name, typ)
typeOf _ = Nothing
-- | Construct a static value.
--
-- If @f : forall a1 .. an. T@
-- then @$(mkStatic 'f) :: forall a1 .. an. Static T@.
-- Be sure to pass 'f' to 'remotable'.
mkStatic :: Name -> Q Exp
mkStatic = varE . staticName
-- | If @f : T1 -> T2@ is a monomorphic function
-- then @$(functionSDict 'f) :: Static (SerializableDict T1)@.
--
-- Be sure to pass 'f' to 'remotable'.
functionSDict :: Name -> Q Exp
functionSDict = varE . sdictName
-- | If @f : T1 -> Process T2@ is a monomorphic function
-- then @$(functionTDict 'f) :: Static (SerializableDict T2)@.
--
-- Be sure to pass 'f' to 'remotable'.
functionTDict :: Name -> Q Exp
functionTDict = varE . tdictName
| If @f : T1 - > T2@ then @$(mkClosure ' f ) : : T1 - > Closure
--
-- TODO: The current version of mkClosure is too polymorphic
-- (@forall a. Binary a => a -> Closure T2).
mkClosure :: Name -> Q Exp
mkClosure n =
[| closure ($(mkStatic n) `staticCompose` staticDecode $(functionSDict n))
. encode
|]
-- | Make a 'Closure' from a static function. This is useful for
-- making a closure for a top-level @Process ()@ function, because
using ' mkClosure ' would require adding a dummy @()@ argument .
--
mkStaticClosure :: Name -> Q Exp
mkStaticClosure n = [| staticClosure $( mkStatic n ) |]
--------------------------------------------------------------------------------
-- Internal (Template Haskell) --
--------------------------------------------------------------------------------
-- | Generate the code to add the metadata to the CH runtime
createMetaData :: Name -> [Q Exp] -> Q [Dec]
createMetaData name is =
sequence [ sigD name [t| RemoteTable -> RemoteTable |]
, sfnD name (compose is)
]
generateDefs :: (Name, Type) -> Q ([Dec], [Q Exp])
generateDefs (origName, fullType) = do
proc <- [t| Process |]
let (typVars, typ') = case fullType of ForallT vars [] mono -> (vars, mono)
_ -> ([], fullType)
-- The main "static" entry
(static, register) <- makeStatic typVars typ'
-- If n :: T1 -> T2, static serializable dictionary for T1
-- TODO: we should check if arg is an instance of Serializable, but we cannot
--
(sdict, registerSDict) <- case (typVars, typ') of
([], ArrowT `AppT` arg `AppT` _res) ->
makeDict (sdictName origName) arg
_ ->
return ([], [])
-- If n :: T1 -> Process T2, static serializable dictionary for T2
-- TODO: check if T2 is serializable (same as above)
(tdict, registerTDict) <- case (typVars, typ') of
([], ArrowT `AppT` _arg `AppT` (proc' `AppT` res)) | proc' == proc ->
makeDict (tdictName origName) res
_ ->
return ([], [])
return ( concat [static, sdict, tdict]
, concat [register, registerSDict, registerTDict]
)
where
#if MIN_VERSION_template_haskell(2,17,0)
makeStatic :: [TyVarBndr Specificity] -> Type -> Q ([Dec], [Q Exp])
#else
makeStatic :: [TyVarBndr] -> Type -> Q ([Dec], [Q Exp])
#endif
makeStatic typVars typ = do
static <- generateStatic origName typVars typ
let dyn = case typVars of
[] -> [| toDynamic $(varE origName) |]
_ -> [| toDynamic ($(varE origName) :: $(monomorphize typVars typ)) |]
return ( static
, [ [| registerStatic $(showFQN origName) $dyn |] ]
)
makeDict :: Name -> Type -> Q ([Dec], [Q Exp])
makeDict dictName typ = do
sdict <- generateDict dictName typ
let dyn = [| toDynamic (SerializableDict :: SerializableDict $(return typ)) |]
return ( sdict
, [ [| registerStatic $(showFQN dictName) $dyn |] ]
)
-- | Turn a polymorphic type into a monomorphic type using ANY and co
#if MIN_VERSION_template_haskell(2,17,0)
monomorphize :: [TyVarBndr Specificity] -> Type -> Q Type
#else
monomorphize :: [TyVarBndr] -> Type -> Q Type
#endif
monomorphize tvs =
let subst = zip (map tyVarBndrName tvs) anys
in everywhereM (mkM (applySubst subst))
where
anys :: [Q Type]
anys = map typVar (iterate succ zero)
typVar :: Q Type -> Q Type
typVar t = [t| TypVar $t |]
zero :: Q Type
zero = [t| Zero |]
succ :: Q Type -> Q Type
succ t = [t| Succ $t |]
applySubst :: [(Name, Q Type)] -> Type -> Q Type
applySubst s (VarT n) =
case lookup n s of
Nothing -> return (VarT n)
Just t -> t
applySubst s t = gmapM (mkM (applySubst s)) t
-- | Generate a static value
#if MIN_VERSION_template_haskell(2,17,0)
generateStatic :: Name -> [TyVarBndr Specificity] -> Type -> Q [Dec]
#else
generateStatic :: Name -> [TyVarBndr] -> Type -> Q [Dec]
#endif
generateStatic n xs typ = do
staticTyp <- [t| Static |]
sequence
[ sigD (staticName n) $ do
txs <- sequence $ map typeable xs
return (ForallT xs
txs
(staticTyp `AppT` typ))
, sfnD (staticName n) [| staticLabel $(showFQN n) |]
]
where
#if MIN_VERSION_template_haskell(2,17,0)
typeable :: TyVarBndr Specificity -> Q Pred
#else
typeable :: TyVarBndr -> Q Pred
#endif
typeable tv =
#if MIN_VERSION_template_haskell(2,10,0)
conT (mkName "Typeable") `appT` varT (tyVarBndrName tv)
#else
classP (mkName "Typeable") [varT (tyVarBndrName tv)]
#endif
-- | Generate a serialization dictionary with name 'n' for type 'typ'
generateDict :: Name -> Type -> Q [Dec]
generateDict n typ = do
sequence
[ sigD n $ [t| Static (SerializableDict $(return typ)) |]
, sfnD n [| staticLabel $(showFQN n) |]
]
staticName :: Name -> Name
staticName n = mkName $ nameBase n ++ "__static"
sdictName :: Name -> Name
sdictName n = mkName $ nameBase n ++ "__sdict"
tdictName :: Name -> Name
tdictName n = mkName $ nameBase n ++ "__tdict"
--------------------------------------------------------------------------------
Generic Template Haskell auxiliary functions --
--------------------------------------------------------------------------------
-- | Compose a set of expressions
compose :: [Q Exp] -> Q Exp
compose [] = [| id |]
compose [e] = e
compose (e:es) = [| $e . $(compose es) |]
-- | Literal string as an expression
stringE :: String -> Q Exp
stringE = litE . stringL
-- | Look up the "original name" (module:name) and type of a top-level function
getType :: Name -> Q (Name, Type)
getType name = do
info <- reify name
case info of
#if MIN_VERSION_template_haskell(2,11,0)
VarI origName typ _ -> return (origName, typ)
#else
VarI origName typ _ _ -> return (origName, typ)
#endif
_ -> fail $ show name ++ " not found"
-- | Variation on 'funD' which takes a single expression to define the function
sfnD :: Name -> Q Exp -> Q Dec
sfnD n e = funD n [clause [] (normalB e) []]
-- | The name of a type variable binding occurrence
#if MIN_VERSION_template_haskell(2,17,0)
tyVarBndrName :: TyVarBndr Specificity -> Name
tyVarBndrName (PlainTV n _) = n
tyVarBndrName (KindedTV n _ _) = n
#else
tyVarBndrName :: TyVarBndr -> Name
tyVarBndrName (PlainTV n) = n
tyVarBndrName (KindedTV n _) = n
#endif
-- | Fully qualified name; that is, the name and the _current_ module
--
-- We ignore the module part of the Name argument (which may or may not exist)
-- because we construct various names (`staticName`, `sdictName`, `tdictName`)
-- and those names certainly won't have Module components.
showFQN :: Name -> Q Exp
showFQN n = do
loc <- location
stringE (loc_module loc ++ "." ++ nameBase n)
| null | https://raw.githubusercontent.com/haskell-distributed/distributed-process/7b8cbf59cc7f291a524a5c405d514e48e6544182/src/Control/Distributed/Process/Internal/Closure/TH.hs | haskell | | Template Haskell support
* User-level API
Q monad and operations
Names
Lifted constructors
.. Literals
.. Patterns
.. Expressions
.. Top-level declarations
------------------------------------------------------------------------------
User-level API --
------------------------------------------------------------------------------
| Create the closure, decoder, and metadata definitions for the given list
of functions
| Like 'remotable', but parameterized by the declaration of a function
instead of the function name. So where for 'remotable' you'd do
> f :: T1 -> T2
> f = ...
>
> remotable ['f]
with 'remotableDecl' you would instead do
> remotableDecl [
> [d| f :: T1 -> T2 ;
> f = ...
> |]
> ]
'remotableDecl' creates the function specified as well as the various
dictionaries and static versions that 'remotable' also creates.
'remotableDecl' is sometimes necessary when you want to refer to, say,
NOTE: 'remotableDecl' creates @__remoteTableDecl@ instead of @__remoteTable@
so that you can use both 'remotable' and 'remotableDecl' within the same
module.
| Construct a static value.
If @f : forall a1 .. an. T@
then @$(mkStatic 'f) :: forall a1 .. an. Static T@.
Be sure to pass 'f' to 'remotable'.
| If @f : T1 -> T2@ is a monomorphic function
then @$(functionSDict 'f) :: Static (SerializableDict T1)@.
Be sure to pass 'f' to 'remotable'.
| If @f : T1 -> Process T2@ is a monomorphic function
then @$(functionTDict 'f) :: Static (SerializableDict T2)@.
Be sure to pass 'f' to 'remotable'.
TODO: The current version of mkClosure is too polymorphic
(@forall a. Binary a => a -> Closure T2).
| Make a 'Closure' from a static function. This is useful for
making a closure for a top-level @Process ()@ function, because
------------------------------------------------------------------------------
Internal (Template Haskell) --
------------------------------------------------------------------------------
| Generate the code to add the metadata to the CH runtime
The main "static" entry
If n :: T1 -> T2, static serializable dictionary for T1
TODO: we should check if arg is an instance of Serializable, but we cannot
If n :: T1 -> Process T2, static serializable dictionary for T2
TODO: check if T2 is serializable (same as above)
| Turn a polymorphic type into a monomorphic type using ANY and co
| Generate a static value
| Generate a serialization dictionary with name 'n' for type 'typ'
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Compose a set of expressions
| Literal string as an expression
| Look up the "original name" (module:name) and type of a top-level function
| Variation on 'funD' which takes a single expression to define the function
| The name of a type variable binding occurrence
| Fully qualified name; that is, the name and the _current_ module
We ignore the module part of the Name argument (which may or may not exist)
because we construct various names (`staticName`, `sdictName`, `tdictName`)
and those names certainly won't have Module components. | # LANGUAGE TemplateHaskell , CPP #
module Control.Distributed.Process.Internal.Closure.TH
remotable
, remotableDecl
, mkStatic
, functionSDict
, functionTDict
, mkClosure
, mkStaticClosure
) where
import Prelude hiding (succ, any)
import Control.Applicative ((<$>))
import Language.Haskell.TH
Q
, reify
, Loc(loc_module)
, location
, Name
, mkName
, nameBase
Algebraic data types
, Dec(SigD)
, Exp
, Type(AppT, ForallT, VarT, ArrowT)
, Info(VarI)
#if MIN_VERSION_template_haskell(2,17,0)
, Specificity
#endif
, TyVarBndr(PlainTV, KindedTV)
, Pred
#if MIN_VERSION_template_haskell(2,10,0)
, conT
, appT
#else
, classP
#endif
, varT
, stringL
, normalB
, clause
, varE
, litE
, funD
, sigD
)
import Data.Maybe (catMaybes)
import Data.Binary (encode)
import Data.Generics (everywhereM, mkM, gmapM)
import Data.Rank1Dynamic (toDynamic)
import Data.Rank1Typeable
( Zero
, Succ
, TypVar
)
import Control.Distributed.Static
( RemoteTable
, registerStatic
, Static
, staticLabel
, closure
, staticCompose
, staticClosure
)
import Control.Distributed.Process.Internal.Types (Process)
import Control.Distributed.Process.Serializable
( SerializableDict(SerializableDict)
)
import Control.Distributed.Process.Internal.Closure.BuiltIn (staticDecode)
remotable :: [Name] -> Q [Dec]
remotable ns = do
types <- mapM getType ns
(closures, inserts) <- unzip <$> mapM generateDefs types
rtable <- createMetaData (mkName "__remoteTable") (concat inserts)
return $ concat closures ++ rtable
@$(mkClosure ' f)@ within the definition of @f@ itself .
remotableDecl :: [Q [Dec]] -> Q [Dec]
remotableDecl qDecs = do
decs <- concat <$> sequence qDecs
let types = catMaybes (map typeOf decs)
(closures, inserts) <- unzip <$> mapM generateDefs types
rtable <- createMetaData (mkName "__remoteTableDecl") (concat inserts)
return $ decs ++ concat closures ++ rtable
where
typeOf :: Dec -> Maybe (Name, Type)
typeOf (SigD name typ) = Just (name, typ)
typeOf _ = Nothing
mkStatic :: Name -> Q Exp
mkStatic = varE . staticName
functionSDict :: Name -> Q Exp
functionSDict = varE . sdictName
functionTDict :: Name -> Q Exp
functionTDict = varE . tdictName
| If @f : T1 - > T2@ then @$(mkClosure ' f ) : : T1 - > Closure
mkClosure :: Name -> Q Exp
mkClosure n =
[| closure ($(mkStatic n) `staticCompose` staticDecode $(functionSDict n))
. encode
|]
using ' mkClosure ' would require adding a dummy @()@ argument .
mkStaticClosure :: Name -> Q Exp
mkStaticClosure n = [| staticClosure $( mkStatic n ) |]
createMetaData :: Name -> [Q Exp] -> Q [Dec]
createMetaData name is =
sequence [ sigD name [t| RemoteTable -> RemoteTable |]
, sfnD name (compose is)
]
generateDefs :: (Name, Type) -> Q ([Dec], [Q Exp])
generateDefs (origName, fullType) = do
proc <- [t| Process |]
let (typVars, typ') = case fullType of ForallT vars [] mono -> (vars, mono)
_ -> ([], fullType)
(static, register) <- makeStatic typVars typ'
(sdict, registerSDict) <- case (typVars, typ') of
([], ArrowT `AppT` arg `AppT` _res) ->
makeDict (sdictName origName) arg
_ ->
return ([], [])
(tdict, registerTDict) <- case (typVars, typ') of
([], ArrowT `AppT` _arg `AppT` (proc' `AppT` res)) | proc' == proc ->
makeDict (tdictName origName) res
_ ->
return ([], [])
return ( concat [static, sdict, tdict]
, concat [register, registerSDict, registerTDict]
)
where
#if MIN_VERSION_template_haskell(2,17,0)
makeStatic :: [TyVarBndr Specificity] -> Type -> Q ([Dec], [Q Exp])
#else
makeStatic :: [TyVarBndr] -> Type -> Q ([Dec], [Q Exp])
#endif
makeStatic typVars typ = do
static <- generateStatic origName typVars typ
let dyn = case typVars of
[] -> [| toDynamic $(varE origName) |]
_ -> [| toDynamic ($(varE origName) :: $(monomorphize typVars typ)) |]
return ( static
, [ [| registerStatic $(showFQN origName) $dyn |] ]
)
makeDict :: Name -> Type -> Q ([Dec], [Q Exp])
makeDict dictName typ = do
sdict <- generateDict dictName typ
let dyn = [| toDynamic (SerializableDict :: SerializableDict $(return typ)) |]
return ( sdict
, [ [| registerStatic $(showFQN dictName) $dyn |] ]
)
#if MIN_VERSION_template_haskell(2,17,0)
monomorphize :: [TyVarBndr Specificity] -> Type -> Q Type
#else
monomorphize :: [TyVarBndr] -> Type -> Q Type
#endif
monomorphize tvs =
let subst = zip (map tyVarBndrName tvs) anys
in everywhereM (mkM (applySubst subst))
where
anys :: [Q Type]
anys = map typVar (iterate succ zero)
typVar :: Q Type -> Q Type
typVar t = [t| TypVar $t |]
zero :: Q Type
zero = [t| Zero |]
succ :: Q Type -> Q Type
succ t = [t| Succ $t |]
applySubst :: [(Name, Q Type)] -> Type -> Q Type
applySubst s (VarT n) =
case lookup n s of
Nothing -> return (VarT n)
Just t -> t
applySubst s t = gmapM (mkM (applySubst s)) t
#if MIN_VERSION_template_haskell(2,17,0)
generateStatic :: Name -> [TyVarBndr Specificity] -> Type -> Q [Dec]
#else
generateStatic :: Name -> [TyVarBndr] -> Type -> Q [Dec]
#endif
generateStatic n xs typ = do
staticTyp <- [t| Static |]
sequence
[ sigD (staticName n) $ do
txs <- sequence $ map typeable xs
return (ForallT xs
txs
(staticTyp `AppT` typ))
, sfnD (staticName n) [| staticLabel $(showFQN n) |]
]
where
#if MIN_VERSION_template_haskell(2,17,0)
typeable :: TyVarBndr Specificity -> Q Pred
#else
typeable :: TyVarBndr -> Q Pred
#endif
typeable tv =
#if MIN_VERSION_template_haskell(2,10,0)
conT (mkName "Typeable") `appT` varT (tyVarBndrName tv)
#else
classP (mkName "Typeable") [varT (tyVarBndrName tv)]
#endif
generateDict :: Name -> Type -> Q [Dec]
generateDict n typ = do
sequence
[ sigD n $ [t| Static (SerializableDict $(return typ)) |]
, sfnD n [| staticLabel $(showFQN n) |]
]
staticName :: Name -> Name
staticName n = mkName $ nameBase n ++ "__static"
sdictName :: Name -> Name
sdictName n = mkName $ nameBase n ++ "__sdict"
tdictName :: Name -> Name
tdictName n = mkName $ nameBase n ++ "__tdict"
compose :: [Q Exp] -> Q Exp
compose [] = [| id |]
compose [e] = e
compose (e:es) = [| $e . $(compose es) |]
stringE :: String -> Q Exp
stringE = litE . stringL
getType :: Name -> Q (Name, Type)
getType name = do
info <- reify name
case info of
#if MIN_VERSION_template_haskell(2,11,0)
VarI origName typ _ -> return (origName, typ)
#else
VarI origName typ _ _ -> return (origName, typ)
#endif
_ -> fail $ show name ++ " not found"
sfnD :: Name -> Q Exp -> Q Dec
sfnD n e = funD n [clause [] (normalB e) []]
#if MIN_VERSION_template_haskell(2,17,0)
tyVarBndrName :: TyVarBndr Specificity -> Name
tyVarBndrName (PlainTV n _) = n
tyVarBndrName (KindedTV n _ _) = n
#else
tyVarBndrName :: TyVarBndr -> Name
tyVarBndrName (PlainTV n) = n
tyVarBndrName (KindedTV n _) = n
#endif
showFQN :: Name -> Q Exp
showFQN n = do
loc <- location
stringE (loc_module loc ++ "." ++ nameBase n)
|
d02ae5ead7d4a5622e7fc7d0a1627243962fbbb55ccdedcc60c005d434fc9a93 | kupl/FixML | sub13.ml | type formula =
| True
| False
| Not of formula
| AndAlso of formula * formula
| OrElse of formula * formula
| Imply of formula * formula
| Equal of exp * exp
and exp =
| Num of int
| Plus of exp * exp
| Minus of exp * exp
let rec eval : formula -> bool
= fun f ->
match f with
|True -> true
|False -> false
|Not(b)->if b = True then eval True else eval False
|AndAlso(a,b) ->
if a = False then eval False
else if b = False then eval False else eval True
|OrElse (a,b) -> if a = True then eval True
else if b = True then eval True else eval False
|Imply(a,b) -> if a = False then eval True
else if b = True then eval True else eval False
|Equal(a,b) -> if a = b then eval True else eval False ;;
| null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/formula/formula1/submissions/sub13.ml | ocaml | type formula =
| True
| False
| Not of formula
| AndAlso of formula * formula
| OrElse of formula * formula
| Imply of formula * formula
| Equal of exp * exp
and exp =
| Num of int
| Plus of exp * exp
| Minus of exp * exp
let rec eval : formula -> bool
= fun f ->
match f with
|True -> true
|False -> false
|Not(b)->if b = True then eval True else eval False
|AndAlso(a,b) ->
if a = False then eval False
else if b = False then eval False else eval True
|OrElse (a,b) -> if a = True then eval True
else if b = True then eval True else eval False
|Imply(a,b) -> if a = False then eval True
else if b = True then eval True else eval False
|Equal(a,b) -> if a = b then eval True else eval False ;;
| |
2e1f570f93c89cc69d5e0d2df6cb4a62936b6fda3539ef449b72d300fd21deb0 | jdreaver/eventful | CommandHandler.hs | # LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module Bank.Models.Account.CommandHandler
( accountCommandHandler
, AccountCommand (..)
) where
import Control.Lens
import Data.Maybe (isNothing)
import SumTypes.TH
import Eventful
import Bank.Models.Account.Commands
import Bank.Models.Account.Events
import Bank.Models.Account.Projection
constructSumType "AccountCommand" (defaultSumTypeOptions { sumTypeOptionsTagOptions = AppendTypeNameToTags }) accountCommands
handleAccountCommand :: Account -> AccountCommand -> [AccountEvent]
handleAccountCommand account (OpenAccountAccountCommand OpenAccount{..}) =
case account ^. accountOwner of
Just _ -> [AccountOpenRejectedAccountEvent $ AccountOpenRejected "Account already open"]
Nothing ->
if openAccountInitialFunding < 0
then [AccountOpenRejectedAccountEvent $ AccountOpenRejected "Invalid initial deposit"]
else
[ AccountOpenedAccountEvent
AccountOpened
{ accountOpenedOwner = openAccountOwner
, accountOpenedInitialFunding = openAccountInitialFunding
}
]
handleAccountCommand _ (CreditAccountAccountCommand CreditAccount{..}) =
[ AccountCreditedAccountEvent
AccountCredited
{ accountCreditedAmount = creditAccountAmount
, accountCreditedReason = creditAccountReason
}
]
handleAccountCommand account (DebitAccountAccountCommand DebitAccount{..}) =
if accountAvailableBalance account - debitAccountAmount < 0
then [AccountDebitRejectedAccountEvent $ AccountDebitRejected $ accountAvailableBalance account]
else
[ AccountDebitedAccountEvent
AccountDebited
{ accountDebitedAmount = debitAccountAmount
, accountDebitedReason = debitAccountReason
}
]
handleAccountCommand account (TransferToAccountAccountCommand TransferToAccount{..})
| isNothing (account ^. accountOwner) =
[AccountTransferRejectedAccountEvent $ AccountTransferRejected transferToAccountTransferId "Account doesn't exist"]
| accountAvailableBalance account - transferToAccountAmount < 0 =
[AccountTransferRejectedAccountEvent $ AccountTransferRejected transferToAccountTransferId "Not enough funds"]
| otherwise =
[ AccountTransferStartedAccountEvent
AccountTransferStarted
{ accountTransferStartedTransferId = transferToAccountTransferId
, accountTransferStartedAmount = transferToAccountAmount
, accountTransferStartedTargetAccount = transferToAccountTargetAccount
}
]
handleAccountCommand _ (AcceptTransferAccountCommand AcceptTransfer{..}) =
[ AccountCreditedFromTransferAccountEvent
AccountCreditedFromTransfer
{ accountCreditedFromTransferTransferId = acceptTransferTransferId
, accountCreditedFromTransferSourceAccount = acceptTransferSourceAccount
, accountCreditedFromTransferAmount = acceptTransferAmount
}
]
accountCommandHandler :: CommandHandler Account AccountEvent AccountCommand
accountCommandHandler = CommandHandler handleAccountCommand accountProjection
| null | https://raw.githubusercontent.com/jdreaver/eventful/3f0c604e5bb2dcf5bacf0a2e01edf6a5e9c5e22e/examples/bank/src/Bank/Models/Account/CommandHandler.hs | haskell | # LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module Bank.Models.Account.CommandHandler
( accountCommandHandler
, AccountCommand (..)
) where
import Control.Lens
import Data.Maybe (isNothing)
import SumTypes.TH
import Eventful
import Bank.Models.Account.Commands
import Bank.Models.Account.Events
import Bank.Models.Account.Projection
constructSumType "AccountCommand" (defaultSumTypeOptions { sumTypeOptionsTagOptions = AppendTypeNameToTags }) accountCommands
handleAccountCommand :: Account -> AccountCommand -> [AccountEvent]
handleAccountCommand account (OpenAccountAccountCommand OpenAccount{..}) =
case account ^. accountOwner of
Just _ -> [AccountOpenRejectedAccountEvent $ AccountOpenRejected "Account already open"]
Nothing ->
if openAccountInitialFunding < 0
then [AccountOpenRejectedAccountEvent $ AccountOpenRejected "Invalid initial deposit"]
else
[ AccountOpenedAccountEvent
AccountOpened
{ accountOpenedOwner = openAccountOwner
, accountOpenedInitialFunding = openAccountInitialFunding
}
]
handleAccountCommand _ (CreditAccountAccountCommand CreditAccount{..}) =
[ AccountCreditedAccountEvent
AccountCredited
{ accountCreditedAmount = creditAccountAmount
, accountCreditedReason = creditAccountReason
}
]
handleAccountCommand account (DebitAccountAccountCommand DebitAccount{..}) =
if accountAvailableBalance account - debitAccountAmount < 0
then [AccountDebitRejectedAccountEvent $ AccountDebitRejected $ accountAvailableBalance account]
else
[ AccountDebitedAccountEvent
AccountDebited
{ accountDebitedAmount = debitAccountAmount
, accountDebitedReason = debitAccountReason
}
]
handleAccountCommand account (TransferToAccountAccountCommand TransferToAccount{..})
| isNothing (account ^. accountOwner) =
[AccountTransferRejectedAccountEvent $ AccountTransferRejected transferToAccountTransferId "Account doesn't exist"]
| accountAvailableBalance account - transferToAccountAmount < 0 =
[AccountTransferRejectedAccountEvent $ AccountTransferRejected transferToAccountTransferId "Not enough funds"]
| otherwise =
[ AccountTransferStartedAccountEvent
AccountTransferStarted
{ accountTransferStartedTransferId = transferToAccountTransferId
, accountTransferStartedAmount = transferToAccountAmount
, accountTransferStartedTargetAccount = transferToAccountTargetAccount
}
]
handleAccountCommand _ (AcceptTransferAccountCommand AcceptTransfer{..}) =
[ AccountCreditedFromTransferAccountEvent
AccountCreditedFromTransfer
{ accountCreditedFromTransferTransferId = acceptTransferTransferId
, accountCreditedFromTransferSourceAccount = acceptTransferSourceAccount
, accountCreditedFromTransferAmount = acceptTransferAmount
}
]
accountCommandHandler :: CommandHandler Account AccountEvent AccountCommand
accountCommandHandler = CommandHandler handleAccountCommand accountProjection
| |
34a2580c2def7736a760ed1e57ab0d7a9e85e70940d16b8c5dbc85c324713495 | inhabitedtype/ocaml-aws | getHealthCheck.ml | open Types
open Aws
type input = GetHealthCheckRequest.t
type output = GetHealthCheckResponse.t
type error = Errors_internal.t
let service = "route53"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[ "Version", [ "2013-04-01" ]; "Action", [ "GetHealthCheck" ] ]
(Util.drop_empty
(Uri.query_of_encoded (Query.render (GetHealthCheckRequest.to_query req)))))
in
`GET, uri, []
let of_http body =
try
let xml = Ezxmlm.from_string body in
let resp = Xml.member "GetHealthCheckResponse" (snd xml) in
try
Util.or_error
(Util.option_bind resp GetHealthCheckResponse.parse)
(let open Error in
BadResponse
{ body; message = "Could not find well formed GetHealthCheckResponse." })
with Xml.RequiredFieldMissing msg ->
let open Error in
`Error
(BadResponse
{ body
; message =
"Error parsing GetHealthCheckResponse - missing field in body or \
children: "
^ msg
})
with Failure msg ->
`Error
(let open Error in
BadResponse { body; message = "Error parsing xml: " ^ msg })
let parse_error code err =
let errors = [] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if List.mem var errors
&&
match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true
then Some var
else None
| None -> None
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/b6d5554c5d201202b5de8d0b0253871f7b66dab6/libraries/route53/lib/getHealthCheck.ml | ocaml | open Types
open Aws
type input = GetHealthCheckRequest.t
type output = GetHealthCheckResponse.t
type error = Errors_internal.t
let service = "route53"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[ "Version", [ "2013-04-01" ]; "Action", [ "GetHealthCheck" ] ]
(Util.drop_empty
(Uri.query_of_encoded (Query.render (GetHealthCheckRequest.to_query req)))))
in
`GET, uri, []
let of_http body =
try
let xml = Ezxmlm.from_string body in
let resp = Xml.member "GetHealthCheckResponse" (snd xml) in
try
Util.or_error
(Util.option_bind resp GetHealthCheckResponse.parse)
(let open Error in
BadResponse
{ body; message = "Could not find well formed GetHealthCheckResponse." })
with Xml.RequiredFieldMissing msg ->
let open Error in
`Error
(BadResponse
{ body
; message =
"Error parsing GetHealthCheckResponse - missing field in body or \
children: "
^ msg
})
with Failure msg ->
`Error
(let open Error in
BadResponse { body; message = "Error parsing xml: " ^ msg })
let parse_error code err =
let errors = [] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if List.mem var errors
&&
match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true
then Some var
else None
| None -> None
| |
0ea483e41a4981e48f7ac89d47f7a8400c288aaaef2a3f4fd07ff1ff7dff078e | zachsully/dl | Translation.hs | module DL.Flat.Backend.JavaScript.Translation
(jsCompile) where
import Data.List (foldl')
import DL.Backend
import qualified DL.General.Top as Top
import DL.Flat.Syntax
import DL.Flat.Backend.JavaScript.Syntax
import DL.General.Variable
import DL.Utils.StdMonad
jsCompile :: Backend FlatTerm
jsCompile = Backend trans
trans :: Top.Program FlatTerm -> Program
trans dpgm = Pgm (aNorm . transTerm . Top.pgmTerm $ dpgm)
Because aNorm and transTerm freshen variables and thus return type Std JSTerm , we must extract the term from the Std Monad
transTerm :: FlatTerm -> JSTerm
transTerm f =
case runStd (transTerm' f) of
Left _ -> error ""
Right x -> x
-- We a-Normalize for sharing
aNorm :: JSTerm -> JSTerm
aNorm f =
case runStd (aNorm' f) of
Left _ -> error ""
Right x -> x
aNorm' :: JSTerm -> Std JSTerm
aNorm' (JSLet vs ts a) = do ts' <- mapM aNorm' ts
a' <- aNorm' a
return $ JSLet vs ts' a'
aNorm' (JSRec v t) = do t' <- aNorm' t
return $ JSRec v t'
aNorm' (JSLit i) = return $ JSLit i
aNorm' (JSAdd a b) = do a' <- aNorm' a
b' <- aNorm' b
return $ JSAdd a' b'
aNorm' (JSVar v) = return $ JSVar v
aNorm' (JSFun v t) = do t' <- aNorm' t
return $ JSFun v t'
aNorm' (JSApp a b) = do x <- freshen (Variable "a")
a' <- aNorm' a
b' <- aNorm' b
return $ JSLet [x] [b'] (JSApp a' (JSVar x) )
aNorm' (JSObj (v,t) d) = do t' <- aNorm' t
d' <- aNorm' d
return $ JSObj (v,t') d'
aNorm' (JSMethod v t) = do t' <- aNorm' t
return $ JSMethod v t'
aNorm' (JSFail) = return $ JSFail
transTerm' :: FlatTerm -> Std JSTerm
transTerm' (FLet v a b) = do a' <- transTerm' a
b' <- transTerm' b
return $ JSLet [v] [a'] b'
transTerm' (FVar v) = return $ JSVar v
transTerm' (FFix v a) = do a' <- transTerm' a
return $ JSRec v a'
transTerm' (FLit i) = return $ JSLit i
transTerm' (FAdd a b) = do a' <- transTerm' a
b' <- transTerm' b
return $ JSAdd a' b'
transTerm' (FConsApp v xs) = do vars <- mapM (\_ -> freshen (Variable "x")) xs
xs' <- mapM transTerm' xs
let body = JSFun (Variable "c") (foldl' JSApp (JSMethod v (JSVar (Variable "c"))) (map JSVar vars))
return $ JSLet vars xs' body
transTerm' (FCase t ((FlatPatVar p),a) (v,u)) = do t' <- transTerm' t
a' <- transTerm' a
u' <- transTerm' u
-- pass identity function into default case
let def = JSApp (JSFun (v) u') (JSFun (Variable "x") (JSVar (Variable "x")))
return $ JSApp t' (JSObj (p,a') def)
transTerm' (FCase t ((FlatPatCons p ps),a) (v,u)) = do t' <- transTerm' t
a' <- transTerm' a
u' <- transTerm' u
-- pass identity function into default case
let def = JSApp (JSFun (v) u') (JSFun (Variable "x") (JSVar (Variable "x")))
return $ JSApp t' (JSObj (p,foldr JSFun a' ps) def)
transTerm' (FCaseEmpty t) = transTerm' t
transTerm' (FFun v t) = do t' <- transTerm' t
return $ JSFun v t'
transTerm' (FCoalt (v, t) a) = do t' <- transTerm' t
a' <- transTerm' a
return $ JSObj (v, t') a'
transTerm' (FEmpty) = return $ JSFail
transTerm' (FAnn t _) = transTerm' t
transTerm' (FShift _ _) = error "transTerm'{shift}"
transTerm' (FPrompt t) = transTerm' t
transTerm' (FObsApp a b) =
do { a' <- transTerm' a
; b' <- transTerm' b
; return $ JSApp b' a' }
transTerm' (FObsDest v t) = JSMethod v <$> transTerm' t
transTerm' (FObsCut _ _) = error "transTerm'{cut}"
transTerm' (FStreamCoiter _ _ _) = error "transTerm'{FStreamCoiter}"
| null | https://raw.githubusercontent.com/zachsully/dl/383bcc9d34c5e1f9787dede440a84503e5a2fd28/haskell/DL/Flat/Backend/JavaScript/Translation.hs | haskell | We a-Normalize for sharing
pass identity function into default case
pass identity function into default case | module DL.Flat.Backend.JavaScript.Translation
(jsCompile) where
import Data.List (foldl')
import DL.Backend
import qualified DL.General.Top as Top
import DL.Flat.Syntax
import DL.Flat.Backend.JavaScript.Syntax
import DL.General.Variable
import DL.Utils.StdMonad
jsCompile :: Backend FlatTerm
jsCompile = Backend trans
trans :: Top.Program FlatTerm -> Program
trans dpgm = Pgm (aNorm . transTerm . Top.pgmTerm $ dpgm)
Because aNorm and transTerm freshen variables and thus return type Std JSTerm , we must extract the term from the Std Monad
transTerm :: FlatTerm -> JSTerm
transTerm f =
case runStd (transTerm' f) of
Left _ -> error ""
Right x -> x
aNorm :: JSTerm -> JSTerm
aNorm f =
case runStd (aNorm' f) of
Left _ -> error ""
Right x -> x
aNorm' :: JSTerm -> Std JSTerm
aNorm' (JSLet vs ts a) = do ts' <- mapM aNorm' ts
a' <- aNorm' a
return $ JSLet vs ts' a'
aNorm' (JSRec v t) = do t' <- aNorm' t
return $ JSRec v t'
aNorm' (JSLit i) = return $ JSLit i
aNorm' (JSAdd a b) = do a' <- aNorm' a
b' <- aNorm' b
return $ JSAdd a' b'
aNorm' (JSVar v) = return $ JSVar v
aNorm' (JSFun v t) = do t' <- aNorm' t
return $ JSFun v t'
aNorm' (JSApp a b) = do x <- freshen (Variable "a")
a' <- aNorm' a
b' <- aNorm' b
return $ JSLet [x] [b'] (JSApp a' (JSVar x) )
aNorm' (JSObj (v,t) d) = do t' <- aNorm' t
d' <- aNorm' d
return $ JSObj (v,t') d'
aNorm' (JSMethod v t) = do t' <- aNorm' t
return $ JSMethod v t'
aNorm' (JSFail) = return $ JSFail
transTerm' :: FlatTerm -> Std JSTerm
transTerm' (FLet v a b) = do a' <- transTerm' a
b' <- transTerm' b
return $ JSLet [v] [a'] b'
transTerm' (FVar v) = return $ JSVar v
transTerm' (FFix v a) = do a' <- transTerm' a
return $ JSRec v a'
transTerm' (FLit i) = return $ JSLit i
transTerm' (FAdd a b) = do a' <- transTerm' a
b' <- transTerm' b
return $ JSAdd a' b'
transTerm' (FConsApp v xs) = do vars <- mapM (\_ -> freshen (Variable "x")) xs
xs' <- mapM transTerm' xs
let body = JSFun (Variable "c") (foldl' JSApp (JSMethod v (JSVar (Variable "c"))) (map JSVar vars))
return $ JSLet vars xs' body
transTerm' (FCase t ((FlatPatVar p),a) (v,u)) = do t' <- transTerm' t
a' <- transTerm' a
u' <- transTerm' u
let def = JSApp (JSFun (v) u') (JSFun (Variable "x") (JSVar (Variable "x")))
return $ JSApp t' (JSObj (p,a') def)
transTerm' (FCase t ((FlatPatCons p ps),a) (v,u)) = do t' <- transTerm' t
a' <- transTerm' a
u' <- transTerm' u
let def = JSApp (JSFun (v) u') (JSFun (Variable "x") (JSVar (Variable "x")))
return $ JSApp t' (JSObj (p,foldr JSFun a' ps) def)
transTerm' (FCaseEmpty t) = transTerm' t
transTerm' (FFun v t) = do t' <- transTerm' t
return $ JSFun v t'
transTerm' (FCoalt (v, t) a) = do t' <- transTerm' t
a' <- transTerm' a
return $ JSObj (v, t') a'
transTerm' (FEmpty) = return $ JSFail
transTerm' (FAnn t _) = transTerm' t
transTerm' (FShift _ _) = error "transTerm'{shift}"
transTerm' (FPrompt t) = transTerm' t
transTerm' (FObsApp a b) =
do { a' <- transTerm' a
; b' <- transTerm' b
; return $ JSApp b' a' }
transTerm' (FObsDest v t) = JSMethod v <$> transTerm' t
transTerm' (FObsCut _ _) = error "transTerm'{cut}"
transTerm' (FStreamCoiter _ _ _) = error "transTerm'{FStreamCoiter}"
|
fae3f1baf5d402b971105ed2a26fc53d5c050f2a6bdc60631fa7dc42f492af83 | sph-mn/sph-lib | shtml.scm | (define-module (sph web shtml))
(use-modules (rnrs io ports) (sph) (sph list) (sxml simple) ((sph string) #:select (any->string)))
(export shtml->html shtml-alist->options
shtml-heading shtml-hyperlink
shtml-include-css shtml-include-javascript
shtml-indent shtml-indent-create shtml-list->list shtml-list->table shtml-section shtml-text->sxml)
(define sph-web-shtml-description "helpers to create html via sxml")
(define html-headings #(h1 h2 h3 h4 h5 h6))
(define (shtml-heading depth . content)
"integer sxml -> sxml
create a html heading element, for example <h1>, with the given content"
(pair (vector-ref html-headings (min 5 depth)) content))
(define (shtml-section depth title content . attributes)
"integer sxml sxml (string/symbol string/symbol) ... -> sxml
create the sxml for an html <section> tag with attributes, heading and content in a single html tag.
content is put in a <div> unless it already is contained in single tag or if it is empty.
the single tag is ensured to make accessors for the content area (everything not first heading) simpler"
(pair (q section)
(append (if (null? attributes) attributes (list (pair (q @) attributes)))
(pair (shtml-heading depth title)
(if (list? content)
(if (null? content) (list)
(if (symbol? (first content)) (list content) (list (pair (q div) content))))
(if (and (string? content) (string-null? content)) (list) (list (q div) content)))))))
(define shtml-indent (q ((*ENTITY* "#160") (*ENTITY* "#160"))))
(define (shtml-indent-create depth)
"integer -> sxml
creates indent with the html entity for the space character so it does not get compressed by the viewer"
(apply append (make-list depth shtml-indent)))
(define (shtml-text->sxml a)
"string -> sxml
replace newlines with (br)"
(interleave (string-split a #\newline) (q (br))))
(define* (shtml-include-javascript path #:optional is-async)
"string boolean -> sxml
create the shtml for including a javascript file"
(qq
(script
(@ (src (unquote path)) (unquote-splicing (if is-async (list (q (async async))) (list)))) "")))
(define (shtml-include-css path)
"string -> sxml
create the shtml for including a stylesheet file"
(qq (link (@ (rel "stylesheet") (type "text/css") (href (unquote path))))))
(define* (shtml-hyperlink target title #:optional (attributes (list)))
"string string -> sxml
sxml for an html <a>"
(qq (a (@ (href (unquote target)) (unquote-splicing attributes)) (unquote (or title target)))))
(define (shtml-alist->options a)
"((content . string:value/false)/string ...) -> sxml:((option _ ...) ...)
create the shtml for multiple <option> elements"
(map
(l (a)
(if (pair? a) (qq (option (@ (value (unquote (tail a)))) (unquote (first a))))
(list (q option) a)))
a))
(define* (shtml-list->list a #:optional ordered?)
"(sxml/list:sub-list) boolean -> sxml
create the shtml for an unordered or ordered list structure, <ul> or <ol>, with elements.
input list elements that are lists are recursively created as shtml sublists"
(pair (if ordered? (q ol) (q ul))
(map (l (a) (list (q li) (if (list? a) (shtml-list->list a) a))) a)))
(define (shtml-list->table a)
"((sxml:cell ...) ...) -> sxml
create the shtml for a <table> with content"
(pair (q table) (map (l (a) (pair (q tr) (map (l (a) (list (q td) a)) a))) a)))
(define (shtml->html shtml port)
"write html from shtml to port, adding a <!doctype html> declaration at the beginning"
(put-string port "<!doctype html>") (sxml->xml shtml port))
| null | https://raw.githubusercontent.com/sph-mn/sph-lib/c7daf74f42d6bd1304f49c2fef89dcd6dd94fdc9/modules/sph/web/shtml.scm | scheme | (define-module (sph web shtml))
(use-modules (rnrs io ports) (sph) (sph list) (sxml simple) ((sph string) #:select (any->string)))
(export shtml->html shtml-alist->options
shtml-heading shtml-hyperlink
shtml-include-css shtml-include-javascript
shtml-indent shtml-indent-create shtml-list->list shtml-list->table shtml-section shtml-text->sxml)
(define sph-web-shtml-description "helpers to create html via sxml")
(define html-headings #(h1 h2 h3 h4 h5 h6))
(define (shtml-heading depth . content)
"integer sxml -> sxml
create a html heading element, for example <h1>, with the given content"
(pair (vector-ref html-headings (min 5 depth)) content))
(define (shtml-section depth title content . attributes)
"integer sxml sxml (string/symbol string/symbol) ... -> sxml
create the sxml for an html <section> tag with attributes, heading and content in a single html tag.
content is put in a <div> unless it already is contained in single tag or if it is empty.
the single tag is ensured to make accessors for the content area (everything not first heading) simpler"
(pair (q section)
(append (if (null? attributes) attributes (list (pair (q @) attributes)))
(pair (shtml-heading depth title)
(if (list? content)
(if (null? content) (list)
(if (symbol? (first content)) (list content) (list (pair (q div) content))))
(if (and (string? content) (string-null? content)) (list) (list (q div) content)))))))
(define shtml-indent (q ((*ENTITY* "#160") (*ENTITY* "#160"))))
(define (shtml-indent-create depth)
"integer -> sxml
creates indent with the html entity for the space character so it does not get compressed by the viewer"
(apply append (make-list depth shtml-indent)))
(define (shtml-text->sxml a)
"string -> sxml
replace newlines with (br)"
(interleave (string-split a #\newline) (q (br))))
(define* (shtml-include-javascript path #:optional is-async)
"string boolean -> sxml
create the shtml for including a javascript file"
(qq
(script
(@ (src (unquote path)) (unquote-splicing (if is-async (list (q (async async))) (list)))) "")))
(define (shtml-include-css path)
"string -> sxml
create the shtml for including a stylesheet file"
(qq (link (@ (rel "stylesheet") (type "text/css") (href (unquote path))))))
(define* (shtml-hyperlink target title #:optional (attributes (list)))
"string string -> sxml
sxml for an html <a>"
(qq (a (@ (href (unquote target)) (unquote-splicing attributes)) (unquote (or title target)))))
(define (shtml-alist->options a)
"((content . string:value/false)/string ...) -> sxml:((option _ ...) ...)
create the shtml for multiple <option> elements"
(map
(l (a)
(if (pair? a) (qq (option (@ (value (unquote (tail a)))) (unquote (first a))))
(list (q option) a)))
a))
(define* (shtml-list->list a #:optional ordered?)
"(sxml/list:sub-list) boolean -> sxml
create the shtml for an unordered or ordered list structure, <ul> or <ol>, with elements.
input list elements that are lists are recursively created as shtml sublists"
(pair (if ordered? (q ol) (q ul))
(map (l (a) (list (q li) (if (list? a) (shtml-list->list a) a))) a)))
(define (shtml-list->table a)
"((sxml:cell ...) ...) -> sxml
create the shtml for a <table> with content"
(pair (q table) (map (l (a) (pair (q tr) (map (l (a) (list (q td) a)) a))) a)))
(define (shtml->html shtml port)
"write html from shtml to port, adding a <!doctype html> declaration at the beginning"
(put-string port "<!doctype html>") (sxml->xml shtml port))
| |
a86811a26e41efe416ee1b45f5393860a39b586c3cfcf2a2d3226260e2181fe8 | Naproche-SAD/Naproche-SAD | Base.hs |
Authors : ( 2001 - 2008 ) , ( 2017 - 2018 )
Construct prover database .
Authors: Andrei Paskevich (2001 - 2008), Steffen Frerix (2017 - 2018)
Construct prover database.
-}
module SAD.Export.Base (Prover(..),Format(..),readProverDatabase) where
import qualified Data.Char as Char
import System.IO
import System.IO.Error
import Control.Exception
import qualified SAD.Core.Message as Message
import SAD.Core.SourcePos
import qualified Isabelle.File as File
data Prover = Prover {
name :: String,
label :: String,
path :: String,
arguments :: [String],
format :: Format,
successMessage :: [String],
failureMessage :: [String],
unknownMessage :: [String] }
data Format = TPTP | DFG
initPrv l = Prover l "Prover" "" [] TPTP [] [] []
-- Database reader
{- parse the prover database in provers.dat -}
readProverDatabase :: String -> IO [Prover]
readProverDatabase file = do
input <- catch (File.read file) $ err . ioeGetErrorString
let dropWS = dropWhile Char.isSpace
trimWS = reverse . dropWS . reverse . dropWS
ls = map trimWS $ lines input
case readProvers 1 Nothing ls of
Left e -> err e
Right d -> return d
where
err = Message.errorExport (fileOnlyPos file)
readProvers :: Int -> Maybe Prover -> [String] -> Either String [Prover]
readProvers n mbp ([]:ls) = readProvers (succ n) mbp ls
readProvers n mbp (('#':_):ls) = readProvers (succ n) mbp ls
readProvers n _ ([_]:_) = Left $ show n ++ ": empty value"
readProvers n Nothing (('P':l):ls)
= readProvers (succ n) (Just $ initPrv l) ls
readProvers n (Just pr) (('P':l):ls)
= fmap2 (:) (validate pr) $ readProvers (succ n) (Just $ initPrv l) ls
readProvers n (Just pr) (('L':l):ls)
= readProvers (succ n) (Just pr { label = l }) ls
readProvers n (Just pr) (('Y':l):ls)
= readProvers (succ n) (Just pr { successMessage = l : successMessage pr }) ls
readProvers n (Just pr) (('N':l):ls)
= readProvers (succ n) (Just pr { failureMessage = l : failureMessage pr }) ls
readProvers n (Just pr) (('U':l):ls)
= readProvers (succ n) (Just pr { unknownMessage = l : unknownMessage pr }) ls
readProvers n (Just pr) (('C':l):ls)
= let (p:a) = if null l then ("":[]) else words l
in readProvers (succ n) (Just pr { path = p, arguments = a }) ls
readProvers n (Just pr) (('F':l):ls)
= case l of
"tptp" -> readProvers (succ n) (Just pr { format = TPTP }) ls
"dfg" -> readProvers (succ n) (Just pr { format = DFG }) ls
_ -> Left $ show n ++ ": unknown format: " ++ l
readProvers n (Just _) ((c:_):_) = Left $ show n ++ ": invalid tag: " ++ [c]
readProvers n Nothing ((c:_):_) = Left $ show n ++ ": misplaced tag: " ++ [c]
readProvers _ (Just pr) [] = fmap1 (:[]) $ validate pr
readProvers _ Nothing [] = Right []
validate :: Prover -> Either String Prover
validate Prover { name = n, path = "" }
= Left $ " prover '" ++ n ++ "' has no command line"
validate Prover { name = n, successMessage = [] }
= Left $ " prover '" ++ n ++ "' has no success responses"
validate Prover { name = n, failureMessage = [], unknownMessage = [] }
= Left $ " prover '" ++ n ++ "' has no failure responses"
validate r = Right r
-- Service stuff
fmap1 :: (a -> b) -> Either e a -> Either e b
fmap1 f (Right a) = Right (f a)
fmap1 _ (Left e) = Left e
fmap2 :: (a -> b -> c) -> Either e a -> Either e b -> Either e c
fmap2 _ (Left e) _ = Left e
fmap2 _ _ (Left e) = Left e
fmap2 f (Right a) (Right b) = Right (f a b)
| null | https://raw.githubusercontent.com/Naproche-SAD/Naproche-SAD/da131a6eaf65d4e02e82082a50a4febb6d42db3d/src/SAD/Export/Base.hs | haskell | Database reader
parse the prover database in provers.dat
Service stuff |
Authors : ( 2001 - 2008 ) , ( 2017 - 2018 )
Construct prover database .
Authors: Andrei Paskevich (2001 - 2008), Steffen Frerix (2017 - 2018)
Construct prover database.
-}
module SAD.Export.Base (Prover(..),Format(..),readProverDatabase) where
import qualified Data.Char as Char
import System.IO
import System.IO.Error
import Control.Exception
import qualified SAD.Core.Message as Message
import SAD.Core.SourcePos
import qualified Isabelle.File as File
data Prover = Prover {
name :: String,
label :: String,
path :: String,
arguments :: [String],
format :: Format,
successMessage :: [String],
failureMessage :: [String],
unknownMessage :: [String] }
data Format = TPTP | DFG
initPrv l = Prover l "Prover" "" [] TPTP [] [] []
readProverDatabase :: String -> IO [Prover]
readProverDatabase file = do
input <- catch (File.read file) $ err . ioeGetErrorString
let dropWS = dropWhile Char.isSpace
trimWS = reverse . dropWS . reverse . dropWS
ls = map trimWS $ lines input
case readProvers 1 Nothing ls of
Left e -> err e
Right d -> return d
where
err = Message.errorExport (fileOnlyPos file)
readProvers :: Int -> Maybe Prover -> [String] -> Either String [Prover]
readProvers n mbp ([]:ls) = readProvers (succ n) mbp ls
readProvers n mbp (('#':_):ls) = readProvers (succ n) mbp ls
readProvers n _ ([_]:_) = Left $ show n ++ ": empty value"
readProvers n Nothing (('P':l):ls)
= readProvers (succ n) (Just $ initPrv l) ls
readProvers n (Just pr) (('P':l):ls)
= fmap2 (:) (validate pr) $ readProvers (succ n) (Just $ initPrv l) ls
readProvers n (Just pr) (('L':l):ls)
= readProvers (succ n) (Just pr { label = l }) ls
readProvers n (Just pr) (('Y':l):ls)
= readProvers (succ n) (Just pr { successMessage = l : successMessage pr }) ls
readProvers n (Just pr) (('N':l):ls)
= readProvers (succ n) (Just pr { failureMessage = l : failureMessage pr }) ls
readProvers n (Just pr) (('U':l):ls)
= readProvers (succ n) (Just pr { unknownMessage = l : unknownMessage pr }) ls
readProvers n (Just pr) (('C':l):ls)
= let (p:a) = if null l then ("":[]) else words l
in readProvers (succ n) (Just pr { path = p, arguments = a }) ls
readProvers n (Just pr) (('F':l):ls)
= case l of
"tptp" -> readProvers (succ n) (Just pr { format = TPTP }) ls
"dfg" -> readProvers (succ n) (Just pr { format = DFG }) ls
_ -> Left $ show n ++ ": unknown format: " ++ l
readProvers n (Just _) ((c:_):_) = Left $ show n ++ ": invalid tag: " ++ [c]
readProvers n Nothing ((c:_):_) = Left $ show n ++ ": misplaced tag: " ++ [c]
readProvers _ (Just pr) [] = fmap1 (:[]) $ validate pr
readProvers _ Nothing [] = Right []
validate :: Prover -> Either String Prover
validate Prover { name = n, path = "" }
= Left $ " prover '" ++ n ++ "' has no command line"
validate Prover { name = n, successMessage = [] }
= Left $ " prover '" ++ n ++ "' has no success responses"
validate Prover { name = n, failureMessage = [], unknownMessage = [] }
= Left $ " prover '" ++ n ++ "' has no failure responses"
validate r = Right r
fmap1 :: (a -> b) -> Either e a -> Either e b
fmap1 f (Right a) = Right (f a)
fmap1 _ (Left e) = Left e
fmap2 :: (a -> b -> c) -> Either e a -> Either e b -> Either e c
fmap2 _ (Left e) _ = Left e
fmap2 _ _ (Left e) = Left e
fmap2 f (Right a) (Right b) = Right (f a b)
|
929ac4570fd40cfc106b42e15239f73764542e658dd4d0f180367b24293f6ed0 | issuu/broen | broen_core.erl | %%% ---------------------------------------------------------------------------------
%%% @doc
%%% broen_core turns HTTP requests/responses into AMQP RPC messaging.
Given a HTTP Request , this module will first authenticate it using the provided
%%% authentication plugin and the publish the message serialized with the serializer
plug over . Upon receiving a response , the module will respond back over HTTP .
%%% @end
%%% ---------------------------------------------------------------------------------
-module(broen_core).
-export([handle/3]).
-export([register_metrics/0]).
-define(CLIENT_REQUEST_BODY_LIMIT, 65536).
-define(ACAO_HEADER, <<"access-control-allow-origin">>).
-type content_type() :: unicode:unicode_binary().
%% The MIME content type
-type broen_string() :: unicode:unicode_binary().
%% A binary string
-type broen_nullable_string() :: unicode:unicode_binary() | null.
%% A binary string that can be null
-type broen_object() :: #{broen_string() => broen_string()}.
%% An generic sub-object that is a map mapping a string to a string. Used for e.g. HTTP headers
-type cookie_name() :: broen_string().
%% The name of a cookie
-type cookie_value() :: #{
value := broen_string(),
domain => broen_string(),
path => broen_string(),
http_only => boolean(),
secure => boolean(),
expires => broen_string()}.
%% The cookie properties. Each cookie must define a value and may optionally define the domain it applies to and the expiration date
-type broen_cookies() :: #{cookie_name() => cookie_value()}.
%% The cookies object maps cookie names to the properties.
-type broen_request() :: #{
appmoddata := broen_string(),
protocol := http | https,
cookies := broen_object(),
http_headers := broen_object(),
request := broen_string(),
method := broen_string(),
referer := broen_nullable_string(),
fullpath := broen_string(),
useragent := broen_nullable_string(),
client_data := binary() | null,
client_ip := broen_string(),
routing_key := broen_string(),
queryobj := broen_object(),
auth_data := term(),
querydata => broen_string(),
postobj => broen_object(),
multipartobj => term()}.
%
%
% }.
The format of a request that is sent to the serializer plugin . < br/ >
%% <b>cookies</b> - Cookies attached to the HTTP request <br/>
%% <b>http_headers</b> - HTTP request headers <br/>
%% <b>request</b> - The HTTP method <br/>
%% <b>method</b> - Same as above <br/>
< b information < br/ >
%% <b>fullpath</b> - Full path of the request as provided by Yaws <br/>
%% <b>appmoddata</b> - The URL that is turned into the routing key (i.e. what follows /call) <br/>
%% <b>referer</b> - The referer URL <br/>
%% <b>useragent</b> - User agent data <br/>
%% <b>client_ip</b> - IP of the client <br/>
%% <b>routing_key</b> - The routing key the request will be sent to <br/>
%% <b>queryobj</b> - The query object containing the query parameters <br/>
%% <b>auth_data</b> - Data returned by the authentication module <br/>
%% <b>querydata</b> - Same as queryobj, but in a string format <br/>
%% <b>postobj</b>- Data attached to a POST request <br/>
%% <b>multipartobj</b> - Data for the multipart request <br/>
-type broen_response() :: #{
payload := term(),
status_code => integer(),
media_type => content_type(),
cookies => broen_cookies(),
cookie_path => broen_string(),
headers => broen_object()}
| #{redirect := unicode:unicode_binary()}.
The format of a response that should be returned by the serializer plugin < br/ >
%% <b>payload</b> - The payload of the response<br/>
%% <b>status_code</b> - Status code of the response<br/>
%% <b>media_type</b> - The MIME content type of the payload<br/>
< b > cookies</b > - Additional cookies to be sent to user < br/ >
%% <b>cookie_path</b> - The cookie path<br/>
%% <b>headers</b> - Additional headers for the HTTP response<br/>
%% Alternatively the response can also be a redirect.
-export_type([content_type/0, broen_request/0, broen_response/0]).
%% @doc Registers metrics with folsom
-spec register_metrics() -> ok.
register_metrics() ->
Groups = application:get_env(broen, metric_groups, []),
[begin
Key = iolist_to_binary(["broen_core.query.", G]),
KeyA = iolist_to_binary(["broen_core.query.", G, ".gone"]),
KeyT = iolist_to_binary(["broen_core.query.", G, ".timeout"]),
KeyL = iolist_to_binary(["broen_core.query.", G, ".latency"]),
folsom_metrics:new_spiral(binary_to_atom(Key, utf8)),
folsom_metrics:new_spiral(binary_to_atom(KeyA, utf8)),
folsom_metrics:new_spiral(binary_to_atom(KeyT, utf8)),
folsom_metrics:new_histogram(binary_to_atom(KeyL, utf8), slide_uniform)
end || G <- Groups],
[folsom_metrics:new_spiral(C)
|| C <- ['broen_core.success',
'broen_core.query.unknown',
'broen_core.query.unknown.timeout',
'broen_core.failure.crash',
'broen_core.failure.500',
'broen_core.failure.503',
'broen_core.failure.404',
'broen_auth.failure']],
[folsom_metrics:new_histogram(H, slide_uniform)
|| H <- ['broen_core.query.unknown.latency']],
ok.
handle(
Req0,
#{
serializer_mod := SerializerMod,
keep_dots_in_routing_keys := KeepDotsRK
} = Conf,
CookiePath) ->
try
RoutingKey = routing_key(Req0, KeepDotsRK),
case broen_request:check_http_origin(Req0, RoutingKey) of
{_, unknown_origin} ->
folsom_metrics:notify({'broen_core.failure.403', 1}),
cowboy_req:reply(403,
#{<<"content-type">> => <<"text/plain">>},
<<"Forbidden">>,
Req0);
{Origin, OriginMode} ->
{AmqpRes, ExtraCookies} = amqp_call(Req0, RoutingKey, Conf),
ReqWithCookies = lists:foldl(fun(Cookie, Req) -> set_cookie(Cookie, <<"/">>, 0, Req) end, Req0, ExtraCookies),
case AmqpRes of
{ok, PackedResponse, ContentType} ->
case SerializerMod:deserialize(PackedResponse, ContentType) of
{ok, Response} ->
folsom_metrics:notify({'broen_core.success', 1}),
build_response(ReqWithCookies, Response, CookiePath, OriginMode, Origin);
{error, invalid_content_type} ->
folsom_metrics:notify({'broen_core.failure.500', 1}),
cowboy_req:reply(500,
#{<<"content-type">> => <<"text/plain">>},
iolist_to_binary([io_lib:format("Got wrong type of Media Type in response: ~ts",
[ContentType])]),
ReqWithCookies)
end;
{error, timeout} ->
cowboy_req:reply(504,
#{<<"content-type">> => <<"text/plain">>},
<<"API Broen timeout">>,
ReqWithCookies);
{error, {reply_code, 312}} ->
folsom_metrics:notify({'broen_core.failure.404', 1}),
cowboy_req:reply(404,
#{<<"content-type">> => <<"text/plain">>},
<<"Not found">>,
ReqWithCookies);
{error, no_route} ->
folsom_metrics:notify({'broen_core.failure.503', 1}),
cowboy_req:reply(503,
#{<<"content-type">> => <<"text/plain">>},
<<"Service unavailable (no_route)">>,
ReqWithCookies);
{error, csrf_verification_failed} ->
cowboy_req:reply(403,
#{<<"content-type">> => <<"text/plain">>},
<<"Forbidden">>,
ReqWithCookies);
{error, Reason} ->
folsom_metrics:notify({'broen_core.failure.500', 1}),
cowboy_req:reply(500,
#{<<"content-type">> => <<"text/plain">>},
iolist_to_binary([io_lib:format("~p~n", [Reason])]),
ReqWithCookies)
end
end
catch
throw: body_too_large ->
cowboy_req:reply(400,
#{<<"content-type">> => <<"text/plain">>},
<<"Body too large">>,
Req0);
_: {request_error, _, _} = Error: StackTrace ->
lager:warning("Bad request: ~p Error: ~p StackTrace: ~p", [Req0, Error, StackTrace]),
cowboy_req:reply(400,
#{<<"content-type">> => <<"text/plain">>},
<<"Bad request">>,
Req0);
_: Error: StackTrace ->
Now = erlang:timestamp(),
Token = base64:encode(crypto:hash(sha256, term_to_binary(Now))),
lager:error("Crash: ~p Error: ~p Request ~p StackTrace: ~p", [Token, Error, Req0, StackTrace]),
folsom_metrics:notify({'broen_core.failure.crash', 1}),
cowboy_req:reply(500,
#{<<"content-type">> => <<"text/plain">>},
iolist_to_binary([io_lib:format("Internal error ~p~n", [Token])]),
Req0)
end.
Internal functions
%% ---------------------------------------------------------------------------------
amqp_call(_Req, invalid_route, _Conf) ->
{{error, no_route}, []};
amqp_call(Req, RoutingKey, #{
exchange := Exchange,
serializer_mod := SerializerMod,
auth_mod := AuthMod,
partial_post_size := PartialPostSize,
timeout := Timeout
}) ->
TimeZero = os:timestamp(),
case AuthMod:authenticate(Req) of
{error, csrf_verification_failed} -> {{error, csrf_verification_failed}, []};
{error, {csrf_verification_failed, Cookies}} ->
{{error, csrf_verification_failed}, Cookies};
{error, _} ->
{handle_http(SerializerMod, PartialPostSize, TimeZero, [], Req, Exchange, RoutingKey, Timeout), []};
{ok, AuthData, Cookies} ->
{handle_http(SerializerMod, PartialPostSize, TimeZero, AuthData, Req, Exchange, RoutingKey, Timeout), Cookies}
end.
handle_http(SerializerMod, PartialPostSize, TimeZero, AuthData, Arg, Exch, RoutingKey, Timeout) ->
Request = broen_request:build_request(Arg, PartialPostSize, RoutingKey, AuthData),
MetricGroup = metric_group_from_routing_key(RoutingKey),
GroupCalledNotified = notify_group_called(MetricGroup),
{Packed, ContentType} = SerializerMod:serialize(Request),
Reply = ad_client:call_timeout(amqp_rpc,
Exch,
RoutingKey,
Packed,
ContentType,
[{timeout, Timeout}]),
TimeAfter = os:timestamp(),
maybe_register_group(Reply, MetricGroup),
case GroupCalledNotified of
true -> ok;
%% if we did not notify before,
%% perhaps we now registered the metric,
%% so try again
false -> notify_group_called(MetricGroup)
end,
notify_group_latency(MetricGroup, TimeZero, TimeAfter),
case Reply of
{error, timeout} ->
lager:warning("broen_core:amqp_call timeout ~s ~p", [RoutingKey, Request]),
notify_group_timeout(MetricGroup);
{error, no_route} ->
notify_group_gone(MetricGroup);
_ -> ok
end,
Reply.
routing_key(Req, KeepDotsRK) ->
Path = cowboy_req:path_info(Req),
TrailingSlash = binary:last(cowboy_req:path(Req)) == $/,
case valid_route(Path) of
false -> invalid_route;
true when TrailingSlash ->
route(KeepDotsRK, Path ++ [<<>>]);
true ->
route(KeepDotsRK, Path)
end.
valid_route([]) ->
false;
valid_route(Paths) ->
Sum = lists:foldl(fun(El, Sum) -> Sum + byte_size(El) end, 0, Paths),
Sum =< 255.
%% '.' is converted to '_' iff the keep_dots_in_routing_key is false,
%% otherwise it is left as a '.'
route(false, Route) ->
Mapped = lists:map(fun(El) -> binary:replace(El, <<".">>, <<"_">>, [global]) end, Route),
route(true, Mapped);
route(true, [First | Rest]) ->
lists:foldl(fun(El, SoFar) -> <<SoFar/binary, ".", El/binary>> end, First, Rest).
%% Decoders of various responses
%% ---------------------------------------------------------------------------------
build_response(Req, #{redirect := URL}, _, _, _) ->
cowboy_req:reply(
302,
#{<<"location">> => URL},
<<>>,
Req
);
build_response(Req, Response, CookiePath, OriginMode, Origin) ->
StatusCode = maps:get(status_code, Response, 200),
Content = maps:get(payload, Response, <<>>),
MediaType = maps:get(media_type, Response, <<>>),
RespwithCookies = cookies(Req, Response, CookiePath),
cowboy_req:reply(
StatusCode,
maps:from_list(headers(Response, OriginMode, Origin) ++ [{<<"content-type">>, MediaType}]),
Content,
RespwithCookies
).
headers(Response, OriginMode, Origin) ->
Headers = maps:to_list(maps:get(headers, Response, #{})),
[{binary_to_list(N), binary_to_list(V)} || {N, V} <- append_cors(Headers, Origin, OriginMode)].
append_cors(Headers, _, same_origin) -> Headers;
append_cors(Headers, Origin, allow_origin) ->
case lists:keysearch(?ACAO_HEADER, 1, Headers) of
false -> [{?ACAO_HEADER, Origin} | Headers];
_ -> Headers
end.
cookies(InitialReq, Response, DefaultCookiePath) ->
Cookies = maps:to_list(maps:get(cookies, Response, #{})),
CookiePath = maps:get(cookie_path, Response, DefaultCookiePath),
DefaultExpires = iso8601:format({{2038, 1, 17}, {12, 34, 56}}),
lists:foldl(fun(Cookie, Req) -> set_cookie(Cookie, CookiePath, DefaultExpires, Req) end, InitialReq, Cookies).
set_cookie({CookieName, CookieValue}, DefaultCookiePath, DefaultExpires, Req) ->
Expiry = parse_expiry(maps:get(expires, CookieValue, DefaultExpires)),
CookiePath = maps:get(path, CookieValue, DefaultCookiePath),
Domain = maps:get(domain, CookieValue, undefined),
Secure = maps:get(secure, CookieValue, false),
HttpOnly = maps:get(http_only, CookieValue, false),
Value = maps:get(value, CookieValue),
cowboy_req:set_resp_cookie(CookieName, Value,
Req,
#{
domain => Domain,
path => CookiePath,
secure => Secure,
http_only => HttpOnly,
max_age => Expiry
}).
parse_expiry(Date) when is_integer(Date) -> Date;
parse_expiry(Date) ->
ParsedDate = parse_date(Date),
UTC = calendar:universal_time(),
Secs = calendar:datetime_to_gregorian_seconds(UTC),
Expires = calendar:datetime_to_gregorian_seconds(ParsedDate),
if
Expires - Secs > 0 -> Expires - Secs;
true -> 0
end.
parse_date(Date) when is_list(Date) ->
parse_date(list_to_binary(Date));
parse_date(Date) ->
try
iso8601:parse(Date)
catch
_:badarg ->
cow_date:parse_date(Date)
end.
%% Other
%% ---------------------------------------------------------------------------------
metric_groups() -> application:get_env(broen, metric_groups, []).
metric_group_exists(MetricGroup) -> lists:any(fun (Item) -> Item == MetricGroup end, metric_groups()).
metric_group_key_count(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup]), utf8).
metric_group_key_gone(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup, ".gone"]), utf8).
metric_group_key_timeout(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup, ".timeout"]), utf8).
metric_group_key_latency(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup, ".latency"]), utf8).
%% register metric group if we did not see it before -
%% but only if the reply is not "immediate delivery failed"
error , as this is most probably a 404 and we do n't want
%% to register random metric groups.
maybe_register_group({error, {reply_code, 312}}, _) -> ok;
maybe_register_group({error, no_route}, _) -> ok;
maybe_register_group(_, MetricGroup) -> register_metric_group(MetricGroup).
register_metric_group(MetricGroup) ->
case metric_group_exists(MetricGroup) of
true -> ok;
false ->
lager:info("Register metric group: ~s", [MetricGroup]),
Key = metric_group_key_count(MetricGroup),
KeyA = metric_group_key_gone(MetricGroup),
KeyT = metric_group_key_timeout(MetricGroup),
KeyL = metric_group_key_latency(MetricGroup),
folsom_metrics:new_spiral(Key),
folsom_metrics:new_spiral(KeyA),
folsom_metrics:new_spiral(KeyT),
folsom_metrics:new_histogram(KeyL, slide_uniform),
application:set_env(broen, metric_groups, [MetricGroup | metric_groups()]),
ok
end.
-spec metric_group_from_routing_key(binary()) -> binary().
metric_group_from_routing_key(RK) when is_binary(RK) ->
case binary:split(RK, <<".">>) of
[SS | _] -> SS;
_ -> <<"unknown">>
end.
-spec notify_group_called(binary()) -> boolean().
notify_group_called(MetricGroup) ->
case metric_group_exists(MetricGroup) of
true -> folsom_metrics:notify({metric_group_key_count(MetricGroup), 1}), true;
false -> false
end.
notify_group_gone(MetricGroup) ->
case metric_group_exists(MetricGroup) of
false -> ok;
true ->
% metric group exists, but message could not be delivered,
% meaning that a subsystem is now gone
lager:warning("broen_core metric_group_gone ~s", [MetricGroup]),
folsom_metrics:notify({metric_group_key_gone(MetricGroup), 1})
end.
notify_group_timeout(MetricGroup) ->
case metric_group_exists(MetricGroup) of
true -> folsom_metrics:notify({metric_group_key_timeout(MetricGroup), 1});
false -> ok
end.
notify_group_latency(MetricGroup, TimeZero, TimeAfter) ->
case metric_group_exists(MetricGroup) of
true -> histogram_notify(metric_group_key_latency(MetricGroup),
timer:now_diff(TimeAfter, TimeZero) div 1000);
false -> ok
end.
histogram_notify(Name, Diff) ->
case folsom_metrics:notify(Name, Diff) of
{error, Name, nonexistent_metric} ->
folsom_metrics:new_histogram(Name, slide_uniform),
folsom_metrics:notify(Name, Diff);
Res ->
Res
end.
| null | https://raw.githubusercontent.com/issuu/broen/7d0e1ad9017b9e9907d924b54c3c63dd1d741c9c/src/broen_core.erl | erlang | ---------------------------------------------------------------------------------
@doc
broen_core turns HTTP requests/responses into AMQP RPC messaging.
authentication plugin and the publish the message serialized with the serializer
@end
---------------------------------------------------------------------------------
The MIME content type
A binary string
A binary string that can be null
An generic sub-object that is a map mapping a string to a string. Used for e.g. HTTP headers
The name of a cookie
The cookie properties. Each cookie must define a value and may optionally define the domain it applies to and the expiration date
The cookies object maps cookie names to the properties.
}.
<b>cookies</b> - Cookies attached to the HTTP request <br/>
<b>http_headers</b> - HTTP request headers <br/>
<b>request</b> - The HTTP method <br/>
<b>method</b> - Same as above <br/>
<b>fullpath</b> - Full path of the request as provided by Yaws <br/>
<b>appmoddata</b> - The URL that is turned into the routing key (i.e. what follows /call) <br/>
<b>referer</b> - The referer URL <br/>
<b>useragent</b> - User agent data <br/>
<b>client_ip</b> - IP of the client <br/>
<b>routing_key</b> - The routing key the request will be sent to <br/>
<b>queryobj</b> - The query object containing the query parameters <br/>
<b>auth_data</b> - Data returned by the authentication module <br/>
<b>querydata</b> - Same as queryobj, but in a string format <br/>
<b>postobj</b>- Data attached to a POST request <br/>
<b>multipartobj</b> - Data for the multipart request <br/>
<b>payload</b> - The payload of the response<br/>
<b>status_code</b> - Status code of the response<br/>
<b>media_type</b> - The MIME content type of the payload<br/>
<b>cookie_path</b> - The cookie path<br/>
<b>headers</b> - Additional headers for the HTTP response<br/>
Alternatively the response can also be a redirect.
@doc Registers metrics with folsom
---------------------------------------------------------------------------------
if we did not notify before,
perhaps we now registered the metric,
so try again
'.' is converted to '_' iff the keep_dots_in_routing_key is false,
otherwise it is left as a '.'
Decoders of various responses
---------------------------------------------------------------------------------
Other
---------------------------------------------------------------------------------
register metric group if we did not see it before -
but only if the reply is not "immediate delivery failed"
to register random metric groups.
metric group exists, but message could not be delivered,
meaning that a subsystem is now gone | Given a HTTP Request , this module will first authenticate it using the provided
plug over . Upon receiving a response , the module will respond back over HTTP .
-module(broen_core).
-export([handle/3]).
-export([register_metrics/0]).
-define(CLIENT_REQUEST_BODY_LIMIT, 65536).
-define(ACAO_HEADER, <<"access-control-allow-origin">>).
-type content_type() :: unicode:unicode_binary().
-type broen_string() :: unicode:unicode_binary().
-type broen_nullable_string() :: unicode:unicode_binary() | null.
-type broen_object() :: #{broen_string() => broen_string()}.
-type cookie_name() :: broen_string().
-type cookie_value() :: #{
value := broen_string(),
domain => broen_string(),
path => broen_string(),
http_only => boolean(),
secure => boolean(),
expires => broen_string()}.
-type broen_cookies() :: #{cookie_name() => cookie_value()}.
-type broen_request() :: #{
appmoddata := broen_string(),
protocol := http | https,
cookies := broen_object(),
http_headers := broen_object(),
request := broen_string(),
method := broen_string(),
referer := broen_nullable_string(),
fullpath := broen_string(),
useragent := broen_nullable_string(),
client_data := binary() | null,
client_ip := broen_string(),
routing_key := broen_string(),
queryobj := broen_object(),
auth_data := term(),
querydata => broen_string(),
postobj => broen_object(),
multipartobj => term()}.
The format of a request that is sent to the serializer plugin . < br/ >
< b information < br/ >
-type broen_response() :: #{
payload := term(),
status_code => integer(),
media_type => content_type(),
cookies => broen_cookies(),
cookie_path => broen_string(),
headers => broen_object()}
| #{redirect := unicode:unicode_binary()}.
The format of a response that should be returned by the serializer plugin < br/ >
< b > cookies</b > - Additional cookies to be sent to user < br/ >
-export_type([content_type/0, broen_request/0, broen_response/0]).
-spec register_metrics() -> ok.
register_metrics() ->
Groups = application:get_env(broen, metric_groups, []),
[begin
Key = iolist_to_binary(["broen_core.query.", G]),
KeyA = iolist_to_binary(["broen_core.query.", G, ".gone"]),
KeyT = iolist_to_binary(["broen_core.query.", G, ".timeout"]),
KeyL = iolist_to_binary(["broen_core.query.", G, ".latency"]),
folsom_metrics:new_spiral(binary_to_atom(Key, utf8)),
folsom_metrics:new_spiral(binary_to_atom(KeyA, utf8)),
folsom_metrics:new_spiral(binary_to_atom(KeyT, utf8)),
folsom_metrics:new_histogram(binary_to_atom(KeyL, utf8), slide_uniform)
end || G <- Groups],
[folsom_metrics:new_spiral(C)
|| C <- ['broen_core.success',
'broen_core.query.unknown',
'broen_core.query.unknown.timeout',
'broen_core.failure.crash',
'broen_core.failure.500',
'broen_core.failure.503',
'broen_core.failure.404',
'broen_auth.failure']],
[folsom_metrics:new_histogram(H, slide_uniform)
|| H <- ['broen_core.query.unknown.latency']],
ok.
handle(
Req0,
#{
serializer_mod := SerializerMod,
keep_dots_in_routing_keys := KeepDotsRK
} = Conf,
CookiePath) ->
try
RoutingKey = routing_key(Req0, KeepDotsRK),
case broen_request:check_http_origin(Req0, RoutingKey) of
{_, unknown_origin} ->
folsom_metrics:notify({'broen_core.failure.403', 1}),
cowboy_req:reply(403,
#{<<"content-type">> => <<"text/plain">>},
<<"Forbidden">>,
Req0);
{Origin, OriginMode} ->
{AmqpRes, ExtraCookies} = amqp_call(Req0, RoutingKey, Conf),
ReqWithCookies = lists:foldl(fun(Cookie, Req) -> set_cookie(Cookie, <<"/">>, 0, Req) end, Req0, ExtraCookies),
case AmqpRes of
{ok, PackedResponse, ContentType} ->
case SerializerMod:deserialize(PackedResponse, ContentType) of
{ok, Response} ->
folsom_metrics:notify({'broen_core.success', 1}),
build_response(ReqWithCookies, Response, CookiePath, OriginMode, Origin);
{error, invalid_content_type} ->
folsom_metrics:notify({'broen_core.failure.500', 1}),
cowboy_req:reply(500,
#{<<"content-type">> => <<"text/plain">>},
iolist_to_binary([io_lib:format("Got wrong type of Media Type in response: ~ts",
[ContentType])]),
ReqWithCookies)
end;
{error, timeout} ->
cowboy_req:reply(504,
#{<<"content-type">> => <<"text/plain">>},
<<"API Broen timeout">>,
ReqWithCookies);
{error, {reply_code, 312}} ->
folsom_metrics:notify({'broen_core.failure.404', 1}),
cowboy_req:reply(404,
#{<<"content-type">> => <<"text/plain">>},
<<"Not found">>,
ReqWithCookies);
{error, no_route} ->
folsom_metrics:notify({'broen_core.failure.503', 1}),
cowboy_req:reply(503,
#{<<"content-type">> => <<"text/plain">>},
<<"Service unavailable (no_route)">>,
ReqWithCookies);
{error, csrf_verification_failed} ->
cowboy_req:reply(403,
#{<<"content-type">> => <<"text/plain">>},
<<"Forbidden">>,
ReqWithCookies);
{error, Reason} ->
folsom_metrics:notify({'broen_core.failure.500', 1}),
cowboy_req:reply(500,
#{<<"content-type">> => <<"text/plain">>},
iolist_to_binary([io_lib:format("~p~n", [Reason])]),
ReqWithCookies)
end
end
catch
throw: body_too_large ->
cowboy_req:reply(400,
#{<<"content-type">> => <<"text/plain">>},
<<"Body too large">>,
Req0);
_: {request_error, _, _} = Error: StackTrace ->
lager:warning("Bad request: ~p Error: ~p StackTrace: ~p", [Req0, Error, StackTrace]),
cowboy_req:reply(400,
#{<<"content-type">> => <<"text/plain">>},
<<"Bad request">>,
Req0);
_: Error: StackTrace ->
Now = erlang:timestamp(),
Token = base64:encode(crypto:hash(sha256, term_to_binary(Now))),
lager:error("Crash: ~p Error: ~p Request ~p StackTrace: ~p", [Token, Error, Req0, StackTrace]),
folsom_metrics:notify({'broen_core.failure.crash', 1}),
cowboy_req:reply(500,
#{<<"content-type">> => <<"text/plain">>},
iolist_to_binary([io_lib:format("Internal error ~p~n", [Token])]),
Req0)
end.
Internal functions
amqp_call(_Req, invalid_route, _Conf) ->
{{error, no_route}, []};
amqp_call(Req, RoutingKey, #{
exchange := Exchange,
serializer_mod := SerializerMod,
auth_mod := AuthMod,
partial_post_size := PartialPostSize,
timeout := Timeout
}) ->
TimeZero = os:timestamp(),
case AuthMod:authenticate(Req) of
{error, csrf_verification_failed} -> {{error, csrf_verification_failed}, []};
{error, {csrf_verification_failed, Cookies}} ->
{{error, csrf_verification_failed}, Cookies};
{error, _} ->
{handle_http(SerializerMod, PartialPostSize, TimeZero, [], Req, Exchange, RoutingKey, Timeout), []};
{ok, AuthData, Cookies} ->
{handle_http(SerializerMod, PartialPostSize, TimeZero, AuthData, Req, Exchange, RoutingKey, Timeout), Cookies}
end.
handle_http(SerializerMod, PartialPostSize, TimeZero, AuthData, Arg, Exch, RoutingKey, Timeout) ->
Request = broen_request:build_request(Arg, PartialPostSize, RoutingKey, AuthData),
MetricGroup = metric_group_from_routing_key(RoutingKey),
GroupCalledNotified = notify_group_called(MetricGroup),
{Packed, ContentType} = SerializerMod:serialize(Request),
Reply = ad_client:call_timeout(amqp_rpc,
Exch,
RoutingKey,
Packed,
ContentType,
[{timeout, Timeout}]),
TimeAfter = os:timestamp(),
maybe_register_group(Reply, MetricGroup),
case GroupCalledNotified of
true -> ok;
false -> notify_group_called(MetricGroup)
end,
notify_group_latency(MetricGroup, TimeZero, TimeAfter),
case Reply of
{error, timeout} ->
lager:warning("broen_core:amqp_call timeout ~s ~p", [RoutingKey, Request]),
notify_group_timeout(MetricGroup);
{error, no_route} ->
notify_group_gone(MetricGroup);
_ -> ok
end,
Reply.
routing_key(Req, KeepDotsRK) ->
Path = cowboy_req:path_info(Req),
TrailingSlash = binary:last(cowboy_req:path(Req)) == $/,
case valid_route(Path) of
false -> invalid_route;
true when TrailingSlash ->
route(KeepDotsRK, Path ++ [<<>>]);
true ->
route(KeepDotsRK, Path)
end.
valid_route([]) ->
false;
valid_route(Paths) ->
Sum = lists:foldl(fun(El, Sum) -> Sum + byte_size(El) end, 0, Paths),
Sum =< 255.
route(false, Route) ->
Mapped = lists:map(fun(El) -> binary:replace(El, <<".">>, <<"_">>, [global]) end, Route),
route(true, Mapped);
route(true, [First | Rest]) ->
lists:foldl(fun(El, SoFar) -> <<SoFar/binary, ".", El/binary>> end, First, Rest).
build_response(Req, #{redirect := URL}, _, _, _) ->
cowboy_req:reply(
302,
#{<<"location">> => URL},
<<>>,
Req
);
build_response(Req, Response, CookiePath, OriginMode, Origin) ->
StatusCode = maps:get(status_code, Response, 200),
Content = maps:get(payload, Response, <<>>),
MediaType = maps:get(media_type, Response, <<>>),
RespwithCookies = cookies(Req, Response, CookiePath),
cowboy_req:reply(
StatusCode,
maps:from_list(headers(Response, OriginMode, Origin) ++ [{<<"content-type">>, MediaType}]),
Content,
RespwithCookies
).
headers(Response, OriginMode, Origin) ->
Headers = maps:to_list(maps:get(headers, Response, #{})),
[{binary_to_list(N), binary_to_list(V)} || {N, V} <- append_cors(Headers, Origin, OriginMode)].
append_cors(Headers, _, same_origin) -> Headers;
append_cors(Headers, Origin, allow_origin) ->
case lists:keysearch(?ACAO_HEADER, 1, Headers) of
false -> [{?ACAO_HEADER, Origin} | Headers];
_ -> Headers
end.
cookies(InitialReq, Response, DefaultCookiePath) ->
Cookies = maps:to_list(maps:get(cookies, Response, #{})),
CookiePath = maps:get(cookie_path, Response, DefaultCookiePath),
DefaultExpires = iso8601:format({{2038, 1, 17}, {12, 34, 56}}),
lists:foldl(fun(Cookie, Req) -> set_cookie(Cookie, CookiePath, DefaultExpires, Req) end, InitialReq, Cookies).
set_cookie({CookieName, CookieValue}, DefaultCookiePath, DefaultExpires, Req) ->
Expiry = parse_expiry(maps:get(expires, CookieValue, DefaultExpires)),
CookiePath = maps:get(path, CookieValue, DefaultCookiePath),
Domain = maps:get(domain, CookieValue, undefined),
Secure = maps:get(secure, CookieValue, false),
HttpOnly = maps:get(http_only, CookieValue, false),
Value = maps:get(value, CookieValue),
cowboy_req:set_resp_cookie(CookieName, Value,
Req,
#{
domain => Domain,
path => CookiePath,
secure => Secure,
http_only => HttpOnly,
max_age => Expiry
}).
parse_expiry(Date) when is_integer(Date) -> Date;
parse_expiry(Date) ->
ParsedDate = parse_date(Date),
UTC = calendar:universal_time(),
Secs = calendar:datetime_to_gregorian_seconds(UTC),
Expires = calendar:datetime_to_gregorian_seconds(ParsedDate),
if
Expires - Secs > 0 -> Expires - Secs;
true -> 0
end.
parse_date(Date) when is_list(Date) ->
parse_date(list_to_binary(Date));
parse_date(Date) ->
try
iso8601:parse(Date)
catch
_:badarg ->
cow_date:parse_date(Date)
end.
metric_groups() -> application:get_env(broen, metric_groups, []).
metric_group_exists(MetricGroup) -> lists:any(fun (Item) -> Item == MetricGroup end, metric_groups()).
metric_group_key_count(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup]), utf8).
metric_group_key_gone(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup, ".gone"]), utf8).
metric_group_key_timeout(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup, ".timeout"]), utf8).
metric_group_key_latency(MetricGroup) -> binary_to_atom(iolist_to_binary(["broen_core.query.", MetricGroup, ".latency"]), utf8).
error , as this is most probably a 404 and we do n't want
maybe_register_group({error, {reply_code, 312}}, _) -> ok;
maybe_register_group({error, no_route}, _) -> ok;
maybe_register_group(_, MetricGroup) -> register_metric_group(MetricGroup).
register_metric_group(MetricGroup) ->
case metric_group_exists(MetricGroup) of
true -> ok;
false ->
lager:info("Register metric group: ~s", [MetricGroup]),
Key = metric_group_key_count(MetricGroup),
KeyA = metric_group_key_gone(MetricGroup),
KeyT = metric_group_key_timeout(MetricGroup),
KeyL = metric_group_key_latency(MetricGroup),
folsom_metrics:new_spiral(Key),
folsom_metrics:new_spiral(KeyA),
folsom_metrics:new_spiral(KeyT),
folsom_metrics:new_histogram(KeyL, slide_uniform),
application:set_env(broen, metric_groups, [MetricGroup | metric_groups()]),
ok
end.
-spec metric_group_from_routing_key(binary()) -> binary().
metric_group_from_routing_key(RK) when is_binary(RK) ->
case binary:split(RK, <<".">>) of
[SS | _] -> SS;
_ -> <<"unknown">>
end.
-spec notify_group_called(binary()) -> boolean().
notify_group_called(MetricGroup) ->
case metric_group_exists(MetricGroup) of
true -> folsom_metrics:notify({metric_group_key_count(MetricGroup), 1}), true;
false -> false
end.
notify_group_gone(MetricGroup) ->
case metric_group_exists(MetricGroup) of
false -> ok;
true ->
lager:warning("broen_core metric_group_gone ~s", [MetricGroup]),
folsom_metrics:notify({metric_group_key_gone(MetricGroup), 1})
end.
notify_group_timeout(MetricGroup) ->
case metric_group_exists(MetricGroup) of
true -> folsom_metrics:notify({metric_group_key_timeout(MetricGroup), 1});
false -> ok
end.
notify_group_latency(MetricGroup, TimeZero, TimeAfter) ->
case metric_group_exists(MetricGroup) of
true -> histogram_notify(metric_group_key_latency(MetricGroup),
timer:now_diff(TimeAfter, TimeZero) div 1000);
false -> ok
end.
histogram_notify(Name, Diff) ->
case folsom_metrics:notify(Name, Diff) of
{error, Name, nonexistent_metric} ->
folsom_metrics:new_histogram(Name, slide_uniform),
folsom_metrics:notify(Name, Diff);
Res ->
Res
end.
|
a4e01b58f84f129dcd0eba00b3e14c9cdf5e6e52c69f6cbec4c2ceeb70dedfb8 | nathanmarz/kafka-deploy | zookeeper.clj | (ns kafka.deploy.crate.zookeeper
(:require
[pallet.action.directory :as directory]
[pallet.action.file :as file]
[pallet.action.remote-directory :as remote-directory]
[pallet.action.remote-file :as remote-file]
[pallet.action.service :as service]
[pallet.action.user :as user]
[pallet.argument :as argument]
[pallet.compute :as compute]
[pallet.parameter :as parameter]
[pallet.session :as session]
[pallet.stevedore :as stevedore]
[clojure.string :as string]
[pallet.resource.package :as package]
[pallet.resource.exec-script :as exec-script]
[pallet.crate.crontab :as crontab]
)
(:use
pallet.thread-expr))
(def install-path "/usr/local/zookeeper")
(def log-path "/var/log/zookeeper")
(def tx-log-path "/mnt/zookeeper")
(def config-path "/etc/zookeeper")
(def data-path "/var/zookeeper")
(def zookeeper-home install-path)
(def zookeeper-user "zookeeper")
(def zookeeper-group "zookeeper")
(def default-config
{:dataDir data-path
:tickTime 2000
:clientPort 2181
:initLimit 10
:syncLimit 5
:dataLogDir tx-log-path})
(defn url "Download url"
[version]
(format
"-%s/zookeeper-%s.tar.gz"
version version))
(defn install
"Install Zookeeper"
[session & {:keys [user group version home]
:or {user zookeeper-user
group zookeeper-group
version "3.3.3"}
:as options}]
(let [url (url version)
home (or home (format "%s-%s" install-path version))]
(->
session
(package/package "daemontools")
(parameter/assoc-for
[:zookeeper :home] home
[:zookeeper :owner] user
[:zookeeper :group] group)
(user/group group :system true)
(user/user user :system true :group group)
(remote-directory/remote-directory
home
:url url :md5-url (str url ".md5")
:unpack :tar :tar-options "xz"
:owner user :group group)
(directory/directory log-path :owner user :group group :mode "0755")
(directory/directory tx-log-path :owner user :group group :mode "0755")
(directory/directory config-path :owner user :group group :mode "0755")
(directory/directory data-path :owner user :group group :mode "0755")
(directory/directory (format "/home/%s" user) :owner user :group group :mode "0755")
(directory/directory (format "%s/supervise" home) :owner user :group group :mode "0755")
(remote-file/remote-file
(format "%s/purge" home)
:content
(format
"#!/bin/bash
cd %s && export ZOOBINDIR=\"bin\" && . bin/zkEnv.sh && echo $CLASSPATH && java -cp $CLASSPATH org.apache.zookeeper.server.PurgeTxnLog %s %s -n 3
"
home
tx-log-path
data-path
)
:overwrite-changes true
:literal true
:mode 755)
(remote-file/remote-file
(format "%s/run" home)
:content
"#!/bin/bash
export ZOOBINDIR=\".\"
if [ \"x$JMXLOCALONLY\" = \"x\" ]
then
JMXLOCALONLY=false
fi
if [ \"x$JMXDISABLE\" = \"x\" ]
then
echo \"JMX enabled by default\"
# for some reason these two options are necessary on jdk6 on Ubuntu
# accord to the docs they are not necessary, but otw jconsole cannot
# do a local attach
ZOOMAIN=\"-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.local.only=$JMXLOCALONLY org.apache.zookeeper.server.quorum.QuorumPeerMain\"
else
echo \"JMX disabled by user request\"
ZOOMAIN=\"org.apache.zookeeper.server.quorum.QuorumPeerMain\"
fi
if [ \"x$2\" != \"x\" ]
then
ZOOCFG=\"$ZOOCFGDIR/$2\"
fi
cd bin && . ./zkEnv.sh && java \"-Dzookeeper.log.dir=${ZOO_LOG_DIR}\" \"-Dzookeeper.root.logger=${ZOO_LOG4J_PROP}\" -cp \"$CLASSPATH\" $JVMFLAGS $ZOOMAIN \"$ZOOCFG\"
"
:overwrite-changes true
:literal true
:mode 755)
(remote-file/remote-file
(format "%s/log4j.properties" config-path)
:remote-file (format "%s/conf/log4j.properties" home)
:owner user :group group :mode "0644")
(file/sed
(format "%s/bin/zkServer.sh" home)
{"# chkconfig:.*" ""
"# description:.*" ""
"# by default we allow local JMX connections"
"# by default we allow local JMX connections\\n# chkconfig: 2345 20 80\\n# description: zookeeper"})
(file/sed
(format "%s/log4j.properties" config-path)
{"log4j.rootLogger=INFO, CONSOLE"
"log4j.rootLogger=INFO, ROLLINGFILE"
"log4j.appender.ROLLINGFILE.File=zookeeper.log"
(format "log4j.appender.ROLLINGFILE.File=%s/zookeeper.log" log-path)}
:seperator "|")
)))
(defn init [session]
(-> session
(exec-script/exec-script
(cd ~(parameter/get-for session [:zookeeper :home]))
"sudo -u " ~(parameter/get-for session [:zookeeper :owner]) " nohup supervise . &")
(crontab/crontab "root"
:content (format "@daily sh %s/purge" (parameter/get-for session [:zookeeper :home])))
))
(defn config-files
"Create a zookeeper configuration file. We sort by name to preserve sequence
across invocations."
[session]
(let [target-name (session/target-name session)
target-ip (session/target-ip session)
nodes (sort-by compute/hostname (session/nodes-in-group session))
configs (parameter/get-for
session
[:zookeper (keyword (session/group-name session))])
config (configs (keyword target-name))
owner (parameter/get-for session [:zookeeper :owner])
group (parameter/get-for session [:zookeeper :group])]
(->
session
(remote-file/remote-file
(format "%s/zoo.cfg" config-path)
:content (str (string/join
\newline
(map #(format "%s=%s" (name (first %)) (second %))
(merge
default-config
(dissoc config :electionPort :quorumPort))))
\newline
(when (> (count nodes) 1)
(string/join
\newline
(map #(let [config (configs
(keyword (compute/hostname %1)))]
(format "server.%s=%s:%s:%s"
%2
(compute/private-ip %1)
(:quorumPort config 2888)
(:electionPort config 3888)))
nodes
(range 1 (inc (count nodes)))))))
:owner owner :group group :mode "0644")
(remote-file/remote-file
(format "%s/myid" data-path)
:content (str (some #(and (= target-ip (second %)) (first %))
(map #(vector %1 (compute/primary-ip %2))
(range 1 (inc (count nodes)))
nodes)))
:owner owner :group group :mode "0644"))))
(defn store-configuration
"Capture zookeeper configuration"
[session options]
(parameter/update-for
session
[:zookeper (keyword (session/group-name session))]
(fn [m]
(assoc m (session/target-name session) options))))
(defn configure
"Configure zookeeper instance"
[session & {:keys [dataDir tickTime clientPort initLimit syncLimit dataLogDir
electionPort quorumPort]
:or {client-port 2181 quorumPort 2888 electionPort 3888}
:as options}]
(->
session
(store-configuration
(assoc options :quorumPort quorumPort :electionPort electionPort))
(config-files)))
#_
(pallet.core/defnode zk
{}
:bootstrap (pallet.action/phase
(pallet.crate.automated-admin-user/automated-admin-user))
:configure (pallet.action/phase
(pallet.crate.java/java :openjdk :jdk)
(pallet.crate.zookeeper/install)
(pallet.crate.zookeeper/configure)
(pallet.crate.zookeeper/init))
:restart-zookeeper (pallet.action/phase
(pallet.action.service/service
"zookeeper" :action :restart))) | null | https://raw.githubusercontent.com/nathanmarz/kafka-deploy/76983b2877a3ebf62c740803f2a55916a42f1420/src/clj/kafka/deploy/crate/zookeeper.clj | clojure | (ns kafka.deploy.crate.zookeeper
(:require
[pallet.action.directory :as directory]
[pallet.action.file :as file]
[pallet.action.remote-directory :as remote-directory]
[pallet.action.remote-file :as remote-file]
[pallet.action.service :as service]
[pallet.action.user :as user]
[pallet.argument :as argument]
[pallet.compute :as compute]
[pallet.parameter :as parameter]
[pallet.session :as session]
[pallet.stevedore :as stevedore]
[clojure.string :as string]
[pallet.resource.package :as package]
[pallet.resource.exec-script :as exec-script]
[pallet.crate.crontab :as crontab]
)
(:use
pallet.thread-expr))
(def install-path "/usr/local/zookeeper")
(def log-path "/var/log/zookeeper")
(def tx-log-path "/mnt/zookeeper")
(def config-path "/etc/zookeeper")
(def data-path "/var/zookeeper")
(def zookeeper-home install-path)
(def zookeeper-user "zookeeper")
(def zookeeper-group "zookeeper")
(def default-config
{:dataDir data-path
:tickTime 2000
:clientPort 2181
:initLimit 10
:syncLimit 5
:dataLogDir tx-log-path})
(defn url "Download url"
[version]
(format
"-%s/zookeeper-%s.tar.gz"
version version))
(defn install
"Install Zookeeper"
[session & {:keys [user group version home]
:or {user zookeeper-user
group zookeeper-group
version "3.3.3"}
:as options}]
(let [url (url version)
home (or home (format "%s-%s" install-path version))]
(->
session
(package/package "daemontools")
(parameter/assoc-for
[:zookeeper :home] home
[:zookeeper :owner] user
[:zookeeper :group] group)
(user/group group :system true)
(user/user user :system true :group group)
(remote-directory/remote-directory
home
:url url :md5-url (str url ".md5")
:unpack :tar :tar-options "xz"
:owner user :group group)
(directory/directory log-path :owner user :group group :mode "0755")
(directory/directory tx-log-path :owner user :group group :mode "0755")
(directory/directory config-path :owner user :group group :mode "0755")
(directory/directory data-path :owner user :group group :mode "0755")
(directory/directory (format "/home/%s" user) :owner user :group group :mode "0755")
(directory/directory (format "%s/supervise" home) :owner user :group group :mode "0755")
(remote-file/remote-file
(format "%s/purge" home)
:content
(format
"#!/bin/bash
cd %s && export ZOOBINDIR=\"bin\" && . bin/zkEnv.sh && echo $CLASSPATH && java -cp $CLASSPATH org.apache.zookeeper.server.PurgeTxnLog %s %s -n 3
"
home
tx-log-path
data-path
)
:overwrite-changes true
:literal true
:mode 755)
(remote-file/remote-file
(format "%s/run" home)
:content
"#!/bin/bash
export ZOOBINDIR=\".\"
if [ \"x$JMXLOCALONLY\" = \"x\" ]
then
JMXLOCALONLY=false
fi
if [ \"x$JMXDISABLE\" = \"x\" ]
then
echo \"JMX enabled by default\"
# for some reason these two options are necessary on jdk6 on Ubuntu
# accord to the docs they are not necessary, but otw jconsole cannot
# do a local attach
ZOOMAIN=\"-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.local.only=$JMXLOCALONLY org.apache.zookeeper.server.quorum.QuorumPeerMain\"
else
echo \"JMX disabled by user request\"
ZOOMAIN=\"org.apache.zookeeper.server.quorum.QuorumPeerMain\"
fi
if [ \"x$2\" != \"x\" ]
then
ZOOCFG=\"$ZOOCFGDIR/$2\"
fi
cd bin && . ./zkEnv.sh && java \"-Dzookeeper.log.dir=${ZOO_LOG_DIR}\" \"-Dzookeeper.root.logger=${ZOO_LOG4J_PROP}\" -cp \"$CLASSPATH\" $JVMFLAGS $ZOOMAIN \"$ZOOCFG\"
"
:overwrite-changes true
:literal true
:mode 755)
(remote-file/remote-file
(format "%s/log4j.properties" config-path)
:remote-file (format "%s/conf/log4j.properties" home)
:owner user :group group :mode "0644")
(file/sed
(format "%s/bin/zkServer.sh" home)
{"# chkconfig:.*" ""
"# description:.*" ""
"# by default we allow local JMX connections"
"# by default we allow local JMX connections\\n# chkconfig: 2345 20 80\\n# description: zookeeper"})
(file/sed
(format "%s/log4j.properties" config-path)
{"log4j.rootLogger=INFO, CONSOLE"
"log4j.rootLogger=INFO, ROLLINGFILE"
"log4j.appender.ROLLINGFILE.File=zookeeper.log"
(format "log4j.appender.ROLLINGFILE.File=%s/zookeeper.log" log-path)}
:seperator "|")
)))
(defn init [session]
(-> session
(exec-script/exec-script
(cd ~(parameter/get-for session [:zookeeper :home]))
"sudo -u " ~(parameter/get-for session [:zookeeper :owner]) " nohup supervise . &")
(crontab/crontab "root"
:content (format "@daily sh %s/purge" (parameter/get-for session [:zookeeper :home])))
))
(defn config-files
"Create a zookeeper configuration file. We sort by name to preserve sequence
across invocations."
[session]
(let [target-name (session/target-name session)
target-ip (session/target-ip session)
nodes (sort-by compute/hostname (session/nodes-in-group session))
configs (parameter/get-for
session
[:zookeper (keyword (session/group-name session))])
config (configs (keyword target-name))
owner (parameter/get-for session [:zookeeper :owner])
group (parameter/get-for session [:zookeeper :group])]
(->
session
(remote-file/remote-file
(format "%s/zoo.cfg" config-path)
:content (str (string/join
\newline
(map #(format "%s=%s" (name (first %)) (second %))
(merge
default-config
(dissoc config :electionPort :quorumPort))))
\newline
(when (> (count nodes) 1)
(string/join
\newline
(map #(let [config (configs
(keyword (compute/hostname %1)))]
(format "server.%s=%s:%s:%s"
%2
(compute/private-ip %1)
(:quorumPort config 2888)
(:electionPort config 3888)))
nodes
(range 1 (inc (count nodes)))))))
:owner owner :group group :mode "0644")
(remote-file/remote-file
(format "%s/myid" data-path)
:content (str (some #(and (= target-ip (second %)) (first %))
(map #(vector %1 (compute/primary-ip %2))
(range 1 (inc (count nodes)))
nodes)))
:owner owner :group group :mode "0644"))))
(defn store-configuration
"Capture zookeeper configuration"
[session options]
(parameter/update-for
session
[:zookeper (keyword (session/group-name session))]
(fn [m]
(assoc m (session/target-name session) options))))
(defn configure
"Configure zookeeper instance"
[session & {:keys [dataDir tickTime clientPort initLimit syncLimit dataLogDir
electionPort quorumPort]
:or {client-port 2181 quorumPort 2888 electionPort 3888}
:as options}]
(->
session
(store-configuration
(assoc options :quorumPort quorumPort :electionPort electionPort))
(config-files)))
#_
(pallet.core/defnode zk
{}
:bootstrap (pallet.action/phase
(pallet.crate.automated-admin-user/automated-admin-user))
:configure (pallet.action/phase
(pallet.crate.java/java :openjdk :jdk)
(pallet.crate.zookeeper/install)
(pallet.crate.zookeeper/configure)
(pallet.crate.zookeeper/init))
:restart-zookeeper (pallet.action/phase
(pallet.action.service/service
"zookeeper" :action :restart))) | |
ddb294d0393eded842352d5ce0790904ef3a590d30e856f3cfa60434ae50cb68 | Kappa-Dev/KappaTools | po_cut.ml | *
* po_cut.ml
*
* Cut concurrent events : a module for * , projet Abstraction , INRIA Paris - Rocquencourt
* , Université Paris - Diderot , CNRS
*
* * , Université Paris Dederot , CNRS
*
* Creation : 16/04/2012
* Last modification : 02/08/2013
* *
* Some parameter references can be tuned thanks to command - line options
* other variables has to be set before compilation
*
* Copyright 2011,2012 Institut National de Recherche en Informatique et
* en Automatique . All rights reserved . This file is distributed
* under the terms of the GNU Library General Public License
* po_cut.ml
*
* Cut concurrent events: a module for KaSim
* Jérôme Feret, projet Abstraction, INRIA Paris-Rocquencourt
* Jean Krivine, Université Paris-Diderot, CNRS
*
* KaSim
* Jean Krivine, Université Paris Dederot, CNRS
*
* Creation: 16/04/2012
* Last modification: 02/08/2013
* *
* Some parameter references can be tuned thanks to command-line options
* other variables has to be set before compilation
*
* Copyright 2011,2012 Institut National de Recherche en Informatique et
* en Automatique. All rights reserved. This file is distributed
* under the terms of the GNU Library General Public License *)
module type Po_cut =
sig
module K:Kappa_instantiation.Cflow_signature
val cut: (Trace.t,(Trace.t * int )) K.H.unary
type on_the_fly_state
val init_cut : on_the_fly_state
val cut_step :
on_the_fly_state -> Trace.step -> on_the_fly_state
val finalize_cut : on_the_fly_state -> Trace.step list * int
val cut_rev_trace:
Trace.step list (*reverse order*) -> Trace.step list (* correct order *) * int
end
module Po_cut =
(struct
module K=Kappa_instantiation.Cflow_linker
type predicate_info =
| Here of K.agent_id
| Bound_site of K.agent_id * Instantiation.site_name
| Internal_state of K.agent_id * Instantiation.site_name
module PSM = SetMap.Make (struct type t = predicate_info
let compare = compare
let print _ _ = () end)
module PS = PSM.Set
let created_predicates_of_action action =
match action with
| Instantiation.Create (ag,interface) ->
let ag_id = K.agent_id_of_agent ag in
List.fold_left
(fun list (s_id,opt) ->
let list = Bound_site(ag_id,s_id) :: list in
match opt
with
| None -> list
| Some _ -> (Internal_state (ag_id,s_id))::list
)
[Here ag_id]
interface
| Instantiation.Bind _ | Instantiation.Bind_to _ | Instantiation.Remove _ | Instantiation.Free _ | Instantiation.Mod_internal _ -> []
let predicates_of_action action =
match action with
| Instantiation.Create (ag,interface) ->
let ag_id = K.agent_id_of_agent ag in
List.fold_left
(fun list (s_id,opt) ->
let list = (Bound_site(ag_id,s_id))::list in
match opt
with
| None -> list
| Some _ -> (Internal_state (ag_id,s_id))::list
)
[Here ag_id]
interface
| Instantiation.Mod_internal (site,_) ->
[Internal_state (K.agent_id_of_site site,K.site_name_of_site site)]
| Instantiation.Bind_to (s1,s2) | Instantiation.Bind (s1,s2) ->
[Bound_site (K.agent_id_of_site s1,K.site_name_of_site s1);Bound_site (K.agent_id_of_site s2,K.site_name_of_site s2)]
| Instantiation.Free s ->
[Bound_site (K.agent_id_of_site s,K.site_name_of_site s)]
| Instantiation.Remove _ -> []
let predicates_of_test test =
match test
with
| Instantiation.Is_Here (agent) ->
[Here (K.agent_id_of_agent agent)]
| Instantiation.Has_Internal(site,_) ->
[Internal_state (K.agent_id_of_site site,K.site_name_of_site site)]
| Instantiation.Is_Free s | Instantiation.Is_Bound s | Instantiation.Has_Binding_type (s,_) ->
[Bound_site (K.agent_id_of_site s,K.site_name_of_site s)]
| Instantiation.Is_Bound_to (s1,s2) ->
[Bound_site (K.agent_id_of_site s1,K.site_name_of_site s1);Bound_site (K.agent_id_of_site s2,K.site_name_of_site s2)]
let predicates_of_side_effects sides =
List.map (fun ((ag_id,_),s_id) -> Bound_site(ag_id,s_id)) sides
type on_the_fly_state = PS.t * Trace.step list * int
let init_cut = (PS.empty, [], 0)
let finalize_cut (_a,b,c) = b,c
let cut_step (seen,kept,n_cut) event =
let rec keep l =
match l
with
| [] -> false
| t0::q0 ->
let rec aux1 l =
match l
with
| [] -> keep q0
| t1::q1 ->
if PS.mem t1 seen
then true
else aux1 q1
in
aux1 (predicates_of_action t0)
in
let rec keep2 l =
match l
with
| [] -> false
| t::q ->
if PS.mem t seen
then
true
else
keep2 q
in
let (action_list,_) = Trace.actions_of_step event in
let seen =
List.fold_left
(fun seen action ->
List.fold_left
(fun seen elt -> PS.remove elt seen)
seen
(created_predicates_of_action action)
)
seen action_list
in
let (actions,_) = Trace.actions_of_step event in
if (Trace.step_is_obs event)
|| (keep actions)
|| (keep2 (predicates_of_side_effects (Trace.side_effects_of_step event)))
then
begin
let kept = event::kept in
let tests = Trace.tests_of_step event in
let tests' =
predicates_of_side_effects (Trace.side_effects_of_step event) in
let seen =
List.fold_left
(fun seen test ->
List.fold_left
(fun seen predicate_info -> PS.add predicate_info seen)
seen
(predicates_of_test test)
)
seen
tests
in
let seen =
List.fold_left
(fun seen predicate_info -> PS.add predicate_info seen)
seen
tests'
in
(seen,kept,n_cut)
end
else
(seen,kept,n_cut+1)
let cut_rev_trace rev_event_list =
let _,event_list,n =
List.fold_left
cut_step
init_cut
rev_event_list
in
(event_list,n)
let cut _parameter _handler info error event_list =
let trace = cut_rev_trace (List.rev event_list) in
error, info, trace
end:Po_cut)
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/eef2337e8688018eda47ccc838aea809cae68de7/core/cflow/po_cut.ml | ocaml | reverse order
correct order | *
* po_cut.ml
*
* Cut concurrent events : a module for * , projet Abstraction , INRIA Paris - Rocquencourt
* , Université Paris - Diderot , CNRS
*
* * , Université Paris Dederot , CNRS
*
* Creation : 16/04/2012
* Last modification : 02/08/2013
* *
* Some parameter references can be tuned thanks to command - line options
* other variables has to be set before compilation
*
* Copyright 2011,2012 Institut National de Recherche en Informatique et
* en Automatique . All rights reserved . This file is distributed
* under the terms of the GNU Library General Public License
* po_cut.ml
*
* Cut concurrent events: a module for KaSim
* Jérôme Feret, projet Abstraction, INRIA Paris-Rocquencourt
* Jean Krivine, Université Paris-Diderot, CNRS
*
* KaSim
* Jean Krivine, Université Paris Dederot, CNRS
*
* Creation: 16/04/2012
* Last modification: 02/08/2013
* *
* Some parameter references can be tuned thanks to command-line options
* other variables has to be set before compilation
*
* Copyright 2011,2012 Institut National de Recherche en Informatique et
* en Automatique. All rights reserved. This file is distributed
* under the terms of the GNU Library General Public License *)
module type Po_cut =
sig
module K:Kappa_instantiation.Cflow_signature
val cut: (Trace.t,(Trace.t * int )) K.H.unary
type on_the_fly_state
val init_cut : on_the_fly_state
val cut_step :
on_the_fly_state -> Trace.step -> on_the_fly_state
val finalize_cut : on_the_fly_state -> Trace.step list * int
val cut_rev_trace:
end
module Po_cut =
(struct
module K=Kappa_instantiation.Cflow_linker
type predicate_info =
| Here of K.agent_id
| Bound_site of K.agent_id * Instantiation.site_name
| Internal_state of K.agent_id * Instantiation.site_name
module PSM = SetMap.Make (struct type t = predicate_info
let compare = compare
let print _ _ = () end)
module PS = PSM.Set
let created_predicates_of_action action =
match action with
| Instantiation.Create (ag,interface) ->
let ag_id = K.agent_id_of_agent ag in
List.fold_left
(fun list (s_id,opt) ->
let list = Bound_site(ag_id,s_id) :: list in
match opt
with
| None -> list
| Some _ -> (Internal_state (ag_id,s_id))::list
)
[Here ag_id]
interface
| Instantiation.Bind _ | Instantiation.Bind_to _ | Instantiation.Remove _ | Instantiation.Free _ | Instantiation.Mod_internal _ -> []
let predicates_of_action action =
match action with
| Instantiation.Create (ag,interface) ->
let ag_id = K.agent_id_of_agent ag in
List.fold_left
(fun list (s_id,opt) ->
let list = (Bound_site(ag_id,s_id))::list in
match opt
with
| None -> list
| Some _ -> (Internal_state (ag_id,s_id))::list
)
[Here ag_id]
interface
| Instantiation.Mod_internal (site,_) ->
[Internal_state (K.agent_id_of_site site,K.site_name_of_site site)]
| Instantiation.Bind_to (s1,s2) | Instantiation.Bind (s1,s2) ->
[Bound_site (K.agent_id_of_site s1,K.site_name_of_site s1);Bound_site (K.agent_id_of_site s2,K.site_name_of_site s2)]
| Instantiation.Free s ->
[Bound_site (K.agent_id_of_site s,K.site_name_of_site s)]
| Instantiation.Remove _ -> []
let predicates_of_test test =
match test
with
| Instantiation.Is_Here (agent) ->
[Here (K.agent_id_of_agent agent)]
| Instantiation.Has_Internal(site,_) ->
[Internal_state (K.agent_id_of_site site,K.site_name_of_site site)]
| Instantiation.Is_Free s | Instantiation.Is_Bound s | Instantiation.Has_Binding_type (s,_) ->
[Bound_site (K.agent_id_of_site s,K.site_name_of_site s)]
| Instantiation.Is_Bound_to (s1,s2) ->
[Bound_site (K.agent_id_of_site s1,K.site_name_of_site s1);Bound_site (K.agent_id_of_site s2,K.site_name_of_site s2)]
let predicates_of_side_effects sides =
List.map (fun ((ag_id,_),s_id) -> Bound_site(ag_id,s_id)) sides
type on_the_fly_state = PS.t * Trace.step list * int
let init_cut = (PS.empty, [], 0)
let finalize_cut (_a,b,c) = b,c
let cut_step (seen,kept,n_cut) event =
let rec keep l =
match l
with
| [] -> false
| t0::q0 ->
let rec aux1 l =
match l
with
| [] -> keep q0
| t1::q1 ->
if PS.mem t1 seen
then true
else aux1 q1
in
aux1 (predicates_of_action t0)
in
let rec keep2 l =
match l
with
| [] -> false
| t::q ->
if PS.mem t seen
then
true
else
keep2 q
in
let (action_list,_) = Trace.actions_of_step event in
let seen =
List.fold_left
(fun seen action ->
List.fold_left
(fun seen elt -> PS.remove elt seen)
seen
(created_predicates_of_action action)
)
seen action_list
in
let (actions,_) = Trace.actions_of_step event in
if (Trace.step_is_obs event)
|| (keep actions)
|| (keep2 (predicates_of_side_effects (Trace.side_effects_of_step event)))
then
begin
let kept = event::kept in
let tests = Trace.tests_of_step event in
let tests' =
predicates_of_side_effects (Trace.side_effects_of_step event) in
let seen =
List.fold_left
(fun seen test ->
List.fold_left
(fun seen predicate_info -> PS.add predicate_info seen)
seen
(predicates_of_test test)
)
seen
tests
in
let seen =
List.fold_left
(fun seen predicate_info -> PS.add predicate_info seen)
seen
tests'
in
(seen,kept,n_cut)
end
else
(seen,kept,n_cut+1)
let cut_rev_trace rev_event_list =
let _,event_list,n =
List.fold_left
cut_step
init_cut
rev_event_list
in
(event_list,n)
let cut _parameter _handler info error event_list =
let trace = cut_rev_trace (List.rev event_list) in
error, info, trace
end:Po_cut)
|
42e3fbbb85035dcbd6e374361ed736045e791c972737e6ec6f39994c2f321ed0 | svenpanne/EOPL3 | exercise-1-22.rkt | #lang eopl
; ------------------------------------------------------------------------------
Exercise 1.22
(define filter-in
(lambda (pred lst)
(cond ((null? lst) '())
((pred (car lst)) (cons (car lst) (filter-in pred (cdr lst))))
(else (filter-in pred (cdr lst))))))
| null | https://raw.githubusercontent.com/svenpanne/EOPL3/3fc14c4dbb1c53a37bd67399eba34cea8f8234cc/chapter1/exercise-1-22.rkt | racket | ------------------------------------------------------------------------------ | #lang eopl
Exercise 1.22
(define filter-in
(lambda (pred lst)
(cond ((null? lst) '())
((pred (car lst)) (cons (car lst) (filter-in pred (cdr lst))))
(else (filter-in pred (cdr lst))))))
|
f41cce6597805bd8ad933673d0bf733d15028b4931a5a27d5c22fe94bd68babd | jstolarek/dep-typed-wbl-heaps-hs | Basics.hs | -----------------------------------------------------------------------
Copyright : 2014 , , Politechnika Łódzka --
-- --
-- License: See LICENSE file in root of the repo --
Repo address : -typed-wbl-heaps-hs --
-- --
-- All the Basics/* modules are used to reinvent the wheel: booleans,--
-- natural numbers, ordering opeartors and primitives for reasoning. --
-- This module re-exports all Basics/* modules for convenience. It --
also defines two type synonyms that will be helpful when working --
-- on heaps: Rank and Priority. --
-----------------------------------------------------------------------
module Basics (
module Basics.Bool
, module Basics.Nat
, module Basics.Ordering
, module Basics.Reasoning
, module Basics.Sing
, module Basics.Unreachable
, Rank, Priority
, undefined
) where
import Basics.Bool
import Basics.Nat
import Basics.Ordering
import Basics.Reasoning
import Basics.Sing
import Basics.Unreachable
import Prelude (undefined)
-- Rank of a weight biased leftist heap is defined as number of nodes
-- in a heap. In other words it is size of a tree used to represent a
-- heap.
type Rank = Nat
Priority assigned to elements stored in a Heap .
--
-- CONVENTION: Lower number means higher Priority. Therefore the
highest Priority is zero . It will sometimes be more convenient not
-- to use this inversed terminology. I will then use terms "smaller"
-- and "greater" (in contrast to "lower" and "higher"). Example:
Priority 3 is higher than 5 , but 3 is smaller than 5 .
type Priority = Nat
Unfortunately in Haskell these synonyms are not as useful as they
are in Agda . The reason is that they can only be used as type
-- synonyms, but not as kind synonyms. So it is invalid to write:
--
data : : Rank - > * where
--
although it is legal to do something like that in Agda .
-- See #9632 and #7961
| null | https://raw.githubusercontent.com/jstolarek/dep-typed-wbl-heaps-hs/0d6e354cbb71056a3eb9df9ebdc788182e137d1d/src/Basics.hs | haskell | ---------------------------------------------------------------------
--
License: See LICENSE file in root of the repo --
--
All the Basics/* modules are used to reinvent the wheel: booleans,--
natural numbers, ordering opeartors and primitives for reasoning. --
This module re-exports all Basics/* modules for convenience. It --
on heaps: Rank and Priority. --
---------------------------------------------------------------------
Rank of a weight biased leftist heap is defined as number of nodes
in a heap. In other words it is size of a tree used to represent a
heap.
CONVENTION: Lower number means higher Priority. Therefore the
to use this inversed terminology. I will then use terms "smaller"
and "greater" (in contrast to "lower" and "higher"). Example:
synonyms, but not as kind synonyms. So it is invalid to write:
See #9632 and #7961 |
module Basics (
module Basics.Bool
, module Basics.Nat
, module Basics.Ordering
, module Basics.Reasoning
, module Basics.Sing
, module Basics.Unreachable
, Rank, Priority
, undefined
) where
import Basics.Bool
import Basics.Nat
import Basics.Ordering
import Basics.Reasoning
import Basics.Sing
import Basics.Unreachable
import Prelude (undefined)
type Rank = Nat
Priority assigned to elements stored in a Heap .
highest Priority is zero . It will sometimes be more convenient not
Priority 3 is higher than 5 , but 3 is smaller than 5 .
type Priority = Nat
Unfortunately in Haskell these synonyms are not as useful as they
are in Agda . The reason is that they can only be used as type
data : : Rank - > * where
although it is legal to do something like that in Agda .
|
c8f70e4058aef8871862eb354ed544ca7466db5a5085759dc00749ae863fe95c | incoherentsoftware/defect-process | Run.hs | module Enemy.All.Lanky.AI.Run
( runBehaviorInstr
) where
import Collision.Hitbox
import Configs.All.Enemy
import Configs.All.Enemy.Lanky
import Constants
import Enemy as E
import Enemy.All.Lanky.AttackDescriptions
import Enemy.All.Lanky.AttackType
import Enemy.All.Lanky.Behavior
import Enemy.All.Lanky.Data
import Enemy.All.Lanky.Projectile
import Msg
import Util
import Window.Graphics
runBehaviorInstr :: Bool -> LankyEnemyBehaviorInstr -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
runBehaviorInstr aiEnabled cmd enemy
| aiEnabled = aiEnabledMsgs
| otherwise = aiDisabledMsgs
where
aiEnabledMsgs = case cmd of
StartIdleInstr -> startIdleBehavior enemy
UpdateIdleInstr idleTtl -> updateIdleBehavior idleTtl enemy
StartWalkInstr -> startWalkBehavior enemy
UpdateWalkInstr walkTtl -> updateWalkBehavior walkTtl enemy
StartRetreatInstr -> startRetreatBehavior enemy
UpdateRetreatInstr retreatTtl -> updateRetreatBehavior retreatTtl enemy
FacePlayerInstr -> facePlayerMessages enemy
StartAttackInstr atkType -> startAttackBehavior atkType enemy
CreateAttackPillarInstr -> createAttackPillarMessages enemy
SetSummonAtkCooldownInstr -> setSummonAtkCooldownMessages enemy
SetBeamAtkCooldownInstr -> setBeamAtkCooldownMessages enemy
UpdateHurtInstr hurtTtl hurtType -> updateHurtBehavior hurtTtl hurtType enemy
StartLaunchedInstr hangtimeTtl -> startLaunchedBehavior hangtimeTtl enemy
LaunchedHangtimeInstr hangtimeTtl -> launchedHangtimeBehavior hangtimeTtl enemy
UpdateKneelingInstr kneelingTtl -> updateKneelingBehavior kneelingTtl enemy
StartGetUpInstr -> startGetUpBehavior enemy
StartWallSplatInstr wallSplatTtl -> startWallSplatBehavior wallSplatTtl enemy
UpdateWallSplatInstr wallSplatTtl -> updateWallSplatBehavior wallSplatTtl enemy
UpdateSpawnInstr -> updateSpawnBehavior enemy
StartDeathInstr -> startDeathBehavior enemy
SetDeadInstr -> enemySetDeadMessages enemy
aiDisabledMsgs =
let
setIdleMsgs = case _behavior (_data enemy) of
IdleBehavior _ -> []
_ -> startIdleBehavior enemy
in case cmd of
StartWalkInstr -> setIdleMsgs
UpdateWalkInstr _ -> setIdleMsgs
StartRetreatInstr -> setIdleMsgs
UpdateRetreatInstr _ -> setIdleMsgs
StartAttackInstr _ -> setIdleMsgs
_ -> aiEnabledMsgs
mkEnemyUpdateBehaviorMsg :: Enemy LankyEnemyData -> LankyEnemyBehavior -> [Msg ThinkEnemyMsgsPhase]
mkEnemyUpdateBehaviorMsg enemy behavior = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = behavior}
}
updateBehaviorIfMatching :: Enemy LankyEnemyData -> LankyEnemyBehavior -> LankyEnemyBehavior
updateBehaviorIfMatching enemy behavior = case (behavior, existingBehavior) of
(HurtBehavior _ _, HurtBehavior _ _) -> behavior
(LaunchedBehavior _, LaunchedBehavior _) -> behavior
_ -> existingBehavior
where existingBehavior = _behavior $ E._data enemy
setSummonAtkCooldownMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
setSummonAtkCooldownMessages enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
cfg = _lanky $ _config eData
in e
{ _data = eData {_summonAtkCooldownTtl = _summonAtkCooldownSecs cfg}
}
setBeamAtkCooldownMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
setBeamAtkCooldownMessages enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
cfg = _lanky $ _config eData
in e
{ _data = eData {_beamAtkCooldownTtl = _beamAtkCooldownSecs cfg}
}
facePlayerMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
facePlayerMessages enemy = case vecX <$> enemyKnownPlayerPos enemy of
Just playerX ->
let
x = vecX $ E._pos enemy
dir = if playerX < x then LeftDir else RightDir
in [mkMsgTo (EnemyMsgSetDirection dir) (E._msgId enemy)]
Nothing -> []
createAttackPillarMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
createAttackPillarMessages enemy = [mkMsg $ NewThinkProjectileMsgAddM mkAtkPillarProj]
where
enemyData = E._data enemy
Pos2 lastKnownPlayerGroundX lastKnownPlayerGroundY = _lastKnownPlayerGroundPos enemyData
y = vecY $ E._pos enemy
pillarPosY
| lastKnownPlayerGroundY > y = lastKnownPlayerGroundY
| otherwise = y
pillarPos = Pos2 lastKnownPlayerGroundX pillarPosY
dir = E._dir enemy
atkPillarDesc = _pillar $ _attackDescs enemyData
mkAtkPillarProj = mkLankyProjectile pillarPos dir atkPillarDesc
startDeathBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startDeathBehavior enemy = deathSoundMsg:updateMsg
where
x = vecX $ E._pos enemy
centerY = vecY $ hitboxCenter (enemyHitbox enemy)
pos = Pos2 x centerY
deathSoundMsg = mkMsg $ AudioMsgPlaySound enemyDeathSoundPath pos
updateMsg = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (_data e) {_behavior = DeathBehavior}
, _vel = zeroVel2
, _attack = Nothing
}
updateHurtBehavior :: Secs -> HurtType -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateHurtBehavior hurtTtl hurtType enemy = mkEnemyUpdateMsg enemy $ \e ->
let
hurtTtl' = hurtTtl - timeStep
behavior = updateBehaviorIfMatching e (HurtBehavior hurtTtl' hurtType)
in e
{ _data = (_data e) {_behavior = behavior}
}
startLaunchedBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startLaunchedBehavior hangtimeTtl enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = LaunchedBehavior hangtimeTtl}
}
launchedHangtimeBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
launchedHangtimeBehavior hangtimeTtl enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (_data e) {_behavior = updateBehaviorIfMatching e behavior}
, _vel = zeroVel2
}
where behavior = LaunchedBehavior $ hangtimeTtl - timeStep
startGetUpBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startGetUpBehavior enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = GetUpBehavior}
}
startWallSplatBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startWallSplatBehavior wallSplatTtl enemy = enemyWallImpactMessages effectDrawScale enemy ++ updateEnemyMsg
where
effectDrawScale = _wallImpactEffectDrawScale . _lanky . _config $ _data enemy
updateEnemyMsg = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = WallSplatBehavior wallSplatTtl}
}
updateWallSplatBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateWallSplatBehavior wallSplatTtl enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = behavior}
, _vel = zeroVel2
}
where behavior = WallSplatBehavior $ wallSplatTtl - timeStep
startAttackBehavior :: LankyEnemyAttackType -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startAttackBehavior atkType enemy = attackMsg:behaviorDataMsgs
where
enemyData = _data enemy
hasAura = hasLankyEnemyDataAura enemyData
atkDescs = _attackDescs enemyData
atkDesc = case atkType of
SummonAttackType
| hasAura -> _summonAura atkDescs
| otherwise -> _summon atkDescs
BeamAttackType
| hasAura -> _beamAura atkDescs
| otherwise -> _beam atkDescs
enemyId = E._msgId enemy
attackMsg = mkMsgTo (EnemyMsgSetAttackDesc atkDesc) enemyId
behaviorDataMsgs = mkEnemyUpdateMsg enemy $ \e ->
let
x = vecX $ E._pos enemy
dir = case vecX <$> enemyKnownPlayerPos enemy of
Just playerX
| playerX < x -> LeftDir
| playerX > x -> RightDir
_ -> E._dir enemy
in e
{ _data = (E._data e) { _behavior = AttackBehavior}
, _dir = dir
}
startIdleBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startIdleBehavior enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
idleSecs = _idleSecs . _lanky $ _config eData
in e
{ _data = eData {_behavior = IdleBehavior idleSecs}
, _vel = zeroVel2
, _attack = Nothing
}
updateIdleBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateIdleBehavior idleTtl enemy = mkEnemyUpdateBehaviorMsg enemy behavior
where
idleTtl' = idleTtl - timeStep
behavior = IdleBehavior idleTtl'
startWalkBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startWalkBehavior enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
lankyCfg = _lanky $ _config eData
in e
{ _data = eData {_behavior = WalkBehavior $ _walkSecs lankyCfg}
, _attack = Nothing
}
updateWalkBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateWalkBehavior walkTtl enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
walkTtl' = walkTtl - timeStep
dir = enemyFlippedDirIfWallOrGround e
walkSpeed = _walkSpeed . _lanky $ _config eData
vel = Vel2 (walkSpeed * directionNeg dir) 0.0
in e
{ _data = eData {_behavior = WalkBehavior walkTtl'}
, _dir = dir
, _vel = vel
}
startRetreatBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startRetreatBehavior enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
lankyCfg = _lanky $ _config eData
in e
{ _data = eData {_behavior = RetreatBehavior $ _retreatSecs lankyCfg}
, _attack = Nothing
}
updateRetreatBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateRetreatBehavior retreatTtl enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
walkSpeed = _walkSpeed . _lanky $ _config eData
dir = E._dir e
vel = Vel2 (walkSpeed * directionPos dir) 0.0
in e
{ _data = eData {_behavior = RetreatBehavior $ retreatTtl - timeStep}
, _vel = vel
}
updateSpawnBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateSpawnBehavior enemy = case E._sprite enemy of
Just spr
| _frameIndex spr == 0 && _frameChanged spr -> enemySpawnEffectMessages enemy
| spriteFinished spr -> startIdleBehavior enemy
_ -> []
updateKneelingBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateKneelingBehavior kneelingTtl enemy = mkEnemyUpdateBehaviorMsg enemy behavior
where behavior = KneelingBehavior $ kneelingTtl - timeStep
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/Lanky/AI/Run.hs | haskell | module Enemy.All.Lanky.AI.Run
( runBehaviorInstr
) where
import Collision.Hitbox
import Configs.All.Enemy
import Configs.All.Enemy.Lanky
import Constants
import Enemy as E
import Enemy.All.Lanky.AttackDescriptions
import Enemy.All.Lanky.AttackType
import Enemy.All.Lanky.Behavior
import Enemy.All.Lanky.Data
import Enemy.All.Lanky.Projectile
import Msg
import Util
import Window.Graphics
runBehaviorInstr :: Bool -> LankyEnemyBehaviorInstr -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
runBehaviorInstr aiEnabled cmd enemy
| aiEnabled = aiEnabledMsgs
| otherwise = aiDisabledMsgs
where
aiEnabledMsgs = case cmd of
StartIdleInstr -> startIdleBehavior enemy
UpdateIdleInstr idleTtl -> updateIdleBehavior idleTtl enemy
StartWalkInstr -> startWalkBehavior enemy
UpdateWalkInstr walkTtl -> updateWalkBehavior walkTtl enemy
StartRetreatInstr -> startRetreatBehavior enemy
UpdateRetreatInstr retreatTtl -> updateRetreatBehavior retreatTtl enemy
FacePlayerInstr -> facePlayerMessages enemy
StartAttackInstr atkType -> startAttackBehavior atkType enemy
CreateAttackPillarInstr -> createAttackPillarMessages enemy
SetSummonAtkCooldownInstr -> setSummonAtkCooldownMessages enemy
SetBeamAtkCooldownInstr -> setBeamAtkCooldownMessages enemy
UpdateHurtInstr hurtTtl hurtType -> updateHurtBehavior hurtTtl hurtType enemy
StartLaunchedInstr hangtimeTtl -> startLaunchedBehavior hangtimeTtl enemy
LaunchedHangtimeInstr hangtimeTtl -> launchedHangtimeBehavior hangtimeTtl enemy
UpdateKneelingInstr kneelingTtl -> updateKneelingBehavior kneelingTtl enemy
StartGetUpInstr -> startGetUpBehavior enemy
StartWallSplatInstr wallSplatTtl -> startWallSplatBehavior wallSplatTtl enemy
UpdateWallSplatInstr wallSplatTtl -> updateWallSplatBehavior wallSplatTtl enemy
UpdateSpawnInstr -> updateSpawnBehavior enemy
StartDeathInstr -> startDeathBehavior enemy
SetDeadInstr -> enemySetDeadMessages enemy
aiDisabledMsgs =
let
setIdleMsgs = case _behavior (_data enemy) of
IdleBehavior _ -> []
_ -> startIdleBehavior enemy
in case cmd of
StartWalkInstr -> setIdleMsgs
UpdateWalkInstr _ -> setIdleMsgs
StartRetreatInstr -> setIdleMsgs
UpdateRetreatInstr _ -> setIdleMsgs
StartAttackInstr _ -> setIdleMsgs
_ -> aiEnabledMsgs
mkEnemyUpdateBehaviorMsg :: Enemy LankyEnemyData -> LankyEnemyBehavior -> [Msg ThinkEnemyMsgsPhase]
mkEnemyUpdateBehaviorMsg enemy behavior = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = behavior}
}
updateBehaviorIfMatching :: Enemy LankyEnemyData -> LankyEnemyBehavior -> LankyEnemyBehavior
updateBehaviorIfMatching enemy behavior = case (behavior, existingBehavior) of
(HurtBehavior _ _, HurtBehavior _ _) -> behavior
(LaunchedBehavior _, LaunchedBehavior _) -> behavior
_ -> existingBehavior
where existingBehavior = _behavior $ E._data enemy
setSummonAtkCooldownMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
setSummonAtkCooldownMessages enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
cfg = _lanky $ _config eData
in e
{ _data = eData {_summonAtkCooldownTtl = _summonAtkCooldownSecs cfg}
}
setBeamAtkCooldownMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
setBeamAtkCooldownMessages enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
cfg = _lanky $ _config eData
in e
{ _data = eData {_beamAtkCooldownTtl = _beamAtkCooldownSecs cfg}
}
facePlayerMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
facePlayerMessages enemy = case vecX <$> enemyKnownPlayerPos enemy of
Just playerX ->
let
x = vecX $ E._pos enemy
dir = if playerX < x then LeftDir else RightDir
in [mkMsgTo (EnemyMsgSetDirection dir) (E._msgId enemy)]
Nothing -> []
createAttackPillarMessages :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
createAttackPillarMessages enemy = [mkMsg $ NewThinkProjectileMsgAddM mkAtkPillarProj]
where
enemyData = E._data enemy
Pos2 lastKnownPlayerGroundX lastKnownPlayerGroundY = _lastKnownPlayerGroundPos enemyData
y = vecY $ E._pos enemy
pillarPosY
| lastKnownPlayerGroundY > y = lastKnownPlayerGroundY
| otherwise = y
pillarPos = Pos2 lastKnownPlayerGroundX pillarPosY
dir = E._dir enemy
atkPillarDesc = _pillar $ _attackDescs enemyData
mkAtkPillarProj = mkLankyProjectile pillarPos dir atkPillarDesc
startDeathBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startDeathBehavior enemy = deathSoundMsg:updateMsg
where
x = vecX $ E._pos enemy
centerY = vecY $ hitboxCenter (enemyHitbox enemy)
pos = Pos2 x centerY
deathSoundMsg = mkMsg $ AudioMsgPlaySound enemyDeathSoundPath pos
updateMsg = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (_data e) {_behavior = DeathBehavior}
, _vel = zeroVel2
, _attack = Nothing
}
updateHurtBehavior :: Secs -> HurtType -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateHurtBehavior hurtTtl hurtType enemy = mkEnemyUpdateMsg enemy $ \e ->
let
hurtTtl' = hurtTtl - timeStep
behavior = updateBehaviorIfMatching e (HurtBehavior hurtTtl' hurtType)
in e
{ _data = (_data e) {_behavior = behavior}
}
startLaunchedBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startLaunchedBehavior hangtimeTtl enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = LaunchedBehavior hangtimeTtl}
}
launchedHangtimeBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
launchedHangtimeBehavior hangtimeTtl enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (_data e) {_behavior = updateBehaviorIfMatching e behavior}
, _vel = zeroVel2
}
where behavior = LaunchedBehavior $ hangtimeTtl - timeStep
startGetUpBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startGetUpBehavior enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = GetUpBehavior}
}
startWallSplatBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startWallSplatBehavior wallSplatTtl enemy = enemyWallImpactMessages effectDrawScale enemy ++ updateEnemyMsg
where
effectDrawScale = _wallImpactEffectDrawScale . _lanky . _config $ _data enemy
updateEnemyMsg = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = WallSplatBehavior wallSplatTtl}
}
updateWallSplatBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateWallSplatBehavior wallSplatTtl enemy = mkEnemyUpdateMsg enemy $ \e -> e
{ _data = (E._data e) {_behavior = behavior}
, _vel = zeroVel2
}
where behavior = WallSplatBehavior $ wallSplatTtl - timeStep
startAttackBehavior :: LankyEnemyAttackType -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startAttackBehavior atkType enemy = attackMsg:behaviorDataMsgs
where
enemyData = _data enemy
hasAura = hasLankyEnemyDataAura enemyData
atkDescs = _attackDescs enemyData
atkDesc = case atkType of
SummonAttackType
| hasAura -> _summonAura atkDescs
| otherwise -> _summon atkDescs
BeamAttackType
| hasAura -> _beamAura atkDescs
| otherwise -> _beam atkDescs
enemyId = E._msgId enemy
attackMsg = mkMsgTo (EnemyMsgSetAttackDesc atkDesc) enemyId
behaviorDataMsgs = mkEnemyUpdateMsg enemy $ \e ->
let
x = vecX $ E._pos enemy
dir = case vecX <$> enemyKnownPlayerPos enemy of
Just playerX
| playerX < x -> LeftDir
| playerX > x -> RightDir
_ -> E._dir enemy
in e
{ _data = (E._data e) { _behavior = AttackBehavior}
, _dir = dir
}
startIdleBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startIdleBehavior enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
idleSecs = _idleSecs . _lanky $ _config eData
in e
{ _data = eData {_behavior = IdleBehavior idleSecs}
, _vel = zeroVel2
, _attack = Nothing
}
updateIdleBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateIdleBehavior idleTtl enemy = mkEnemyUpdateBehaviorMsg enemy behavior
where
idleTtl' = idleTtl - timeStep
behavior = IdleBehavior idleTtl'
startWalkBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startWalkBehavior enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
lankyCfg = _lanky $ _config eData
in e
{ _data = eData {_behavior = WalkBehavior $ _walkSecs lankyCfg}
, _attack = Nothing
}
updateWalkBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateWalkBehavior walkTtl enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
walkTtl' = walkTtl - timeStep
dir = enemyFlippedDirIfWallOrGround e
walkSpeed = _walkSpeed . _lanky $ _config eData
vel = Vel2 (walkSpeed * directionNeg dir) 0.0
in e
{ _data = eData {_behavior = WalkBehavior walkTtl'}
, _dir = dir
, _vel = vel
}
startRetreatBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
startRetreatBehavior enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
lankyCfg = _lanky $ _config eData
in e
{ _data = eData {_behavior = RetreatBehavior $ _retreatSecs lankyCfg}
, _attack = Nothing
}
updateRetreatBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateRetreatBehavior retreatTtl enemy = mkEnemyUpdateMsg enemy $ \e ->
let
eData = E._data e
walkSpeed = _walkSpeed . _lanky $ _config eData
dir = E._dir e
vel = Vel2 (walkSpeed * directionPos dir) 0.0
in e
{ _data = eData {_behavior = RetreatBehavior $ retreatTtl - timeStep}
, _vel = vel
}
updateSpawnBehavior :: Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateSpawnBehavior enemy = case E._sprite enemy of
Just spr
| _frameIndex spr == 0 && _frameChanged spr -> enemySpawnEffectMessages enemy
| spriteFinished spr -> startIdleBehavior enemy
_ -> []
updateKneelingBehavior :: Secs -> Enemy LankyEnemyData -> [Msg ThinkEnemyMsgsPhase]
updateKneelingBehavior kneelingTtl enemy = mkEnemyUpdateBehaviorMsg enemy behavior
where behavior = KneelingBehavior $ kneelingTtl - timeStep
| |
a4e9422161c9ecc5d9bf440175944e6c29b7d364f8144c107200eac568c2dcb7 | plum-umd/adapton.ocaml | spreadTree.ml | * Spread trees :
Spread trees are a general - purpose data structure for ( nominal ,
demand - driven ) incremental computation .
A spread tree can represent :
-- a sequence ( and in particular , an iterator ) ,
-- a binary search tree ,
-- a set ,
-- a mapping ,
-- a graph
-- and more ( ? )
Programmers use spreadtrees by constructing and computing with one
of the above structures , and by inter - converting between them .
The programmer rarely ( if ever ) sees the actual spreadtree
structure , which is hidden by an API for one of the above
structures .
Internally , a " spreadtree " is a binary tree whose leaves consist
of linked lists . Both the tree 's internal nodes and linked - list
leaves hold data elements . Incrementality is accomplished by
interposing the recursive tree and list structures with Adapton
names and articulation points ( in particular , see
GrifolaType . ArtType ) .
= = Tree and List structure :
In Trees , we place data elements in two places : ( 1 ) at internal
tree nodes and ( 2 ) at the lists in the leaves . We use both places
so as to trade off between tree structure and list structure in a
flexible way .
* * This will be helpful for experiments , where we can measure the
performance penalty of less tree - like data structures .
By varying the size of leaf lists relative to the tree size , one
trades off tree structure for list structure , either " spreading "
the tree out linearly into a list , or gathering the leaf lists
back into a bifurcated tree structure . In either extreme , one
either has a binary tree with empty leafs , or a single flat linked
list .
= = Ropes versus SpreadTrees
We use the term " rope " for a restricted structure ( aka a " join
list " ) , where there are three cases : Zero , One , Two , and where One
cases carries one data element , and where the binary ( Two ) case
carries no data elements , but just two sub - ropes .
Since the Two case carries no data , it is helpful for writing
certain computations , e.g. , mergesort . On the other hand , without
data in the Two case , ropes can not represent search trees ( just
sequences ) .
= = Use of incremental articulation points :
Note : Nominal features are necessary to build and maintain trees
efficiently . Hence , these structures were designed with nominal
incremental reuse in mind . We expect that non - nominal thunks will
not perform well incrementally , as compared with nominal thunks .
We design " articulation points " ( of laziness / incrementality ) into
the structures as a special recursive case , which can be present or
absent with any frequency . In one extreme , the structures have
fine - grained articulated structure , and are maximally lazy and
incremental . In the other extreme , the structures have no
articulated structure , and correspond exactly to immutable
( purely - functional ) structures that never change across time .
Reasoning about articulation points isolates reasoning about
laziness and nominal incrementality from reasoning about eager
( and purely - functional ) calculation steps . Articulation cases are
defined and used separately from the usual cases of the structure ,
which are defined in the usual ( eager ) fashion .
Spread trees are a general-purpose data structure for (nominal,
demand-driven) incremental computation.
A spread tree can represent:
-- a sequence (and in particular, an iterator),
-- a binary search tree,
-- a set,
-- a mapping,
-- a graph
-- and more (?)
Programmers use spreadtrees by constructing and computing with one
of the above structures, and by inter-converting between them.
The programmer rarely (if ever) sees the actual spreadtree
structure, which is hidden by an API for one of the above
structures.
Internally, a "spreadtree" is a binary tree whose leaves consist
of linked lists. Both the tree's internal nodes and linked-list
leaves hold data elements. Incrementality is accomplished by
interposing the recursive tree and list structures with Adapton
names and articulation points (in particular, see
GrifolaType.ArtType).
== Tree and List structure:
In Trees, we place data elements in two places: (1) at internal
tree nodes and (2) at the lists in the leaves. We use both places
so as to trade off between tree structure and list structure in a
flexible way.
** This will be helpful for experiments, where we can measure the
performance penalty of less tree-like data structures.
By varying the size of leaf lists relative to the tree size, one
trades off tree structure for list structure, either "spreading"
the tree out linearly into a list, or gathering the leaf lists
back into a bifurcated tree structure. In either extreme, one
either has a binary tree with empty leafs, or a single flat linked
list.
== Ropes versus SpreadTrees
We use the term "rope" for a restricted structure (aka a "join
list"), where there are three cases: Zero, One, Two, and where One
cases carries one data element, and where the binary (Two) case
carries no data elements, but just two sub-ropes.
Since the Two case carries no data, it is helpful for writing
certain computations, e.g., mergesort. On the other hand, without
data in the Two case, ropes cannot represent search trees (just
sequences).
== Use of incremental articulation points:
Note: Nominal features are necessary to build and maintain trees
efficiently. Hence, these structures were designed with nominal
incremental reuse in mind. We expect that non-nominal thunks will
not perform well incrementally, as compared with nominal thunks.
We design "articulation points" (of laziness/incrementality) into
the structures as a special recursive case, which can be present or
absent with any frequency. In one extreme, the structures have
fine-grained articulated structure, and are maximally lazy and
incremental. In the other extreme, the structures have no
articulated structure, and correspond exactly to immutable
(purely-functional) structures that never change across time.
Reasoning about articulation points isolates reasoning about
laziness and nominal incrementality from reasoning about eager
(and purely-functional) calculation steps. Articulation cases are
defined and used separately from the usual cases of the structure,
which are defined in the usual (eager) fashion.
*)
module type S = sig
type elt
type name
type 'art art_list = [ (* articulated list. *)
| `Nil
| `Cons of elt * 'art art_list
| `Art of 'art
| `Name of name * 'art art_list
]
type ('leaf, 'art) art_tree = [ (* articulated tree. *)
| `Leaf of 'leaf
| `Bin of ('leaf,'art) art_tree * elt * ('leaf,'art) art_tree
| `Art of 'art
| `Name of name * ('leaf, 'art) art_tree
]
type ('one, 'art) art_rope = [ (* articulated rope. *)
| `Zero
| `One of 'one
| `Two of ('one,'art) art_rope * ('one,'art) art_rope
| `Art of 'art
| `Name of name * ('one,'art) art_rope
]
module rec List : Articulated.S with type t = List.Art.t art_list
and type name = name
module rec Tree : Articulated.S with type t = (List.t, Tree.Art.t) art_tree
and type name = name
module rec Rope : Articulated.S with type t = (elt, Rope.Art.t) art_rope
and type name = name
end
module Make
(ArtLib : ArtLib.S)
(Name : Name.S)
(Elt : Data.S)
: S with type name = Name.t
and type elt = Elt.t =
struct
type elt = Elt.t [@@deriving eq, ord, show]
type name = Name.t [@@deriving eq, ord, show]
type 'art art_list = [ (* articulated list. *)
| `Nil
| `Cons of elt * 'art art_list
| `Art of 'art
| `Name of Name.t * 'art art_list
]
[@@deriving eq, ord, show]
type ('leaf, 'art) art_tree = [ (* articulated tree. *)
| `Leaf of 'leaf
| `Bin of ('leaf,'art) art_tree * elt * ('leaf,'art) art_tree
| `Art of 'art
| `Name of Name.t * ('leaf,'art) art_tree
]
[@@deriving eq, ord, show]
type ('one, 'art) art_rope = [ (* articulated rope. *)
| `Zero
| `One of 'one
| `Two of ('one,'art) art_rope * ('one,'art) art_rope
| `Art of 'art
| `Name of Name.t * ('one,'art) art_rope
]
[@@deriving eq, ord, show]
module rec List : (Articulated.S with type t = List.Art.t art_list
and type name = Name.t) =
struct
type name = Name.t
module Data = struct
type t = List.Art.t art_list
[@@deriving eq, ord, show]
let rec hash seed x =
( match x with
| `Nil -> Hashtbl.seeded_hash seed `Nil
| `Cons(x,tl) -> Elt.hash (hash seed tl) x
| `Art a -> List.Art.hash seed a
| `Name(nm,xs) -> (Name.hash (hash seed xs) nm)
)
let rec sanitize x =
( match x with
| `Nil -> `Nil
| `Cons (x, tl) -> `Cons(Elt.sanitize x, sanitize tl)
| `Art a -> `Art (List.Art.sanitize a)
| `Name(nm,xs) -> `Name(Name.sanitize nm, sanitize xs)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
include Data
end
module rec Tree : (Articulated.S with type t = (List.t, Tree.Art.t) art_tree
and type name = Name.t) =
struct
type name = Name.t
module Data = struct
type t = (List.t, Tree.Art.t) art_tree
[@@deriving eq, ord, show]
let rec hash seed x =
( match x with
| `Leaf xs -> List.hash seed xs
| `Bin(l,x,r) -> hash (Elt.hash (hash seed l) x) r
| `Art a -> Tree.Art.hash seed a
| `Name(nm,x) -> (Name.hash (hash seed x) nm)
)
let rec sanitize x =
( match x with
| `Leaf x -> `Leaf (List.sanitize x)
| `Bin(l,x,r) -> `Bin(sanitize l, Elt.sanitize x, sanitize r)
| `Art a -> `Art(Tree.Art.sanitize a)
| `Name(nm,x) -> `Name(Name.sanitize nm, sanitize x)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
include Data
end
module rec Rope : (Articulated.S with type t = (elt, Rope.Art.t) art_rope
and type name = Name.t) =
struct
type name = Name.t
module Data = struct
type t = (elt, Rope.Art.t) art_rope
[@@deriving eq, ord, show]
let rec hash seed x =
( match x with
| `Zero -> 0
| `One x -> Elt.hash seed x
| `Two (x,y) -> hash (hash seed x) y
| `Art a -> Rope.Art.hash seed a
| `Name(nm,x) -> (Name.hash (hash seed x) nm)
)
let rec sanitize x =
( match x with
| `Zero -> `Zero
| `One x -> `One (Elt.sanitize x)
| `Two(x,y) -> `Two (sanitize x, sanitize y)
| `Art a -> `Art(Rope.Art.sanitize a)
| `Name(nm,x) -> `Name(Name.sanitize nm, sanitize x)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
include Data
end
end
Sequences , based on SpreadTrees .
(* *)
module SeqWrap
(ArtLib : ArtLib.S)
(Name : Name.S)
(Elt : Data.S)
(St : S with type elt = Elt.t
and type name = Name.t) =
struct
let default_granularity = 4
type name = St.name
module AElt = ArtLib.MakeArt(Name)(Elt)
module AEltOption = ArtLib.MakeArt(Name)(Types.Option(Elt))
(* Abbreviations, for accessing mfn_* and force: *)
module LArt = St.List.Art
module TArt = St.Tree.Art
module RArt = St.Rope.Art
let mut_elts_of_list
?c:(cons_first=true)
( name : name )
( list : 'a list )
( data_of : 'a -> St.elt )
( name_of : 'a -> name )
( gran_level : int )
: St.List.Art.t
=
let rec loop list =
match list with
| [] -> `Nil
| x :: xs ->
if Bits.ffs0 (Elt.hash 0 (data_of x)) >= gran_level then
let nm1, nm2 = Name.fork (name_of x) in
if cons_first then
`Cons((data_of x), `Name(nm1, `Art (St.List.Art.cell nm2 (loop xs))))
else
`Name(nm1, `Cons((data_of x), `Art (St.List.Art.cell nm2 (loop xs))))
else
`Cons((data_of x), (loop xs))
in St.List.Art.cell name (loop list)
let simple_full_string =
let rec loop = function
| `Nil -> "Nil"
| `Cons(x,xs) -> (Elt.show x)^"; "^(loop xs)
| `Art(a) -> "Art => "^(loop (LArt.force a))
| `Name(_,xs) -> "Name; "^(loop xs)
in loop
let rec insert_elt list_art h nm_tl_opt =
match nm_tl_opt with
| Some (nm, tl_art) ->
assert ( list_art <> tl_art );
let list_art_content = St.List.Art.force list_art in
St.List.Art.set list_art (`Cons(h, `Name(nm, `Art(tl_art)))) ;
St.List.Art.set tl_art list_art_content ;
| None ->
let list_art_content = St.List.Art.force list_art in
St.List.Art.set list_art (`Cons(h, list_art_content))
let rec delete_elt list_art =
let (x,x_tl) =
let rec loop list =
match list with
| `Art art ->
let elt, tl = loop (St.List.Art.force art) in
elt, (`Art art)
| `Name (nm, tl) ->
let elt, tl = loop tl in
elt, (`Name(nm, tl))
| `Cons(x, tl) -> (x,tl)
| `Nil -> failwith "delete_elt: Nil: No element to delete"
in
loop (St.List.Art.force list_art)
in
St.List.Art.set list_art x_tl ;
(x,x_tl)
let rec next_art x = match x with
| `Nil -> None
| `Cons(_, tl) -> next_art tl
| `Name(_, rest) -> next_art rest
| `Art a -> Some a
let rec next_cons x =
match x with
| `Nil -> None
| `Cons(x,xs) -> Some(x,xs)
| `Art(a) -> next_cons(LArt.force a)
| `Name(_, xs) -> next_cons xs
let rec ith_art list count =
( match count with
| x when x <= 0 -> list
| _ -> match list with
| `Nil -> `Nil
| `Cons(x, xs) -> ith_art xs (count-1)
| `Name(_, xs) -> ith_art xs count
| `Art a -> ith_art (St.List.Art.force a) count
)
This function returns the final art of a list , which contains exactly ` Nil
if it 's not available , it 's created first , mutating the input list directly
This function returns the final art of a list, which contains exactly `Nil
if it's not available, it's created first, mutating the input list directly
*)
let get_or_create_final_art (list : LArt.t) =
let rec find_last art =
match next_art (LArt.force art) with
| None -> art
| Some(a) -> find_last a
in
let la = find_last list in
(* return it if it already contains `Nil *)
if LArt.force la = `Nil then la else
let rec create_nil_art elt =
match elt with
| `Nil ->
let nm1, nm2 = Name.fork (Name.gensym ()) in
`Name(nm1, `Art(LArt.cell nm2 `Nil))
| `Cons(x,xs) -> `Cons(x, create_nil_art xs)
| `Art(a) -> failwith "two last arts!"
| `Name(nm, xs) -> `Name(nm, create_nil_art xs)
in
(* add articulated `Nil to the end and return that art *)
LArt.set la (create_nil_art (LArt.force la));
find_last la
let rec take list count =
let dec = function
| Some count -> Some (count-1)
| None -> None
in
( match count with
| Some count when (count <= 0) -> []
| _ ->
match list with
| `Nil -> []
| `Cons(x, xs) -> x :: (take xs (dec count))
| `Name(_, xs) -> take xs count
| `Art a -> take (St.List.Art.force a) count
)
let rec list_is_empty ( list : St.List.t) : bool =
( match list with
| `Nil -> true
| `Cons(_,_) -> false
| `Art a -> list_is_empty ( LArt.force a )
| `Name (_,x) -> list_is_empty x
)
let list_length : St.List.t -> int =
let module Len = ArtLib.MakeArt(Name)(Types.Int) in
let mfn = Len.mk_mfn (Name.of_string "list_length")
(module St.List)
(fun r l ->
let len l = r.Len.mfn_data l in
let memo_len n l = r.Len.mfn_nart n l in
match l with
| `Nil -> 0
| `Cons(_,l) -> 1 + (len l)
| `Art(a) -> len (LArt.force a)
| `Name(nm, l) -> Len.force (memo_len nm l)
)
in
fun l -> mfn.Len.mfn_data l
let list_append =
let mfn = LArt.mk_mfn (Name.of_string "list_append")
(module Types.Tuple2(St.List)(St.List))
(fun r (xs, ys) ->
let list_append xs ys = r.LArt.mfn_data (xs,ys) in
( match xs with
| `Nil -> ys
| `Cons(x,tl) -> `Cons(x, list_append tl ys)
| `Art a -> list_append (LArt.force a) ys
| `Name(nm,xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art (r.LArt.mfn_nart nm2 (xs, ys)))
))
in
fun xs ys -> mfn.LArt.mfn_data (xs, ys)
let list_of_tree : St.Tree.t -> St.List.t -> St.List.t =
let mfn = LArt.mk_mfn (Name.of_string "list_of_tree")
(module Types.Tuple2(St.Tree)(St.List))
(fun r (tree, rest) ->
let list_of_tree tree list = r.LArt.mfn_data (tree, list) in
( match tree with
| `Leaf xs -> list_append xs rest
| `Bin(left,x,right) -> list_of_tree left (`Cons(x, list_of_tree right rest))
| `Art art -> list_of_tree (TArt.force art) rest
| `Name(nm,tree) -> let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 (tree, rest)))
))
in
fun tree list -> mfn.LArt.mfn_data (tree, list)
let rope_of_list_rec : name option -> int -> int -> St.Rope.t -> St.List.t -> St.Rope.t * St.List.t =
let module P = Articulated.ArtTuple2(ArtLib)(Name)(St.Rope)(St.List) in
let rope_of_list_rec =
let mfn = P.Art.mk_mfn (Name.of_string "rope_of_list_rec")
(module Types.Tuple5(Types.Option(Name))(Types.Int)(Types.Int)(St.Rope)(St.List))
(fun r (nm_opt, parent_lev, rope_lev, rope, list) ->
let rope_of_list_rec no pl tl t l = r.P.Art.mfn_data (no,pl,tl,t,l) in
( match list with
| `Nil -> rope, `Nil
| `Cons (hd, tl) ->
let hd_lev = Bits.ffs0 (Elt.hash 0 hd) in
if rope_lev <= hd_lev && hd_lev <= parent_lev then (
match nm_opt with
| None ->
let right, rest = rope_of_list_rec None hd_lev (-1) (`One hd) tl in
let rope = `Two(rope, right) in
rope_of_list_rec None parent_lev hd_lev rope rest
| Some(nm) ->
let nm1,nm = Name.fork nm in
let nm2,nm3 = Name.fork nm in
let right, rest = P.split nm1 (r.P.Art.mfn_nart nm2 (None, hd_lev, (-1), (`One hd), tl)) in
let rope : St.Rope.t = `Two(rope, `Name(nm3, `Art(right))) in
rope_of_list_rec None parent_lev hd_lev rope (LArt.force rest)
)
else (
match nm_opt with
| None -> rope, list
| Some(nm) -> rope, `Name(nm, list)
)
| `Art art -> rope_of_list_rec nm_opt parent_lev rope_lev rope (LArt.force art)
| `Name(nm, list) -> rope_of_list_rec (Some nm) parent_lev rope_lev rope list
)
)
in
fun nm pl tl t l -> mfn.P.Art.mfn_data (nm, pl, tl, t, l)
in
rope_of_list_rec
let rope_of_list : St.List.t -> St.Rope.t =
fun list ->
let rope, rest =
rope_of_list_rec None max_int (-1) (`Zero) list
in
(* assert (list_is_empty rest) ; *)
rope
let list_of_rope : St.Rope.t -> St.List.t -> St.List.t =
let mfn = LArt.mk_mfn (Name.of_string "list_of_rope")
(module Types.Tuple2(St.Rope)(St.List))
(fun r (rope, rest) ->
let list_of_rope rope list = r.LArt.mfn_data (rope, list) in
( match rope with
| `Zero -> rest
| `One x -> `Cons(x, rest)
| `Two(x,y) -> list_of_rope x (list_of_rope y rest)
| `Art art -> list_of_rope (RArt.force art) rest
| `Name(nm,rope) -> let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 (rope, rest)))
))
in
fun rope list -> mfn.LArt.mfn_data (rope, list)
let rope_length : St.Rope.t -> int =
let module Len = ArtLib.MakeArt(Name)(Types.Int) in
let mfn = Len.mk_mfn (Name.of_string "rope_length")
(module St.Rope)
(fun r rope ->
let len rope = r.Len.mfn_data rope in
let memo_len n rope = r.Len.mfn_nart n rope in
match rope with
| `Zero -> 0
| `One(x) -> 1
| `Two(r1,r2) -> (len r1) + (len r2)
| `Art(a) -> len (RArt.force a)
| `Name(nm, r) -> Len.force (memo_len nm r)
)
in
fun rope -> mfn.Len.mfn_data rope
let rope_not_empty : name -> St.Rope.t -> bool =
fun (namespace : name) ->
let module M = ArtLib.MakeArt(Name)(Types.Bool) in
let fnn = Name.pair (Name.of_string "rope_empty") namespace in
let mfn = M.mk_mfn fnn
(module St.Rope)
(fun r rope ->
let empty rope = r.M.mfn_data rope in
let memo_empty n rope = r.M.mfn_nart n rope in
match rope with
| `Zero -> false
| `One(x) -> true
| `Two(r1,r2) -> (empty r1) || (empty r2)
| `Art(a) -> empty (RArt.force a)
| `Name(nm, r) -> M.force (memo_empty nm r)
)
in
fun rope -> mfn.M.mfn_data rope
(* non-memoised indexed lookup of a rope, using memoized rope_length for speed *)
let rope_nth rope n : Elt.t option =
if rope_length rope <= n then None else
(* main work after initial checks *)
let rec rope_nth rope n =
match rope with
| `Zero -> failwith "rope_nth: bad length reporting"
| `One(x) -> if n = 0 then Some(x) else failwith "rope_nth: bad length reporting"
| `Two(r1,r2) ->
let r1l = rope_length r1 in
if r1l > n then
rope_nth r1 n
else
rope_nth r2 (n-r1l)
| `Art(a) -> rope_nth (RArt.force a) n
| `Name(nm, r) -> rope_nth r n
in
rope_nth rope n
let list_reverse : St.List.t -> St.List.t -> St.List.t =
let mfn = LArt.mk_mfn (Name.of_string "list_reverse")
(module Types.Tuple2(St.List)(St.List))
(fun r (list, rev) ->
let list_reverse list rev = r.LArt.mfn_data (list,rev) in
( match list with
| `Nil -> rev
| `Cons(x, xs) -> list_reverse xs (`Cons(x, rev))
| `Art art -> list_reverse (LArt.force art) rev
| `Name (nm, xs) -> let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art (r.LArt.mfn_nart nm2 (xs, rev)))
))
in
fun list rev -> mfn.LArt.mfn_data (list, rev)
let list_reverse_balanced : St.List.t -> St.List.t -> St.List.t =
let debug = false in
let accum =
LArt.mk_mfn
(Name.of_string "list_reverse_accum")
(module St.List)
(fun _ list ->
(if debug then Printf.printf "... accum=(%s)\n" (St.List.show list));
list)
in
let module Res = ArtLib.MakeArt(Name)(Types.Tuple2(St.List)(St.List)) in
let module Arg = Types.Tuple5(Types.Option(Name))(Types.Int)(Types.Int)(St.List)(St.List) in
let mfn =
Res.mk_mfn
(Name.of_string "list_reverse")(module Arg)
(fun r ((no, lo, hi, list, rev) as arg) ->
(if debug then Printf.printf "... list_reverse:args=(%s)\n%!" (Arg.show arg)) ;
let list_reverse no lo hi list rev = r.Res.mfn_data (no,lo,hi,list,rev) in
( match list with
| `Nil -> (`Nil, rev)
| `Cons(x, xs) ->
let hd_lev = Bits.ffs0 (Elt.hash 0 x) in
if lo <= hd_lev && hd_lev <= hi then (
match no with
| None ->
let rev = `Cons(x,rev) in
let rest, rev = list_reverse None (-1) hd_lev xs rev in
(if debug then Printf.printf "... rest1,rev1 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
let rest, rev = list_reverse None hd_lev hi rest rev in
(if debug then Printf.printf "... rest2,rev2 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
rest, rev
| Some nm ->
let nm1,nm = Name.fork nm in
let nm2,nm3 = Name.fork nm in
let rev = `Name(nm1, `Art(accum.LArt.mfn_nart nm2 (`Cons(x, rev)))) in
let rest, rev = Res.force (r.Res.mfn_nart nm3 (None, -1, hd_lev, xs, rev)) in
(if debug then Printf.printf "...N rest1,rev1 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
let rest, rev = list_reverse None hd_lev hi rest rev in
(if debug then Printf.printf "...N rest2,rev2 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
rest, rev
)
else (
(if debug then Printf.printf "... Basecase: list,rev = %s,%s\n%!" (St.List.show list) (St.List.show rev)) ;
match no with
| Some nm -> (`Name(nm,list), rev)
| None -> (list, rev)
)
| `Art art -> list_reverse no lo hi (LArt.force art) rev
| `Name (nm, xs) -> list_reverse (Some nm) lo hi xs rev
))
in
fun list rev ->
match mfn.Res.mfn_data (None, -1, max_int, list, rev) with
| `Nil, rev -> rev
| _, _ -> failwith "list_reverse: impossible"
let rec rope_reverse =
let mfn = RArt.mk_mfn (Name.of_string "rope_reverse")
(module St.Rope)
(fun r rope -> let rope_reverse = r.RArt.mfn_data in
( match rope with
| `Zero -> `Zero
| `One x -> `One x
| `Two(x,y) -> `Two(rope_reverse y, rope_reverse x)
| `Art art -> rope_reverse (RArt.force art)
| `Name (nm,rope) ->
let nm1,nm2 = Name.fork nm in
let art = r.RArt.mfn_nart nm2 rope in
ignore (RArt.force art) ;
`Name(nm1, `Art(art))
))
in
fun rope -> mfn.RArt.mfn_data rope
TODO : optimize , compact zeros
let rope_filter
(op_nm : name)
(op : Elt.t -> bool)
: St.Rope.t -> St.Rope.t =
let fnn = Name.pair (Name.of_string "rope_filter") op_nm in
let mfn = RArt.mk_mfn fnn
(module St.Rope)
(fun r rope ->
let rope_filter = r.RArt.mfn_data in
match rope with
| `Zero -> `Zero
| `One(x) -> if (op x) then `One(x) else `Zero
| `Two(x,y) -> `Two(rope_filter x, rope_filter y)
| `Art(a) -> rope_filter (RArt.force a)
| `Name(nm, rp) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.RArt.mfn_nart nm2 rp))
)
in
fun rope -> mfn.RArt.mfn_data rope
let list_filter
(op_nm : name)
(op : Elt.t -> bool)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_filter") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_filter = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) ->
let rest = list_filter xs in
if op x then `Cons(x, rest) else rest
| `Art(a) -> list_filter (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 xs))
)
in
fun list -> mfn.LArt.mfn_data list
let list_map
(op_nm : name)
(op : Elt.t -> Elt.t)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_map") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_map = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) -> `Cons(op x, list_map xs)
| `Art(a) -> list_map (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 xs))
)
in
fun list -> mfn.LArt.mfn_data list
let list_ref_cell
: name -> St.List.t -> St.List.Art.t =
let fnn = Name.of_string "list_ref_cell" in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list -> list)
in
mfn.St.List.Art.mfn_nart
let list_eager_map
(op_nm : name)
(op : Elt.t -> Elt.t)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_map") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_map = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) -> `Cons(op x, list_map xs)
| `Art(a) -> list_map (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
let ys = (* memoized recursive call: *)
LArt.force (r.LArt.mfn_nart nm1 xs)
in
let ref_ys = list_ref_cell nm2 ys in
`Name(nm3, `Art(ref_ys))
)
in
fun list -> mfn.LArt.mfn_data list
let list_eager_filter
(op_nm : name)
(op : Elt.t -> bool)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_filter") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_filter = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) ->
let rest = list_filter xs in
if op x then `Cons(x, rest) else rest
| `Art(a) -> list_filter (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
let ys = (* memoized recursive call: *)
LArt.force (r.LArt.mfn_nart nm1 xs)
in
let ref_ys = list_ref_cell nm2 ys in
`Name(nm3, `Art(ref_ys))
)
in
fun list -> mfn.LArt.mfn_data list
let list_map_paired
(op_nm : name)
(op : Elt.t -> Elt.t -> Elt.t)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_map_paired") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let map2 = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
(* ignore last value if unpaired *)
| `Cons(_, `Nil) -> `Nil
| `Cons(x, `Cons(y, ys)) -> `Cons(op x y, map2 ys)
| `Cons(x, `Art(a)) -> map2 (`Cons(x, LArt.force a))
(* move the name to the outside, to catch in a later case *)
| `Cons(x, `Name(nm,xs)) -> map2 (`Name(nm, `Cons(x,xs)))
| `Art(a) -> map2 (LArt.force a)
(* deal with double names from both data *)
| `Name(nm, `Cons(x, `Art(a))) -> map2 (`Name(nm, `Cons(x, LArt.force a)))
| `Name(nm1, `Cons(x, `Name(nm2, xs))) ->
(* should we pair ane fork these names for tracking purposes? *)
let nm1 , nm2 = Name.fork @@ Name.pair nm1 nm2 in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 (`Cons(x,xs))))
| `Name(nm, `Art(a)) -> map2 (`Name(nm, LArt.force a))
(* after all the double name cases are delt with, handle the default *)
| `Name(nm, xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 xs))
)
in
fun list -> mfn.LArt.mfn_data list
let name_opt_fork nm =
match nm with
| None -> None, None
| Some nm ->
let nm1,nm2 = Name.fork nm in
(Some nm1, Some nm2)
let name_opt_seq nm1 nm2 =
match nm1 with
| Some nm1 -> Some nm1
| None ->
( match nm2 with
| None -> None
| Some nm2 -> Some nm2
)
(* TODO: simplify this as a special case of rope_reduce_name below *)
let rec rope_reduce
( op_nm : St.name )
( op : Elt.t -> Elt.t -> Elt.t )
: St.Rope.t -> Elt.t option =
let fnn = Name.pair (Name.of_string "rope_reduce") op_nm in
let mfn = AEltOption.mk_mfn fnn
(module St.Rope)
(fun r rope ->
let rope_reduce = r.AEltOption.mfn_data in
( match rope with
| `Zero -> None
| `One x -> Some x
| `Two(left,right) ->
( match rope_reduce left, rope_reduce right with
| Some l, Some r -> Some (op l r)
| Some l, None -> Some l
| None, Some r -> Some r
| None, None -> None
)
| `Art art -> rope_reduce (St.Rope.Art.force art)
| `Name (nm1, `Name(nm2, rope)) ->
let nm = if Name.height nm1 > Name.height nm2 then nm1 else nm2 in
rope_reduce (`Name(nm, rope))
| `Name (nm, rope) ->
AEltOption.force (r.AEltOption.mfn_nart nm rope)
))
in
fun rope -> mfn.AEltOption.mfn_data rope
let rec rope_reduce_name
( op_nm : St.name )
( op : Elt.t -> Elt.t -> Elt.t )
: St.Rope.t -> Elt.t option * name option =
let fnn = Name.pair (Name.of_string "rope_reduce_name") op_nm in
let module M =
ArtLib.MakeArt
(Name)
(Types.Tuple2(Types.Option(Elt))(Types.Option(Name)))
in
let mfn = M.mk_mfn fnn
(module Types.Tuple2(St.Rope)(Types.Option(Name)))
(fun r (rope, nm_opt)->
let rope_reduce frag = r.M.mfn_data (frag, nm_opt) in
match rope with
| `Zero -> None, nm_opt
| `One x -> Some x, nm_opt
| `Two(left,right) ->
let r1,no1 = rope_reduce left in
let r2,no2 = rope_reduce right in
find a useful name of the three available
let nm_opt = name_opt_seq nm_opt (name_opt_seq no1 no2) in
( match r1, r2 with
| Some l, Some r -> Some (op l r), nm_opt
| Some l, None -> Some l, nm_opt
| None, Some r -> Some r, nm_opt
| None, None -> None, nm_opt
)
| `Art art -> rope_reduce (St.Rope.Art.force art)
| `Name (nm1, `Name(nm2, rope)) ->
let nm = if Name.height nm1 > Name.height nm2 then nm1 else nm2 in
rope_reduce (`Name(nm, rope))
| `Name (nm, rope) ->
let nm1, nm2 = Name.fork nm in
M.force (r.M.mfn_nart nm1 (rope, Some(nm2)))
)
in
fun rope -> mfn.M.mfn_data (rope, None)
finds the median of a rope in current order , sort first to find true median
let rope_median rope : Elt.t option =
let len = rope_length rope in
if len = 0 then None else
let mid = len/2 in
rope_nth rope mid
let list_merge_full
(compare_nm : name)
(compare : Elt.t -> Elt.t -> int)
: name option ->
name option ->
St.List.t ->
St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_merge") compare_nm in
let mfn = LArt.mk_mfn fnn
(module Types.Tuple4
(Types.Option(Name))
(Types.Option(Name))
(St.List)
(St.List))
(fun r (nm_opt1,nm_opt2,list1,list2) ->
let merge xs ys = r.LArt.mfn_data (nm_opt1,nm_opt2,xs,ys) in
let merge_nms nm1 nm2 xs ys = r.LArt.mfn_data (nm1,nm2,xs,ys) in
let merge_cons1 x l1 l2 =
match nm_opt1 with
| None -> `Cons(x, merge l1 l2)
| Some(nms) ->
let nm1,nm2 = Name.fork nms in
`Name(nm1, `Cons(x, `Art(r.LArt.mfn_nart nm2 (None, nm_opt2, l1, l2))))
in
let merge_cons2 y l1 l2 =
match nm_opt2 with
| None -> `Cons(y, merge l1 l2)
| Some(nms) ->
let nm1,nm2 = Name.fork nms in
`Name(nm1, `Cons(y, `Art(r.LArt.mfn_nart nm2 (nm_opt1, None, l1, l2))))
in
match list1, list2 with
| `Nil, _ -> (match nm_opt2 with None -> list2 | Some nm -> `Name(nm,list2))
| _, `Nil -> (match nm_opt1 with None -> list1 | Some nm -> `Name(nm,list1))
| `Art(a1), _ -> merge (LArt.force a1) list2
| _, `Art(a2) -> merge list1 (LArt.force a2)
| `Name(nm1, xs1), _ -> merge_nms (Some(nm1)) nm_opt2 xs1 list2
| _, `Name(nm2, xs2) -> merge_nms nm_opt1 (Some(nm2)) list1 xs2
| `Cons(x,xs), `Cons(y,ys) ->
incr Statistics.Counts.unit_cost ;
if compare x y <= 0 then
merge_cons1 x xs list2
else
merge_cons2 y list1 ys
)
in
fun nm1 nm2 l1 l2 -> mfn.LArt.mfn_data (nm1, nm2, l1, l2)
let list_merge cmp_nm cmp =
list_merge_full cmp_nm cmp None None
let rope_mergesort
( compare_nm : St.name )
( compare : Elt.t -> Elt.t -> int )
: St.Rope.t -> St.List.t =
let fnn = Name.pair (Name.of_string "rope_mergesort") compare_nm in
let merge = list_merge_full compare_nm compare in
let mfn = St.List.Art.mk_mfn fnn
(module Types.Tuple2(Types.Option(Name))(St.Rope))
(fun r (nm,rope) ->
let rope_mergesort nm rope = r.LArt.mfn_data (nm,rope) in
( match rope with
| `Zero -> `Nil
| `One x ->
( match nm with
| None -> `Cons(x,`Nil)
| Some nm -> `Name(nm, `Cons(x, `Nil))
)
| `Two(x, y) ->
send the name to the first ` Cons
let x_sorted = rope_mergesort nm x in
let y_sorted = rope_mergesort None y in
merge None None x_sorted y_sorted
| `Art art -> rope_mergesort nm (RArt.force art)
| `Name (nnm, rope) ->
let nm1,nm2 = Name.fork nnm in
match nm with
| None ->
(*
suspend, but don't create a name-seed to here,
they need to be associated with 'Cons
*)
`Art (r.LArt.mfn_nart nm1 (Some nm2,rope))
(* keep both names active *)
| Some(nm) ->
`Name(nm,`Art (r.LArt.mfn_nart nm1 (Some nm2,rope)))
))
in
fun rope -> mfn.LArt.mfn_data (None,rope)
let list_to_rope_mergesort
( compare_nm : St.name )
( compare : Elt.t -> Elt.t -> int )
: St.List.t -> St.List.t =
let sort = rope_mergesort compare_nm compare in
fun list ->
let rope = rope_of_list list in
sort rope
end
module MakeSeq(ArtLib : ArtLib.S)(Name : Name.S)(Elt : Data.S) =
SeqWrap(ArtLib)(Name)(Elt)(Make(ArtLib)(Name)(Elt))
Makes a key - Value mapping , based on SpreadTrees .
The mapping is represented as a tree of key - value - sequence pairs .
Keys are ordered by a comparison function .
The tree is a binary search tree according to this comparison function .
Value sequences are stored in an unordered fashion .
The mapping is represented as a tree of key-value-sequence pairs.
Keys are ordered by a comparison function.
The tree is a binary search tree according to this comparison function.
Value sequences are stored in an unordered fashion.
*)
module KvMapWrap
(ArtLib : ArtLib.S)
(Name : Name.S)
(Key : Data.S)
(Val : Data.S)
(ValSt : S with type elt = Val.t
and type name = Name.t) =
struct
let get_key x = x
let empty_kv k = k
module KeySt = Make(ArtLib)(Name)(Key)
module KeySeq = MakeSeq (ArtLib)(Name)(Key)
module ValSeq = MakeSeq (ArtLib)(Name)(Val)
module KeyOptAdpt =
struct
type name = Name.t
module Tmp = Types.Option(Key)
module Art = ArtLib.MakeArt(Name)(Tmp)
include Tmp
end
module ABool = ArtLib.MakeArt(Name)(Types.Bool)
module TArt = KeySt.Tree.Art
module LArt = KeySt.List.Art
let rec is_bst : Key.t * Key.t -> KeySt.Tree.t -> bool =
let mfn = ABool.mk_mfn (Name.of_string "is_bst")
(module (Types.Tuple3(Key)(Key)(KeySt.Tree)))
(fun r (lo,hi,tree) ->
let is_bst (lo,hi) tree = r.ABool.mfn_data (lo,hi,tree) in
( match tree with
| `Leaf `Nil -> true
| `Leaf `Art art -> is_bst (lo,hi) (`Leaf (KeySt.List.Art.force art))
| `Leaf `Name (_,rest) -> is_bst (lo,hi) (`Leaf rest)
| `Leaf `Cons(kv, rest) ->
Key.compare lo (get_key kv) <= 0
&& Key.compare (get_key kv) hi <= 0
&& KeySeq.list_is_empty rest
| `Bin(left,kv,right) -> let x = get_key kv in
( Key.compare lo x <= 0
&& Key.compare x hi <= 0
&& is_bst (lo, x) left
&& is_bst (x, hi) right )
| `Art art -> is_bst (lo,hi) (KeySt.Tree.Art.force art)
| `Name (nm,tree) -> ABool.force (r.ABool.mfn_nart nm (lo,hi,tree))
))
in
fun (lo,hi) tree -> mfn.ABool.mfn_data (lo,hi,tree)
let rec list_find
(list : KeySt.List.t)
(target : Key.t) : Key.t option =
( match list with
| `Nil -> None
| `Cons(kv, tl) ->
if Key.compare (get_key kv) target = 0 then Some kv
else list_find tl target
| `Art art -> list_find (KeySt.List.Art.force art) target
| `Name (_,list) -> list_find list target
)
let rec tree_find
( tree : KeySt.Tree.t )
( target : Key.t ) : Key.t option =
( match tree with
| `Leaf xs -> list_find xs target
| `Bin(left,kv,right) ->
let ord = Key.compare target (get_key kv) in
if ord < 0 then tree_find left target
else if ord > 0 then tree_find right target
else if ord = 0 then Some kv
else failwith "impossible"
| `Art art -> tree_find (KeySt.Tree.Art.force art) target
| `Name (_,tree) -> tree_find tree target
)
let rec list_remove : KeySt.List.t -> Key.t -> (Key.t option) * KeySt.List.t =
let module M = Articulated.ArtTuple2(ArtLib)(Name)(KeyOptAdpt)(KeySt.List) in
let mfn = M.Art.mk_mfn (Name.of_string "list_remove")
(module (Types.Tuple2(KeySt.List)(Key)))
(fun r (list, target) ->
let list_remove list target = r.M.Art.mfn_data (list, target) in
( match list with
| `Nil -> ( None, `Nil )
| `Cons(kv, rest) ->
if Key.compare (get_key kv) target = 0 then
( Some kv, rest )
else
let res, rem = list_remove rest target in
(res, `Cons(kv, rem))
| `Art art -> list_remove (KeySt.List.Art.force art) target
| `Name (nm, list) ->
let nm1,nm = Name.fork nm in
let nm2,nm3 = Name.fork nm in
let elt_rem = r.M.Art.mfn_nart nm1 (list, target) in
let elt,rem = M.split nm2 elt_rem in
M.Adpt1.Art.force elt, `Name(nm3, `Art rem)
))
in
fun list target -> mfn.M.Art.mfn_data (list, target)
let rec tree_remove : KeySt.Tree.t -> Key.t -> (Key.t option) * KeySt.Tree.t =
let module M = Articulated.ArtTuple2(ArtLib)(Name)(KeyOptAdpt)(KeySt.Tree) in
let mfn = M.Art.mk_mfn (Name.of_string "tree_remove")
(module (Types.Tuple2(KeySt.Tree)(Key)))
(fun r (tree, target) ->
let tree_remove tree target = r.M.Art.mfn_data (tree, target) in
( match tree with
| `Leaf xs ->
let res, ys = list_remove xs target in
(res, `Leaf ys)
| `Bin(left, kv, right) ->
let ord = Key.compare target (get_key kv) in
if ord < 0 then tree_remove left target
else if ord > 0 then tree_remove right target
else if ord = 0 then (Some kv, `Bin(left, empty_kv (get_key kv), right))
else failwith "impossible"
| `Art art -> tree_remove (KeySt.Tree.Art.force art) target
| `Name (nm, tree) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
let elt_rem = r.M.Art.mfn_nart nm1 (tree, target) in
let elt,rem = M.split nm2 elt_rem in
M.Adpt1.Art.force elt, `Name(nm3, `Art rem)
))
in
fun tree target -> mfn.M.Art.mfn_data (tree, target)
let tree_height : KeySt.Tree.t -> int =
let module M = ArtLib.MakeArt(Name)(Types.Int) in
let mfn = M.mk_mfn (Name.of_string "tree_height")
(module KeySt.Tree)
(fun r tree ->
let tree_height tree = r.M.mfn_data tree in
( match tree with
| `Leaf xs -> (-1)
| `Bin(left,x,right) ->
let hleft = tree_height left in
let hright = tree_height right in
1 + (if hleft > hright then hleft else hright)
| `Art art -> tree_height (KeySt.Tree.Art.force art)
| `Name (nm, tree) -> M.force (r.M.mfn_nart nm tree)
))
in
fun tree -> mfn.M.mfn_data tree
let rec tree_height_diff ( tree : KeySt.Tree.t ) : int =
( match tree with
| `Leaf _ -> 0
| `Bin(left,x,right) -> (tree_height left) - (tree_height right)
| `Art art -> tree_height_diff (KeySt.Tree.Art.force art)
| `Name (_,tree) -> tree_height_diff tree
)
let rotate_right : KeySt.Tree.t -> KeySt.Tree.t =
let mfn = TArt.mk_mfn (Name.of_string "rotate_right")
(module KeySt.Tree)
( fun r tree ->
let rotate_right t = r.TArt.mfn_data t in
( match tree with
| `Leaf _ -> failwith "impossible rr1"
| `Bin(t1, x, t2) ->
let rec loop = function
| `Leaf _ -> failwith "impossible rr2"
| `Bin(t21, y, t22) -> `Bin(`Bin(t1, x, t21), y, t22)
| `Art art -> loop (TArt.force art)
| `Name(_, t) -> loop t
in loop t2
| `Art art -> rotate_right (TArt.force art)
| `Name(nm, t) ->
if false then let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.TArt.mfn_nart nm2 t))
else `Name(nm, rotate_right t)
))
in
fun tree -> mfn.TArt.mfn_data tree
let rec rotate_left : KeySt.Tree.t -> KeySt.Tree.t =
let mfn = TArt.mk_mfn (Name.of_string "rotate_left")
(module KeySt.Tree)
( fun r tree ->
let rotate_left t = r.TArt.mfn_data t in
( match tree with
| `Leaf _ -> failwith "impossible rl1"
| `Bin(t1, x, t2) ->
let rec loop = function
| `Leaf _ -> failwith "impossible rl2"
| `Bin(t11, y, t12) -> `Bin(t11, y, `Bin(t12, x, t2))
| `Art art -> loop (TArt.force art)
| `Name(_, t) -> loop t
in loop t1
| `Art art -> rotate_left (TArt.force art)
| `Name(nm, t) ->
if false then
let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.TArt.mfn_nart nm2 t))
else `Name(nm, rotate_left t)
))
in
fun tree -> mfn.TArt.mfn_data tree
let nm_tree : Name.t -> KeySt.Tree.t -> KeySt.Tree.t =
let mfn = TArt.mk_mfn (Name.of_string "nm_tree")
(module KeySt.Tree)
(fun r tree -> tree)
in
fun nm tree ->
let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art (mfn.TArt.mfn_nart nm2 tree))
let rec avl_insert : Name.t -> KeySt.Tree.t -> Key.t -> KeySt.Tree.t
=
let mfn = TArt.mk_mfn (Name.of_string "avl_insert")
(module (Types.Tuple3(Name)(KeySt.Tree)(Key)))
(fun r (insert_nm,tree,kv) ->
let avl_insert nm tree kv = r.TArt.mfn_data (nm,tree,kv) in
let wrap_avl nm tree =
let h = tree_height_diff tree in
assert (h = 0 || h = 1 || h = -1);
(nm_tree nm tree)
in
( match tree with
| `Art art -> avl_insert insert_nm (KeySt.Tree.Art.force art) kv
| `Name(tree_nm, tree) -> avl_insert insert_nm tree kv
| `Leaf `Nil -> nm_tree insert_nm (`Bin (`Leaf `Nil, kv, `Leaf `Nil))
| `Leaf _ -> failwith "avl_insert: `Leaf _ : invalid AVL tree"
| `Bin(left, kv0, right) ->
let insert_nm1, insert_nm2 = Name.fork insert_nm in
let ord = Key.compare (get_key kv) (get_key kv0) in
if ord = 0 then
`Bin(left, kv0, right)
else if ord < 0 then
let left' = avl_insert insert_nm1 left kv in
let tree' = `Bin(left',kv0, right) in
begin match tree_height_diff tree' with
| -1 | 0 | 1 -> wrap_avl insert_nm2 tree'
| 2 -> begin match tree_height_diff left' with
| 1 -> wrap_avl insert_nm2 (rotate_left tree')
| -1 -> let tree'' = `Bin(rotate_right left', kv0, right) in
wrap_avl insert_nm2 (rotate_left tree'')
| _ -> failwith "impossible il1"
end
| _ -> failwith "impossible il2"
end
else if ord > 0 then
let right' = avl_insert insert_nm1 right kv in
let tree' = `Bin(left, kv0, right') in
begin match tree_height_diff tree' with
| -1 | 0 | 1 -> wrap_avl insert_nm2 tree'
| -2 -> begin match tree_height_diff right' with
| -1 -> wrap_avl insert_nm2 (rotate_right tree')
| 1 -> let tree'' = `Bin(left, kv0, rotate_left right') in
wrap_avl insert_nm2 (rotate_right tree'')
| _ -> failwith "impossible ir1"
end
| _ -> failwith "impossible ir2"
end
else
failwith "impossible ilast"
))
in
fun nm tree kv ->
let nm1, nm2 = Name.fork nm in
TArt.force (mfn.TArt.mfn_nart nm1 (nm2, tree, kv))
let avl_tree_of_rope : Name.t -> KeySt.Rope.t -> KeySt.Tree.t -> KeySt.Tree.t
=
let mfn = TArt.mk_mfn (Name.of_string "avl_tree_of_rope")
(module Types.Tuple3(Name)(KeySt.Rope)(KeySt.Tree))
(fun r (nm, rope, tree) ->
let avl_tree_of_rope nm rope tree = r.TArt.mfn_data (nm, rope, tree) in
(match rope with
| `Zero -> tree
| `One kv -> (*>>> *) avl_insert nm tree kv
| `Two (rope1, rope2) ->
let nm1, nm2 = Name.fork nm in
let tree' = (avl_tree_of_rope nm1 rope1 tree) in
avl_tree_of_rope nm2 rope2 tree'
| `Art(art) -> avl_tree_of_rope nm (KeySt.Rope.Art.force art) tree
| `Name(nm, rope) ->
let nm1, nm2 = Name.fork nm in
TArt.force (r.TArt.mfn_nart nm1 (nm2,rope,tree))
))
in
fun nm rope tree -> mfn.TArt.mfn_data (nm, rope, tree)
let avl_tree_of_list : Name.t -> KeySt.List.t -> KeySt.Tree.t -> KeySt.Tree.t
=
let mfn = TArt.mk_mfn (Name.of_string "avl_tree_of_list")
(module Types.Tuple3(Name)(KeySt.List)(KeySt.Tree))
(fun r (nm, list, tree) ->
let avl_tree_of_list nm list tree = r.TArt.mfn_data (nm, list, tree) in
(match list with
| `Nil -> tree
| `Cons(x, tl) ->
let nm1,nm2 = Name.fork nm in
let tree' = avl_insert nm1 tree x in
avl_tree_of_list nm2 tl tree'
| `Name(nm_here, tl) -> avl_tree_of_list nm_here tl tree
| `Art(art) -> avl_tree_of_list nm (KeySt.List.Art.force art) tree
))
in
fun nm list tree -> mfn.TArt.mfn_data (nm, list, tree)
end
module MakeKvMap(ArtLib : ArtLib.S)(Name : Name.S)(Key : Data.S)(Val : Data.S) =
KvMapWrap(ArtLib)(Name)(Key)(Val)(Make(ArtLib)(Name)(Val))
(*
(* Directed graph representation. *)
module MakeDigraph
( ArtLib : ArtLibType )
( Name : NameType )
( NodeData : sig include DatType val compare : t -> t -> int end )
( EdgeData : DatType )
=
struct
module ArtLib = ArtLib
module Name = Name
module NodeData = NodeData
module EdgeData = EdgeData
module Edge = Types.Tuple3(NodeData)(EdgeData)(NodeData)
module Adj = Types.Tuple2(EdgeData)(NodeData)
module NodeSt = Make (ArtLib) (Name) (NodeData)
module AdjSt = Make (ArtLib) (Name) (Adj)
module EdgeSt = Make (ArtLib) (Name) (Edge)
module NodeSeq = MakeSeq (NodeSt)
module AdjSeq = MakeSeq (AdjSt)
module EdgeSeq = MakeSeq (EdgeSt)
module NodeMap = MakeKvMap (ArtLib) (Name) (NodeData) (AdjSt)
let graph_without_node
( graph : NodeMap.KeySt.Tree.data )
( node : Nodedata )
: NodeMap.KeySt.Tree.data * NodeMap.KeySt.data option =
let node_op, graph = NodeMap.tree_remove graph node in
( graph, node_op )
let tgt_nodes_of_adjs
( adjs : AdjSt.List.data )
: NodeSt.List.data
= failwith "TODO"
let rec dfs
( graph : NodeMap.KeySt.Tree.data )
( stack : NodeSt.List.data )
: NodeMap.KeySt.List.data
=
( match stack with
| `Nil -> `Nil
| `Art( art ) ->dfs graph (NodeSt.List.Art.force art)
| `Cons(nd, stack_tl) ->
( match graph_without_node graph nd with
(* node is already visited: *)
| graph, None -> dfs graph stack_tl
(* node is not yet visited: *)
| graph, Some nd ->
let stack' = NodeSeq.list_append
(tgt_nodes_of_adjs (snd nd)) stack_tl
in
`Cons(nd, dfs graph stack')
)
| `Name _ -> failwith "TODO: Missing case"
)
let rec bfs
( graph : NodeMap.KeySt.Tree.data )
( queue : NodeSt.Tree.data )
: NodeMap.KeySt.List.data
=
( match NodeSeq.tree_pop_front queue with
| None -> `Nil
| Some ( queue_tl, front_nd ) ->
( match graph_without_node graph front_nd with
(* node is already visited: *)
| graph, None -> bfs graph queue_tl
(* node is not yet visited: *)
| graph, Some nd ->
let queue' = NodeSeq.tree_append
queue_tl (NodeSeq.tree_of_list (tgt_nodes_of_adjs (snd nd)))
in
`Cons(nd, bfs graph queue')
)
(* | `Art( art ) ->
`Art( NodeMap.KeySt.List.Art.cell (
dfs graph (NodeSt.List.Art.force art) ) ) *)
)
end
*)
module struct
module type ExprLangType = sig
module ArtLib : ArtLib . S
module Name : Name . S
module Value : Data . S
type binop = string * ( Value.t - > Value.t - > Value.t )
type uniop = string * ( Value.t - > Value.t )
( * articulated expression for an arithmetic language with ' let ' .
module ExprLang = struct
module type ExprLangType = sig
module ArtLib : ArtLib.S
module Name : Name.S
module Value : Data.S
type binop = string * (Value.t -> Value.t -> Value.t)
type uniop = string * (Value.t -> Value.t)
(* articulated expression for an arithmetic language with 'let'. *)
type 'art art_expr = [
| `Let of string * 'art art_expr * 'art art_expr
| `Var of string
| `Value of Value.t
| `Binop of binop * 'art art_expr * 'art art_expr
| `Uniop of uniop * 'art art_expr
| `Art of 'art
| `Name of name * 'art art_expr
]
module rec Expr : sig
module Data : Data.S
module Art : Art.S
end
with type data = Expr.Art.t art_expr
and type Art.data = Expr.Art.t art_expr
and type Art.name = name
end
module Make
(ArtLib : ArtLib.S)
(Name : Name.S)
(Value : Data.S) : ExprLangType = struct
module ArtLib = ArtLib
module Name = Name
module Value = Value
type binop = string * (Value.t -> Value.t -> Value.t)
type uniop = string * (Value.t -> Value.t)
(* articulated expression for an arithmetic language with 'let'. *)
type 'art art_expr = [
| `Let of string * 'art art_expr * 'art art_expr
| `Var of string
| `Value of Value.t
| `Binop of binop * 'art art_expr * 'art art_expr
| `Uniop of uniop * 'art art_expr
| `Art of 'art
| `Name of name * 'art art_expr
]
module rec Expr : sig
module Data : Data.S
module Art : Art.S
end
with type data = Expr.Art.t art_expr
and type Art.data = Expr.Art.t art_expr
and type Art.name = name
=
struct
module Data = struct
type t = Expr.Art.t art_expr
let rec show exp =
( match exp with
| `Value v -> Value.show v
| `Binop ((s,_), e1, e2) -> "Binop("^s^","^show e1^","^show e2^")"
| `Uniop ((s,_), e) -> "Uniop("^s^","^show e^")"
| `Let (v,e1,e2) -> "Let("^v^","^show e1^","^show e2^")"
| `Var v -> "Var("^v^")"
| `Art a -> "Art("^Expr.Art.show a^")"
| `Name(nm, e) -> "Name("^Name.show nm^","^show e^")"
)
let rec hash seed x =
( match x with
| `Value v -> Value.hash seed v
| `Binop((s,_), e1, e2) -> Hashtbl.seeded_hash (hash (hash seed e1) e2) s
| `Uniop ((s,_), e) -> Hashtbl.seeded_hash (hash seed e) s
| `Let(v,e1,e2) -> Hashtbl.seeded_hash (hash (hash seed e1) e2) v
| `Var v -> Hashtbl.seeded_hash seed v
| `Art a -> Expr.Art.hash seed a
| `Name(nm, e) -> Name.hash (hash seed e) nm
)
let rec equal (exp1:Expr.Art.t art_expr) (exp2:Expr.Art.t art_expr) =
( match exp1, exp2 with
| `Value v1, `Value v2 -> Value.equal v1 v2
| `Binop((s1, _), e11, e12),
`Binop((s2, _), e21, e22) -> s1 = s2 && equal e11 e21 && equal e12 e22
| `Uniop((s1,_),e1), `Uniop((s2,_),e2) -> s1 = s2 && equal e1 e2
| `Let(v1,e11,e12), `Let(v2,e21,e22) -> v1 = v2 && equal e11 e21 && equal e12 e22
| `Var v1, `Var v2 -> v1 = v2
| `Art a1, `Art a2 -> Expr.Art.equal a1 a2
| `Name(nm1, e1), `Name(nm2, e2) -> Name.equal nm1 nm2 && equal e1 e2
| _ -> false
)
let rec sanitize x =
( match x with
| `Value v -> `Value (Value.sanitize v)
| `Binop (binop, e1, e2) -> `Binop(binop, sanitize e1, sanitize e2)
| `Uniop (uniop, e) -> `Uniop(uniop, sanitize e)
| `Let(v, e1, e2) -> `Let(v, sanitize e1, sanitize e2)
| `Var v -> `Var v
| `Art a -> `Art (Expr.Art.sanitize a)
| `Name (nm, e) -> `Name (Name.sanitize nm, sanitize e)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
end
end
module MakeEval ( ExprLang : ExprLangType )
=
struct
(* TODO: Write small-step evaluator, and driver loop. *)
open ExprLang
module VArt = ArtLib.MakeArt(Name)(Value)
module VOptionArt = ArtLib.MakeArt(Name)(Types.Option(Value))
module Values = Make(ArtLib)(Name)(Value)
module Env = MakeKvMap(ArtLib)(Name)(Types.String)(Values)
type env = Env.KeySt.Tree.t
type expr = Expr.data
type value = Value.t
type nm = name
let eval_big : nm -> env -> expr -> value option
=
let mfn = VOptionArt.mk_mfn (Name.of_string "eval_big")
(module Types.Tuple3(Name)(Env.KeySt.Tree.Data)(Expr.Data))
(fun r (nm,env,expr) ->
let eval nm env exp = r.VOptionArt.mfn_data (nm,env,exp) in
(match expr with
| `Value v -> Some v
| `Binop((_, binop), e1, e2) ->
let nm1,nm2 = Name.fork nm in
let v1 = eval nm1 env e1 in
let v2 = eval nm2 env e2 in
(match v1, v2 with
| Some v1, Some v2 -> Some (binop v1 v2)
| _ -> None
)
| `Uniop((_, uniop), e) ->
let v = eval nm env e in
(match v with
| Some v -> Some (uniop v)
| _ -> None
)
| ` Let(var , e1 , e2 ) - >
let nm1 , nm = Name.fork nm in
let nm2 , nm3 = Name.fork nm in
let v1 = eval nm1 env e1 in
let vs = match v1 with
| None - > ` Nil
| Some v - > ` Cons(v , ` Nil ) in
( * let env ' = Env.avl_insert nm2 env ( var , vs ) in
| `Let(var, e1, e2) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
let v1 = eval nm1 env e1 in
let vs = match v1 with
| None -> `Nil
| Some v -> `Cons(v, `Nil) in
(* let env' = Env.avl_insert nm2 env (var, vs) in *)
(* TEMP *)
let env' = Env.avl_insert nm2 env var in
eval nm3 env' e2
| `Var v -> ( match Env.tree_find env v with
| Some (_, `Cons(value,_)) -> Some value
| _ -> None
)
*)
| `Art a -> eval nm env (Expr.Art.force a)
| `Name(nm, exp) ->
let nm1, nm2 = Name.fork nm in
VOptionArt.force (r.VOptionArt.mfn_nart nm1 (nm2,env,exp))
| _ -> failwith "FIXME"
))
in
fun nm env exp -> mfn.VOptionArt.mfn_data (nm, env, exp)
type cxt = [ ` Cxt_Union of env * uniop
| ` Cxt_Binop of env * binop * expr
| ` Cxt_Let of env * string * expr
]
let eval_small : nm - > cxt - > env - > expr - > ( cxt , env , expr ) option
=
let mfn = VOptionArt.mk_mfn ( Name.of_string " eval " )
( module Types . Tuple3(Name)(Env . KeySt . Tree . Data)(Expr . Data ) )
( fun r ( nm , cxt , env , expr ) - >
let eval nm env exp nm , cxt , env , exp ) in
( match expr with
| None - > ( cxt , env , None )
| Some expr - >
( match expr with
| ` Value v - >
let v = match cxt with
| ` Cxt_Uniop(env , ( s , uniop ) ) - > ` Cxt_emp , env , Some ( ` Value ( uniop v ) )
| ` Cxt_Binop(env , ( s , ) , e ) - > ` Cxt_uniop(s , v ) , env , Some e
| ` Cxt_Let(env , var , e2 ) - > ` Cxt_emp , ( env(*TODO
type cxt = [ `Cxt_Union of env * uniop
| `Cxt_Binop of env * binop * expr
| `Cxt_Let of env * string * expr
]
let eval_small : nm -> cxt -> env -> expr -> (cxt, env, expr) option
=
let mfn = VOptionArt.mk_mfn (Name.of_string "eval")
(module Types.Tuple3(Name)(Env.KeySt.Tree.Data)(Expr.Data))
(fun r (nm,cxt,env,expr) ->
let eval nm env exp = r.VOptionArt.mfn_data (nm,cxt,env,exp) in
(match expr with
| None -> (cxt, env, None)
| Some expr ->
(match expr with
| `Value v ->
let cxt_subst cxt v = match cxt with
| `Cxt_Uniop(env, (s,uniop)) -> `Cxt_emp, env, Some (`Value (uniop v))
| `Cxt_Binop(env, (s,binop), e) -> `Cxt_uniop(s,binop v), env, Some e
| `Cxt_Let(env, var, e2) -> `Cxt_emp, (env(*TODO*)), Some e2
in
let cxt, env, expr = cxt_subst cxt v in
(cxt, env, expr)
| `Var var ->
( match tree_find env var with
| Some v -> Some (`Value v)
| None -> None
)
| `Binop((s, binop), e1, e2) ->
(`Cxt_Binop(env, (s, binop), e2), env, Some e1)
| `Uniop((s, uniop), e) ->
(`Cxt_Uniop(env, (s, uniop)), env, Some e)
| `Let(var, e1, e2) ->
(`Cxt_Let(env, var, e2), env, Some e1)
| `Art a -> (cxt, env, Some (Expr.Art.force a))
| `Name(nm, exp) ->
let nm1, nm2 = Name.fork nm in
VOptionArt.force (r.VOptionArt.mfn_nart nm1 (nm2,env,exp))
)))
in
fun nm cxt env exp -> mfn.VOptionArt.mfn_data (nm,cxt,env,exp)
*)
end
end
*)
| null | https://raw.githubusercontent.com/plum-umd/adapton.ocaml/a8e642ac1cc113b33e1837da960940c2dfcfa772/src/collections/spreadTree.ml | ocaml | articulated list.
articulated tree.
articulated rope.
articulated list.
articulated tree.
articulated rope.
Abbreviations, for accessing mfn_* and force:
return it if it already contains `Nil
add articulated `Nil to the end and return that art
assert (list_is_empty rest) ;
non-memoised indexed lookup of a rope, using memoized rope_length for speed
main work after initial checks
memoized recursive call:
memoized recursive call:
ignore last value if unpaired
move the name to the outside, to catch in a later case
deal with double names from both data
should we pair ane fork these names for tracking purposes?
after all the double name cases are delt with, handle the default
TODO: simplify this as a special case of rope_reduce_name below
suspend, but don't create a name-seed to here,
they need to be associated with 'Cons
keep both names active
>>>
(* Directed graph representation.
node is already visited:
node is not yet visited:
node is already visited:
node is not yet visited:
| `Art( art ) ->
`Art( NodeMap.KeySt.List.Art.cell (
dfs graph (NodeSt.List.Art.force art) ) )
articulated expression for an arithmetic language with 'let'.
articulated expression for an arithmetic language with 'let'.
TODO: Write small-step evaluator, and driver loop.
let env' = Env.avl_insert nm2 env (var, vs) in
TEMP
TODO
type cxt = [ `Cxt_Union of env * uniop
| `Cxt_Binop of env * binop * expr
| `Cxt_Let of env * string * expr
]
let eval_small : nm -> cxt -> env -> expr -> (cxt, env, expr) option
=
let mfn = VOptionArt.mk_mfn (Name.of_string "eval")
(module Types.Tuple3(Name)(Env.KeySt.Tree.Data)(Expr.Data))
(fun r (nm,cxt,env,expr) ->
let eval nm env exp = r.VOptionArt.mfn_data (nm,cxt,env,exp) in
(match expr with
| None -> (cxt, env, None)
| Some expr ->
(match expr with
| `Value v ->
let cxt_subst cxt v = match cxt with
| `Cxt_Uniop(env, (s,uniop)) -> `Cxt_emp, env, Some (`Value (uniop v))
| `Cxt_Binop(env, (s,binop), e) -> `Cxt_uniop(s,binop v), env, Some e
| `Cxt_Let(env, var, e2) -> `Cxt_emp, (env(*TODO | * Spread trees :
Spread trees are a general - purpose data structure for ( nominal ,
demand - driven ) incremental computation .
A spread tree can represent :
-- a sequence ( and in particular , an iterator ) ,
-- a binary search tree ,
-- a set ,
-- a mapping ,
-- a graph
-- and more ( ? )
Programmers use spreadtrees by constructing and computing with one
of the above structures , and by inter - converting between them .
The programmer rarely ( if ever ) sees the actual spreadtree
structure , which is hidden by an API for one of the above
structures .
Internally , a " spreadtree " is a binary tree whose leaves consist
of linked lists . Both the tree 's internal nodes and linked - list
leaves hold data elements . Incrementality is accomplished by
interposing the recursive tree and list structures with Adapton
names and articulation points ( in particular , see
GrifolaType . ArtType ) .
= = Tree and List structure :
In Trees , we place data elements in two places : ( 1 ) at internal
tree nodes and ( 2 ) at the lists in the leaves . We use both places
so as to trade off between tree structure and list structure in a
flexible way .
* * This will be helpful for experiments , where we can measure the
performance penalty of less tree - like data structures .
By varying the size of leaf lists relative to the tree size , one
trades off tree structure for list structure , either " spreading "
the tree out linearly into a list , or gathering the leaf lists
back into a bifurcated tree structure . In either extreme , one
either has a binary tree with empty leafs , or a single flat linked
list .
= = Ropes versus SpreadTrees
We use the term " rope " for a restricted structure ( aka a " join
list " ) , where there are three cases : Zero , One , Two , and where One
cases carries one data element , and where the binary ( Two ) case
carries no data elements , but just two sub - ropes .
Since the Two case carries no data , it is helpful for writing
certain computations , e.g. , mergesort . On the other hand , without
data in the Two case , ropes can not represent search trees ( just
sequences ) .
= = Use of incremental articulation points :
Note : Nominal features are necessary to build and maintain trees
efficiently . Hence , these structures were designed with nominal
incremental reuse in mind . We expect that non - nominal thunks will
not perform well incrementally , as compared with nominal thunks .
We design " articulation points " ( of laziness / incrementality ) into
the structures as a special recursive case , which can be present or
absent with any frequency . In one extreme , the structures have
fine - grained articulated structure , and are maximally lazy and
incremental . In the other extreme , the structures have no
articulated structure , and correspond exactly to immutable
( purely - functional ) structures that never change across time .
Reasoning about articulation points isolates reasoning about
laziness and nominal incrementality from reasoning about eager
( and purely - functional ) calculation steps . Articulation cases are
defined and used separately from the usual cases of the structure ,
which are defined in the usual ( eager ) fashion .
Spread trees are a general-purpose data structure for (nominal,
demand-driven) incremental computation.
A spread tree can represent:
-- a sequence (and in particular, an iterator),
-- a binary search tree,
-- a set,
-- a mapping,
-- a graph
-- and more (?)
Programmers use spreadtrees by constructing and computing with one
of the above structures, and by inter-converting between them.
The programmer rarely (if ever) sees the actual spreadtree
structure, which is hidden by an API for one of the above
structures.
Internally, a "spreadtree" is a binary tree whose leaves consist
of linked lists. Both the tree's internal nodes and linked-list
leaves hold data elements. Incrementality is accomplished by
interposing the recursive tree and list structures with Adapton
names and articulation points (in particular, see
GrifolaType.ArtType).
== Tree and List structure:
In Trees, we place data elements in two places: (1) at internal
tree nodes and (2) at the lists in the leaves. We use both places
so as to trade off between tree structure and list structure in a
flexible way.
** This will be helpful for experiments, where we can measure the
performance penalty of less tree-like data structures.
By varying the size of leaf lists relative to the tree size, one
trades off tree structure for list structure, either "spreading"
the tree out linearly into a list, or gathering the leaf lists
back into a bifurcated tree structure. In either extreme, one
either has a binary tree with empty leafs, or a single flat linked
list.
== Ropes versus SpreadTrees
We use the term "rope" for a restricted structure (aka a "join
list"), where there are three cases: Zero, One, Two, and where One
cases carries one data element, and where the binary (Two) case
carries no data elements, but just two sub-ropes.
Since the Two case carries no data, it is helpful for writing
certain computations, e.g., mergesort. On the other hand, without
data in the Two case, ropes cannot represent search trees (just
sequences).
== Use of incremental articulation points:
Note: Nominal features are necessary to build and maintain trees
efficiently. Hence, these structures were designed with nominal
incremental reuse in mind. We expect that non-nominal thunks will
not perform well incrementally, as compared with nominal thunks.
We design "articulation points" (of laziness/incrementality) into
the structures as a special recursive case, which can be present or
absent with any frequency. In one extreme, the structures have
fine-grained articulated structure, and are maximally lazy and
incremental. In the other extreme, the structures have no
articulated structure, and correspond exactly to immutable
(purely-functional) structures that never change across time.
Reasoning about articulation points isolates reasoning about
laziness and nominal incrementality from reasoning about eager
(and purely-functional) calculation steps. Articulation cases are
defined and used separately from the usual cases of the structure,
which are defined in the usual (eager) fashion.
*)
module type S = sig
type elt
type name
| `Nil
| `Cons of elt * 'art art_list
| `Art of 'art
| `Name of name * 'art art_list
]
| `Leaf of 'leaf
| `Bin of ('leaf,'art) art_tree * elt * ('leaf,'art) art_tree
| `Art of 'art
| `Name of name * ('leaf, 'art) art_tree
]
| `Zero
| `One of 'one
| `Two of ('one,'art) art_rope * ('one,'art) art_rope
| `Art of 'art
| `Name of name * ('one,'art) art_rope
]
module rec List : Articulated.S with type t = List.Art.t art_list
and type name = name
module rec Tree : Articulated.S with type t = (List.t, Tree.Art.t) art_tree
and type name = name
module rec Rope : Articulated.S with type t = (elt, Rope.Art.t) art_rope
and type name = name
end
module Make
(ArtLib : ArtLib.S)
(Name : Name.S)
(Elt : Data.S)
: S with type name = Name.t
and type elt = Elt.t =
struct
type elt = Elt.t [@@deriving eq, ord, show]
type name = Name.t [@@deriving eq, ord, show]
| `Nil
| `Cons of elt * 'art art_list
| `Art of 'art
| `Name of Name.t * 'art art_list
]
[@@deriving eq, ord, show]
| `Leaf of 'leaf
| `Bin of ('leaf,'art) art_tree * elt * ('leaf,'art) art_tree
| `Art of 'art
| `Name of Name.t * ('leaf,'art) art_tree
]
[@@deriving eq, ord, show]
| `Zero
| `One of 'one
| `Two of ('one,'art) art_rope * ('one,'art) art_rope
| `Art of 'art
| `Name of Name.t * ('one,'art) art_rope
]
[@@deriving eq, ord, show]
module rec List : (Articulated.S with type t = List.Art.t art_list
and type name = Name.t) =
struct
type name = Name.t
module Data = struct
type t = List.Art.t art_list
[@@deriving eq, ord, show]
let rec hash seed x =
( match x with
| `Nil -> Hashtbl.seeded_hash seed `Nil
| `Cons(x,tl) -> Elt.hash (hash seed tl) x
| `Art a -> List.Art.hash seed a
| `Name(nm,xs) -> (Name.hash (hash seed xs) nm)
)
let rec sanitize x =
( match x with
| `Nil -> `Nil
| `Cons (x, tl) -> `Cons(Elt.sanitize x, sanitize tl)
| `Art a -> `Art (List.Art.sanitize a)
| `Name(nm,xs) -> `Name(Name.sanitize nm, sanitize xs)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
include Data
end
module rec Tree : (Articulated.S with type t = (List.t, Tree.Art.t) art_tree
and type name = Name.t) =
struct
type name = Name.t
module Data = struct
type t = (List.t, Tree.Art.t) art_tree
[@@deriving eq, ord, show]
let rec hash seed x =
( match x with
| `Leaf xs -> List.hash seed xs
| `Bin(l,x,r) -> hash (Elt.hash (hash seed l) x) r
| `Art a -> Tree.Art.hash seed a
| `Name(nm,x) -> (Name.hash (hash seed x) nm)
)
let rec sanitize x =
( match x with
| `Leaf x -> `Leaf (List.sanitize x)
| `Bin(l,x,r) -> `Bin(sanitize l, Elt.sanitize x, sanitize r)
| `Art a -> `Art(Tree.Art.sanitize a)
| `Name(nm,x) -> `Name(Name.sanitize nm, sanitize x)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
include Data
end
module rec Rope : (Articulated.S with type t = (elt, Rope.Art.t) art_rope
and type name = Name.t) =
struct
type name = Name.t
module Data = struct
type t = (elt, Rope.Art.t) art_rope
[@@deriving eq, ord, show]
let rec hash seed x =
( match x with
| `Zero -> 0
| `One x -> Elt.hash seed x
| `Two (x,y) -> hash (hash seed x) y
| `Art a -> Rope.Art.hash seed a
| `Name(nm,x) -> (Name.hash (hash seed x) nm)
)
let rec sanitize x =
( match x with
| `Zero -> `Zero
| `One x -> `One (Elt.sanitize x)
| `Two(x,y) -> `Two (sanitize x, sanitize y)
| `Art a -> `Art(Rope.Art.sanitize a)
| `Name(nm,x) -> `Name(Name.sanitize nm, sanitize x)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
include Data
end
end
Sequences , based on SpreadTrees .
module SeqWrap
(ArtLib : ArtLib.S)
(Name : Name.S)
(Elt : Data.S)
(St : S with type elt = Elt.t
and type name = Name.t) =
struct
let default_granularity = 4
type name = St.name
module AElt = ArtLib.MakeArt(Name)(Elt)
module AEltOption = ArtLib.MakeArt(Name)(Types.Option(Elt))
module LArt = St.List.Art
module TArt = St.Tree.Art
module RArt = St.Rope.Art
let mut_elts_of_list
?c:(cons_first=true)
( name : name )
( list : 'a list )
( data_of : 'a -> St.elt )
( name_of : 'a -> name )
( gran_level : int )
: St.List.Art.t
=
let rec loop list =
match list with
| [] -> `Nil
| x :: xs ->
if Bits.ffs0 (Elt.hash 0 (data_of x)) >= gran_level then
let nm1, nm2 = Name.fork (name_of x) in
if cons_first then
`Cons((data_of x), `Name(nm1, `Art (St.List.Art.cell nm2 (loop xs))))
else
`Name(nm1, `Cons((data_of x), `Art (St.List.Art.cell nm2 (loop xs))))
else
`Cons((data_of x), (loop xs))
in St.List.Art.cell name (loop list)
let simple_full_string =
let rec loop = function
| `Nil -> "Nil"
| `Cons(x,xs) -> (Elt.show x)^"; "^(loop xs)
| `Art(a) -> "Art => "^(loop (LArt.force a))
| `Name(_,xs) -> "Name; "^(loop xs)
in loop
let rec insert_elt list_art h nm_tl_opt =
match nm_tl_opt with
| Some (nm, tl_art) ->
assert ( list_art <> tl_art );
let list_art_content = St.List.Art.force list_art in
St.List.Art.set list_art (`Cons(h, `Name(nm, `Art(tl_art)))) ;
St.List.Art.set tl_art list_art_content ;
| None ->
let list_art_content = St.List.Art.force list_art in
St.List.Art.set list_art (`Cons(h, list_art_content))
let rec delete_elt list_art =
let (x,x_tl) =
let rec loop list =
match list with
| `Art art ->
let elt, tl = loop (St.List.Art.force art) in
elt, (`Art art)
| `Name (nm, tl) ->
let elt, tl = loop tl in
elt, (`Name(nm, tl))
| `Cons(x, tl) -> (x,tl)
| `Nil -> failwith "delete_elt: Nil: No element to delete"
in
loop (St.List.Art.force list_art)
in
St.List.Art.set list_art x_tl ;
(x,x_tl)
let rec next_art x = match x with
| `Nil -> None
| `Cons(_, tl) -> next_art tl
| `Name(_, rest) -> next_art rest
| `Art a -> Some a
let rec next_cons x =
match x with
| `Nil -> None
| `Cons(x,xs) -> Some(x,xs)
| `Art(a) -> next_cons(LArt.force a)
| `Name(_, xs) -> next_cons xs
let rec ith_art list count =
( match count with
| x when x <= 0 -> list
| _ -> match list with
| `Nil -> `Nil
| `Cons(x, xs) -> ith_art xs (count-1)
| `Name(_, xs) -> ith_art xs count
| `Art a -> ith_art (St.List.Art.force a) count
)
This function returns the final art of a list , which contains exactly ` Nil
if it 's not available , it 's created first , mutating the input list directly
This function returns the final art of a list, which contains exactly `Nil
if it's not available, it's created first, mutating the input list directly
*)
let get_or_create_final_art (list : LArt.t) =
let rec find_last art =
match next_art (LArt.force art) with
| None -> art
| Some(a) -> find_last a
in
let la = find_last list in
if LArt.force la = `Nil then la else
let rec create_nil_art elt =
match elt with
| `Nil ->
let nm1, nm2 = Name.fork (Name.gensym ()) in
`Name(nm1, `Art(LArt.cell nm2 `Nil))
| `Cons(x,xs) -> `Cons(x, create_nil_art xs)
| `Art(a) -> failwith "two last arts!"
| `Name(nm, xs) -> `Name(nm, create_nil_art xs)
in
LArt.set la (create_nil_art (LArt.force la));
find_last la
let rec take list count =
let dec = function
| Some count -> Some (count-1)
| None -> None
in
( match count with
| Some count when (count <= 0) -> []
| _ ->
match list with
| `Nil -> []
| `Cons(x, xs) -> x :: (take xs (dec count))
| `Name(_, xs) -> take xs count
| `Art a -> take (St.List.Art.force a) count
)
let rec list_is_empty ( list : St.List.t) : bool =
( match list with
| `Nil -> true
| `Cons(_,_) -> false
| `Art a -> list_is_empty ( LArt.force a )
| `Name (_,x) -> list_is_empty x
)
let list_length : St.List.t -> int =
let module Len = ArtLib.MakeArt(Name)(Types.Int) in
let mfn = Len.mk_mfn (Name.of_string "list_length")
(module St.List)
(fun r l ->
let len l = r.Len.mfn_data l in
let memo_len n l = r.Len.mfn_nart n l in
match l with
| `Nil -> 0
| `Cons(_,l) -> 1 + (len l)
| `Art(a) -> len (LArt.force a)
| `Name(nm, l) -> Len.force (memo_len nm l)
)
in
fun l -> mfn.Len.mfn_data l
let list_append =
let mfn = LArt.mk_mfn (Name.of_string "list_append")
(module Types.Tuple2(St.List)(St.List))
(fun r (xs, ys) ->
let list_append xs ys = r.LArt.mfn_data (xs,ys) in
( match xs with
| `Nil -> ys
| `Cons(x,tl) -> `Cons(x, list_append tl ys)
| `Art a -> list_append (LArt.force a) ys
| `Name(nm,xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art (r.LArt.mfn_nart nm2 (xs, ys)))
))
in
fun xs ys -> mfn.LArt.mfn_data (xs, ys)
let list_of_tree : St.Tree.t -> St.List.t -> St.List.t =
let mfn = LArt.mk_mfn (Name.of_string "list_of_tree")
(module Types.Tuple2(St.Tree)(St.List))
(fun r (tree, rest) ->
let list_of_tree tree list = r.LArt.mfn_data (tree, list) in
( match tree with
| `Leaf xs -> list_append xs rest
| `Bin(left,x,right) -> list_of_tree left (`Cons(x, list_of_tree right rest))
| `Art art -> list_of_tree (TArt.force art) rest
| `Name(nm,tree) -> let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 (tree, rest)))
))
in
fun tree list -> mfn.LArt.mfn_data (tree, list)
let rope_of_list_rec : name option -> int -> int -> St.Rope.t -> St.List.t -> St.Rope.t * St.List.t =
let module P = Articulated.ArtTuple2(ArtLib)(Name)(St.Rope)(St.List) in
let rope_of_list_rec =
let mfn = P.Art.mk_mfn (Name.of_string "rope_of_list_rec")
(module Types.Tuple5(Types.Option(Name))(Types.Int)(Types.Int)(St.Rope)(St.List))
(fun r (nm_opt, parent_lev, rope_lev, rope, list) ->
let rope_of_list_rec no pl tl t l = r.P.Art.mfn_data (no,pl,tl,t,l) in
( match list with
| `Nil -> rope, `Nil
| `Cons (hd, tl) ->
let hd_lev = Bits.ffs0 (Elt.hash 0 hd) in
if rope_lev <= hd_lev && hd_lev <= parent_lev then (
match nm_opt with
| None ->
let right, rest = rope_of_list_rec None hd_lev (-1) (`One hd) tl in
let rope = `Two(rope, right) in
rope_of_list_rec None parent_lev hd_lev rope rest
| Some(nm) ->
let nm1,nm = Name.fork nm in
let nm2,nm3 = Name.fork nm in
let right, rest = P.split nm1 (r.P.Art.mfn_nart nm2 (None, hd_lev, (-1), (`One hd), tl)) in
let rope : St.Rope.t = `Two(rope, `Name(nm3, `Art(right))) in
rope_of_list_rec None parent_lev hd_lev rope (LArt.force rest)
)
else (
match nm_opt with
| None -> rope, list
| Some(nm) -> rope, `Name(nm, list)
)
| `Art art -> rope_of_list_rec nm_opt parent_lev rope_lev rope (LArt.force art)
| `Name(nm, list) -> rope_of_list_rec (Some nm) parent_lev rope_lev rope list
)
)
in
fun nm pl tl t l -> mfn.P.Art.mfn_data (nm, pl, tl, t, l)
in
rope_of_list_rec
let rope_of_list : St.List.t -> St.Rope.t =
fun list ->
let rope, rest =
rope_of_list_rec None max_int (-1) (`Zero) list
in
rope
let list_of_rope : St.Rope.t -> St.List.t -> St.List.t =
let mfn = LArt.mk_mfn (Name.of_string "list_of_rope")
(module Types.Tuple2(St.Rope)(St.List))
(fun r (rope, rest) ->
let list_of_rope rope list = r.LArt.mfn_data (rope, list) in
( match rope with
| `Zero -> rest
| `One x -> `Cons(x, rest)
| `Two(x,y) -> list_of_rope x (list_of_rope y rest)
| `Art art -> list_of_rope (RArt.force art) rest
| `Name(nm,rope) -> let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 (rope, rest)))
))
in
fun rope list -> mfn.LArt.mfn_data (rope, list)
let rope_length : St.Rope.t -> int =
let module Len = ArtLib.MakeArt(Name)(Types.Int) in
let mfn = Len.mk_mfn (Name.of_string "rope_length")
(module St.Rope)
(fun r rope ->
let len rope = r.Len.mfn_data rope in
let memo_len n rope = r.Len.mfn_nart n rope in
match rope with
| `Zero -> 0
| `One(x) -> 1
| `Two(r1,r2) -> (len r1) + (len r2)
| `Art(a) -> len (RArt.force a)
| `Name(nm, r) -> Len.force (memo_len nm r)
)
in
fun rope -> mfn.Len.mfn_data rope
let rope_not_empty : name -> St.Rope.t -> bool =
fun (namespace : name) ->
let module M = ArtLib.MakeArt(Name)(Types.Bool) in
let fnn = Name.pair (Name.of_string "rope_empty") namespace in
let mfn = M.mk_mfn fnn
(module St.Rope)
(fun r rope ->
let empty rope = r.M.mfn_data rope in
let memo_empty n rope = r.M.mfn_nart n rope in
match rope with
| `Zero -> false
| `One(x) -> true
| `Two(r1,r2) -> (empty r1) || (empty r2)
| `Art(a) -> empty (RArt.force a)
| `Name(nm, r) -> M.force (memo_empty nm r)
)
in
fun rope -> mfn.M.mfn_data rope
let rope_nth rope n : Elt.t option =
if rope_length rope <= n then None else
let rec rope_nth rope n =
match rope with
| `Zero -> failwith "rope_nth: bad length reporting"
| `One(x) -> if n = 0 then Some(x) else failwith "rope_nth: bad length reporting"
| `Two(r1,r2) ->
let r1l = rope_length r1 in
if r1l > n then
rope_nth r1 n
else
rope_nth r2 (n-r1l)
| `Art(a) -> rope_nth (RArt.force a) n
| `Name(nm, r) -> rope_nth r n
in
rope_nth rope n
let list_reverse : St.List.t -> St.List.t -> St.List.t =
let mfn = LArt.mk_mfn (Name.of_string "list_reverse")
(module Types.Tuple2(St.List)(St.List))
(fun r (list, rev) ->
let list_reverse list rev = r.LArt.mfn_data (list,rev) in
( match list with
| `Nil -> rev
| `Cons(x, xs) -> list_reverse xs (`Cons(x, rev))
| `Art art -> list_reverse (LArt.force art) rev
| `Name (nm, xs) -> let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art (r.LArt.mfn_nart nm2 (xs, rev)))
))
in
fun list rev -> mfn.LArt.mfn_data (list, rev)
let list_reverse_balanced : St.List.t -> St.List.t -> St.List.t =
let debug = false in
let accum =
LArt.mk_mfn
(Name.of_string "list_reverse_accum")
(module St.List)
(fun _ list ->
(if debug then Printf.printf "... accum=(%s)\n" (St.List.show list));
list)
in
let module Res = ArtLib.MakeArt(Name)(Types.Tuple2(St.List)(St.List)) in
let module Arg = Types.Tuple5(Types.Option(Name))(Types.Int)(Types.Int)(St.List)(St.List) in
let mfn =
Res.mk_mfn
(Name.of_string "list_reverse")(module Arg)
(fun r ((no, lo, hi, list, rev) as arg) ->
(if debug then Printf.printf "... list_reverse:args=(%s)\n%!" (Arg.show arg)) ;
let list_reverse no lo hi list rev = r.Res.mfn_data (no,lo,hi,list,rev) in
( match list with
| `Nil -> (`Nil, rev)
| `Cons(x, xs) ->
let hd_lev = Bits.ffs0 (Elt.hash 0 x) in
if lo <= hd_lev && hd_lev <= hi then (
match no with
| None ->
let rev = `Cons(x,rev) in
let rest, rev = list_reverse None (-1) hd_lev xs rev in
(if debug then Printf.printf "... rest1,rev1 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
let rest, rev = list_reverse None hd_lev hi rest rev in
(if debug then Printf.printf "... rest2,rev2 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
rest, rev
| Some nm ->
let nm1,nm = Name.fork nm in
let nm2,nm3 = Name.fork nm in
let rev = `Name(nm1, `Art(accum.LArt.mfn_nart nm2 (`Cons(x, rev)))) in
let rest, rev = Res.force (r.Res.mfn_nart nm3 (None, -1, hd_lev, xs, rev)) in
(if debug then Printf.printf "...N rest1,rev1 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
let rest, rev = list_reverse None hd_lev hi rest rev in
(if debug then Printf.printf "...N rest2,rev2 = %s,%s\n%!" (St.List.show rest) (St.List.show rev)) ;
rest, rev
)
else (
(if debug then Printf.printf "... Basecase: list,rev = %s,%s\n%!" (St.List.show list) (St.List.show rev)) ;
match no with
| Some nm -> (`Name(nm,list), rev)
| None -> (list, rev)
)
| `Art art -> list_reverse no lo hi (LArt.force art) rev
| `Name (nm, xs) -> list_reverse (Some nm) lo hi xs rev
))
in
fun list rev ->
match mfn.Res.mfn_data (None, -1, max_int, list, rev) with
| `Nil, rev -> rev
| _, _ -> failwith "list_reverse: impossible"
let rec rope_reverse =
let mfn = RArt.mk_mfn (Name.of_string "rope_reverse")
(module St.Rope)
(fun r rope -> let rope_reverse = r.RArt.mfn_data in
( match rope with
| `Zero -> `Zero
| `One x -> `One x
| `Two(x,y) -> `Two(rope_reverse y, rope_reverse x)
| `Art art -> rope_reverse (RArt.force art)
| `Name (nm,rope) ->
let nm1,nm2 = Name.fork nm in
let art = r.RArt.mfn_nart nm2 rope in
ignore (RArt.force art) ;
`Name(nm1, `Art(art))
))
in
fun rope -> mfn.RArt.mfn_data rope
TODO : optimize , compact zeros
let rope_filter
(op_nm : name)
(op : Elt.t -> bool)
: St.Rope.t -> St.Rope.t =
let fnn = Name.pair (Name.of_string "rope_filter") op_nm in
let mfn = RArt.mk_mfn fnn
(module St.Rope)
(fun r rope ->
let rope_filter = r.RArt.mfn_data in
match rope with
| `Zero -> `Zero
| `One(x) -> if (op x) then `One(x) else `Zero
| `Two(x,y) -> `Two(rope_filter x, rope_filter y)
| `Art(a) -> rope_filter (RArt.force a)
| `Name(nm, rp) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.RArt.mfn_nart nm2 rp))
)
in
fun rope -> mfn.RArt.mfn_data rope
let list_filter
(op_nm : name)
(op : Elt.t -> bool)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_filter") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_filter = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) ->
let rest = list_filter xs in
if op x then `Cons(x, rest) else rest
| `Art(a) -> list_filter (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 xs))
)
in
fun list -> mfn.LArt.mfn_data list
let list_map
(op_nm : name)
(op : Elt.t -> Elt.t)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_map") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_map = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) -> `Cons(op x, list_map xs)
| `Art(a) -> list_map (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 xs))
)
in
fun list -> mfn.LArt.mfn_data list
let list_ref_cell
: name -> St.List.t -> St.List.Art.t =
let fnn = Name.of_string "list_ref_cell" in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list -> list)
in
mfn.St.List.Art.mfn_nart
let list_eager_map
(op_nm : name)
(op : Elt.t -> Elt.t)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_map") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_map = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) -> `Cons(op x, list_map xs)
| `Art(a) -> list_map (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
LArt.force (r.LArt.mfn_nart nm1 xs)
in
let ref_ys = list_ref_cell nm2 ys in
`Name(nm3, `Art(ref_ys))
)
in
fun list -> mfn.LArt.mfn_data list
let list_eager_filter
(op_nm : name)
(op : Elt.t -> bool)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_filter") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let list_filter = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(x, xs) ->
let rest = list_filter xs in
if op x then `Cons(x, rest) else rest
| `Art(a) -> list_filter (LArt.force a)
| `Name(nm, xs) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
LArt.force (r.LArt.mfn_nart nm1 xs)
in
let ref_ys = list_ref_cell nm2 ys in
`Name(nm3, `Art(ref_ys))
)
in
fun list -> mfn.LArt.mfn_data list
let list_map_paired
(op_nm : name)
(op : Elt.t -> Elt.t -> Elt.t)
: St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_map_paired") op_nm in
let mfn = LArt.mk_mfn fnn
(module St.List)
(fun r list ->
let map2 = r.LArt.mfn_data in
match list with
| `Nil -> `Nil
| `Cons(_, `Nil) -> `Nil
| `Cons(x, `Cons(y, ys)) -> `Cons(op x y, map2 ys)
| `Cons(x, `Art(a)) -> map2 (`Cons(x, LArt.force a))
| `Cons(x, `Name(nm,xs)) -> map2 (`Name(nm, `Cons(x,xs)))
| `Art(a) -> map2 (LArt.force a)
| `Name(nm, `Cons(x, `Art(a))) -> map2 (`Name(nm, `Cons(x, LArt.force a)))
| `Name(nm1, `Cons(x, `Name(nm2, xs))) ->
let nm1 , nm2 = Name.fork @@ Name.pair nm1 nm2 in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 (`Cons(x,xs))))
| `Name(nm, `Art(a)) -> map2 (`Name(nm, LArt.force a))
| `Name(nm, xs) ->
let nm1, nm2 = Name.fork nm in
`Name(nm1, `Art(r.LArt.mfn_nart nm2 xs))
)
in
fun list -> mfn.LArt.mfn_data list
let name_opt_fork nm =
match nm with
| None -> None, None
| Some nm ->
let nm1,nm2 = Name.fork nm in
(Some nm1, Some nm2)
let name_opt_seq nm1 nm2 =
match nm1 with
| Some nm1 -> Some nm1
| None ->
( match nm2 with
| None -> None
| Some nm2 -> Some nm2
)
let rec rope_reduce
( op_nm : St.name )
( op : Elt.t -> Elt.t -> Elt.t )
: St.Rope.t -> Elt.t option =
let fnn = Name.pair (Name.of_string "rope_reduce") op_nm in
let mfn = AEltOption.mk_mfn fnn
(module St.Rope)
(fun r rope ->
let rope_reduce = r.AEltOption.mfn_data in
( match rope with
| `Zero -> None
| `One x -> Some x
| `Two(left,right) ->
( match rope_reduce left, rope_reduce right with
| Some l, Some r -> Some (op l r)
| Some l, None -> Some l
| None, Some r -> Some r
| None, None -> None
)
| `Art art -> rope_reduce (St.Rope.Art.force art)
| `Name (nm1, `Name(nm2, rope)) ->
let nm = if Name.height nm1 > Name.height nm2 then nm1 else nm2 in
rope_reduce (`Name(nm, rope))
| `Name (nm, rope) ->
AEltOption.force (r.AEltOption.mfn_nart nm rope)
))
in
fun rope -> mfn.AEltOption.mfn_data rope
let rec rope_reduce_name
( op_nm : St.name )
( op : Elt.t -> Elt.t -> Elt.t )
: St.Rope.t -> Elt.t option * name option =
let fnn = Name.pair (Name.of_string "rope_reduce_name") op_nm in
let module M =
ArtLib.MakeArt
(Name)
(Types.Tuple2(Types.Option(Elt))(Types.Option(Name)))
in
let mfn = M.mk_mfn fnn
(module Types.Tuple2(St.Rope)(Types.Option(Name)))
(fun r (rope, nm_opt)->
let rope_reduce frag = r.M.mfn_data (frag, nm_opt) in
match rope with
| `Zero -> None, nm_opt
| `One x -> Some x, nm_opt
| `Two(left,right) ->
let r1,no1 = rope_reduce left in
let r2,no2 = rope_reduce right in
find a useful name of the three available
let nm_opt = name_opt_seq nm_opt (name_opt_seq no1 no2) in
( match r1, r2 with
| Some l, Some r -> Some (op l r), nm_opt
| Some l, None -> Some l, nm_opt
| None, Some r -> Some r, nm_opt
| None, None -> None, nm_opt
)
| `Art art -> rope_reduce (St.Rope.Art.force art)
| `Name (nm1, `Name(nm2, rope)) ->
let nm = if Name.height nm1 > Name.height nm2 then nm1 else nm2 in
rope_reduce (`Name(nm, rope))
| `Name (nm, rope) ->
let nm1, nm2 = Name.fork nm in
M.force (r.M.mfn_nart nm1 (rope, Some(nm2)))
)
in
fun rope -> mfn.M.mfn_data (rope, None)
finds the median of a rope in current order , sort first to find true median
let rope_median rope : Elt.t option =
let len = rope_length rope in
if len = 0 then None else
let mid = len/2 in
rope_nth rope mid
let list_merge_full
(compare_nm : name)
(compare : Elt.t -> Elt.t -> int)
: name option ->
name option ->
St.List.t ->
St.List.t -> St.List.t =
let fnn = Name.pair (Name.of_string "list_merge") compare_nm in
let mfn = LArt.mk_mfn fnn
(module Types.Tuple4
(Types.Option(Name))
(Types.Option(Name))
(St.List)
(St.List))
(fun r (nm_opt1,nm_opt2,list1,list2) ->
let merge xs ys = r.LArt.mfn_data (nm_opt1,nm_opt2,xs,ys) in
let merge_nms nm1 nm2 xs ys = r.LArt.mfn_data (nm1,nm2,xs,ys) in
let merge_cons1 x l1 l2 =
match nm_opt1 with
| None -> `Cons(x, merge l1 l2)
| Some(nms) ->
let nm1,nm2 = Name.fork nms in
`Name(nm1, `Cons(x, `Art(r.LArt.mfn_nart nm2 (None, nm_opt2, l1, l2))))
in
let merge_cons2 y l1 l2 =
match nm_opt2 with
| None -> `Cons(y, merge l1 l2)
| Some(nms) ->
let nm1,nm2 = Name.fork nms in
`Name(nm1, `Cons(y, `Art(r.LArt.mfn_nart nm2 (nm_opt1, None, l1, l2))))
in
match list1, list2 with
| `Nil, _ -> (match nm_opt2 with None -> list2 | Some nm -> `Name(nm,list2))
| _, `Nil -> (match nm_opt1 with None -> list1 | Some nm -> `Name(nm,list1))
| `Art(a1), _ -> merge (LArt.force a1) list2
| _, `Art(a2) -> merge list1 (LArt.force a2)
| `Name(nm1, xs1), _ -> merge_nms (Some(nm1)) nm_opt2 xs1 list2
| _, `Name(nm2, xs2) -> merge_nms nm_opt1 (Some(nm2)) list1 xs2
| `Cons(x,xs), `Cons(y,ys) ->
incr Statistics.Counts.unit_cost ;
if compare x y <= 0 then
merge_cons1 x xs list2
else
merge_cons2 y list1 ys
)
in
fun nm1 nm2 l1 l2 -> mfn.LArt.mfn_data (nm1, nm2, l1, l2)
let list_merge cmp_nm cmp =
list_merge_full cmp_nm cmp None None
let rope_mergesort
( compare_nm : St.name )
( compare : Elt.t -> Elt.t -> int )
: St.Rope.t -> St.List.t =
let fnn = Name.pair (Name.of_string "rope_mergesort") compare_nm in
let merge = list_merge_full compare_nm compare in
let mfn = St.List.Art.mk_mfn fnn
(module Types.Tuple2(Types.Option(Name))(St.Rope))
(fun r (nm,rope) ->
let rope_mergesort nm rope = r.LArt.mfn_data (nm,rope) in
( match rope with
| `Zero -> `Nil
| `One x ->
( match nm with
| None -> `Cons(x,`Nil)
| Some nm -> `Name(nm, `Cons(x, `Nil))
)
| `Two(x, y) ->
send the name to the first ` Cons
let x_sorted = rope_mergesort nm x in
let y_sorted = rope_mergesort None y in
merge None None x_sorted y_sorted
| `Art art -> rope_mergesort nm (RArt.force art)
| `Name (nnm, rope) ->
let nm1,nm2 = Name.fork nnm in
match nm with
| None ->
`Art (r.LArt.mfn_nart nm1 (Some nm2,rope))
| Some(nm) ->
`Name(nm,`Art (r.LArt.mfn_nart nm1 (Some nm2,rope)))
))
in
fun rope -> mfn.LArt.mfn_data (None,rope)
let list_to_rope_mergesort
( compare_nm : St.name )
( compare : Elt.t -> Elt.t -> int )
: St.List.t -> St.List.t =
let sort = rope_mergesort compare_nm compare in
fun list ->
let rope = rope_of_list list in
sort rope
end
module MakeSeq(ArtLib : ArtLib.S)(Name : Name.S)(Elt : Data.S) =
SeqWrap(ArtLib)(Name)(Elt)(Make(ArtLib)(Name)(Elt))
Makes a key - Value mapping , based on SpreadTrees .
The mapping is represented as a tree of key - value - sequence pairs .
Keys are ordered by a comparison function .
The tree is a binary search tree according to this comparison function .
Value sequences are stored in an unordered fashion .
The mapping is represented as a tree of key-value-sequence pairs.
Keys are ordered by a comparison function.
The tree is a binary search tree according to this comparison function.
Value sequences are stored in an unordered fashion.
*)
module KvMapWrap
(ArtLib : ArtLib.S)
(Name : Name.S)
(Key : Data.S)
(Val : Data.S)
(ValSt : S with type elt = Val.t
and type name = Name.t) =
struct
let get_key x = x
let empty_kv k = k
module KeySt = Make(ArtLib)(Name)(Key)
module KeySeq = MakeSeq (ArtLib)(Name)(Key)
module ValSeq = MakeSeq (ArtLib)(Name)(Val)
module KeyOptAdpt =
struct
type name = Name.t
module Tmp = Types.Option(Key)
module Art = ArtLib.MakeArt(Name)(Tmp)
include Tmp
end
module ABool = ArtLib.MakeArt(Name)(Types.Bool)
module TArt = KeySt.Tree.Art
module LArt = KeySt.List.Art
let rec is_bst : Key.t * Key.t -> KeySt.Tree.t -> bool =
let mfn = ABool.mk_mfn (Name.of_string "is_bst")
(module (Types.Tuple3(Key)(Key)(KeySt.Tree)))
(fun r (lo,hi,tree) ->
let is_bst (lo,hi) tree = r.ABool.mfn_data (lo,hi,tree) in
( match tree with
| `Leaf `Nil -> true
| `Leaf `Art art -> is_bst (lo,hi) (`Leaf (KeySt.List.Art.force art))
| `Leaf `Name (_,rest) -> is_bst (lo,hi) (`Leaf rest)
| `Leaf `Cons(kv, rest) ->
Key.compare lo (get_key kv) <= 0
&& Key.compare (get_key kv) hi <= 0
&& KeySeq.list_is_empty rest
| `Bin(left,kv,right) -> let x = get_key kv in
( Key.compare lo x <= 0
&& Key.compare x hi <= 0
&& is_bst (lo, x) left
&& is_bst (x, hi) right )
| `Art art -> is_bst (lo,hi) (KeySt.Tree.Art.force art)
| `Name (nm,tree) -> ABool.force (r.ABool.mfn_nart nm (lo,hi,tree))
))
in
fun (lo,hi) tree -> mfn.ABool.mfn_data (lo,hi,tree)
let rec list_find
(list : KeySt.List.t)
(target : Key.t) : Key.t option =
( match list with
| `Nil -> None
| `Cons(kv, tl) ->
if Key.compare (get_key kv) target = 0 then Some kv
else list_find tl target
| `Art art -> list_find (KeySt.List.Art.force art) target
| `Name (_,list) -> list_find list target
)
let rec tree_find
( tree : KeySt.Tree.t )
( target : Key.t ) : Key.t option =
( match tree with
| `Leaf xs -> list_find xs target
| `Bin(left,kv,right) ->
let ord = Key.compare target (get_key kv) in
if ord < 0 then tree_find left target
else if ord > 0 then tree_find right target
else if ord = 0 then Some kv
else failwith "impossible"
| `Art art -> tree_find (KeySt.Tree.Art.force art) target
| `Name (_,tree) -> tree_find tree target
)
let rec list_remove : KeySt.List.t -> Key.t -> (Key.t option) * KeySt.List.t =
let module M = Articulated.ArtTuple2(ArtLib)(Name)(KeyOptAdpt)(KeySt.List) in
let mfn = M.Art.mk_mfn (Name.of_string "list_remove")
(module (Types.Tuple2(KeySt.List)(Key)))
(fun r (list, target) ->
let list_remove list target = r.M.Art.mfn_data (list, target) in
( match list with
| `Nil -> ( None, `Nil )
| `Cons(kv, rest) ->
if Key.compare (get_key kv) target = 0 then
( Some kv, rest )
else
let res, rem = list_remove rest target in
(res, `Cons(kv, rem))
| `Art art -> list_remove (KeySt.List.Art.force art) target
| `Name (nm, list) ->
let nm1,nm = Name.fork nm in
let nm2,nm3 = Name.fork nm in
let elt_rem = r.M.Art.mfn_nart nm1 (list, target) in
let elt,rem = M.split nm2 elt_rem in
M.Adpt1.Art.force elt, `Name(nm3, `Art rem)
))
in
fun list target -> mfn.M.Art.mfn_data (list, target)
let rec tree_remove : KeySt.Tree.t -> Key.t -> (Key.t option) * KeySt.Tree.t =
let module M = Articulated.ArtTuple2(ArtLib)(Name)(KeyOptAdpt)(KeySt.Tree) in
let mfn = M.Art.mk_mfn (Name.of_string "tree_remove")
(module (Types.Tuple2(KeySt.Tree)(Key)))
(fun r (tree, target) ->
let tree_remove tree target = r.M.Art.mfn_data (tree, target) in
( match tree with
| `Leaf xs ->
let res, ys = list_remove xs target in
(res, `Leaf ys)
| `Bin(left, kv, right) ->
let ord = Key.compare target (get_key kv) in
if ord < 0 then tree_remove left target
else if ord > 0 then tree_remove right target
else if ord = 0 then (Some kv, `Bin(left, empty_kv (get_key kv), right))
else failwith "impossible"
| `Art art -> tree_remove (KeySt.Tree.Art.force art) target
| `Name (nm, tree) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
let elt_rem = r.M.Art.mfn_nart nm1 (tree, target) in
let elt,rem = M.split nm2 elt_rem in
M.Adpt1.Art.force elt, `Name(nm3, `Art rem)
))
in
fun tree target -> mfn.M.Art.mfn_data (tree, target)
let tree_height : KeySt.Tree.t -> int =
let module M = ArtLib.MakeArt(Name)(Types.Int) in
let mfn = M.mk_mfn (Name.of_string "tree_height")
(module KeySt.Tree)
(fun r tree ->
let tree_height tree = r.M.mfn_data tree in
( match tree with
| `Leaf xs -> (-1)
| `Bin(left,x,right) ->
let hleft = tree_height left in
let hright = tree_height right in
1 + (if hleft > hright then hleft else hright)
| `Art art -> tree_height (KeySt.Tree.Art.force art)
| `Name (nm, tree) -> M.force (r.M.mfn_nart nm tree)
))
in
fun tree -> mfn.M.mfn_data tree
let rec tree_height_diff ( tree : KeySt.Tree.t ) : int =
( match tree with
| `Leaf _ -> 0
| `Bin(left,x,right) -> (tree_height left) - (tree_height right)
| `Art art -> tree_height_diff (KeySt.Tree.Art.force art)
| `Name (_,tree) -> tree_height_diff tree
)
let rotate_right : KeySt.Tree.t -> KeySt.Tree.t =
let mfn = TArt.mk_mfn (Name.of_string "rotate_right")
(module KeySt.Tree)
( fun r tree ->
let rotate_right t = r.TArt.mfn_data t in
( match tree with
| `Leaf _ -> failwith "impossible rr1"
| `Bin(t1, x, t2) ->
let rec loop = function
| `Leaf _ -> failwith "impossible rr2"
| `Bin(t21, y, t22) -> `Bin(`Bin(t1, x, t21), y, t22)
| `Art art -> loop (TArt.force art)
| `Name(_, t) -> loop t
in loop t2
| `Art art -> rotate_right (TArt.force art)
| `Name(nm, t) ->
if false then let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.TArt.mfn_nart nm2 t))
else `Name(nm, rotate_right t)
))
in
fun tree -> mfn.TArt.mfn_data tree
let rec rotate_left : KeySt.Tree.t -> KeySt.Tree.t =
let mfn = TArt.mk_mfn (Name.of_string "rotate_left")
(module KeySt.Tree)
( fun r tree ->
let rotate_left t = r.TArt.mfn_data t in
( match tree with
| `Leaf _ -> failwith "impossible rl1"
| `Bin(t1, x, t2) ->
let rec loop = function
| `Leaf _ -> failwith "impossible rl2"
| `Bin(t11, y, t12) -> `Bin(t11, y, `Bin(t12, x, t2))
| `Art art -> loop (TArt.force art)
| `Name(_, t) -> loop t
in loop t1
| `Art art -> rotate_left (TArt.force art)
| `Name(nm, t) ->
if false then
let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art(r.TArt.mfn_nart nm2 t))
else `Name(nm, rotate_left t)
))
in
fun tree -> mfn.TArt.mfn_data tree
let nm_tree : Name.t -> KeySt.Tree.t -> KeySt.Tree.t =
let mfn = TArt.mk_mfn (Name.of_string "nm_tree")
(module KeySt.Tree)
(fun r tree -> tree)
in
fun nm tree ->
let nm1,nm2 = Name.fork nm in
`Name(nm1, `Art (mfn.TArt.mfn_nart nm2 tree))
let rec avl_insert : Name.t -> KeySt.Tree.t -> Key.t -> KeySt.Tree.t
=
let mfn = TArt.mk_mfn (Name.of_string "avl_insert")
(module (Types.Tuple3(Name)(KeySt.Tree)(Key)))
(fun r (insert_nm,tree,kv) ->
let avl_insert nm tree kv = r.TArt.mfn_data (nm,tree,kv) in
let wrap_avl nm tree =
let h = tree_height_diff tree in
assert (h = 0 || h = 1 || h = -1);
(nm_tree nm tree)
in
( match tree with
| `Art art -> avl_insert insert_nm (KeySt.Tree.Art.force art) kv
| `Name(tree_nm, tree) -> avl_insert insert_nm tree kv
| `Leaf `Nil -> nm_tree insert_nm (`Bin (`Leaf `Nil, kv, `Leaf `Nil))
| `Leaf _ -> failwith "avl_insert: `Leaf _ : invalid AVL tree"
| `Bin(left, kv0, right) ->
let insert_nm1, insert_nm2 = Name.fork insert_nm in
let ord = Key.compare (get_key kv) (get_key kv0) in
if ord = 0 then
`Bin(left, kv0, right)
else if ord < 0 then
let left' = avl_insert insert_nm1 left kv in
let tree' = `Bin(left',kv0, right) in
begin match tree_height_diff tree' with
| -1 | 0 | 1 -> wrap_avl insert_nm2 tree'
| 2 -> begin match tree_height_diff left' with
| 1 -> wrap_avl insert_nm2 (rotate_left tree')
| -1 -> let tree'' = `Bin(rotate_right left', kv0, right) in
wrap_avl insert_nm2 (rotate_left tree'')
| _ -> failwith "impossible il1"
end
| _ -> failwith "impossible il2"
end
else if ord > 0 then
let right' = avl_insert insert_nm1 right kv in
let tree' = `Bin(left, kv0, right') in
begin match tree_height_diff tree' with
| -1 | 0 | 1 -> wrap_avl insert_nm2 tree'
| -2 -> begin match tree_height_diff right' with
| -1 -> wrap_avl insert_nm2 (rotate_right tree')
| 1 -> let tree'' = `Bin(left, kv0, rotate_left right') in
wrap_avl insert_nm2 (rotate_right tree'')
| _ -> failwith "impossible ir1"
end
| _ -> failwith "impossible ir2"
end
else
failwith "impossible ilast"
))
in
fun nm tree kv ->
let nm1, nm2 = Name.fork nm in
TArt.force (mfn.TArt.mfn_nart nm1 (nm2, tree, kv))
let avl_tree_of_rope : Name.t -> KeySt.Rope.t -> KeySt.Tree.t -> KeySt.Tree.t
=
let mfn = TArt.mk_mfn (Name.of_string "avl_tree_of_rope")
(module Types.Tuple3(Name)(KeySt.Rope)(KeySt.Tree))
(fun r (nm, rope, tree) ->
let avl_tree_of_rope nm rope tree = r.TArt.mfn_data (nm, rope, tree) in
(match rope with
| `Zero -> tree
| `Two (rope1, rope2) ->
let nm1, nm2 = Name.fork nm in
let tree' = (avl_tree_of_rope nm1 rope1 tree) in
avl_tree_of_rope nm2 rope2 tree'
| `Art(art) -> avl_tree_of_rope nm (KeySt.Rope.Art.force art) tree
| `Name(nm, rope) ->
let nm1, nm2 = Name.fork nm in
TArt.force (r.TArt.mfn_nart nm1 (nm2,rope,tree))
))
in
fun nm rope tree -> mfn.TArt.mfn_data (nm, rope, tree)
let avl_tree_of_list : Name.t -> KeySt.List.t -> KeySt.Tree.t -> KeySt.Tree.t
=
let mfn = TArt.mk_mfn (Name.of_string "avl_tree_of_list")
(module Types.Tuple3(Name)(KeySt.List)(KeySt.Tree))
(fun r (nm, list, tree) ->
let avl_tree_of_list nm list tree = r.TArt.mfn_data (nm, list, tree) in
(match list with
| `Nil -> tree
| `Cons(x, tl) ->
let nm1,nm2 = Name.fork nm in
let tree' = avl_insert nm1 tree x in
avl_tree_of_list nm2 tl tree'
| `Name(nm_here, tl) -> avl_tree_of_list nm_here tl tree
| `Art(art) -> avl_tree_of_list nm (KeySt.List.Art.force art) tree
))
in
fun nm list tree -> mfn.TArt.mfn_data (nm, list, tree)
end
module MakeKvMap(ArtLib : ArtLib.S)(Name : Name.S)(Key : Data.S)(Val : Data.S) =
KvMapWrap(ArtLib)(Name)(Key)(Val)(Make(ArtLib)(Name)(Val))
module MakeDigraph
( ArtLib : ArtLibType )
( Name : NameType )
( NodeData : sig include DatType val compare : t -> t -> int end )
( EdgeData : DatType )
=
struct
module ArtLib = ArtLib
module Name = Name
module NodeData = NodeData
module EdgeData = EdgeData
module Edge = Types.Tuple3(NodeData)(EdgeData)(NodeData)
module Adj = Types.Tuple2(EdgeData)(NodeData)
module NodeSt = Make (ArtLib) (Name) (NodeData)
module AdjSt = Make (ArtLib) (Name) (Adj)
module EdgeSt = Make (ArtLib) (Name) (Edge)
module NodeSeq = MakeSeq (NodeSt)
module AdjSeq = MakeSeq (AdjSt)
module EdgeSeq = MakeSeq (EdgeSt)
module NodeMap = MakeKvMap (ArtLib) (Name) (NodeData) (AdjSt)
let graph_without_node
( graph : NodeMap.KeySt.Tree.data )
( node : Nodedata )
: NodeMap.KeySt.Tree.data * NodeMap.KeySt.data option =
let node_op, graph = NodeMap.tree_remove graph node in
( graph, node_op )
let tgt_nodes_of_adjs
( adjs : AdjSt.List.data )
: NodeSt.List.data
= failwith "TODO"
let rec dfs
( graph : NodeMap.KeySt.Tree.data )
( stack : NodeSt.List.data )
: NodeMap.KeySt.List.data
=
( match stack with
| `Nil -> `Nil
| `Art( art ) ->dfs graph (NodeSt.List.Art.force art)
| `Cons(nd, stack_tl) ->
( match graph_without_node graph nd with
| graph, None -> dfs graph stack_tl
| graph, Some nd ->
let stack' = NodeSeq.list_append
(tgt_nodes_of_adjs (snd nd)) stack_tl
in
`Cons(nd, dfs graph stack')
)
| `Name _ -> failwith "TODO: Missing case"
)
let rec bfs
( graph : NodeMap.KeySt.Tree.data )
( queue : NodeSt.Tree.data )
: NodeMap.KeySt.List.data
=
( match NodeSeq.tree_pop_front queue with
| None -> `Nil
| Some ( queue_tl, front_nd ) ->
( match graph_without_node graph front_nd with
| graph, None -> bfs graph queue_tl
| graph, Some nd ->
let queue' = NodeSeq.tree_append
queue_tl (NodeSeq.tree_of_list (tgt_nodes_of_adjs (snd nd)))
in
`Cons(nd, bfs graph queue')
)
)
end
*)
module struct
module type ExprLangType = sig
module ArtLib : ArtLib . S
module Name : Name . S
module Value : Data . S
type binop = string * ( Value.t - > Value.t - > Value.t )
type uniop = string * ( Value.t - > Value.t )
( * articulated expression for an arithmetic language with ' let ' .
module ExprLang = struct
module type ExprLangType = sig
module ArtLib : ArtLib.S
module Name : Name.S
module Value : Data.S
type binop = string * (Value.t -> Value.t -> Value.t)
type uniop = string * (Value.t -> Value.t)
type 'art art_expr = [
| `Let of string * 'art art_expr * 'art art_expr
| `Var of string
| `Value of Value.t
| `Binop of binop * 'art art_expr * 'art art_expr
| `Uniop of uniop * 'art art_expr
| `Art of 'art
| `Name of name * 'art art_expr
]
module rec Expr : sig
module Data : Data.S
module Art : Art.S
end
with type data = Expr.Art.t art_expr
and type Art.data = Expr.Art.t art_expr
and type Art.name = name
end
module Make
(ArtLib : ArtLib.S)
(Name : Name.S)
(Value : Data.S) : ExprLangType = struct
module ArtLib = ArtLib
module Name = Name
module Value = Value
type binop = string * (Value.t -> Value.t -> Value.t)
type uniop = string * (Value.t -> Value.t)
type 'art art_expr = [
| `Let of string * 'art art_expr * 'art art_expr
| `Var of string
| `Value of Value.t
| `Binop of binop * 'art art_expr * 'art art_expr
| `Uniop of uniop * 'art art_expr
| `Art of 'art
| `Name of name * 'art art_expr
]
module rec Expr : sig
module Data : Data.S
module Art : Art.S
end
with type data = Expr.Art.t art_expr
and type Art.data = Expr.Art.t art_expr
and type Art.name = name
=
struct
module Data = struct
type t = Expr.Art.t art_expr
let rec show exp =
( match exp with
| `Value v -> Value.show v
| `Binop ((s,_), e1, e2) -> "Binop("^s^","^show e1^","^show e2^")"
| `Uniop ((s,_), e) -> "Uniop("^s^","^show e^")"
| `Let (v,e1,e2) -> "Let("^v^","^show e1^","^show e2^")"
| `Var v -> "Var("^v^")"
| `Art a -> "Art("^Expr.Art.show a^")"
| `Name(nm, e) -> "Name("^Name.show nm^","^show e^")"
)
let rec hash seed x =
( match x with
| `Value v -> Value.hash seed v
| `Binop((s,_), e1, e2) -> Hashtbl.seeded_hash (hash (hash seed e1) e2) s
| `Uniop ((s,_), e) -> Hashtbl.seeded_hash (hash seed e) s
| `Let(v,e1,e2) -> Hashtbl.seeded_hash (hash (hash seed e1) e2) v
| `Var v -> Hashtbl.seeded_hash seed v
| `Art a -> Expr.Art.hash seed a
| `Name(nm, e) -> Name.hash (hash seed e) nm
)
let rec equal (exp1:Expr.Art.t art_expr) (exp2:Expr.Art.t art_expr) =
( match exp1, exp2 with
| `Value v1, `Value v2 -> Value.equal v1 v2
| `Binop((s1, _), e11, e12),
`Binop((s2, _), e21, e22) -> s1 = s2 && equal e11 e21 && equal e12 e22
| `Uniop((s1,_),e1), `Uniop((s2,_),e2) -> s1 = s2 && equal e1 e2
| `Let(v1,e11,e12), `Let(v2,e21,e22) -> v1 = v2 && equal e11 e21 && equal e12 e22
| `Var v1, `Var v2 -> v1 = v2
| `Art a1, `Art a2 -> Expr.Art.equal a1 a2
| `Name(nm1, e1), `Name(nm2, e2) -> Name.equal nm1 nm2 && equal e1 e2
| _ -> false
)
let rec sanitize x =
( match x with
| `Value v -> `Value (Value.sanitize v)
| `Binop (binop, e1, e2) -> `Binop(binop, sanitize e1, sanitize e2)
| `Uniop (uniop, e) -> `Uniop(uniop, sanitize e)
| `Let(v, e1, e2) -> `Let(v, sanitize e1, sanitize e2)
| `Var v -> `Var v
| `Art a -> `Art (Expr.Art.sanitize a)
| `Name (nm, e) -> `Name (Name.sanitize nm, sanitize e)
)
end
module Art = ArtLib.MakeArt(Name)(Data)
end
end
module MakeEval ( ExprLang : ExprLangType )
=
struct
open ExprLang
module VArt = ArtLib.MakeArt(Name)(Value)
module VOptionArt = ArtLib.MakeArt(Name)(Types.Option(Value))
module Values = Make(ArtLib)(Name)(Value)
module Env = MakeKvMap(ArtLib)(Name)(Types.String)(Values)
type env = Env.KeySt.Tree.t
type expr = Expr.data
type value = Value.t
type nm = name
let eval_big : nm -> env -> expr -> value option
=
let mfn = VOptionArt.mk_mfn (Name.of_string "eval_big")
(module Types.Tuple3(Name)(Env.KeySt.Tree.Data)(Expr.Data))
(fun r (nm,env,expr) ->
let eval nm env exp = r.VOptionArt.mfn_data (nm,env,exp) in
(match expr with
| `Value v -> Some v
| `Binop((_, binop), e1, e2) ->
let nm1,nm2 = Name.fork nm in
let v1 = eval nm1 env e1 in
let v2 = eval nm2 env e2 in
(match v1, v2 with
| Some v1, Some v2 -> Some (binop v1 v2)
| _ -> None
)
| `Uniop((_, uniop), e) ->
let v = eval nm env e in
(match v with
| Some v -> Some (uniop v)
| _ -> None
)
| ` Let(var , e1 , e2 ) - >
let nm1 , nm = Name.fork nm in
let nm2 , nm3 = Name.fork nm in
let v1 = eval nm1 env e1 in
let vs = match v1 with
| None - > ` Nil
| Some v - > ` Cons(v , ` Nil ) in
( * let env ' = Env.avl_insert nm2 env ( var , vs ) in
| `Let(var, e1, e2) ->
let nm1, nm = Name.fork nm in
let nm2, nm3 = Name.fork nm in
let v1 = eval nm1 env e1 in
let vs = match v1 with
| None -> `Nil
| Some v -> `Cons(v, `Nil) in
let env' = Env.avl_insert nm2 env var in
eval nm3 env' e2
| `Var v -> ( match Env.tree_find env v with
| Some (_, `Cons(value,_)) -> Some value
| _ -> None
)
*)
| `Art a -> eval nm env (Expr.Art.force a)
| `Name(nm, exp) ->
let nm1, nm2 = Name.fork nm in
VOptionArt.force (r.VOptionArt.mfn_nart nm1 (nm2,env,exp))
| _ -> failwith "FIXME"
))
in
fun nm env exp -> mfn.VOptionArt.mfn_data (nm, env, exp)
type cxt = [ ` Cxt_Union of env * uniop
| ` Cxt_Binop of env * binop * expr
| ` Cxt_Let of env * string * expr
]
let eval_small : nm - > cxt - > env - > expr - > ( cxt , env , expr ) option
=
let mfn = VOptionArt.mk_mfn ( Name.of_string " eval " )
( module Types . Tuple3(Name)(Env . KeySt . Tree . Data)(Expr . Data ) )
( fun r ( nm , cxt , env , expr ) - >
let eval nm env exp nm , cxt , env , exp ) in
( match expr with
| None - > ( cxt , env , None )
| Some expr - >
( match expr with
| ` Value v - >
let v = match cxt with
| ` Cxt_Uniop(env , ( s , uniop ) ) - > ` Cxt_emp , env , Some ( ` Value ( uniop v ) )
| ` Cxt_Binop(env , ( s , ) , e ) - > ` Cxt_uniop(s , v ) , env , Some e
in
let cxt, env, expr = cxt_subst cxt v in
(cxt, env, expr)
| `Var var ->
( match tree_find env var with
| Some v -> Some (`Value v)
| None -> None
)
| `Binop((s, binop), e1, e2) ->
(`Cxt_Binop(env, (s, binop), e2), env, Some e1)
| `Uniop((s, uniop), e) ->
(`Cxt_Uniop(env, (s, uniop)), env, Some e)
| `Let(var, e1, e2) ->
(`Cxt_Let(env, var, e2), env, Some e1)
| `Art a -> (cxt, env, Some (Expr.Art.force a))
| `Name(nm, exp) ->
let nm1, nm2 = Name.fork nm in
VOptionArt.force (r.VOptionArt.mfn_nart nm1 (nm2,env,exp))
)))
in
fun nm cxt env exp -> mfn.VOptionArt.mfn_data (nm,cxt,env,exp)
*)
end
end
*)
|
231a5a7aca16b6cb355c8fd5f868b0197bf1c7a6044adf20fb3dcf05db75730d | danilkolikov/dfl | Base.hs | |
Module : Frontend . Desugaring . Record . Base
Description : Base functions for record desugaring
Copyright : ( c ) , 2019
License : MIT
Base functions for record desugaring
Module : Frontend.Desugaring.Record.Base
Description : Base functions for record desugaring
Copyright : (c) Danil Kolikov, 2019
License : MIT
Base functions for record desugaring
-}
module Frontend.Desugaring.Record.Base where
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (Except, runExcept, throwE)
import Control.Monad.Trans.Reader (ReaderT, ask, runReaderT)
import qualified Data.HashMap.Lazy as HM
import Frontend.Desugaring.Record.Ast
import Frontend.Syntax.Position (WithLocation(..))
-- | Errors which may happen during desugaring of records
data RecordDesugaringError
= RecordDesugaringErrorUnknownField (WithLocation Ident) -- ^ Unknown field
| RecordDesugaringErrorUnknownConstructor (WithLocation Ident) -- ^ Unknown constructor
| RecordDesugaringErrorDuplicateField (WithLocation Ident)
(WithLocation Ident) -- ^ Some field is used twice in a binding
| RecordDesugaringErrorMissingConstructor [WithLocation Ident] -- ^ No constructors including these fields
deriving (Show, Eq)
-- | Context of record desugaring
data RecordDesugaringContext = RecordDesugaringContext
{ getFieldToTypeMap :: DataTypes
, getConstructorToTypeMap :: DataTypes
}
| Processor of computations which can throw RecordDesugaringError
type RecordDesugaringExcept = Except RecordDesugaringError
-- | Run record desugaring except
runRecordDesugaringExcept ::
RecordDesugaringExcept a -> Either RecordDesugaringError a
runRecordDesugaringExcept = runExcept
-- | Raises a RecordDEsugaring error
raiseRDError :: RecordDesugaringError -> RecordDesugaringExcept a
raiseRDError = throwE
-- | Processor of record desugaring
type RecordDesugaringProcessor
= ReaderT RecordDesugaringContext RecordDesugaringExcept
| Run a record desugaring processor
runRecordDesugaringProcessor ::
RecordDesugaringProcessor a
-> DataTypes
-> DataTypes
-> Either RecordDesugaringError a
runRecordDesugaringProcessor rdp fieldToType constrToType =
runRecordDesugaringExcept
(runReaderT
rdp
RecordDesugaringContext
{ getFieldToTypeMap = fieldToType
, getConstructorToTypeMap = constrToType
})
| Function raises a RecordDesugaringError
raiseError :: RecordDesugaringError -> RecordDesugaringProcessor a
raiseError = lift . raiseRDError
-- | Function finds a data type by a specified field
findDataTypeByField :: WithLocation Ident -> RecordDesugaringProcessor DataType
findDataTypeByField name = do
context <- ask
let fields = getFieldToTypeMap context
case HM.lookup (getValue name) fields of
Just dataType -> return dataType
Nothing -> raiseError $ RecordDesugaringErrorUnknownField name
-- | Function finds a data type by a specified constructor
findDataTypeByConstructor ::
WithLocation Ident -> RecordDesugaringProcessor DataType
findDataTypeByConstructor name = do
context <- ask
let constructors = getConstructorToTypeMap context
case HM.lookup (getValue name) constructors of
Just dataType -> return dataType
Nothing -> raiseError $ RecordDesugaringErrorUnknownConstructor name
-- | Find a specific constructor in the data type or raise an error
lookupConstructor ::
WithLocation Ident -> DataType -> RecordDesugaringExcept Constructor
lookupConstructor name dataType =
case lookup (getValue name) (getDataTypeConstructors dataType) of
Just c -> return c
Nothing -> throwE $ RecordDesugaringErrorUnknownConstructor name
| null | https://raw.githubusercontent.com/danilkolikov/dfl/698a8f32e23b381afe803fc0e353293a3bf644ba/src/Frontend/Desugaring/Record/Base.hs | haskell | | Errors which may happen during desugaring of records
^ Unknown field
^ Unknown constructor
^ Some field is used twice in a binding
^ No constructors including these fields
| Context of record desugaring
| Run record desugaring except
| Raises a RecordDEsugaring error
| Processor of record desugaring
| Function finds a data type by a specified field
| Function finds a data type by a specified constructor
| Find a specific constructor in the data type or raise an error | |
Module : Frontend . Desugaring . Record . Base
Description : Base functions for record desugaring
Copyright : ( c ) , 2019
License : MIT
Base functions for record desugaring
Module : Frontend.Desugaring.Record.Base
Description : Base functions for record desugaring
Copyright : (c) Danil Kolikov, 2019
License : MIT
Base functions for record desugaring
-}
module Frontend.Desugaring.Record.Base where
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (Except, runExcept, throwE)
import Control.Monad.Trans.Reader (ReaderT, ask, runReaderT)
import qualified Data.HashMap.Lazy as HM
import Frontend.Desugaring.Record.Ast
import Frontend.Syntax.Position (WithLocation(..))
data RecordDesugaringError
| RecordDesugaringErrorDuplicateField (WithLocation Ident)
deriving (Show, Eq)
data RecordDesugaringContext = RecordDesugaringContext
{ getFieldToTypeMap :: DataTypes
, getConstructorToTypeMap :: DataTypes
}
| Processor of computations which can throw RecordDesugaringError
type RecordDesugaringExcept = Except RecordDesugaringError
runRecordDesugaringExcept ::
RecordDesugaringExcept a -> Either RecordDesugaringError a
runRecordDesugaringExcept = runExcept
raiseRDError :: RecordDesugaringError -> RecordDesugaringExcept a
raiseRDError = throwE
type RecordDesugaringProcessor
= ReaderT RecordDesugaringContext RecordDesugaringExcept
| Run a record desugaring processor
runRecordDesugaringProcessor ::
RecordDesugaringProcessor a
-> DataTypes
-> DataTypes
-> Either RecordDesugaringError a
runRecordDesugaringProcessor rdp fieldToType constrToType =
runRecordDesugaringExcept
(runReaderT
rdp
RecordDesugaringContext
{ getFieldToTypeMap = fieldToType
, getConstructorToTypeMap = constrToType
})
| Function raises a RecordDesugaringError
raiseError :: RecordDesugaringError -> RecordDesugaringProcessor a
raiseError = lift . raiseRDError
findDataTypeByField :: WithLocation Ident -> RecordDesugaringProcessor DataType
findDataTypeByField name = do
context <- ask
let fields = getFieldToTypeMap context
case HM.lookup (getValue name) fields of
Just dataType -> return dataType
Nothing -> raiseError $ RecordDesugaringErrorUnknownField name
findDataTypeByConstructor ::
WithLocation Ident -> RecordDesugaringProcessor DataType
findDataTypeByConstructor name = do
context <- ask
let constructors = getConstructorToTypeMap context
case HM.lookup (getValue name) constructors of
Just dataType -> return dataType
Nothing -> raiseError $ RecordDesugaringErrorUnknownConstructor name
lookupConstructor ::
WithLocation Ident -> DataType -> RecordDesugaringExcept Constructor
lookupConstructor name dataType =
case lookup (getValue name) (getDataTypeConstructors dataType) of
Just c -> return c
Nothing -> throwE $ RecordDesugaringErrorUnknownConstructor name
|
95a451903ed0f51ab534a30e65bbad1ecd29d71ed2d8861c846b16a6f4e23eea | telekons/one-more-re-nightmare | nullable.lisp | (in-package :one-more-re-nightmare)
(defvar *gensym-assignments?* t)
(defun cached-nullable* (re)
(if *gensym-assignments?*
(cached-nullable re)
(cached-nullable-no-gensym re)))
(defun (setf cached-nullable*) (value re)
(if *gensym-assignments?*
(setf (cached-nullable re) value)
(setf (cached-nullable-no-gensym re) value)))
(defun nullable (re)
"(language-of (nullable RE)) = (language-of (both RE (empty-string)))"
(with-slot-consing (cached-nullable* re)
(trivia:ematch re
((empty-string) (empty-string))
((literal _) (empty-set))
((join r s) (join (nullable r) (nullable s)))
((either r s) (either (nullable r) (nullable s)))
((repeat r min _ c) (let ((rn (if c (nullable r) (empty-set))))
(if (plusp min)
(empty-set)
(either rn (empty-string)))))
((both r s) (both (nullable r) (nullable s)))
((tag-set s) (tag-set (gensym-position-assignments s)))
((invert r) (if (eq (nullable r) (empty-set))
(empty-string)
(empty-set)))
((grep r _) (nullable r))
((alpha r history) (either (nullable r) history)))))
| null | https://raw.githubusercontent.com/telekons/one-more-re-nightmare/c27f2e96ece56dc5689db0d5d3909e6f8a3744ef/Code/DFA-construction/nullable.lisp | lisp | (in-package :one-more-re-nightmare)
(defvar *gensym-assignments?* t)
(defun cached-nullable* (re)
(if *gensym-assignments?*
(cached-nullable re)
(cached-nullable-no-gensym re)))
(defun (setf cached-nullable*) (value re)
(if *gensym-assignments?*
(setf (cached-nullable re) value)
(setf (cached-nullable-no-gensym re) value)))
(defun nullable (re)
"(language-of (nullable RE)) = (language-of (both RE (empty-string)))"
(with-slot-consing (cached-nullable* re)
(trivia:ematch re
((empty-string) (empty-string))
((literal _) (empty-set))
((join r s) (join (nullable r) (nullable s)))
((either r s) (either (nullable r) (nullable s)))
((repeat r min _ c) (let ((rn (if c (nullable r) (empty-set))))
(if (plusp min)
(empty-set)
(either rn (empty-string)))))
((both r s) (both (nullable r) (nullable s)))
((tag-set s) (tag-set (gensym-position-assignments s)))
((invert r) (if (eq (nullable r) (empty-set))
(empty-string)
(empty-set)))
((grep r _) (nullable r))
((alpha r history) (either (nullable r) history)))))
| |
21073c6df1d6bad652411f271eee5447692162c39d3cce80b666ab0086d1c59c | input-output-hk/plutus-apps | Client.hs | # LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
-- | Servant client for PAB
module Plutus.PAB.Webserver.Client (
PabClient(..)
, InstanceClient(..)
, pabClient
) where
import Data.Aeson (FromJSON, ToJSON (..))
import Data.Aeson qualified as JSON
import Data.Proxy
import Data.Text (Text)
import Plutus.PAB.Events.Contract
import Plutus.PAB.Instances ()
import Plutus.PAB.Webserver.API
import Plutus.PAB.Webserver.Types
import Servant.API
import Servant.Client
| Client for PAB . The first type - argument is contract type that is used for PAB - simulator .
data PabClient t walletId = PabClient
{ healthcheck :: ClientM ()
^ call method
, fullreport :: ClientM (FullReport t)
^ call fullreport method
, activateContract :: ContractActivationArgs t -> ClientM ContractInstanceId
-- ^ call activate contract method
, instanceClient :: ContractInstanceId -> InstanceClient t
^ call methods for instance client . We should turn @ContractInstanceId@ to @Text@ for the first argument .
, getWallet :: walletId -> Maybe Text -> ClientM [ContractInstanceClientState t]
-- ^ get wallet instances
, getInstances :: Maybe Text -> ClientM [ContractInstanceClientState t]
-- ^ get instances
, getDefinitions :: ClientM [ContractSignatureResponse t]
-- ^ get definitions
}
-- | Contract instance endpoints
data InstanceClient t = InstanceClient
{ getInstanceStatus :: ClientM (ContractInstanceClientState t)
-- ^ get instance status
, getInstanceSchema :: ClientM (ContractSignatureResponse t)
-- ^ get instance schema
, callInstanceEndpoint :: String -> JSON.Value -> ClientM ()
-- ^ call instance endpoint
, stopInstance :: ClientM ()
-- ^ call stop instance method
}
| Init generic pab client
pabClient :: forall t walletId. (ToJSON t, FromJSON t, ToHttpApiData walletId) => PabClient t walletId
pabClient = PabClient{..}
where
(healthcheck
:<|> fullreport
:<|> activateContract
:<|> toInstanceClient
:<|> getWallet
:<|> getInstances
:<|> getDefinitions
) = client (Proxy @(API t walletId))
instanceClient cid = InstanceClient{..}
where
(getInstanceStatus
:<|> getInstanceSchema
:<|> callInstanceEndpoint
:<|> stopInstance
) = toInstanceClient cid
| null | https://raw.githubusercontent.com/input-output-hk/plutus-apps/d637b1916522e4ec20b719487a8a2e066937aceb/plutus-pab/src/Plutus/PAB/Webserver/Client.hs | haskell | | Servant client for PAB
^ call activate contract method
^ get wallet instances
^ get instances
^ get definitions
| Contract instance endpoints
^ get instance status
^ get instance schema
^ call instance endpoint
^ call stop instance method | # LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Plutus.PAB.Webserver.Client (
PabClient(..)
, InstanceClient(..)
, pabClient
) where
import Data.Aeson (FromJSON, ToJSON (..))
import Data.Aeson qualified as JSON
import Data.Proxy
import Data.Text (Text)
import Plutus.PAB.Events.Contract
import Plutus.PAB.Instances ()
import Plutus.PAB.Webserver.API
import Plutus.PAB.Webserver.Types
import Servant.API
import Servant.Client
| Client for PAB . The first type - argument is contract type that is used for PAB - simulator .
data PabClient t walletId = PabClient
{ healthcheck :: ClientM ()
^ call method
, fullreport :: ClientM (FullReport t)
^ call fullreport method
, activateContract :: ContractActivationArgs t -> ClientM ContractInstanceId
, instanceClient :: ContractInstanceId -> InstanceClient t
^ call methods for instance client . We should turn @ContractInstanceId@ to @Text@ for the first argument .
, getWallet :: walletId -> Maybe Text -> ClientM [ContractInstanceClientState t]
, getInstances :: Maybe Text -> ClientM [ContractInstanceClientState t]
, getDefinitions :: ClientM [ContractSignatureResponse t]
}
data InstanceClient t = InstanceClient
{ getInstanceStatus :: ClientM (ContractInstanceClientState t)
, getInstanceSchema :: ClientM (ContractSignatureResponse t)
, callInstanceEndpoint :: String -> JSON.Value -> ClientM ()
, stopInstance :: ClientM ()
}
| Init generic pab client
pabClient :: forall t walletId. (ToJSON t, FromJSON t, ToHttpApiData walletId) => PabClient t walletId
pabClient = PabClient{..}
where
(healthcheck
:<|> fullreport
:<|> activateContract
:<|> toInstanceClient
:<|> getWallet
:<|> getInstances
:<|> getDefinitions
) = client (Proxy @(API t walletId))
instanceClient cid = InstanceClient{..}
where
(getInstanceStatus
:<|> getInstanceSchema
:<|> callInstanceEndpoint
:<|> stopInstance
) = toInstanceClient cid
|
ae443ffba5aa3a34e6a5ba5cf35128b45e4db60f17b73e13094a694cabb9bd94 | philnguyen/soft-contract | b.rkt | #lang typed/racket/base
(: f (-> Integer Integer))
(define (f x)
(+ x 2))
(provide f)
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/safe/issues/issue-90/b.rkt | racket | #lang typed/racket/base
(: f (-> Integer Integer))
(define (f x)
(+ x 2))
(provide f)
| |
55ddde525ddc582afb971b11a662c55e953be11b3366ce4cceea96fa0d767094 | emaphis/HtDP2e-solutions | 04_Checking_the_World.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname 06_04_checking_the_world) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
HtDP 2e - 6 Itemizations and Structures
;; 6.4 Checking the World
Exercise : 114
;;;;;;;;;;;;;;;;;;;;;;;;;
;; 6.4 Checking the World
world programs can deal with a lot of variing data so " bigban ' comes with a
;; data verification mechanism
#;
(define (main s0)
(big-bang so [...] [check-with number?] [...]))
;; examples:
A UnitWorld is a number
between 0 ( inclusize and 1 ( exclosive )
[ 0,1 )
; Any -> Boolean
is x beween 0 ( iclusive and 1 ( exclusive )
(check-expect (between-0-and-1? "a") #false)
(check-expect (between-0-and-1? 1.2) #false)
(check-expect (between-0-and-1? 0.2) #true)
(check-expect (between-0-and-1? 0.0) #true)
(check-expect (between-0-and-1? 1.0) #false)
(define (between-0-and-1? x)
(and (number? x) (<= 0 x) (< x 1)))
#;
(define (main s0)
(big-bang s0
...
[check-with between-0-and-1?]
...))
;;;;;;;;;;;;;;;
Ex . 114 :
Use the predicates from exercise 113 to check the space invader world
program , the virtual pet program ( exercise 106 ) , and the editor program
;; (A Graphical Editor).
;; See 06_04_space_invater_3.rkt, 06_04_chat_cham_4.rkt, 06_04_editor_4.rkt
| null | https://raw.githubusercontent.com/emaphis/HtDP2e-solutions/ecb60b9a7bbf9b8999c0122b6ea152a3301f0a68/1-Fixed-Size-Data/06-Itemizations-Structures/04_Checking_the_World.rkt | racket | about the language level of this file in a form that our tools can easily process.
6.4 Checking the World
6.4 Checking the World
data verification mechanism
examples:
Any -> Boolean
(A Graphical Editor).
See 06_04_space_invater_3.rkt, 06_04_chat_cham_4.rkt, 06_04_editor_4.rkt | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname 06_04_checking_the_world) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
HtDP 2e - 6 Itemizations and Structures
Exercise : 114
world programs can deal with a lot of variing data so " bigban ' comes with a
(define (main s0)
(big-bang so [...] [check-with number?] [...]))
A UnitWorld is a number
between 0 ( inclusize and 1 ( exclosive )
[ 0,1 )
is x beween 0 ( iclusive and 1 ( exclusive )
(check-expect (between-0-and-1? "a") #false)
(check-expect (between-0-and-1? 1.2) #false)
(check-expect (between-0-and-1? 0.2) #true)
(check-expect (between-0-and-1? 0.0) #true)
(check-expect (between-0-and-1? 1.0) #false)
(define (between-0-and-1? x)
(and (number? x) (<= 0 x) (< x 1)))
(define (main s0)
(big-bang s0
...
[check-with between-0-and-1?]
...))
Ex . 114 :
Use the predicates from exercise 113 to check the space invader world
program , the virtual pet program ( exercise 106 ) , and the editor program
|
751f47e9f400213c5e54099f34871b02edefef367f3e52f5f19cc324c05f3af1 | restyled-io/restyled.io | MetricsSpec.hs | module Restyled.MetricsSpec
( spec
) where
import Restyled.Test
import Restyled.Metrics
import Restyled.Test.Graphula
import Restyled.Time
import Restyled.TimeRange
spec :: Spec
spec = withApp $ do
describe "fetchJobMetrics" $ do
it "returns counts of Jobs in various states" $ graph $ do
now <- liftIO getCurrentTime
repo <- node @Repo () mempty
let setJobCreatedAt = fieldLens JobCreatedAt .~ now
sequence_
[ genJob repo $ setJobCreatedAt . setJobComplete now 0
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobComplete now 20
, genJob repo $ setJobCreatedAt . setJobComplete now 0
, genJob repo $ setJobCreatedAt . setJobComplete now 99
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobComplete now 0
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobIncomplete
]
range <- timeRangeFromAgo $ Minutes 5
JobMetrics {..} <- lift $ runDB $ fetchJobMetrics range
jmSucceeded `shouldBe` Sum 3
jmFailed `shouldBe` Sum 2
jmFailedUnknown `shouldBe` Sum 1
jmUnfinished `shouldBe` Sum 5
jmTotal `shouldBe` 10
| null | https://raw.githubusercontent.com/restyled-io/restyled.io/134019dffb54f84bddb905e8e21131b4e33f7850/test/Restyled/MetricsSpec.hs | haskell | module Restyled.MetricsSpec
( spec
) where
import Restyled.Test
import Restyled.Metrics
import Restyled.Test.Graphula
import Restyled.Time
import Restyled.TimeRange
spec :: Spec
spec = withApp $ do
describe "fetchJobMetrics" $ do
it "returns counts of Jobs in various states" $ graph $ do
now <- liftIO getCurrentTime
repo <- node @Repo () mempty
let setJobCreatedAt = fieldLens JobCreatedAt .~ now
sequence_
[ genJob repo $ setJobCreatedAt . setJobComplete now 0
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobComplete now 20
, genJob repo $ setJobCreatedAt . setJobComplete now 0
, genJob repo $ setJobCreatedAt . setJobComplete now 99
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobComplete now 0
, genJob repo $ setJobCreatedAt . setJobIncomplete
, genJob repo $ setJobCreatedAt . setJobIncomplete
]
range <- timeRangeFromAgo $ Minutes 5
JobMetrics {..} <- lift $ runDB $ fetchJobMetrics range
jmSucceeded `shouldBe` Sum 3
jmFailed `shouldBe` Sum 2
jmFailedUnknown `shouldBe` Sum 1
jmUnfinished `shouldBe` Sum 5
jmTotal `shouldBe` 10
| |
4253d264fe5a5cc029479e2384513cd2c54b6ff1ccd93ca6f5a8e4cc4bb9d86c | akabe/ocaml-jupyter | jupyter_args.mli | ocaml - jupyter --- An OCaml kernel for Jupyter
Copyright ( c ) 2017
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
Copyright (c) 2017 Akinori ABE
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. *)
(** Command-line arguments *)
val connection_file : string ref
val merlin : string ref
val dot_merlin : string ref
val error_ctx_size : int ref
val parse : unit -> unit
| null | https://raw.githubusercontent.com/akabe/ocaml-jupyter/7ea00fde81a915ee9d86c979f295f4c5dac28db8/src/main/jupyter_args.mli | ocaml | * Command-line arguments | ocaml - jupyter --- An OCaml kernel for Jupyter
Copyright ( c ) 2017
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
Copyright (c) 2017 Akinori ABE
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. *)
val connection_file : string ref
val merlin : string ref
val dot_merlin : string ref
val error_ctx_size : int ref
val parse : unit -> unit
|
a030671481ac53f7f41f4f051a5008b6bd5a898ec0ab6da8accd71e4bba511b2 | mylesmegyesi/sass-clojure | core.clj | (ns sass.core
(:require [clojure.java.io :refer [file resource]]))
(defn- build-jsass-options [options]
(let [jsass-options (io.bit3.jsass.Options.)
include-paths (.getIncludePaths jsass-options)]
(doseq [load-path (:load-paths options)]
(.add include-paths (file load-path)))
(.setIsIndentedSyntaxSrc jsass-options (= :sass (:syntax options)))
(case (:style options)
:nested
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/NESTED)
:compressed
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/COMPRESSED)
:expanded
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/EXPANDED)
:compact
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/COMPACT)
nil)
jsass-options))
(defn render-file-path [file-path & {:as options}]
(let [jsass-options (build-jsass-options options)
compiler (io.bit3.jsass.Compiler.)
file-uri (.toURI (file file-path))
output (.compileFile compiler file-uri nil jsass-options)]
(.getCss output)))
(defn render-string [string & {:as options}]
(let [jsass-options (build-jsass-options options)
compiler (io.bit3.jsass.Compiler.)
output (.compileString compiler string jsass-options)]
(.getCss output)))
(defn render-resource-path [path & options]
(let [jsass-options (build-jsass-options options)
resource-uri (.toURI (resource path))
compiler (io.bit3.jsass.Compiler.)
output (.compileFile compiler resource-uri nil jsass-options)]
(.getCss output)))
| null | https://raw.githubusercontent.com/mylesmegyesi/sass-clojure/99c1f7078ffa7183a06114559cde47c912fff63f/src/sass/core.clj | clojure | (ns sass.core
(:require [clojure.java.io :refer [file resource]]))
(defn- build-jsass-options [options]
(let [jsass-options (io.bit3.jsass.Options.)
include-paths (.getIncludePaths jsass-options)]
(doseq [load-path (:load-paths options)]
(.add include-paths (file load-path)))
(.setIsIndentedSyntaxSrc jsass-options (= :sass (:syntax options)))
(case (:style options)
:nested
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/NESTED)
:compressed
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/COMPRESSED)
:expanded
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/EXPANDED)
:compact
(.setOutputStyle jsass-options io.bit3.jsass.OutputStyle/COMPACT)
nil)
jsass-options))
(defn render-file-path [file-path & {:as options}]
(let [jsass-options (build-jsass-options options)
compiler (io.bit3.jsass.Compiler.)
file-uri (.toURI (file file-path))
output (.compileFile compiler file-uri nil jsass-options)]
(.getCss output)))
(defn render-string [string & {:as options}]
(let [jsass-options (build-jsass-options options)
compiler (io.bit3.jsass.Compiler.)
output (.compileString compiler string jsass-options)]
(.getCss output)))
(defn render-resource-path [path & options]
(let [jsass-options (build-jsass-options options)
resource-uri (.toURI (resource path))
compiler (io.bit3.jsass.Compiler.)
output (.compileFile compiler resource-uri nil jsass-options)]
(.getCss output)))
| |
bcdba6123177d542947c96be31d295387d6355858945e620e43a0bb178e24586 | penpot/penpot | pixel_precision.cljc | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
;; Copyright (c) KALEIDOS INC
(ns app.common.geom.shapes.pixel-precision
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.geom.point :as gpt]
[app.common.geom.shapes.common :as gco]
[app.common.geom.shapes.points :as gpo]
[app.common.geom.shapes.rect :as gpr]
[app.common.geom.shapes.transforms :as gtr]
[app.common.math :as mth]
[app.common.pages.helpers :as cph]
[app.common.types.modifiers :as ctm]))
(defn size-pixel-precision
[modifiers shape points precision]
(let [origin (gpo/origin points)
curr-width (gpo/width-points points)
curr-height (gpo/height-points points)
[_ transform transform-inverse] (gtr/calculate-geometry points)
path? (cph/path-shape? shape)
vertical-line? (and path? (<= curr-width 0.01))
horizontal-line? (and path? (<= curr-height 0.01))
target-width (if vertical-line? curr-width (max 1 (mth/round curr-width precision)))
target-height (if horizontal-line? curr-height (max 1 (mth/round curr-height precision)))
ratio-width (/ target-width curr-width)
ratio-height (/ target-height curr-height)
scalev (gpt/point ratio-width ratio-height)]
(-> modifiers
(ctm/resize scalev origin transform transform-inverse {:precise? true}))))
(defn position-pixel-precision
[modifiers _ points precision ignore-axis]
(let [bounds (gpr/bounds->rect points)
corner (gpt/point bounds)
target-corner
(cond-> corner
(= ignore-axis :x)
(update :y mth/round precision)
(= ignore-axis :y)
(update :x mth/round precision)
(nil? ignore-axis)
(gpt/round-step precision))
deltav (gpt/to-vec corner target-corner)]
(ctm/move modifiers deltav)))
(defn set-pixel-precision
"Adjust modifiers so they adjust to the pixel grid"
[modifiers shape precision ignore-axis]
(let [points (-> shape :points (gco/transform-points (ctm/modifiers->transform modifiers)))
has-resize? (not (ctm/only-move? modifiers))
[modifiers points]
(let [modifiers
(cond-> modifiers
has-resize? (size-pixel-precision shape points precision))
points
(if has-resize?
(-> (:points shape)
(gco/transform-points (ctm/modifiers->transform modifiers)) )
points)]
[modifiers points])]
(position-pixel-precision modifiers shape points precision ignore-axis)))
(defn adjust-pixel-precision
[modif-tree objects precision ignore-axis]
(let [update-modifiers
(fn [modif-tree shape]
(let [modifiers (dm/get-in modif-tree [(:id shape) :modifiers])]
(cond-> modif-tree
(and (some? modifiers) (ctm/has-geometry? modifiers))
(update-in [(:id shape) :modifiers] set-pixel-precision shape precision ignore-axis))))]
(->> (keys modif-tree)
(map (d/getf objects))
(reduce update-modifiers modif-tree))))
| null | https://raw.githubusercontent.com/penpot/penpot/c8360b19949a34a9b0878a3a6f2dd08529c9c4cb/common/src/app/common/geom/shapes/pixel_precision.cljc | clojure |
Copyright (c) KALEIDOS INC | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns app.common.geom.shapes.pixel-precision
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.geom.point :as gpt]
[app.common.geom.shapes.common :as gco]
[app.common.geom.shapes.points :as gpo]
[app.common.geom.shapes.rect :as gpr]
[app.common.geom.shapes.transforms :as gtr]
[app.common.math :as mth]
[app.common.pages.helpers :as cph]
[app.common.types.modifiers :as ctm]))
(defn size-pixel-precision
[modifiers shape points precision]
(let [origin (gpo/origin points)
curr-width (gpo/width-points points)
curr-height (gpo/height-points points)
[_ transform transform-inverse] (gtr/calculate-geometry points)
path? (cph/path-shape? shape)
vertical-line? (and path? (<= curr-width 0.01))
horizontal-line? (and path? (<= curr-height 0.01))
target-width (if vertical-line? curr-width (max 1 (mth/round curr-width precision)))
target-height (if horizontal-line? curr-height (max 1 (mth/round curr-height precision)))
ratio-width (/ target-width curr-width)
ratio-height (/ target-height curr-height)
scalev (gpt/point ratio-width ratio-height)]
(-> modifiers
(ctm/resize scalev origin transform transform-inverse {:precise? true}))))
(defn position-pixel-precision
[modifiers _ points precision ignore-axis]
(let [bounds (gpr/bounds->rect points)
corner (gpt/point bounds)
target-corner
(cond-> corner
(= ignore-axis :x)
(update :y mth/round precision)
(= ignore-axis :y)
(update :x mth/round precision)
(nil? ignore-axis)
(gpt/round-step precision))
deltav (gpt/to-vec corner target-corner)]
(ctm/move modifiers deltav)))
(defn set-pixel-precision
"Adjust modifiers so they adjust to the pixel grid"
[modifiers shape precision ignore-axis]
(let [points (-> shape :points (gco/transform-points (ctm/modifiers->transform modifiers)))
has-resize? (not (ctm/only-move? modifiers))
[modifiers points]
(let [modifiers
(cond-> modifiers
has-resize? (size-pixel-precision shape points precision))
points
(if has-resize?
(-> (:points shape)
(gco/transform-points (ctm/modifiers->transform modifiers)) )
points)]
[modifiers points])]
(position-pixel-precision modifiers shape points precision ignore-axis)))
(defn adjust-pixel-precision
[modif-tree objects precision ignore-axis]
(let [update-modifiers
(fn [modif-tree shape]
(let [modifiers (dm/get-in modif-tree [(:id shape) :modifiers])]
(cond-> modif-tree
(and (some? modifiers) (ctm/has-geometry? modifiers))
(update-in [(:id shape) :modifiers] set-pixel-precision shape precision ignore-axis))))]
(->> (keys modif-tree)
(map (d/getf objects))
(reduce update-modifiers modif-tree))))
|
a6d13636ee41f1507ea2123155b17227ed728e316e91044a570918cf2110f7b8 | BitGameEN/bitgamex | c_xchgsvr.erl | %%%--------------------------------------
%%% @Module : c_xchgsvr
@Description :
%%%--------------------------------------
-module(c_xchgsvr).
-export([transfer_gold_to_exchange/5, transfer_gold_to_wallet/6]).
-include("common.hrl").
-include("gameConfig.hrl").
-include("gameConfigGlobalKey.hrl").
-include("gameConfig3rdParty.hrl").
-include("record_usr_user.hrl").
-include("record_run_role_gold.hrl").
-include("record_usr_gold_transfer.hrl").
-define(TRANSFER_TO_XCHG_URL, "").
-define(TRANSFER_TO_WALLET_URL, "").
-define(JSON_CONTENT, {"Content-Type", "application/json; charset=utf8"}).
-define(HTTP_CLIENT_TIMEOUT, 10000).
-define(HTTP_CLIENT_OPTIONS, [{max_sessions, 100}, {max_pipeline_size, 10}]).
transfer_gold_to_exchange(GameId, UserId, GoldType, Amount, ReceiptData) ->
transfer_gold(?GOLD_TRANSFER_TYPE_GAME_TO_XCHG, GameId, UserId, GoldType, Amount, <<>>, ReceiptData).
transfer_gold_to_wallet(GameId, UserId, GoldType, Amount, WalletAddr, ReceiptData) ->
transfer_gold(?GOLD_TRANSFER_TYPE_GAME_TO_WALLET, GameId, UserId, GoldType, Amount, WalletAddr, ReceiptData).
transfer_gold(TransferType, GameId, UserId, GoldType, Amount0, WalletAddr, ReceiptData) ->
#usr_user{id = UserId, bind_xchg_accid = BindXchgAccId, device_id = DeviceId} = usr_user:get_one(UserId),
TransactionType = ?GOLD_TRANSFER_TX_TYPE_GAME_TO_XCHG,
lib_role_gold:put_gold_drain_type_and_drain_id(gold_transfer, TransferType, Amount0),
lib_role_gold:add_gold(UserId, GameId, GoldType, -Amount0), % 先扣除
TransactionId = lib_user_gold_transfer:gen_uuid(),
NowDateTime = util:now_datetime_str(),
TransferR = #usr_gold_transfer{
type = TransferType,
transaction_type = TransactionType,
transaction_id = TransactionId,
receipt = ReceiptData,
player_id = UserId,
device_id = DeviceId,
xchg_accid = BindXchgAccId,
wallet_addr = WalletAddr,
gold_type = GoldType,
gold = Amount0,
status = 0,
error_tag = <<>>,
receive_game_id = GameId,
receive_time = NowDateTime,
update_time = NowDateTime},
usr_gold_transfer:set_one(TransferR),
TransferDiscountToXchg = lib_global_config:get(?GLOBAL_CONFIG_KEY_TRANSFER_DISCOUNT_TO_XCHG),
%% todo:临时为了客户端调试注释掉,以后改回来
%Amount = Amount0 * (1 - TransferDiscountToXchg),
true = Amount > 0 , % 相当于断言
%% 参数串:
%% 发送到交易所:transaction_id=xx&game_uid=xx&exchange_accid=xx&token_symbol=xx&amount=xx&time=xx
%% 发送到钱包:transaction_id=xx&game_uid=xx&exchange_accid=xx&wallet_address=xx&token_symbol=xx&amount=xx&time=xx
= integer_to_binary(UserId ) ,
AmountBin = util : f2s(Amount ) ,
= integer_to_binary(util : unixtime ( ) ) ,
%Params0 =
case TransferType of
% ?GOLD_TRANSFER_TYPE_GAME_TO_XCHG ->
< < " transaction_id= " , TransactionId / binary , " & game_uid= " , UserIdBin / binary , " & exchange_accid= " , BindXchgAccId / binary ,
" & token_symbol= " , GoldType / binary , " & amount= " , AmountBin / binary , " & time= " , / binary > > ;
% ?GOLD_TRANSFER_TYPE_GAME_TO_WALLET ->
< < " transaction_id= " , TransactionId / binary , " & game_uid= " , UserIdBin / binary , " & exchange_accid= " , BindXchgAccId / binary ,
" & wallet_address= " , WalletAddr / binary , " & token_symbol= " , GoldType / binary , " & amount= " , AmountBin / binary , " & time= " , / binary > >
% end,
用自己的私钥签名
[ Entry1 ] = public_key : pem_decode(?SELF_PRIVATE_KEY ) ,
RSAPriKey = public_key : pem_entry_decode(Entry1 ) ,
Sign0 = public_key : sign(Params0 , ' sha ' , RSAPriKey ) ,
%% 用交易所的公钥加密
[ Entry2 ] = public_key : pem_decode(?EXCHANGE_PUBLIC_KEY ) ,
= public_key : pem_entry_decode(Entry2 ) ,
Params1 = public_key : encrypt_public(Params0 , ) ,
%% 然后,base64、url编码
%Sign = util:url_encode(base64:encode(Sign0)),
= util : url_encode(base64 : encode(Params1 ) ) ,
Params = [ { < < " param_data " > > , } , { < < " sign " > > , Sign } ] ,
Params = [],
% 发送,并处理结果
Callback =
fun(JsonObject) ->
case lists:keyfind(<<"succ">>, 1, JsonObject) of
{_, 0} -> % 失败
lib_role_gold:add_gold(UserId, GameId, GoldType, Amount0), % 返回游戏币
ErrNo = case lists:keyfind(<<"errno">>, 1, JsonObject) of
{_, ErrNo_} -> ErrNo_;
false -> ?ERRNO_UNIDENTIFIED
end,
ErrMsg = case lists:keyfind(<<"errmsg">>, 1, JsonObject) of
{_, ErrMsg_} -> ErrMsg_;
false -> <<>>
end,
lib_user_gold_transfer:update_transfer_log(TransactionType, TransactionId, {error, ErrNo, ErrMsg}),
throw({ErrNo, ErrMsg});
_ -> % 其余均视为成功
Balance =
case lists:keyfind(<<"balance">>, 1, JsonObject) of
{_, Balance_} -> Balance_;
false -> -1
end,
lib_user_gold_transfer:update_transfer_log(TransactionType, TransactionId, {ok, GoldType, Amount0}),
lib_game:put_gold_drain_type_and_drain_id(gold_transfer, TransferType, Amount0),
lib_game:add_reclaimed_gold(GameId, GoldType, Amount0 * TransferDiscountToXchg),
RoleGold = run_role_gold:get_one({GameId, UserId}),
{ok, RoleGold#run_role_gold.gold, Balance}
end
end,
Url = case TransferType of
?GOLD_TRANSFER_TYPE_GAME_TO_XCHG -> ?TRANSFER_TO_XCHG_URL;
?GOLD_TRANSFER_TYPE_GAME_TO_WALLET -> ?TRANSFER_TO_WALLET_URL
end,
case do_transfer_gold_to_exchange(Url, Params) of
{error, ErrNo, ErrMsg} = Rs ->
case ErrNo of
?ERRNO_HTTP_REQ_TIMEOUT ->
% 超时情况下不能确认是否已经发到对端并处理完成,所以不能返回游戏币
httpc_proxy:queue_request(Url, get, Params, Callback);
_ -> lib_role_gold:add_gold(UserId, GameId, GoldType, Amount0) % 返回游戏币
end,
lib_user_gold_transfer:update_transfer_log(TransactionType, TransactionId, Rs),
throw({ErrNo, ErrMsg});
JsonObject ->
Callback(JsonObject)
end.
do_transfer_gold_to_exchange(Url, Params) ->
%% todo:临时为了客户端调试注释掉,以后改回来
case ibrowse : send_req(Url , [ ? JSON_CONTENT ] , get , : encode(Params ) , ? HTTP_CLIENT_OPTIONS , ? HTTP_CLIENT_TIMEOUT ) of
% {ok, Status, Head, Body} ->
% case Status of
" 200 " - >
JsonObject = jsx : decode(list_to_binary(Body ) ) ,
% ? INFO("JsonObject : ~p ~ n " , [ JsonObject ] ) ,
JsonObject ;
% _ ->
% {error, ?ERRNO_HTTP_REQ_FAILED, list_to_binary(Body)}
% end;
{ error ,
% {error, ?ERRNO_HTTP_REQ_TIMEOUT, <<"request timeout">>};
% {error, Reason} ->
% {error, ?ERRNO_HTTP_REQ_FAILED, ?T2B(Reason)}
%end.
[{<<"succ">>, 1}, {<<"balance">>, 0}].
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/ctrl/c_xchgsvr.erl | erlang | --------------------------------------
@Module : c_xchgsvr
--------------------------------------
先扣除
todo:临时为了客户端调试注释掉,以后改回来
Amount = Amount0 * (1 - TransferDiscountToXchg),
相当于断言
参数串:
发送到交易所:transaction_id=xx&game_uid=xx&exchange_accid=xx&token_symbol=xx&amount=xx&time=xx
发送到钱包:transaction_id=xx&game_uid=xx&exchange_accid=xx&wallet_address=xx&token_symbol=xx&amount=xx&time=xx
Params0 =
?GOLD_TRANSFER_TYPE_GAME_TO_XCHG ->
?GOLD_TRANSFER_TYPE_GAME_TO_WALLET ->
end,
用交易所的公钥加密
然后,base64、url编码
Sign = util:url_encode(base64:encode(Sign0)),
发送,并处理结果
失败
返回游戏币
其余均视为成功
超时情况下不能确认是否已经发到对端并处理完成,所以不能返回游戏币
返回游戏币
todo:临时为了客户端调试注释掉,以后改回来
{ok, Status, Head, Body} ->
case Status of
? INFO("JsonObject : ~p ~ n " , [ JsonObject ] ) ,
_ ->
{error, ?ERRNO_HTTP_REQ_FAILED, list_to_binary(Body)}
end;
{error, ?ERRNO_HTTP_REQ_TIMEOUT, <<"request timeout">>};
{error, Reason} ->
{error, ?ERRNO_HTTP_REQ_FAILED, ?T2B(Reason)}
end. | @Description :
-module(c_xchgsvr).
-export([transfer_gold_to_exchange/5, transfer_gold_to_wallet/6]).
-include("common.hrl").
-include("gameConfig.hrl").
-include("gameConfigGlobalKey.hrl").
-include("gameConfig3rdParty.hrl").
-include("record_usr_user.hrl").
-include("record_run_role_gold.hrl").
-include("record_usr_gold_transfer.hrl").
-define(TRANSFER_TO_XCHG_URL, "").
-define(TRANSFER_TO_WALLET_URL, "").
-define(JSON_CONTENT, {"Content-Type", "application/json; charset=utf8"}).
-define(HTTP_CLIENT_TIMEOUT, 10000).
-define(HTTP_CLIENT_OPTIONS, [{max_sessions, 100}, {max_pipeline_size, 10}]).
transfer_gold_to_exchange(GameId, UserId, GoldType, Amount, ReceiptData) ->
transfer_gold(?GOLD_TRANSFER_TYPE_GAME_TO_XCHG, GameId, UserId, GoldType, Amount, <<>>, ReceiptData).
transfer_gold_to_wallet(GameId, UserId, GoldType, Amount, WalletAddr, ReceiptData) ->
transfer_gold(?GOLD_TRANSFER_TYPE_GAME_TO_WALLET, GameId, UserId, GoldType, Amount, WalletAddr, ReceiptData).
transfer_gold(TransferType, GameId, UserId, GoldType, Amount0, WalletAddr, ReceiptData) ->
#usr_user{id = UserId, bind_xchg_accid = BindXchgAccId, device_id = DeviceId} = usr_user:get_one(UserId),
TransactionType = ?GOLD_TRANSFER_TX_TYPE_GAME_TO_XCHG,
lib_role_gold:put_gold_drain_type_and_drain_id(gold_transfer, TransferType, Amount0),
TransactionId = lib_user_gold_transfer:gen_uuid(),
NowDateTime = util:now_datetime_str(),
TransferR = #usr_gold_transfer{
type = TransferType,
transaction_type = TransactionType,
transaction_id = TransactionId,
receipt = ReceiptData,
player_id = UserId,
device_id = DeviceId,
xchg_accid = BindXchgAccId,
wallet_addr = WalletAddr,
gold_type = GoldType,
gold = Amount0,
status = 0,
error_tag = <<>>,
receive_game_id = GameId,
receive_time = NowDateTime,
update_time = NowDateTime},
usr_gold_transfer:set_one(TransferR),
TransferDiscountToXchg = lib_global_config:get(?GLOBAL_CONFIG_KEY_TRANSFER_DISCOUNT_TO_XCHG),
= integer_to_binary(UserId ) ,
AmountBin = util : f2s(Amount ) ,
= integer_to_binary(util : unixtime ( ) ) ,
case TransferType of
< < " transaction_id= " , TransactionId / binary , " & game_uid= " , UserIdBin / binary , " & exchange_accid= " , BindXchgAccId / binary ,
" & token_symbol= " , GoldType / binary , " & amount= " , AmountBin / binary , " & time= " , / binary > > ;
< < " transaction_id= " , TransactionId / binary , " & game_uid= " , UserIdBin / binary , " & exchange_accid= " , BindXchgAccId / binary ,
" & wallet_address= " , WalletAddr / binary , " & token_symbol= " , GoldType / binary , " & amount= " , AmountBin / binary , " & time= " , / binary > >
用自己的私钥签名
[ Entry1 ] = public_key : pem_decode(?SELF_PRIVATE_KEY ) ,
RSAPriKey = public_key : pem_entry_decode(Entry1 ) ,
Sign0 = public_key : sign(Params0 , ' sha ' , RSAPriKey ) ,
[ Entry2 ] = public_key : pem_decode(?EXCHANGE_PUBLIC_KEY ) ,
= public_key : pem_entry_decode(Entry2 ) ,
Params1 = public_key : encrypt_public(Params0 , ) ,
= util : url_encode(base64 : encode(Params1 ) ) ,
Params = [ { < < " param_data " > > , } , { < < " sign " > > , Sign } ] ,
Params = [],
Callback =
fun(JsonObject) ->
case lists:keyfind(<<"succ">>, 1, JsonObject) of
ErrNo = case lists:keyfind(<<"errno">>, 1, JsonObject) of
{_, ErrNo_} -> ErrNo_;
false -> ?ERRNO_UNIDENTIFIED
end,
ErrMsg = case lists:keyfind(<<"errmsg">>, 1, JsonObject) of
{_, ErrMsg_} -> ErrMsg_;
false -> <<>>
end,
lib_user_gold_transfer:update_transfer_log(TransactionType, TransactionId, {error, ErrNo, ErrMsg}),
throw({ErrNo, ErrMsg});
Balance =
case lists:keyfind(<<"balance">>, 1, JsonObject) of
{_, Balance_} -> Balance_;
false -> -1
end,
lib_user_gold_transfer:update_transfer_log(TransactionType, TransactionId, {ok, GoldType, Amount0}),
lib_game:put_gold_drain_type_and_drain_id(gold_transfer, TransferType, Amount0),
lib_game:add_reclaimed_gold(GameId, GoldType, Amount0 * TransferDiscountToXchg),
RoleGold = run_role_gold:get_one({GameId, UserId}),
{ok, RoleGold#run_role_gold.gold, Balance}
end
end,
Url = case TransferType of
?GOLD_TRANSFER_TYPE_GAME_TO_XCHG -> ?TRANSFER_TO_XCHG_URL;
?GOLD_TRANSFER_TYPE_GAME_TO_WALLET -> ?TRANSFER_TO_WALLET_URL
end,
case do_transfer_gold_to_exchange(Url, Params) of
{error, ErrNo, ErrMsg} = Rs ->
case ErrNo of
?ERRNO_HTTP_REQ_TIMEOUT ->
httpc_proxy:queue_request(Url, get, Params, Callback);
end,
lib_user_gold_transfer:update_transfer_log(TransactionType, TransactionId, Rs),
throw({ErrNo, ErrMsg});
JsonObject ->
Callback(JsonObject)
end.
do_transfer_gold_to_exchange(Url, Params) ->
case ibrowse : send_req(Url , [ ? JSON_CONTENT ] , get , : encode(Params ) , ? HTTP_CLIENT_OPTIONS , ? HTTP_CLIENT_TIMEOUT ) of
" 200 " - >
JsonObject = jsx : decode(list_to_binary(Body ) ) ,
JsonObject ;
{ error ,
[{<<"succ">>, 1}, {<<"balance">>, 0}].
|
0b9451b0af2611aa9286e95762e8cf77cc10a383c133bd2912b52bfea29ce983 | ernius/plutus-cardano-samples | mathbounty-serialize.hs |
import Prelude
import System.Environment
import Cardano.Api
import Cardano.Api.Shelley
import qualified Plutus.V1.Ledger.Api as Plutus
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Short as SBS
import qualified Data.ByteString.Lazy as LBS
import Codec.Serialise
import MathBountyPAB
import MathBounty
import qualified PlutusTx
import Cardano.Ledger.Alonzo.TxInfo
main :: IO ()
main =
writePlutusScript "mathbounty.plutus" contractSerialised contractSBS
contractSBS :: SBS.ShortByteString
contractSBS = SBS.toShort . LBS.toStrict $ serialise bountyScript
contractSerialised :: PlutusScript PlutusScriptV1
contractSerialised = PlutusScriptSerialised contractSBS
mathBountyRedeemJSON:: LBS.ByteString
mathBountyRedeemJSON = Aeson.encode $ scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData $ PlutusTx.toData (3 :: Integer)
mathBountyDatumJSON:: LBS.ByteString
mathBountyDatumJSON = Aeson.encode $ scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData $ PlutusTx.toData (MathBountyDatum 9)
writePlutusScript :: FilePath -> PlutusScript PlutusScriptV1 -> SBS.ShortByteString -> IO ()
writePlutusScript filename scriptSerial scriptSBS = do
LBS.writeFile "redeemer.json" mathBountyRedeemJSON
LBS.writeFile "datum.json" mathBountyDatumJSON
case Plutus.defaultCostModelParams of
Just m ->
let (logout, e) = Plutus.evaluateScriptCounting Plutus.Verbose m scriptSBS []
in do print ("Log output" :: String) >> print logout
case e of
Left evalErr -> print ("Eval Error" :: String) >> print evalErr
Right exbudget -> do
print ("Ex Budget" :: String) >> print exbudget
print ("Ex Units" :: String) >> print (exBudgetToExUnits exbudget)
Nothing -> error "defaultCostModelParams failed"
result <- writeFileTextEnvelope filename Nothing scriptSerial
case result of
Left err -> print $ displayError err
Right () -> return ()
| null | https://raw.githubusercontent.com/ernius/plutus-cardano-samples/3b5476e45725578622889114e1b36d9a6cf56535/app/mathbounty-serialize.hs | haskell |
import Prelude
import System.Environment
import Cardano.Api
import Cardano.Api.Shelley
import qualified Plutus.V1.Ledger.Api as Plutus
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Short as SBS
import qualified Data.ByteString.Lazy as LBS
import Codec.Serialise
import MathBountyPAB
import MathBounty
import qualified PlutusTx
import Cardano.Ledger.Alonzo.TxInfo
main :: IO ()
main =
writePlutusScript "mathbounty.plutus" contractSerialised contractSBS
contractSBS :: SBS.ShortByteString
contractSBS = SBS.toShort . LBS.toStrict $ serialise bountyScript
contractSerialised :: PlutusScript PlutusScriptV1
contractSerialised = PlutusScriptSerialised contractSBS
mathBountyRedeemJSON:: LBS.ByteString
mathBountyRedeemJSON = Aeson.encode $ scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData $ PlutusTx.toData (3 :: Integer)
mathBountyDatumJSON:: LBS.ByteString
mathBountyDatumJSON = Aeson.encode $ scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData $ PlutusTx.toData (MathBountyDatum 9)
writePlutusScript :: FilePath -> PlutusScript PlutusScriptV1 -> SBS.ShortByteString -> IO ()
writePlutusScript filename scriptSerial scriptSBS = do
LBS.writeFile "redeemer.json" mathBountyRedeemJSON
LBS.writeFile "datum.json" mathBountyDatumJSON
case Plutus.defaultCostModelParams of
Just m ->
let (logout, e) = Plutus.evaluateScriptCounting Plutus.Verbose m scriptSBS []
in do print ("Log output" :: String) >> print logout
case e of
Left evalErr -> print ("Eval Error" :: String) >> print evalErr
Right exbudget -> do
print ("Ex Budget" :: String) >> print exbudget
print ("Ex Units" :: String) >> print (exBudgetToExUnits exbudget)
Nothing -> error "defaultCostModelParams failed"
result <- writeFileTextEnvelope filename Nothing scriptSerial
case result of
Left err -> print $ displayError err
Right () -> return ()
| |
ba285cf9496c6f413ec20e7881abcc88f3478657ee61b6b92494801e551487b9 | metabase/metabase | xlsx_test.clj | (ns metabase.query-processor.streaming.xlsx-test
(:require
[cheshire.generate :as json.generate]
[clojure.java.io :as io]
[clojure.test :refer :all]
[dk.ative.docjure.spreadsheet :as spreadsheet]
[metabase.driver :as driver]
[metabase.query-processor.streaming.interface :as qp.si]
[metabase.query-processor.streaming.xlsx :as qp.xlsx]
[metabase.shared.models.visualization-settings :as mb.viz]
[metabase.test :as mt])
(:import
(com.fasterxml.jackson.core JsonGenerator)
(java.io BufferedInputStream BufferedOutputStream ByteArrayInputStream ByteArrayOutputStream)))
(set! *warn-on-reflection* true)
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Format string generation unit tests |
;;; +----------------------------------------------------------------------------------------------------------------+
(defn- format-string
([format-settings]
(format-string format-settings nil))
([format-settings col]
(let [format-strings (@#'qp.xlsx/format-settings->format-strings format-settings col)]
If only one format string is returned ( for datetimes ) or both format strings
;; are equal, just return a single value to make tests more readable.
(cond
(= (count format-strings) 1)
(first format-strings)
(= (first format-strings) (second format-strings))
(first format-strings)
:else
format-strings))))
(deftest format-settings->format-string-test
(mt/with-temporary-setting-values [custom-formatting {}]
(testing "Empty format settings don't produce a format string"
(is (nil? (format-string {}))))
(testing "General number formatting"
(testing "number-style (non-currency)"
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-style "decimal"})))
(is (= "#,##0.00%" (format-string {::mb.viz/number-style "percent"})))
(is (= "#,##0.00E+0" (format-string {::mb.viz/number-style "scientific"}))))
(testing "Decimals"
(is (= "#,##0" (format-string {::mb.viz/decimals 0, ::mb.viz/number-style "decimal"})))
(is (= "#,##0%" (format-string {::mb.viz/decimals 0, ::mb.viz/number-style "percent"})))
(is (= "#,##0E+0" (format-string {::mb.viz/decimals 0, ::mb.viz/number-style "scientific"})))
(is (= "[$$]#,##0" (format-string {::mb.viz/decimals 0,
::mb.viz/currency-in-header false,
::mb.viz/number-style "currency"})))
(is (= "#,##0.000" (format-string {::mb.viz/decimals 3, ::mb.viz/number-style "decimal"})))
(is (= "#,##0.000%" (format-string {::mb.viz/decimals 3, ::mb.viz/number-style "percent"})))
(is (= "#,##0.000E+0" (format-string {::mb.viz/decimals 3, ::mb.viz/number-style "scientific"})))
(is (= "[$$]#,##0.000" (format-string {::mb.viz/decimals 3,
::mb.viz/currency-in-header false,
::mb.viz/number-style "currency"})))
Negative decimal values not supported ( unlike on frontend ) ; falls back to 0
(is (= "#,##0" (format-string {::mb.viz/decimals -1, ::mb.viz/number-style "decimal"})))
(is (= "#,##0%" (format-string {::mb.viz/decimals -1, ::mb.viz/number-style "percent"})))
(is (= "#,##0E+0" (format-string {::mb.viz/decimals -1, ::mb.viz/number-style "scientific"})))
(is (= "[$$]#,##0" (format-string {::mb.viz/decimals -1,
::mb.viz/currency-in-header false,
::mb.viz/number-style "currency"})))
Thousands separator can be omitted
(is (= ["###0" "###0.##"] (format-string {::mb.viz/number-separators "."})))
;; Custom separators are not supported
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-separators ", "})))
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-separators ".,"})))
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-separators ".’"}))))
(testing "Scale"
;; Scale should not affect format string since it is applied to the actual data prior to export
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/scale 2})))
(is (= "#,##0.00" (format-string {::mb.viz/scale 2, ::mb.viz/decimals 2}))))
(testing "Prefix and suffix"
;; Prefix/suffix on general number format
(is (= ["\"prefix\"#,##0"
"\"prefix\"#,##0.##"] (format-string {::mb.viz/prefix "prefix"})))
(is (= ["#,##0\"suffix\""
"#,##0.##\"suffix\""] (format-string {::mb.viz/suffix "suffix"})))
(is (= ["\"prefix\"#,##0\"suffix\""
"\"prefix\"#,##0.##\"suffix\""] (format-string {::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
;; Prefix/suffix on number format w/fixed decimal count
(is (= "\"prefix\"#,##0.00" (format-string {::mb.viz/decimals 2,
::mb.viz/prefix "prefix"})))
(is (= "#,##0.00\"suffix\"" (format-string {::mb.viz/decimals 2,
::mb.viz/suffix "suffix"})))
(is (= "\"prefix\"#,##0.00\"suffix\"" (format-string {::mb.viz/decimals 2,
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
;; Prefix/suffix on percentage
(is (= "\"prefix\"#,##0.00%\"suffix\"" (format-string {::mb.viz/number-style "percent",
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
;; Prefix/suffix on scientific notation
(is (= "\"prefix\"#,##0.00E+0\"suffix\"" (format-string {::mb.viz/number-style "scientific",
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
;; Prefix/suffix on currency
(is (= "\"prefix\"[$$]#,##0.00\"suffix\"" (format-string {::mb.viz/currency-in-header false,
::mb.viz/number-style "currency",
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))))
(testing "Currency formatting"
(let [price-col {:semantic_type :type/Price, :effective_type :type/Float}]
(testing "Default currency formatting is dollar sign"
(is (= "[$$]#,##0.00" (format-string {::mb.viz/currency-in-header false} price-col))))
(testing "Uses native currency symbol if supported"
(is (= "[$$]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "USD"} price-col)))
(is (= "[$CA$]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "CAD"} price-col)))
(is (= "[$€]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "EUR"} price-col)))
(is (= "[$¥]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "JPY"} price-col))))
(testing "Falls back to code if native symbol not supported"
(is (= "[$KGS] #,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "KGS"} price-col)))
(is (= "[$KGS] #,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "KGS",
::mb.viz/currency-style "symbol"}
price-col))))
(testing "Respects currency-style option"
(is (= "[$$]#,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency-style "symbol"}
price-col)))
(is (= "[$USD] #,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency-style "code"}
price-col)))
(is (= "#,##0.00\" US dollars\"" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency-style "name"}
price-col)))
(is (= "[$€]#,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "EUR",
::mb.viz/currency-style "symbol"}
price-col)))
(is (= "[$EUR] #,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "EUR",
::mb.viz/currency-style "code"}
price-col)))
(is (= "#,##0.00\" euros\"" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "EUR",
::mb.viz/currency-style "name"}
price-col))))
(testing "Currency not included for non-currency semantic types"
(is (= "#,##0.00" (format-string {::mb.viz/currency-in-header false} {:semantic_type :type/Quantity}))))
(testing "Formatting options are ignored if currency-in-header is true or absent (defaults to true)"
(is (= "#,##0.00" (format-string {::mb.viz/currency-style "symbol"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency-style "name"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency-style "code"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency "USD"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency "EUR"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency-style "symbol"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency-style "name"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency-style "code"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency "USD"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency "EUR"} price-col))))
(testing "Global localization settings are incorporated with lower precedence than column format settings"
(mt/with-temporary-setting-values [custom-formatting {:type/Currency {:currency "EUR",
:currency_in_header false,
:currency_style "code"}}]
(is (= "[$EUR] #,##0.00" (format-string {} price-col)))
(is (= "[$CAD] #,##0.00" (format-string {::mb.viz/currency "CAD"} price-col)))
(is (= "[$€]#,##0.00" (format-string {::mb.viz/currency-style "symbol"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency-in-header true} price-col)))))))
(testing "Datetime formatting"
(let [date-col {:semantic_type :type/CreationTimestamp, :effective_type :type/Temporal}]
(testing "date-style"
(is (= "m/d/yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY"} date-col)))
(is (= "d/m/yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "D/M/YYYY"} date-col)))
(is (= "yyyy/m/d, h:mm am/pm" (format-string {::mb.viz/date-style "YYYY/M/D"} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "MMMM D, YYYY"} date-col)))
(is (= "dmmmm, yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "DMMMM, YYYY"} date-col)))
(is (= "dddd, mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "dddd, MMMM D, YYYY"} date-col))))
(testing "date-separator"
(is (= "m/d/yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY", ::mb.viz/date-separator "/"} date-col)))
(is (= "m.d.yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY", ::mb.viz/date-separator "."} date-col)))
(is (= "m-d-yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY", ::mb.viz/date-separator "-"} date-col))))
(testing "date-abbreviate"
(is (= "mmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate true} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate false} date-col)))
(is (= "ddd, mmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate true
::mb.viz/date-style, "dddd, MMMM D, YYYY"} date-col)))
(is (= "dddd, mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate false
::mb.viz/date-style, "dddd, MMMM D, YYYY"} date-col))))
(testing "time-style"
(is (= "mmmm d, yyyy, hh:mm" (format-string {::mb.viz/time-style "HH:mm"} date-col)))
(is (= "mmmm d, yyyy, hh:mm" (format-string {::mb.viz/time-style "k:mm"} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/time-style "h:mm A"} date-col)))
(is (= "mmmm d, yyyy, h am/pm" (format-string {::mb.viz/time-style "h A"} date-col))))
(testing "time-enabled"
(is (= "mmmm d, yyyy" (format-string {::mb.viz/time-enabled nil} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/time-enabled "minutes"} date-col)))
(is (= "mmmm d, yyyy, h:mm:ss am/pm" (format-string {::mb.viz/time-enabled "seconds"} date-col)))
(is (= "mmmm d, yyyy, h:mm:ss.000 am/pm" (format-string {::mb.viz/time-enabled "milliseconds"} date-col)))
;; time-enabled overrides time-styled
(is (= "mmmm d, yyyy" (format-string {::mb.viz/time-style "h:mm A", ::mb.viz/time-enabled nil} date-col))))
(testing ":unit values on temporal breakout fields"
(let [month-col (assoc date-col :unit :month)
year-col (assoc date-col :unit :year)]
(is (= "mmmm, yyyy" (format-string {} month-col)))
(is (= "m/yyyy" (format-string {::mb.viz/date-style "M/D/YYYY"} month-col)))
(is (= "yyyy/m" (format-string {::mb.viz/date-style "YYYY/M/D"} month-col)))
(is (= "mmmm, yyyy" (format-string {::mb.viz/date-style "MMMM D, YYYY"} month-col)))
(is (= "mmmm, yyyy" (format-string {::mb.viz/date-style "D MMMM, YYYY"} month-col)))
(is (= "mmmm, yyyy" (format-string {::mb.viz/date-style "DDDD, MMMM D, YYYY"} month-col)))
(is (= "yyyy" (format-string {} year-col)))
(is (= "yyyy" (format-string {::mb.viz/date-style "M/D/YYYY"} year-col)))))
(testing "misc combinations"
(is (= "yyyy.m.d, h:mm:ss am/pm" (format-string {::mb.viz/date-style "YYYY/M/D",
::mb.viz/date-separator ".",
::mb.viz/time-style "h:mm A",
::mb.viz/time-enabled "seconds"} date-col)))
(is (= "dddd, mmmm d, yyyy, hh:mm:ss.000" (format-string {::mb.viz/date-style "dddd, MMMM D, YYYY",
::mb.viz/time-style "HH:mm",
::mb.viz/time-enabled "milliseconds"} date-col))))
(testing "Global localization settings are incorporated with lower precedence than column format settings"
(mt/with-temporary-setting-values [custom-formatting {:type/Temporal {:date_style "YYYY/M/D",
:date_separator ".",
:time_style "HH:mm"}}]
(is (= "yyyy.m.d, hh:mm" (format-string {} date-col)))
(is (= "d.m.yyyy, hh:mm" (format-string {::mb.viz/date-style "D/M/YYYY"} date-col)))
(is (= "yyyy-m-d, hh:mm" (format-string {::mb.viz/date-separator "-"} date-col)))
(is (= "yyyy.m.d, h:mm am/pm" (format-string {::mb.viz/time-style "h:mm A"} date-col)))))))
(testing "primary key and foreign key formatting"
(is (= "0" (format-string {} {:semantic_type :type/PK})))
(is (= "0" (format-string {} {:semantic_type :type/FK}))))))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | XLSX export tests |
;;; +----------------------------------------------------------------------------------------------------------------+
;; These are tests that generate an XLSX binary and then parse and assert on its contents, to test logic and value
;; formatting that is specific to the XLSX format. These do NOT test any of the column ordering logic in
;; `metabase.query-processor.streaming`, or anything that happens in the API handlers for generating exports.
(defn parse-cell-content
"Parses an XLSX sheet and returns the raw data in each row"
[sheet]
(mapv (fn [row]
(mapv spreadsheet/read-cell row))
(spreadsheet/into-seq sheet)))
(defn parse-xlsx-results
"Given a byte array representing an XLSX document, parses the query result sheet using the provided `parse-fn`"
([bytea]
(parse-xlsx-results bytea parse-cell-content))
([bytea parse-fn]
(with-open [is (BufferedInputStream. (ByteArrayInputStream. bytea))]
(let [workbook (spreadsheet/load-workbook-from-stream is)
sheet (spreadsheet/select-sheet "Query result" workbook)]
(parse-fn sheet)))))
(defn- xlsx-export
([ordered-cols viz-settings rows]
(xlsx-export ordered-cols viz-settings rows parse-cell-content))
([ordered-cols viz-settings rows parse-fn]
(with-open [bos (ByteArrayOutputStream.)
os (BufferedOutputStream. bos)]
(let [results-writer (qp.si/streaming-results-writer :xlsx os)]
(qp.si/begin! results-writer {:data {:ordered-cols ordered-cols}} viz-settings)
(doall (map-indexed
(fn [i row] (qp.si/write-row! results-writer row i ordered-cols viz-settings))
rows))
(qp.si/finish! results-writer {:row_count (count rows)}))
(let [bytea (.toByteArray bos)]
(parse-xlsx-results bytea parse-fn)))))
(defn- parse-format-strings
[sheet]
(for [^org.apache.poi.ss.usermodel.Row row (spreadsheet/into-seq sheet)]
(map (fn [^org.apache.poi.xssf.usermodel.XSSFCell cell]
(.. cell getCellStyle getDataFormatString))
row)))
(deftest export-format-test
(testing "Different format strings are used for ints and numbers that round to ints (with 2 decimal places)"
(is (= [["#,##0"] ["#,##0.##"] ["#,##0"] ["#,##0.##"] ["#,##0"] ["#,##0.##"]]
(rest (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{}
[[1] [1.23] [1.004] [1.005] [10000000000] [10000000000.123]]
parse-format-strings)))))
(testing "Misc format strings are included correctly in exports"
(is (= ["[$€]#,##0.00"]
(second (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "EUR"
::mb.viz/currency-in-header false}}}
[[1.23]]
parse-format-strings))))
(is (= ["yyyy.m.d, h:mm:ss am/pm"]
(second (xlsx-export [{:id 0, :name "Col", :effective_type :type/Temporal}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/date-style "YYYY/M/D",
::mb.viz/date-separator ".",
::mb.viz/time-style "h:mm A",
::mb.viz/time-enabled "seconds"}}}
[[#t "2020-03-28T10:12:06.681"]]
parse-format-strings))))))
(deftest column-order-test
(testing "Column titles are ordered correctly in the output"
(is (= ["Col1" "Col2"]
(first (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}] {} []))))
(is (= ["Col2" "Col1"]
(first (xlsx-export [{:id 0, :name "Col2"} {:id 1, :name "Col1"}] {} [])))))
(testing "Data in each row is reordered by output-order prior to export"
(is (= [["b" "a"] ["d" "c"]]
(rest (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}]
{:output-order [1 0]}
[["a" "b"] ["c" "d"]])))))
(testing "Rows not included by index in output-order are excluded from export"
(is (= [["b"] ["d"]]
(rest (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}]
{:output-order [1]}
[["a" "b"] ["c" "d"]]))))))
(deftest column-title-test
(testing "::mb.viz/column-title precedence over :display_name, which takes precendence over :name"
(is (= ["Display name"]
(first (xlsx-export [{:id 0, :display_name "Display name", :name "Name"}] {} []))))
(is (= ["Column title"]
(first (xlsx-export [{:id 0, :display_name "Display name", :name "Name"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/column-title "Column title"}}}
[]))))
Columns can be correlated to viz settings by : name if : i d is missing ( e.g. for native queries )
(is (= ["Column title"]
(first (xlsx-export [{:display_name "Display name", :name "Name"}]
{::mb.viz/column-settings {{::mb.viz/column-name "Name"} {::mb.viz/column-title "Column title"}}}
[])))))
(testing "Currency is included in column title if necessary"
;; Dollar symbol is included by default if semantic type of column derives from :type/Currency
(is (= ["Col ($)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {::mb.viz/field-id 0}}
[]))))
;; Currency code is used if requested in viz settings
(is (= ["Col (USD)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "USD",
::mb.viz/currency-style "code"}}}
[]))))
;; Currency name is used if requested in viz settings
(is (= ["Col (US dollars)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "USD",
::mb.viz/currency-style "name"}}}
[]))))
;; Currency type from viz settings is respected
(is (= ["Col (€)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/currency "EUR"}}}
[]))))
;; Falls back to code if native symbol is not supported
(is (= ["Col (KGS)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "KGS", ::mb.viz/currency-style "symbol"}}}
[]))))
;; Currency not included unless semantic type of column derives from :type/Currency
(is (= ["Col"]
(first (xlsx-export [{:id 0, :name "Col"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/currency "USD"}}}
[]))))
;; Currency not included if ::mb.viz/currency-in-header is false
(is (= ["Col"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "USD",
::mb.viz/currency-style "code",
::mb.viz/currency-in-header false}}}
[])))))
(testing "If a col is remapped to a foreign key field, the title is taken from the viz settings for its fk_field_id (#18573)"
(is (= ["Correct title"]
(first (xlsx-export [{:id 0, :fk_field_id 1, :remapped_from "FIELD_1"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/column-title "Incorrect title"}
{::mb.viz/field-id 1} {::mb.viz/column-title "Correct title"}}}
[]))))))
(deftest scale-test
(testing "scale is applied to data prior to export"
(is (= [2.0]
(second (xlsx-export [{:id 0, :name "Col"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/scale 2}}}
[[1.0]]))))))
(deftest misc-data-test
(testing "nil values"
(is (= [nil]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[nil]])))))
(testing "Boolean values"
(is (= [[true] [false]]
(rest (xlsx-export [{:id 0, :name "Col"}] {} [[true] [false]])))))
(testing "ints"
(is (= [1.0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[1]])))))
(testing "bigints"
(is (= [1.0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[1N]])))))
(testing "bigdecimals"
(is (= [1.23]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[1.23M]])))))
(testing "numbers that round to ints"
(is (= [2.00001]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[2.00001]])))))
(testing "numbers that do not round to ints"
(is (= [123.123]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[123.123]])))))
(testing "LocalDate"
(is (= [#inst "2020-03-28T00:00:00.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28"]])))))
(testing "LocalDateTime"
(is (= [#inst "2020-03-28T10:12:06.681-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28T10:12:06.681"]])))))
(testing "LocalTime"
(is (= [#inst "1899-12-31T10:12:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "10:12:06.681"]])))))
(testing "LocalDateTime formatted as a string; should be parsed when *parse-temporal-string-values* is true"
(is (= ["2020-03-28T10:12:06.681"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["2020-03-28T10:12:06.681"]]))))
(binding [qp.xlsx/*parse-temporal-string-values* true]
(is (= [#inst "2020-03-28T10:12:06.681"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["2020-03-28T10:12:06.681"]]))))))
(mt/with-everything-store
(binding [driver/*driver* :h2]
(testing "OffsetDateTime"
(is (= [#inst "2020-03-28T13:33:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28T10:12:06Z-03:21"]])))))
(testing "OffsetTime"
(is (= [#inst "1899-12-31T10:12:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "10:12:06Z-03:21"]])))))
(testing "ZonedDateTime"
(is (= [#inst "2020-03-28T10:12:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28T10:12:06Z"]])))))))
(testing "Strings representing country names/codes don't error when *parse-temporal-string-values* is true (#18724)"
(binding [qp.xlsx/*parse-temporal-string-values* true]
(is (= ["GB"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["GB"]]))))
(is (= ["Portugal"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["Portugal"]]))))))
(testing "NaN and infinity values (#21343)"
;; These values apparently are represented as error codes, which are parsed here into keywords
(is (= [:NUM]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[##NaN]]))))
(is (= [:DIV0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[##Inf]]))))
(is (= [:DIV0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[##-Inf]]))))))
(defrecord ^:private SampleNastyClass [^String v])
(json.generate/add-encoder
SampleNastyClass
(fn [obj, ^JsonGenerator json-generator]
(.writeString json-generator (str (:v obj)))))
(defrecord ^:private AnotherNastyClass [^String v])
(deftest encode-strange-classes-test
(testing (str "Make sure that we're piggybacking off of the JSON encoding logic when encoding strange values in "
"XLSX (#5145, #5220, #5459)")
(is (= ["Hello XLSX World!" "{:v \"No Encoder\"}"]
(second (xlsx-export [{:name "val1"} {:name "val2"}]
{}
[[(SampleNastyClass. "Hello XLSX World!") (AnotherNastyClass. "No Encoder")]]))))))
(defn- parse-column-width
[^org.apache.poi.ss.usermodel.Sheet sheet]
(for [^org.apache.poi.ss.usermodel.Row row (spreadsheet/into-seq sheet)]
(for [i (range (.getLastCellNum row))]
(.getColumnWidth sheet i))))
(deftest auto-sizing-test
(testing "Columns in export are autosized to fit their content"
(let [[col1-width col2-width] (second (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}]
{}
[["a" "abcdefghijklmnopqrstuvwxyz"]]
parse-column-width))]
;; Provide a marign for error since width measurements end up being slightly different on CI
(is (<= 2300 col1-width 2400))
(is (<= 7950 col2-width 8200))))
(testing "Auto-sizing works when the number of rows is at or above the auto-sizing threshold"
(binding [qp.xlsx/*auto-sizing-threshold* 2]
(let [[col-width] (second (xlsx-export [{:id 0, :name "Col1"}]
{}
[["abcdef"] ["abcedf"]]
parse-column-width))]
(is (<= 2800 col-width 2900)))
(let [[col-width] (second (xlsx-export [{:id 0, :name "Col1"}]
{}
[["abcdef"] ["abcedf"] ["abcdef"]]
parse-column-width))]
(is (<= 2800 col-width 2900)))))
(testing "An auto-sized column does not exceed max-column-width (the width of 255 characters)"
(let [[col-width] (second (xlsx-export [{:id 0, :name "Col1"}]
{}
[[(apply str (repeat 256 "0"))]]
parse-column-width))]
(is (= 65280 col-width)))))
(deftest poi-tempfiles-test
(testing "POI temporary files are cleaned up if output stream is closed before export completes (#19480)"
(let [poifiles-directory (io/file (str (System/getProperty "java.io.tmpdir") "/poifiles"))
expected-poifiles-count (count (file-seq poifiles-directory))
TODO -- should n't these be using ` with - open ` ? !
bos (ByteArrayOutputStream.)
os (BufferedOutputStream. bos)
results-writer (qp.si/streaming-results-writer :xlsx os)]
(.close os)
(qp.si/begin! results-writer {:data {:ordered-cols []}} {})
(qp.si/finish! results-writer {:row_count 0})
;; No additional files should exist in the temp directory
(is (= expected-poifiles-count (count (file-seq poifiles-directory)))))))
(deftest dont-format-non-temporal-columns-as-temporal-columns-test
(testing "Don't format columns with temporal semantic type as datetime unless they're actually datetimes (#18729)"
(mt/dataset sample-dataset
(is (= [["CREATED_AT"]
[1.0]
[2.0]]
(xlsx-export [{:id 0
:semantic_type :type/CreationTimestamp
:unit :month-of-year
:name "CREATED_AT"
:effective_type :type/Integer
:base_type :type/Integer}]
{}
[[1]
[2]]))))))
| null | https://raw.githubusercontent.com/metabase/metabase/56b28e5b07e73002d5c507f583e3d64439ba8b8c/test/metabase/query_processor/streaming/xlsx_test.clj | clojure | +----------------------------------------------------------------------------------------------------------------+
| Format string generation unit tests |
+----------------------------------------------------------------------------------------------------------------+
are equal, just return a single value to make tests more readable.
falls back to 0
Custom separators are not supported
Scale should not affect format string since it is applied to the actual data prior to export
Prefix/suffix on general number format
Prefix/suffix on number format w/fixed decimal count
Prefix/suffix on percentage
Prefix/suffix on scientific notation
Prefix/suffix on currency
time-enabled overrides time-styled
+----------------------------------------------------------------------------------------------------------------+
| XLSX export tests |
+----------------------------------------------------------------------------------------------------------------+
These are tests that generate an XLSX binary and then parse and assert on its contents, to test logic and value
formatting that is specific to the XLSX format. These do NOT test any of the column ordering logic in
`metabase.query-processor.streaming`, or anything that happens in the API handlers for generating exports.
Dollar symbol is included by default if semantic type of column derives from :type/Currency
Currency code is used if requested in viz settings
Currency name is used if requested in viz settings
Currency type from viz settings is respected
Falls back to code if native symbol is not supported
Currency not included unless semantic type of column derives from :type/Currency
Currency not included if ::mb.viz/currency-in-header is false
These values apparently are represented as error codes, which are parsed here into keywords
Provide a marign for error since width measurements end up being slightly different on CI
No additional files should exist in the temp directory | (ns metabase.query-processor.streaming.xlsx-test
(:require
[cheshire.generate :as json.generate]
[clojure.java.io :as io]
[clojure.test :refer :all]
[dk.ative.docjure.spreadsheet :as spreadsheet]
[metabase.driver :as driver]
[metabase.query-processor.streaming.interface :as qp.si]
[metabase.query-processor.streaming.xlsx :as qp.xlsx]
[metabase.shared.models.visualization-settings :as mb.viz]
[metabase.test :as mt])
(:import
(com.fasterxml.jackson.core JsonGenerator)
(java.io BufferedInputStream BufferedOutputStream ByteArrayInputStream ByteArrayOutputStream)))
(set! *warn-on-reflection* true)
(defn- format-string
([format-settings]
(format-string format-settings nil))
([format-settings col]
(let [format-strings (@#'qp.xlsx/format-settings->format-strings format-settings col)]
If only one format string is returned ( for datetimes ) or both format strings
(cond
(= (count format-strings) 1)
(first format-strings)
(= (first format-strings) (second format-strings))
(first format-strings)
:else
format-strings))))
(deftest format-settings->format-string-test
(mt/with-temporary-setting-values [custom-formatting {}]
(testing "Empty format settings don't produce a format string"
(is (nil? (format-string {}))))
(testing "General number formatting"
(testing "number-style (non-currency)"
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-style "decimal"})))
(is (= "#,##0.00%" (format-string {::mb.viz/number-style "percent"})))
(is (= "#,##0.00E+0" (format-string {::mb.viz/number-style "scientific"}))))
(testing "Decimals"
(is (= "#,##0" (format-string {::mb.viz/decimals 0, ::mb.viz/number-style "decimal"})))
(is (= "#,##0%" (format-string {::mb.viz/decimals 0, ::mb.viz/number-style "percent"})))
(is (= "#,##0E+0" (format-string {::mb.viz/decimals 0, ::mb.viz/number-style "scientific"})))
(is (= "[$$]#,##0" (format-string {::mb.viz/decimals 0,
::mb.viz/currency-in-header false,
::mb.viz/number-style "currency"})))
(is (= "#,##0.000" (format-string {::mb.viz/decimals 3, ::mb.viz/number-style "decimal"})))
(is (= "#,##0.000%" (format-string {::mb.viz/decimals 3, ::mb.viz/number-style "percent"})))
(is (= "#,##0.000E+0" (format-string {::mb.viz/decimals 3, ::mb.viz/number-style "scientific"})))
(is (= "[$$]#,##0.000" (format-string {::mb.viz/decimals 3,
::mb.viz/currency-in-header false,
::mb.viz/number-style "currency"})))
(is (= "#,##0" (format-string {::mb.viz/decimals -1, ::mb.viz/number-style "decimal"})))
(is (= "#,##0%" (format-string {::mb.viz/decimals -1, ::mb.viz/number-style "percent"})))
(is (= "#,##0E+0" (format-string {::mb.viz/decimals -1, ::mb.viz/number-style "scientific"})))
(is (= "[$$]#,##0" (format-string {::mb.viz/decimals -1,
::mb.viz/currency-in-header false,
::mb.viz/number-style "currency"})))
Thousands separator can be omitted
(is (= ["###0" "###0.##"] (format-string {::mb.viz/number-separators "."})))
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-separators ", "})))
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-separators ".,"})))
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/number-separators ".’"}))))
(testing "Scale"
(is (= ["#,##0" "#,##0.##"] (format-string {::mb.viz/scale 2})))
(is (= "#,##0.00" (format-string {::mb.viz/scale 2, ::mb.viz/decimals 2}))))
(testing "Prefix and suffix"
(is (= ["\"prefix\"#,##0"
"\"prefix\"#,##0.##"] (format-string {::mb.viz/prefix "prefix"})))
(is (= ["#,##0\"suffix\""
"#,##0.##\"suffix\""] (format-string {::mb.viz/suffix "suffix"})))
(is (= ["\"prefix\"#,##0\"suffix\""
"\"prefix\"#,##0.##\"suffix\""] (format-string {::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
(is (= "\"prefix\"#,##0.00" (format-string {::mb.viz/decimals 2,
::mb.viz/prefix "prefix"})))
(is (= "#,##0.00\"suffix\"" (format-string {::mb.viz/decimals 2,
::mb.viz/suffix "suffix"})))
(is (= "\"prefix\"#,##0.00\"suffix\"" (format-string {::mb.viz/decimals 2,
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
(is (= "\"prefix\"#,##0.00%\"suffix\"" (format-string {::mb.viz/number-style "percent",
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
(is (= "\"prefix\"#,##0.00E+0\"suffix\"" (format-string {::mb.viz/number-style "scientific",
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))
(is (= "\"prefix\"[$$]#,##0.00\"suffix\"" (format-string {::mb.viz/currency-in-header false,
::mb.viz/number-style "currency",
::mb.viz/prefix "prefix",
::mb.viz/suffix "suffix"})))))
(testing "Currency formatting"
(let [price-col {:semantic_type :type/Price, :effective_type :type/Float}]
(testing "Default currency formatting is dollar sign"
(is (= "[$$]#,##0.00" (format-string {::mb.viz/currency-in-header false} price-col))))
(testing "Uses native currency symbol if supported"
(is (= "[$$]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "USD"} price-col)))
(is (= "[$CA$]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "CAD"} price-col)))
(is (= "[$€]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "EUR"} price-col)))
(is (= "[$¥]#,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "JPY"} price-col))))
(testing "Falls back to code if native symbol not supported"
(is (= "[$KGS] #,##0.00" (format-string {::mb.viz/currency-in-header false, ::mb.viz/currency "KGS"} price-col)))
(is (= "[$KGS] #,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "KGS",
::mb.viz/currency-style "symbol"}
price-col))))
(testing "Respects currency-style option"
(is (= "[$$]#,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency-style "symbol"}
price-col)))
(is (= "[$USD] #,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency-style "code"}
price-col)))
(is (= "#,##0.00\" US dollars\"" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency-style "name"}
price-col)))
(is (= "[$€]#,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "EUR",
::mb.viz/currency-style "symbol"}
price-col)))
(is (= "[$EUR] #,##0.00" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "EUR",
::mb.viz/currency-style "code"}
price-col)))
(is (= "#,##0.00\" euros\"" (format-string {::mb.viz/currency-in-header false,
::mb.viz/currency "EUR",
::mb.viz/currency-style "name"}
price-col))))
(testing "Currency not included for non-currency semantic types"
(is (= "#,##0.00" (format-string {::mb.viz/currency-in-header false} {:semantic_type :type/Quantity}))))
(testing "Formatting options are ignored if currency-in-header is true or absent (defaults to true)"
(is (= "#,##0.00" (format-string {::mb.viz/currency-style "symbol"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency-style "name"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency-style "code"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency "USD"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency "EUR"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency-style "symbol"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency-style "name"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency-style "code"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency "USD"} price-col)))
(is (= "#,##0.00" (format-string {::currency-in-header true, ::mb.viz/currency "EUR"} price-col))))
(testing "Global localization settings are incorporated with lower precedence than column format settings"
(mt/with-temporary-setting-values [custom-formatting {:type/Currency {:currency "EUR",
:currency_in_header false,
:currency_style "code"}}]
(is (= "[$EUR] #,##0.00" (format-string {} price-col)))
(is (= "[$CAD] #,##0.00" (format-string {::mb.viz/currency "CAD"} price-col)))
(is (= "[$€]#,##0.00" (format-string {::mb.viz/currency-style "symbol"} price-col)))
(is (= "#,##0.00" (format-string {::mb.viz/currency-in-header true} price-col)))))))
(testing "Datetime formatting"
(let [date-col {:semantic_type :type/CreationTimestamp, :effective_type :type/Temporal}]
(testing "date-style"
(is (= "m/d/yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY"} date-col)))
(is (= "d/m/yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "D/M/YYYY"} date-col)))
(is (= "yyyy/m/d, h:mm am/pm" (format-string {::mb.viz/date-style "YYYY/M/D"} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "MMMM D, YYYY"} date-col)))
(is (= "dmmmm, yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "DMMMM, YYYY"} date-col)))
(is (= "dddd, mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "dddd, MMMM D, YYYY"} date-col))))
(testing "date-separator"
(is (= "m/d/yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY", ::mb.viz/date-separator "/"} date-col)))
(is (= "m.d.yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY", ::mb.viz/date-separator "."} date-col)))
(is (= "m-d-yyyy, h:mm am/pm" (format-string {::mb.viz/date-style "M/D/YYYY", ::mb.viz/date-separator "-"} date-col))))
(testing "date-abbreviate"
(is (= "mmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate true} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate false} date-col)))
(is (= "ddd, mmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate true
::mb.viz/date-style, "dddd, MMMM D, YYYY"} date-col)))
(is (= "dddd, mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/date-abbreviate false
::mb.viz/date-style, "dddd, MMMM D, YYYY"} date-col))))
(testing "time-style"
(is (= "mmmm d, yyyy, hh:mm" (format-string {::mb.viz/time-style "HH:mm"} date-col)))
(is (= "mmmm d, yyyy, hh:mm" (format-string {::mb.viz/time-style "k:mm"} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/time-style "h:mm A"} date-col)))
(is (= "mmmm d, yyyy, h am/pm" (format-string {::mb.viz/time-style "h A"} date-col))))
(testing "time-enabled"
(is (= "mmmm d, yyyy" (format-string {::mb.viz/time-enabled nil} date-col)))
(is (= "mmmm d, yyyy, h:mm am/pm" (format-string {::mb.viz/time-enabled "minutes"} date-col)))
(is (= "mmmm d, yyyy, h:mm:ss am/pm" (format-string {::mb.viz/time-enabled "seconds"} date-col)))
(is (= "mmmm d, yyyy, h:mm:ss.000 am/pm" (format-string {::mb.viz/time-enabled "milliseconds"} date-col)))
(is (= "mmmm d, yyyy" (format-string {::mb.viz/time-style "h:mm A", ::mb.viz/time-enabled nil} date-col))))
(testing ":unit values on temporal breakout fields"
(let [month-col (assoc date-col :unit :month)
year-col (assoc date-col :unit :year)]
(is (= "mmmm, yyyy" (format-string {} month-col)))
(is (= "m/yyyy" (format-string {::mb.viz/date-style "M/D/YYYY"} month-col)))
(is (= "yyyy/m" (format-string {::mb.viz/date-style "YYYY/M/D"} month-col)))
(is (= "mmmm, yyyy" (format-string {::mb.viz/date-style "MMMM D, YYYY"} month-col)))
(is (= "mmmm, yyyy" (format-string {::mb.viz/date-style "D MMMM, YYYY"} month-col)))
(is (= "mmmm, yyyy" (format-string {::mb.viz/date-style "DDDD, MMMM D, YYYY"} month-col)))
(is (= "yyyy" (format-string {} year-col)))
(is (= "yyyy" (format-string {::mb.viz/date-style "M/D/YYYY"} year-col)))))
(testing "misc combinations"
(is (= "yyyy.m.d, h:mm:ss am/pm" (format-string {::mb.viz/date-style "YYYY/M/D",
::mb.viz/date-separator ".",
::mb.viz/time-style "h:mm A",
::mb.viz/time-enabled "seconds"} date-col)))
(is (= "dddd, mmmm d, yyyy, hh:mm:ss.000" (format-string {::mb.viz/date-style "dddd, MMMM D, YYYY",
::mb.viz/time-style "HH:mm",
::mb.viz/time-enabled "milliseconds"} date-col))))
(testing "Global localization settings are incorporated with lower precedence than column format settings"
(mt/with-temporary-setting-values [custom-formatting {:type/Temporal {:date_style "YYYY/M/D",
:date_separator ".",
:time_style "HH:mm"}}]
(is (= "yyyy.m.d, hh:mm" (format-string {} date-col)))
(is (= "d.m.yyyy, hh:mm" (format-string {::mb.viz/date-style "D/M/YYYY"} date-col)))
(is (= "yyyy-m-d, hh:mm" (format-string {::mb.viz/date-separator "-"} date-col)))
(is (= "yyyy.m.d, h:mm am/pm" (format-string {::mb.viz/time-style "h:mm A"} date-col)))))))
(testing "primary key and foreign key formatting"
(is (= "0" (format-string {} {:semantic_type :type/PK})))
(is (= "0" (format-string {} {:semantic_type :type/FK}))))))
(defn parse-cell-content
"Parses an XLSX sheet and returns the raw data in each row"
[sheet]
(mapv (fn [row]
(mapv spreadsheet/read-cell row))
(spreadsheet/into-seq sheet)))
(defn parse-xlsx-results
"Given a byte array representing an XLSX document, parses the query result sheet using the provided `parse-fn`"
([bytea]
(parse-xlsx-results bytea parse-cell-content))
([bytea parse-fn]
(with-open [is (BufferedInputStream. (ByteArrayInputStream. bytea))]
(let [workbook (spreadsheet/load-workbook-from-stream is)
sheet (spreadsheet/select-sheet "Query result" workbook)]
(parse-fn sheet)))))
(defn- xlsx-export
([ordered-cols viz-settings rows]
(xlsx-export ordered-cols viz-settings rows parse-cell-content))
([ordered-cols viz-settings rows parse-fn]
(with-open [bos (ByteArrayOutputStream.)
os (BufferedOutputStream. bos)]
(let [results-writer (qp.si/streaming-results-writer :xlsx os)]
(qp.si/begin! results-writer {:data {:ordered-cols ordered-cols}} viz-settings)
(doall (map-indexed
(fn [i row] (qp.si/write-row! results-writer row i ordered-cols viz-settings))
rows))
(qp.si/finish! results-writer {:row_count (count rows)}))
(let [bytea (.toByteArray bos)]
(parse-xlsx-results bytea parse-fn)))))
(defn- parse-format-strings
[sheet]
(for [^org.apache.poi.ss.usermodel.Row row (spreadsheet/into-seq sheet)]
(map (fn [^org.apache.poi.xssf.usermodel.XSSFCell cell]
(.. cell getCellStyle getDataFormatString))
row)))
(deftest export-format-test
(testing "Different format strings are used for ints and numbers that round to ints (with 2 decimal places)"
(is (= [["#,##0"] ["#,##0.##"] ["#,##0"] ["#,##0.##"] ["#,##0"] ["#,##0.##"]]
(rest (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{}
[[1] [1.23] [1.004] [1.005] [10000000000] [10000000000.123]]
parse-format-strings)))))
(testing "Misc format strings are included correctly in exports"
(is (= ["[$€]#,##0.00"]
(second (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "EUR"
::mb.viz/currency-in-header false}}}
[[1.23]]
parse-format-strings))))
(is (= ["yyyy.m.d, h:mm:ss am/pm"]
(second (xlsx-export [{:id 0, :name "Col", :effective_type :type/Temporal}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/date-style "YYYY/M/D",
::mb.viz/date-separator ".",
::mb.viz/time-style "h:mm A",
::mb.viz/time-enabled "seconds"}}}
[[#t "2020-03-28T10:12:06.681"]]
parse-format-strings))))))
(deftest column-order-test
(testing "Column titles are ordered correctly in the output"
(is (= ["Col1" "Col2"]
(first (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}] {} []))))
(is (= ["Col2" "Col1"]
(first (xlsx-export [{:id 0, :name "Col2"} {:id 1, :name "Col1"}] {} [])))))
(testing "Data in each row is reordered by output-order prior to export"
(is (= [["b" "a"] ["d" "c"]]
(rest (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}]
{:output-order [1 0]}
[["a" "b"] ["c" "d"]])))))
(testing "Rows not included by index in output-order are excluded from export"
(is (= [["b"] ["d"]]
(rest (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}]
{:output-order [1]}
[["a" "b"] ["c" "d"]]))))))
(deftest column-title-test
(testing "::mb.viz/column-title precedence over :display_name, which takes precendence over :name"
(is (= ["Display name"]
(first (xlsx-export [{:id 0, :display_name "Display name", :name "Name"}] {} []))))
(is (= ["Column title"]
(first (xlsx-export [{:id 0, :display_name "Display name", :name "Name"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/column-title "Column title"}}}
[]))))
Columns can be correlated to viz settings by : name if : i d is missing ( e.g. for native queries )
(is (= ["Column title"]
(first (xlsx-export [{:display_name "Display name", :name "Name"}]
{::mb.viz/column-settings {{::mb.viz/column-name "Name"} {::mb.viz/column-title "Column title"}}}
[])))))
(testing "Currency is included in column title if necessary"
(is (= ["Col ($)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {::mb.viz/field-id 0}}
[]))))
(is (= ["Col (USD)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "USD",
::mb.viz/currency-style "code"}}}
[]))))
(is (= ["Col (US dollars)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "USD",
::mb.viz/currency-style "name"}}}
[]))))
(is (= ["Col (€)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/currency "EUR"}}}
[]))))
(is (= ["Col (KGS)"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "KGS", ::mb.viz/currency-style "symbol"}}}
[]))))
(is (= ["Col"]
(first (xlsx-export [{:id 0, :name "Col"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/currency "USD"}}}
[]))))
(is (= ["Col"]
(first (xlsx-export [{:id 0, :name "Col", :semantic_type :type/Cost}]
{::mb.viz/column-settings {{::mb.viz/field-id 0}
{::mb.viz/currency "USD",
::mb.viz/currency-style "code",
::mb.viz/currency-in-header false}}}
[])))))
(testing "If a col is remapped to a foreign key field, the title is taken from the viz settings for its fk_field_id (#18573)"
(is (= ["Correct title"]
(first (xlsx-export [{:id 0, :fk_field_id 1, :remapped_from "FIELD_1"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/column-title "Incorrect title"}
{::mb.viz/field-id 1} {::mb.viz/column-title "Correct title"}}}
[]))))))
(deftest scale-test
(testing "scale is applied to data prior to export"
(is (= [2.0]
(second (xlsx-export [{:id 0, :name "Col"}]
{::mb.viz/column-settings {{::mb.viz/field-id 0} {::mb.viz/scale 2}}}
[[1.0]]))))))
(deftest misc-data-test
(testing "nil values"
(is (= [nil]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[nil]])))))
(testing "Boolean values"
(is (= [[true] [false]]
(rest (xlsx-export [{:id 0, :name "Col"}] {} [[true] [false]])))))
(testing "ints"
(is (= [1.0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[1]])))))
(testing "bigints"
(is (= [1.0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[1N]])))))
(testing "bigdecimals"
(is (= [1.23]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[1.23M]])))))
(testing "numbers that round to ints"
(is (= [2.00001]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[2.00001]])))))
(testing "numbers that do not round to ints"
(is (= [123.123]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[123.123]])))))
(testing "LocalDate"
(is (= [#inst "2020-03-28T00:00:00.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28"]])))))
(testing "LocalDateTime"
(is (= [#inst "2020-03-28T10:12:06.681-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28T10:12:06.681"]])))))
(testing "LocalTime"
(is (= [#inst "1899-12-31T10:12:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "10:12:06.681"]])))))
(testing "LocalDateTime formatted as a string; should be parsed when *parse-temporal-string-values* is true"
(is (= ["2020-03-28T10:12:06.681"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["2020-03-28T10:12:06.681"]]))))
(binding [qp.xlsx/*parse-temporal-string-values* true]
(is (= [#inst "2020-03-28T10:12:06.681"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["2020-03-28T10:12:06.681"]]))))))
(mt/with-everything-store
(binding [driver/*driver* :h2]
(testing "OffsetDateTime"
(is (= [#inst "2020-03-28T13:33:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28T10:12:06Z-03:21"]])))))
(testing "OffsetTime"
(is (= [#inst "1899-12-31T10:12:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "10:12:06Z-03:21"]])))))
(testing "ZonedDateTime"
(is (= [#inst "2020-03-28T10:12:06.000-00:00"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[#t "2020-03-28T10:12:06Z"]])))))))
(testing "Strings representing country names/codes don't error when *parse-temporal-string-values* is true (#18724)"
(binding [qp.xlsx/*parse-temporal-string-values* true]
(is (= ["GB"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["GB"]]))))
(is (= ["Portugal"]
(second (xlsx-export [{:id 0, :name "Col"}] {} [["Portugal"]]))))))
(testing "NaN and infinity values (#21343)"
(is (= [:NUM]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[##NaN]]))))
(is (= [:DIV0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[##Inf]]))))
(is (= [:DIV0]
(second (xlsx-export [{:id 0, :name "Col"}] {} [[##-Inf]]))))))
(defrecord ^:private SampleNastyClass [^String v])
(json.generate/add-encoder
SampleNastyClass
(fn [obj, ^JsonGenerator json-generator]
(.writeString json-generator (str (:v obj)))))
(defrecord ^:private AnotherNastyClass [^String v])
(deftest encode-strange-classes-test
(testing (str "Make sure that we're piggybacking off of the JSON encoding logic when encoding strange values in "
"XLSX (#5145, #5220, #5459)")
(is (= ["Hello XLSX World!" "{:v \"No Encoder\"}"]
(second (xlsx-export [{:name "val1"} {:name "val2"}]
{}
[[(SampleNastyClass. "Hello XLSX World!") (AnotherNastyClass. "No Encoder")]]))))))
(defn- parse-column-width
[^org.apache.poi.ss.usermodel.Sheet sheet]
(for [^org.apache.poi.ss.usermodel.Row row (spreadsheet/into-seq sheet)]
(for [i (range (.getLastCellNum row))]
(.getColumnWidth sheet i))))
(deftest auto-sizing-test
(testing "Columns in export are autosized to fit their content"
(let [[col1-width col2-width] (second (xlsx-export [{:id 0, :name "Col1"} {:id 1, :name "Col2"}]
{}
[["a" "abcdefghijklmnopqrstuvwxyz"]]
parse-column-width))]
(is (<= 2300 col1-width 2400))
(is (<= 7950 col2-width 8200))))
(testing "Auto-sizing works when the number of rows is at or above the auto-sizing threshold"
(binding [qp.xlsx/*auto-sizing-threshold* 2]
(let [[col-width] (second (xlsx-export [{:id 0, :name "Col1"}]
{}
[["abcdef"] ["abcedf"]]
parse-column-width))]
(is (<= 2800 col-width 2900)))
(let [[col-width] (second (xlsx-export [{:id 0, :name "Col1"}]
{}
[["abcdef"] ["abcedf"] ["abcdef"]]
parse-column-width))]
(is (<= 2800 col-width 2900)))))
(testing "An auto-sized column does not exceed max-column-width (the width of 255 characters)"
(let [[col-width] (second (xlsx-export [{:id 0, :name "Col1"}]
{}
[[(apply str (repeat 256 "0"))]]
parse-column-width))]
(is (= 65280 col-width)))))
(deftest poi-tempfiles-test
(testing "POI temporary files are cleaned up if output stream is closed before export completes (#19480)"
(let [poifiles-directory (io/file (str (System/getProperty "java.io.tmpdir") "/poifiles"))
expected-poifiles-count (count (file-seq poifiles-directory))
TODO -- should n't these be using ` with - open ` ? !
bos (ByteArrayOutputStream.)
os (BufferedOutputStream. bos)
results-writer (qp.si/streaming-results-writer :xlsx os)]
(.close os)
(qp.si/begin! results-writer {:data {:ordered-cols []}} {})
(qp.si/finish! results-writer {:row_count 0})
(is (= expected-poifiles-count (count (file-seq poifiles-directory)))))))
(deftest dont-format-non-temporal-columns-as-temporal-columns-test
(testing "Don't format columns with temporal semantic type as datetime unless they're actually datetimes (#18729)"
(mt/dataset sample-dataset
(is (= [["CREATED_AT"]
[1.0]
[2.0]]
(xlsx-export [{:id 0
:semantic_type :type/CreationTimestamp
:unit :month-of-year
:name "CREATED_AT"
:effective_type :type/Integer
:base_type :type/Integer}]
{}
[[1]
[2]]))))))
|
564a20a06b2a541af0696346d2a1dfa8827842d82da639a60d19e345248d9fcb | Leystryku/mpbomberman_racket | cl_render.rkt | #lang racket
;; imports
(require 2htdp/image)
(require lang/posn)
(require "cl_helper.rkt")
(require "cl_render_game.rkt")
(require "cl_render_titlescreen.rkt")
(require "sh_config.rkt")
(require "sh_helper.rkt")
(require "sh_structs.rkt")
(require "sh_config_textures.rkt")
;; exports
(provide (all-defined-out))
[ ] Calls the fitting render function for our current state to render the game
(define (renderHandlerCond currentWorld)
(define curState (clientsideWorld-curState currentWorld))
(case curState
[("titlescreen") (renderTitlescreen currentWorld "HIT ENTER TO JOIN THE GAME" (* gameWidth 0.30)) ]
[("loadingscreen") (renderTitlescreen currentWorld "LOADING..." (* gameWidth 0.45))]
[("ingame") (renderGame currentWorld) ]
[("gameover") (renderTitlescreen currentWorld (generateWinnersText currentWorld) (* gameWidth 0.30)) ]
[("resetscreen") (renderTitlescreen currentWorld "PLAYERS LEFT, REBOOT GAME" (* gameWidth 0.30)) ]
[else (text (string-append (clientsideWorld-curState currentWorld) "IS NOT A INVALID STATE!") 12 "red")]
)
)
;; [renderHandlerRedraw] Calls the rendlerHanderCond since the frame needs to be redrawn
(define (rendlerHandlerRedraw currentWorld)
(place-images/align
(list
(renderHandlerCond currentWorld)
)
(list
(make-posn 0 0)
)
"left"
"top"
(empty-scene gameWidth gameHeight "black")
)
)
;; [renderHandlerRedrawWithCache] Calls [rendlerHandlerRedraw] and caches the new frame
(define (renderHandlerRedrawWithCache currentWorld)
(define newFrame (rendlerHandlerRedraw currentWorld))
(and
(set-clientsideWorld-renderCache!
currentWorld
(list
newFrame
(current-inexact-milliseconds)
)
)
newFrame
)
)
[ rendlerHandlerShouldRedraw ] Checks whether the frame should be redrawn to fit to 60FPS
(define (rendlerHandlerShouldRedraw lastRenderTime)
(if lastRenderTime
(>= (- (current-inexact-milliseconds) lastRenderTime) (/ 1 gameFPSRender))
#t
)
)
;; [renderHandler] Calls [renderHandlerRedrawWithCache] with a black canvas as background
(define (renderHandler currentWorld)
(define curCache (clientsideWorld-renderCache currentWorld))
(if (rendlerHandlerShouldRedraw (clientsideWorld-renderLastTime currentWorld))
(renderHandlerRedrawWithCache currentWorld)
curCache
)
)
| null | https://raw.githubusercontent.com/Leystryku/mpbomberman_racket/059d95040cfad2e27237f8dd41fc32a4fc698afe/game/cl_render.rkt | racket | imports
exports
[renderHandlerRedraw] Calls the rendlerHanderCond since the frame needs to be redrawn
[renderHandlerRedrawWithCache] Calls [rendlerHandlerRedraw] and caches the new frame
[renderHandler] Calls [renderHandlerRedrawWithCache] with a black canvas as background | #lang racket
(require 2htdp/image)
(require lang/posn)
(require "cl_helper.rkt")
(require "cl_render_game.rkt")
(require "cl_render_titlescreen.rkt")
(require "sh_config.rkt")
(require "sh_helper.rkt")
(require "sh_structs.rkt")
(require "sh_config_textures.rkt")
(provide (all-defined-out))
[ ] Calls the fitting render function for our current state to render the game
(define (renderHandlerCond currentWorld)
(define curState (clientsideWorld-curState currentWorld))
(case curState
[("titlescreen") (renderTitlescreen currentWorld "HIT ENTER TO JOIN THE GAME" (* gameWidth 0.30)) ]
[("loadingscreen") (renderTitlescreen currentWorld "LOADING..." (* gameWidth 0.45))]
[("ingame") (renderGame currentWorld) ]
[("gameover") (renderTitlescreen currentWorld (generateWinnersText currentWorld) (* gameWidth 0.30)) ]
[("resetscreen") (renderTitlescreen currentWorld "PLAYERS LEFT, REBOOT GAME" (* gameWidth 0.30)) ]
[else (text (string-append (clientsideWorld-curState currentWorld) "IS NOT A INVALID STATE!") 12 "red")]
)
)
(define (rendlerHandlerRedraw currentWorld)
(place-images/align
(list
(renderHandlerCond currentWorld)
)
(list
(make-posn 0 0)
)
"left"
"top"
(empty-scene gameWidth gameHeight "black")
)
)
(define (renderHandlerRedrawWithCache currentWorld)
(define newFrame (rendlerHandlerRedraw currentWorld))
(and
(set-clientsideWorld-renderCache!
currentWorld
(list
newFrame
(current-inexact-milliseconds)
)
)
newFrame
)
)
[ rendlerHandlerShouldRedraw ] Checks whether the frame should be redrawn to fit to 60FPS
(define (rendlerHandlerShouldRedraw lastRenderTime)
(if lastRenderTime
(>= (- (current-inexact-milliseconds) lastRenderTime) (/ 1 gameFPSRender))
#t
)
)
(define (renderHandler currentWorld)
(define curCache (clientsideWorld-renderCache currentWorld))
(if (rendlerHandlerShouldRedraw (clientsideWorld-renderLastTime currentWorld))
(renderHandlerRedrawWithCache currentWorld)
curCache
)
)
|
9f524ee942fde60112ec8288bfc48cd42c235c46b51fbd64fa742a9b7c2de572 | jfacorro/klarna-loves-erlang-meetup-2020 | bank_proper_utils.erl | -module(bank_proper_utils).
-export([ request/2
, request/4
]).
-type response() :: #{ status := integer()
, headers := map()
, body := iolist()
}.
-export_type([response/0]).
-spec request(atom(), string()) -> response().
request(Method, Url) ->
request(Method, Url, undefined, undefined).
-spec request(atom(), iolist(), iolist(), string()) -> response().
request(Method, Url0, Body, ContentType) ->
Url = binary_to_list(iolist_to_binary(Url0)),
Headers = headers(),
Request = case Body of
undefined -> {Url, Headers};
_ -> {Url, Headers, ContentType, Body}
end,
HTTPOptions = [{autoredirect, true}],
Options = [],
%% Disable pipelining to avoid the socket getting closed during long runs
ok = httpc:set_options([ {max_keep_alive_length, 0}
, {max_pipeline_length, 0}
, {max_sessions, 0}
]),
Result = httpc:request(Method, Request, HTTPOptions, Options),
{ok, {{_Ver, Status, _Phrase}, RespHeaders, RespBody}} = Result,
Response = #{ status => Status
, headers => maps:from_list(RespHeaders)
, body => RespBody
},
decode_body(Response).
-spec headers() -> [{string(), string()}].
headers() ->
[ {"Accept", "application/json"}
| basic_auth()
].
-spec basic_auth() -> [{string(), string()}].
basic_auth() ->
case application:get_env(bank_proper, basic_auth, undefined) of
undefined -> [];
{Username, Password} ->
Credentials = base64:encode_to_string(Username ++ ":" ++ Password),
[{"Authorization", "Basic " ++ Credentials}]
end.
-spec decode_body(response()) -> response().
decode_body(#{ headers := #{"content-type" := "application/json"}
, body := Body
} = Response) ->
Json = jsx:decode( unicode:characters_to_binary(Body)
, [return_maps, {labels, atom}]
),
Response#{body_json => Json};
decode_body(Response) ->
Response.
| null | https://raw.githubusercontent.com/jfacorro/klarna-loves-erlang-meetup-2020/61795af0ac80ac7afa4a00a215342988e5aac45f/apps/bank_proper/src/bank_proper_utils.erl | erlang | Disable pipelining to avoid the socket getting closed during long runs | -module(bank_proper_utils).
-export([ request/2
, request/4
]).
-type response() :: #{ status := integer()
, headers := map()
, body := iolist()
}.
-export_type([response/0]).
-spec request(atom(), string()) -> response().
request(Method, Url) ->
request(Method, Url, undefined, undefined).
-spec request(atom(), iolist(), iolist(), string()) -> response().
request(Method, Url0, Body, ContentType) ->
Url = binary_to_list(iolist_to_binary(Url0)),
Headers = headers(),
Request = case Body of
undefined -> {Url, Headers};
_ -> {Url, Headers, ContentType, Body}
end,
HTTPOptions = [{autoredirect, true}],
Options = [],
ok = httpc:set_options([ {max_keep_alive_length, 0}
, {max_pipeline_length, 0}
, {max_sessions, 0}
]),
Result = httpc:request(Method, Request, HTTPOptions, Options),
{ok, {{_Ver, Status, _Phrase}, RespHeaders, RespBody}} = Result,
Response = #{ status => Status
, headers => maps:from_list(RespHeaders)
, body => RespBody
},
decode_body(Response).
-spec headers() -> [{string(), string()}].
headers() ->
[ {"Accept", "application/json"}
| basic_auth()
].
-spec basic_auth() -> [{string(), string()}].
basic_auth() ->
case application:get_env(bank_proper, basic_auth, undefined) of
undefined -> [];
{Username, Password} ->
Credentials = base64:encode_to_string(Username ++ ":" ++ Password),
[{"Authorization", "Basic " ++ Credentials}]
end.
-spec decode_body(response()) -> response().
decode_body(#{ headers := #{"content-type" := "application/json"}
, body := Body
} = Response) ->
Json = jsx:decode( unicode:characters_to_binary(Body)
, [return_maps, {labels, atom}]
),
Response#{body_json => Json};
decode_body(Response) ->
Response.
|
36ce1f3a964b30fe988170e72a0dd90f665f80f02ae6065625dfefe389859d1f | jaspervdj/websockets-snap | server.hs | --------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Main where
--------------------------------------------------------------------------------
import Control.Concurrent (forkIO)
import Control.Exception (finally)
import Control.Monad (forever, unless)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Network.WebSockets as WS
import qualified Network.WebSockets.Snap as WS
import Snap.Core (Snap)
import qualified Snap.Core as Snap
import qualified Snap.Http.Server as Snap
import qualified Snap.Util.FileServe as Snap
import qualified System.IO as IO
import qualified System.Process as Process
--------------------------------------------------------------------------------
app :: Snap ()
app = Snap.route
[ ("", Snap.ifTop $ Snap.serveFile "console.html")
, ("console.js", Snap.serveFile "console.js")
, ("console/:shell", console)
, ("style.css", Snap.serveFile "style.css")
]
--------------------------------------------------------------------------------
console :: Snap ()
console = do
Just shell <- Snap.getParam "shell"
WS.runWebSocketsSnap $ consoleApp $ BC.unpack shell
--------------------------------------------------------------------------------
consoleApp :: String -> WS.ServerApp
consoleApp shell pending = do
(stdin, stdout, stderr, phandle) <- Process.runInteractiveCommand shell
conn <- WS.acceptRequest pending
_ <- forkIO $ copyHandleToConn stdout conn
_ <- forkIO $ copyHandleToConn stderr conn
_ <- forkIO $ copyConnToHandle conn stdin
exitCode <- Process.waitForProcess phandle
putStrLn $ "consoleApp ended: " ++ show exitCode
--------------------------------------------------------------------------------
copyHandleToConn :: IO.Handle -> WS.Connection -> IO ()
copyHandleToConn h c = do
bs <- B.hGetSome h 1024
unless (B.null bs) $ do
putStrLn $ "> " ++ show bs
WS.sendTextData c bs
copyHandleToConn h c
--------------------------------------------------------------------------------
copyConnToHandle :: WS.Connection -> IO.Handle -> IO ()
copyConnToHandle c h = flip finally (IO.hClose h) $ forever $ do
bs <- WS.receiveData c
putStrLn $ "< " ++ show bs
B.hPutStr h bs
IO.hFlush h
--------------------------------------------------------------------------------
main :: IO ()
main = Snap.httpServe config app
where
config =
Snap.setErrorLog Snap.ConfigNoLog $
Snap.setAccessLog Snap.ConfigNoLog $
Snap.defaultConfig
| null | https://raw.githubusercontent.com/jaspervdj/websockets-snap/a6cb1467e4b60b98cdbaa56f17fcddda75b2da61/example/server.hs | haskell | ------------------------------------------------------------------------------
# LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | module Main where
import Control.Concurrent (forkIO)
import Control.Exception (finally)
import Control.Monad (forever, unless)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Network.WebSockets as WS
import qualified Network.WebSockets.Snap as WS
import Snap.Core (Snap)
import qualified Snap.Core as Snap
import qualified Snap.Http.Server as Snap
import qualified Snap.Util.FileServe as Snap
import qualified System.IO as IO
import qualified System.Process as Process
app :: Snap ()
app = Snap.route
[ ("", Snap.ifTop $ Snap.serveFile "console.html")
, ("console.js", Snap.serveFile "console.js")
, ("console/:shell", console)
, ("style.css", Snap.serveFile "style.css")
]
console :: Snap ()
console = do
Just shell <- Snap.getParam "shell"
WS.runWebSocketsSnap $ consoleApp $ BC.unpack shell
consoleApp :: String -> WS.ServerApp
consoleApp shell pending = do
(stdin, stdout, stderr, phandle) <- Process.runInteractiveCommand shell
conn <- WS.acceptRequest pending
_ <- forkIO $ copyHandleToConn stdout conn
_ <- forkIO $ copyHandleToConn stderr conn
_ <- forkIO $ copyConnToHandle conn stdin
exitCode <- Process.waitForProcess phandle
putStrLn $ "consoleApp ended: " ++ show exitCode
copyHandleToConn :: IO.Handle -> WS.Connection -> IO ()
copyHandleToConn h c = do
bs <- B.hGetSome h 1024
unless (B.null bs) $ do
putStrLn $ "> " ++ show bs
WS.sendTextData c bs
copyHandleToConn h c
copyConnToHandle :: WS.Connection -> IO.Handle -> IO ()
copyConnToHandle c h = flip finally (IO.hClose h) $ forever $ do
bs <- WS.receiveData c
putStrLn $ "< " ++ show bs
B.hPutStr h bs
IO.hFlush h
main :: IO ()
main = Snap.httpServe config app
where
config =
Snap.setErrorLog Snap.ConfigNoLog $
Snap.setAccessLog Snap.ConfigNoLog $
Snap.defaultConfig
|
44d2564375f958e3b273c00f1c953c5b9333e478422df7444d2c1218e8580d07 | jlouis/graphql-erlang | graphql_scalar_bool_coerce.erl | -module(graphql_scalar_bool_coerce).
-export([input/2, output/2]).
input(_, true) -> {ok, true};
input(_, false) -> {ok, false};
input(_, _) -> {error, not_bool}.
output(<<"Bool">>, true) -> {ok, true};
output(<<"Bool">>, <<"true">>) -> {ok, true};
output(<<"Bool">>, false) -> {ok, false};
output(<<"Bool">>, <<"false">>) -> {ok, false};
output(<<"Bool">>, 0) -> {ok, false};
output(<<"Bool">>, X) when is_integer(X) -> {ok, true};
output(_,_) -> {error, not_coercible}.
| null | https://raw.githubusercontent.com/jlouis/graphql-erlang/4fd356294c2acea42a024366bc5a64661e4862d7/src/graphql_scalar_bool_coerce.erl | erlang | -module(graphql_scalar_bool_coerce).
-export([input/2, output/2]).
input(_, true) -> {ok, true};
input(_, false) -> {ok, false};
input(_, _) -> {error, not_bool}.
output(<<"Bool">>, true) -> {ok, true};
output(<<"Bool">>, <<"true">>) -> {ok, true};
output(<<"Bool">>, false) -> {ok, false};
output(<<"Bool">>, <<"false">>) -> {ok, false};
output(<<"Bool">>, 0) -> {ok, false};
output(<<"Bool">>, X) when is_integer(X) -> {ok, true};
output(_,_) -> {error, not_coercible}.
| |
9862c304ef7323ffed43965376bae6bcc9daea12a9f25c1f20cf33f37227695a | PEZ/shadow-bare-bones | server.clj | (ns main.server
(:gen-class))
(defn -main
"I don't do a whole lot ... yet."
[& _args]
(println "Hello, World!"))
(-main)
| null | https://raw.githubusercontent.com/PEZ/shadow-bare-bones/8e6d8328804b0e17e5ae0d79655688083ac739e9/src/main/server.clj | clojure | (ns main.server
(:gen-class))
(defn -main
"I don't do a whole lot ... yet."
[& _args]
(println "Hello, World!"))
(-main)
| |
deff2f7751db19be8aab0ed3336b6ac40769690347dc03805665e08cc4f96a48 | pveber/bistro | zhou2018.ml | (**
Article:
Data:
*)
open Core_kernel
open Bistro
open Bistro.Shell_dsl
open Bistro_utils
module Dataset = struct
type t = [`SongD1]
let to_string = function
| `SongD1 -> "SongD1"
let alignments d =
Bistro_unix.wget ""
|> Bistro_unix.tar_xfj
|> Fn.flip Workflow.select ["single-gene_alignments" ; to_string d ]
|> Workflow.glob ~pattern:"*"
let best_trees d =
Bistro_unix.wget ""
|> Bistro_unix.tar_xfj
|> Fn.flip Workflow.select ["single-gene_trees" ; to_string d ; "Best_observed"]
|> Workflow.glob ~pattern:"*"
end
module Raxml = struct
let img = [ docker_image ~account:"pveber" ~name:"raxml" ~tag:"8.2.9" () ]
let hpc alignment =
Workflow.shell ~descr:"raxmlhpc" ~np:4 [
within_container img (
and_list [
cd tmp ;
cmd "raxmlHPC" [
opt "-T" ident np ;
string "-p 1 -m GTRGAMMA --no-bfgs" ;
opt "-s" dep alignment ;
string "-n NAME" ;
] ;
]
) ;
mv (tmp // "RAxML_bestTree.NAME") dest ;
]
end
module Fasttree = struct
let img = [ docker_image ~account:"pveber" ~name:"fasttree" ~tag:"2.1.10" () ]
let fasttree fa =
Workflow.shell ~descr:"fasttree" [
cmd ~img "/usr/local/bin/FastTree" ~stdout:dest [
string "-nt -gtr -gamma -spr 4 -mlacc 2 -slownni" ;
dep fa ;
]
]
end
module IQTree = struct
let img = [ docker_image ~account:"pveber" ~name:"iqtree" ~tag:"1.4.2" () ]
let iqtree fa =
let tmp_ali_fn = "data.fa" in
let tmp_ali = tmp // tmp_ali_fn in
Workflow.shell ~descr:"iqtree" [
within_container img (
and_list [
cmd "ln" [ string "-s" ; dep fa ; tmp_ali ] ;
cmd "/usr/local/bin/iqtree" [ (* iqtree save its output right next to its input, hence this mess *)
string "-m GTR+G4" ;
opt "-s" ident tmp_ali ;
string "-seed 1" ;
opt "-nt" ident np ;
] ;
mv (tmp // (tmp_ali_fn ^ ".treefile")) dest ;
]
)
]
end
module PhyML = struct
let img = [ docker_image ~account:"pveber" ~name:"phyml" ~tag:"3.3.20180129" () ]
let phyml alignment =
let tmp_ali_fn = "alignment" in
let tmp_ali = tmp // tmp_ali_fn in
Workflow.shell ~descr:"phyml" [
within_container img (
and_list [
cd tmp ;
cmd "ln" [ string "-s" ; dep alignment ; tmp_ali ] ;
cmd "/usr/local/bin/phyml" [
opt "-i" ident tmp_ali ;
string "--r_seed 1 -d nt -b 0 -m GTR -f e -c 4 -a e -s SPR --n_rand_starts 1 -o tlr -p --run_id ID" ;
] ;
mv (tmp // (tmp_ali_fn ^ "*_phyml_tree_ID.txt")) dest ;
]
)
]
end
module Goalign = struct
let img = [ docker_image ~account:"pveber" ~name:"goalign" ~tag:"0.2.9" () ]
let phylip_of_fasta fa =
Workflow.shell ~descr:"goalign.reformat" [
cmd "goalign" ~img [
string "reformat phylip" ;
opt "-i" dep fa ;
opt "-o" ident dest ;
]
]
end
module Gotree = struct
let img = [ docker_image ~account:"pveber" ~name:"gotree" ~tag:"0.2.10" () ]
let compare_trees ~input ~reference =
Workflow.shell ~descr:"gotree.compare" [
cmd "/usr/local/bin/gotree" ~stdout:dest ~img [
string "compare trees --binary" ;
opt "-i" dep input ;
opt "-c" dep reference ;
]
]
end
let tree_inference meth fa = match meth with
| `Fasttree -> Fasttree.fasttree fa
| `RAXML -> Raxml.hpc fa
| `IQTree -> IQTree.iqtree fa
| `PhyML -> PhyML.phyml (Goalign.phylip_of_fasta fa)
let inferred_trees d meth =
Workflow.spawn (Dataset.alignments d) ~f:(tree_inference meth)
let comparisons d meth =
Workflow.spawn2
(inferred_trees d meth)
(Dataset.best_trees d)
~f:(fun input reference -> Gotree.compare_trees ~input ~reference)
let%pworkflow concat results =
List.map [%eval Workflow.(spawn results ~f:eval_path)] ~f:(fun fn ->
In_channel.read_lines fn
|> Fn.flip List.nth_exn 1
)
|> Out_channel.write_lines [%dest]
let repo = Repo.[
item ["concatenated_comps_fasttree"] (concat (comparisons `SongD1 `Fasttree)) ;
items ["comps_fasttree"] ~prefix:"tree" (comparisons `SongD1 `Fasttree) ;
]
let () = Repo.build_main ~loggers:[Console_logger.create ()] ~np:4 ~mem:(`GB 4) ~outdir:"res" repo
| null | https://raw.githubusercontent.com/pveber/bistro/da0ebc969c8c5ca091905366875cbf8366622280/examples/zhou2018.ml | ocaml | *
Article:
Data:
iqtree save its output right next to its input, hence this mess |
open Core_kernel
open Bistro
open Bistro.Shell_dsl
open Bistro_utils
module Dataset = struct
type t = [`SongD1]
let to_string = function
| `SongD1 -> "SongD1"
let alignments d =
Bistro_unix.wget ""
|> Bistro_unix.tar_xfj
|> Fn.flip Workflow.select ["single-gene_alignments" ; to_string d ]
|> Workflow.glob ~pattern:"*"
let best_trees d =
Bistro_unix.wget ""
|> Bistro_unix.tar_xfj
|> Fn.flip Workflow.select ["single-gene_trees" ; to_string d ; "Best_observed"]
|> Workflow.glob ~pattern:"*"
end
module Raxml = struct
let img = [ docker_image ~account:"pveber" ~name:"raxml" ~tag:"8.2.9" () ]
let hpc alignment =
Workflow.shell ~descr:"raxmlhpc" ~np:4 [
within_container img (
and_list [
cd tmp ;
cmd "raxmlHPC" [
opt "-T" ident np ;
string "-p 1 -m GTRGAMMA --no-bfgs" ;
opt "-s" dep alignment ;
string "-n NAME" ;
] ;
]
) ;
mv (tmp // "RAxML_bestTree.NAME") dest ;
]
end
module Fasttree = struct
let img = [ docker_image ~account:"pveber" ~name:"fasttree" ~tag:"2.1.10" () ]
let fasttree fa =
Workflow.shell ~descr:"fasttree" [
cmd ~img "/usr/local/bin/FastTree" ~stdout:dest [
string "-nt -gtr -gamma -spr 4 -mlacc 2 -slownni" ;
dep fa ;
]
]
end
module IQTree = struct
let img = [ docker_image ~account:"pveber" ~name:"iqtree" ~tag:"1.4.2" () ]
let iqtree fa =
let tmp_ali_fn = "data.fa" in
let tmp_ali = tmp // tmp_ali_fn in
Workflow.shell ~descr:"iqtree" [
within_container img (
and_list [
cmd "ln" [ string "-s" ; dep fa ; tmp_ali ] ;
string "-m GTR+G4" ;
opt "-s" ident tmp_ali ;
string "-seed 1" ;
opt "-nt" ident np ;
] ;
mv (tmp // (tmp_ali_fn ^ ".treefile")) dest ;
]
)
]
end
module PhyML = struct
let img = [ docker_image ~account:"pveber" ~name:"phyml" ~tag:"3.3.20180129" () ]
let phyml alignment =
let tmp_ali_fn = "alignment" in
let tmp_ali = tmp // tmp_ali_fn in
Workflow.shell ~descr:"phyml" [
within_container img (
and_list [
cd tmp ;
cmd "ln" [ string "-s" ; dep alignment ; tmp_ali ] ;
cmd "/usr/local/bin/phyml" [
opt "-i" ident tmp_ali ;
string "--r_seed 1 -d nt -b 0 -m GTR -f e -c 4 -a e -s SPR --n_rand_starts 1 -o tlr -p --run_id ID" ;
] ;
mv (tmp // (tmp_ali_fn ^ "*_phyml_tree_ID.txt")) dest ;
]
)
]
end
module Goalign = struct
let img = [ docker_image ~account:"pveber" ~name:"goalign" ~tag:"0.2.9" () ]
let phylip_of_fasta fa =
Workflow.shell ~descr:"goalign.reformat" [
cmd "goalign" ~img [
string "reformat phylip" ;
opt "-i" dep fa ;
opt "-o" ident dest ;
]
]
end
module Gotree = struct
let img = [ docker_image ~account:"pveber" ~name:"gotree" ~tag:"0.2.10" () ]
let compare_trees ~input ~reference =
Workflow.shell ~descr:"gotree.compare" [
cmd "/usr/local/bin/gotree" ~stdout:dest ~img [
string "compare trees --binary" ;
opt "-i" dep input ;
opt "-c" dep reference ;
]
]
end
let tree_inference meth fa = match meth with
| `Fasttree -> Fasttree.fasttree fa
| `RAXML -> Raxml.hpc fa
| `IQTree -> IQTree.iqtree fa
| `PhyML -> PhyML.phyml (Goalign.phylip_of_fasta fa)
let inferred_trees d meth =
Workflow.spawn (Dataset.alignments d) ~f:(tree_inference meth)
let comparisons d meth =
Workflow.spawn2
(inferred_trees d meth)
(Dataset.best_trees d)
~f:(fun input reference -> Gotree.compare_trees ~input ~reference)
let%pworkflow concat results =
List.map [%eval Workflow.(spawn results ~f:eval_path)] ~f:(fun fn ->
In_channel.read_lines fn
|> Fn.flip List.nth_exn 1
)
|> Out_channel.write_lines [%dest]
let repo = Repo.[
item ["concatenated_comps_fasttree"] (concat (comparisons `SongD1 `Fasttree)) ;
items ["comps_fasttree"] ~prefix:"tree" (comparisons `SongD1 `Fasttree) ;
]
let () = Repo.build_main ~loggers:[Console_logger.create ()] ~np:4 ~mem:(`GB 4) ~outdir:"res" repo
|
f633f74d8057b9aeb0a9281679628b55ce6344424c7c5cfd2a662cc04595f55a | rtoy/cmucl | slot-type.lisp | Copyright ( C ) 2002 < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
1 . Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
3 . The name of the author may not be used to endorse or promote
;;; products derived from this software without specific prior written
;;; permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE AUTHOR OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
;;; OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
;;; BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
;;; USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
;;; DAMAGE.
(ext:file-comment "$Header: src/pcl/rt/slot-type.lisp $")
(in-package "PCL-TESTS")
#+gerds-pcl
(eval-when (:compile-toplevel :load-toplevel :execute)
(setq pcl::*use-slot-types-p* t))
;;; Check that we check slot types, at least sometimes.
(defclass stype ()
((a :type fixnum :initform 0 :initarg :a)))
(defmethod stype.0 ((obj stype))
(slot-value obj 'a))
(defmethod stype.1 ((obj stype) value)
(setf (slot-value obj 'a) value))
(deftest slot-type.0
(multiple-value-bind (r c)
(ignore-errors
(stype.0 (make-instance 'stype :a 1)))
(values r (null c)))
1 t)
(deftest slot-type.1
(multiple-value-bind (r c)
(ignore-errors
(stype.0 (make-instance 'stype :a 1.0)))
(values r (typep c 'error)))
nil t)
(deftest slot-type.2
(multiple-value-bind (r c)
(ignore-errors
(stype.1 (make-instance 'stype) 1))
(values r (typep c 'error)))
1 nil)
(deftest slot-type.3
(multiple-value-bind (r c)
(ignore-errors
(stype.1 (make-instance 'stype) 1.0))
(values r (typep c 'error)))
nil t)
(deftest slot-type.4
(multiple-value-bind (r c)
(ignore-errors
(setf (slot-value (make-instance 'stype) 'a) "string"))
(values r (typep c 'error)))
nil t)
| null | https://raw.githubusercontent.com/rtoy/cmucl/9b1abca53598f03a5b39ded4185471a5b8777dea/tests/pcl/slot-type.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
products derived from this software without specific prior written
permission.
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
Check that we check slot types, at least sometimes. | Copyright ( C ) 2002 < >
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . The name of the author may not be used to endorse or promote
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS
ARE DISCLAIMED . IN NO EVENT SHALL THE AUTHOR OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
(ext:file-comment "$Header: src/pcl/rt/slot-type.lisp $")
(in-package "PCL-TESTS")
#+gerds-pcl
(eval-when (:compile-toplevel :load-toplevel :execute)
(setq pcl::*use-slot-types-p* t))
(defclass stype ()
((a :type fixnum :initform 0 :initarg :a)))
(defmethod stype.0 ((obj stype))
(slot-value obj 'a))
(defmethod stype.1 ((obj stype) value)
(setf (slot-value obj 'a) value))
(deftest slot-type.0
(multiple-value-bind (r c)
(ignore-errors
(stype.0 (make-instance 'stype :a 1)))
(values r (null c)))
1 t)
(deftest slot-type.1
(multiple-value-bind (r c)
(ignore-errors
(stype.0 (make-instance 'stype :a 1.0)))
(values r (typep c 'error)))
nil t)
(deftest slot-type.2
(multiple-value-bind (r c)
(ignore-errors
(stype.1 (make-instance 'stype) 1))
(values r (typep c 'error)))
1 nil)
(deftest slot-type.3
(multiple-value-bind (r c)
(ignore-errors
(stype.1 (make-instance 'stype) 1.0))
(values r (typep c 'error)))
nil t)
(deftest slot-type.4
(multiple-value-bind (r c)
(ignore-errors
(setf (slot-value (make-instance 'stype) 'a) "string"))
(values r (typep c 'error)))
nil t)
|
ec89a7645fac1cd55d7924efd12b247889d26461694ad73f29e3cbb1353f8e38 | patricoferris/sesame | test.ml | let () =
let open Alcotest_lwt in
Lwt_main.run @@ run "Sesame" [ ("collections", Test_collection.tests) ]
| null | https://raw.githubusercontent.com/patricoferris/sesame/8c1086444b81a0faf284c7a3ac5412f578ca6868/test/test.ml | ocaml | let () =
let open Alcotest_lwt in
Lwt_main.run @@ run "Sesame" [ ("collections", Test_collection.tests) ]
| |
0de636d83d21b8e5d8753dde89480eca99a8b8dec3823304823ec156b8b7d256 | chchen/comet | bitvector.rkt | #lang rosette/safe
(require "../bool-bitvec/types.rkt")
;; Logical AND
(define (bvland l r)
(if (and (bitvector->bool l)
(bitvector->bool r))
true-vect
false-vect))
Logical OR
(define (bvlor l r)
(if (or (bitvector->bool l)
(bitvector->bool r))
true-vect
false-vect))
;; Logical NOT
(define (bvlnot l)
(if (bitvector->bool l)
false-vect
true-vect))
;; Equality as a word
(define (bvleq l r)
(if (bveq l r)
true-vect
false-vect))
;; Less-than as a word
(define (bvlult l r)
(if (bvult l r)
true-vect
false-vect))
(provide bvland
bvlor
bvlnot
bvleq
bvlult)
| null | https://raw.githubusercontent.com/chchen/comet/005477b761f4d35c9fce175738f4dcbb805909e7/unity-synthesis/arduino/bitvector.rkt | racket | Logical AND
Logical NOT
Equality as a word
Less-than as a word | #lang rosette/safe
(require "../bool-bitvec/types.rkt")
(define (bvland l r)
(if (and (bitvector->bool l)
(bitvector->bool r))
true-vect
false-vect))
Logical OR
(define (bvlor l r)
(if (or (bitvector->bool l)
(bitvector->bool r))
true-vect
false-vect))
(define (bvlnot l)
(if (bitvector->bool l)
false-vect
true-vect))
(define (bvleq l r)
(if (bveq l r)
true-vect
false-vect))
(define (bvlult l r)
(if (bvult l r)
true-vect
false-vect))
(provide bvland
bvlor
bvlnot
bvleq
bvlult)
|
6de0c596fd66510ce42f38bca194e47f2664eb1f3fb63b4d987f947265499a9b | mzp/coq-for-ipad | compile.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : compile.ml 9074 2008 - 10 - 06 13:53:54Z doligez $
(* The batch compiler *)
open Misc
open Config
open Format
open Typedtree
Initialize the search path .
The current directory is always searched first ,
then the directories specified with the -I option ( in command - line order ) ,
then the standard library directory ( unless the -nostdlib option is given ) .
The current directory is always searched first,
then the directories specified with the -I option (in command-line order),
then the standard library directory (unless the -nostdlib option is given).
*)
let init_path () =
let dirs =
if !Clflags.use_threads then "+threads" :: !Clflags.include_dirs
else if !Clflags.use_vmthreads then "+vmthreads" :: !Clflags.include_dirs
else !Clflags.include_dirs in
let exp_dirs =
List.map (expand_directory Config.standard_library) dirs in
load_path := "" :: List.rev_append exp_dirs (Clflags.std_include_dir ());
Env.reset_cache ()
(* Return the initial environment in which compilation proceeds. *)
(* Note: do not do init_path() in initial_env, this breaks
toplevel initialization (PR#1775) *)
let initial_env () =
Ident.reinit();
try
if !Clflags.nopervasives
then Env.initial
else Env.open_pers_signature "Pervasives" Env.initial
with Not_found ->
fatal_error "cannot open pervasives.cmi"
(* Note: this function is duplicated in optcompile.ml *)
let check_unit_name ppf filename name =
try
begin match name.[0] with
| 'A'..'Z' -> ()
| _ ->
Location.print_warning (Location.in_file filename) ppf
(Warnings.Bad_module_name name);
raise Exit;
end;
for i = 1 to String.length name - 1 do
match name.[i] with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' -> ()
| _ ->
Location.print_warning (Location.in_file filename) ppf
(Warnings.Bad_module_name name);
raise Exit;
done;
with Exit -> ()
;;
(* Compile a .mli file *)
let interface ppf sourcefile outputprefix =
Location.input_name := sourcefile;
init_path ();
let modulename =
String.capitalize(Filename.basename(chop_extension_if_any sourcefile)) in
check_unit_name ppf sourcefile modulename;
Env.set_unit_name modulename;
let inputfile = Pparse.preprocess sourcefile in
try
let ast =
Pparse.file ppf inputfile Parse.interface ast_intf_magic_number in
if !Clflags.dump_parsetree then fprintf ppf "%a@." Printast.interface ast;
let sg = Typemod.transl_signature (initial_env()) ast in
if !Clflags.print_types then
fprintf std_formatter "%a@." Printtyp.signature
(Typemod.simplify_signature sg);
Warnings.check_fatal ();
if not !Clflags.print_types then
Env.save_signature sg modulename (outputprefix ^ ".cmi");
Pparse.remove_preprocessed inputfile
with e ->
Pparse.remove_preprocessed_if_ast inputfile;
raise e
Compile a .ml file
let print_if ppf flag printer arg =
if !flag then fprintf ppf "%a@." printer arg;
arg
let (++) x f = f x
let implementation ppf sourcefile outputprefix =
Location.input_name := sourcefile;
init_path ();
let modulename =
String.capitalize(Filename.basename(chop_extension_if_any sourcefile)) in
check_unit_name ppf sourcefile modulename;
Env.set_unit_name modulename;
let inputfile = Pparse.preprocess sourcefile in
let env = initial_env() in
if !Clflags.print_types then begin
try ignore(
Pparse.file ppf inputfile Parse.implementation ast_impl_magic_number
++ print_if ppf Clflags.dump_parsetree Printast.implementation
++ Typemod.type_implementation sourcefile outputprefix modulename env)
with x ->
Pparse.remove_preprocessed_if_ast inputfile;
raise x
end else begin
let objfile = outputprefix ^ ".cmo" in
let oc = open_out_bin objfile in
try
Pparse.file ppf inputfile Parse.implementation ast_impl_magic_number
++ print_if ppf Clflags.dump_parsetree Printast.implementation
++ Unused_var.warn ppf
++ Typemod.type_implementation sourcefile outputprefix modulename env
++ Translmod.transl_implementation modulename
++ print_if ppf Clflags.dump_rawlambda Printlambda.lambda
++ Simplif.simplify_lambda
++ print_if ppf Clflags.dump_lambda Printlambda.lambda
++ Bytegen.compile_implementation modulename
++ print_if ppf Clflags.dump_instr Printinstr.instrlist
++ Emitcode.to_file oc modulename;
Warnings.check_fatal ();
close_out oc;
Pparse.remove_preprocessed inputfile;
Stypes.dump (outputprefix ^ ".annot");
with x ->
close_out oc;
remove_file objfile;
Pparse.remove_preprocessed_if_ast inputfile;
Stypes.dump (outputprefix ^ ".annot");
raise x
end
let c_file name =
Location.input_name := name;
if Ccomp.compile_file name <> 0 then exit 2
| null | https://raw.githubusercontent.com/mzp/coq-for-ipad/4fb3711723e2581a170ffd734e936f210086396e/src/ocaml-3.12.0/driver/compile.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
The batch compiler
Return the initial environment in which compilation proceeds.
Note: do not do init_path() in initial_env, this breaks
toplevel initialization (PR#1775)
Note: this function is duplicated in optcompile.ml
Compile a .mli file | , projet Cristal , INRIA Rocquencourt
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : compile.ml 9074 2008 - 10 - 06 13:53:54Z doligez $
open Misc
open Config
open Format
open Typedtree
Initialize the search path .
The current directory is always searched first ,
then the directories specified with the -I option ( in command - line order ) ,
then the standard library directory ( unless the -nostdlib option is given ) .
The current directory is always searched first,
then the directories specified with the -I option (in command-line order),
then the standard library directory (unless the -nostdlib option is given).
*)
let init_path () =
let dirs =
if !Clflags.use_threads then "+threads" :: !Clflags.include_dirs
else if !Clflags.use_vmthreads then "+vmthreads" :: !Clflags.include_dirs
else !Clflags.include_dirs in
let exp_dirs =
List.map (expand_directory Config.standard_library) dirs in
load_path := "" :: List.rev_append exp_dirs (Clflags.std_include_dir ());
Env.reset_cache ()
let initial_env () =
Ident.reinit();
try
if !Clflags.nopervasives
then Env.initial
else Env.open_pers_signature "Pervasives" Env.initial
with Not_found ->
fatal_error "cannot open pervasives.cmi"
let check_unit_name ppf filename name =
try
begin match name.[0] with
| 'A'..'Z' -> ()
| _ ->
Location.print_warning (Location.in_file filename) ppf
(Warnings.Bad_module_name name);
raise Exit;
end;
for i = 1 to String.length name - 1 do
match name.[i] with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' -> ()
| _ ->
Location.print_warning (Location.in_file filename) ppf
(Warnings.Bad_module_name name);
raise Exit;
done;
with Exit -> ()
;;
let interface ppf sourcefile outputprefix =
Location.input_name := sourcefile;
init_path ();
let modulename =
String.capitalize(Filename.basename(chop_extension_if_any sourcefile)) in
check_unit_name ppf sourcefile modulename;
Env.set_unit_name modulename;
let inputfile = Pparse.preprocess sourcefile in
try
let ast =
Pparse.file ppf inputfile Parse.interface ast_intf_magic_number in
if !Clflags.dump_parsetree then fprintf ppf "%a@." Printast.interface ast;
let sg = Typemod.transl_signature (initial_env()) ast in
if !Clflags.print_types then
fprintf std_formatter "%a@." Printtyp.signature
(Typemod.simplify_signature sg);
Warnings.check_fatal ();
if not !Clflags.print_types then
Env.save_signature sg modulename (outputprefix ^ ".cmi");
Pparse.remove_preprocessed inputfile
with e ->
Pparse.remove_preprocessed_if_ast inputfile;
raise e
Compile a .ml file
let print_if ppf flag printer arg =
if !flag then fprintf ppf "%a@." printer arg;
arg
let (++) x f = f x
let implementation ppf sourcefile outputprefix =
Location.input_name := sourcefile;
init_path ();
let modulename =
String.capitalize(Filename.basename(chop_extension_if_any sourcefile)) in
check_unit_name ppf sourcefile modulename;
Env.set_unit_name modulename;
let inputfile = Pparse.preprocess sourcefile in
let env = initial_env() in
if !Clflags.print_types then begin
try ignore(
Pparse.file ppf inputfile Parse.implementation ast_impl_magic_number
++ print_if ppf Clflags.dump_parsetree Printast.implementation
++ Typemod.type_implementation sourcefile outputprefix modulename env)
with x ->
Pparse.remove_preprocessed_if_ast inputfile;
raise x
end else begin
let objfile = outputprefix ^ ".cmo" in
let oc = open_out_bin objfile in
try
Pparse.file ppf inputfile Parse.implementation ast_impl_magic_number
++ print_if ppf Clflags.dump_parsetree Printast.implementation
++ Unused_var.warn ppf
++ Typemod.type_implementation sourcefile outputprefix modulename env
++ Translmod.transl_implementation modulename
++ print_if ppf Clflags.dump_rawlambda Printlambda.lambda
++ Simplif.simplify_lambda
++ print_if ppf Clflags.dump_lambda Printlambda.lambda
++ Bytegen.compile_implementation modulename
++ print_if ppf Clflags.dump_instr Printinstr.instrlist
++ Emitcode.to_file oc modulename;
Warnings.check_fatal ();
close_out oc;
Pparse.remove_preprocessed inputfile;
Stypes.dump (outputprefix ^ ".annot");
with x ->
close_out oc;
remove_file objfile;
Pparse.remove_preprocessed_if_ast inputfile;
Stypes.dump (outputprefix ^ ".annot");
raise x
end
let c_file name =
Location.input_name := name;
if Ccomp.compile_file name <> 0 then exit 2
|
2b445bf7a567391ceff6a3f8aa4f02603c70d23c21bbab68e2591b3246e30e6c | zippy/anansi | debug.cljs | (ns ss.debug
(:require [ss.dom-helpers :as d]
[ss.utils :as u]
[ goog.debug . DebugWindow : as debugw ]
))
(defn log [txt]
(d/insert-at (d/get-element :debug-log) (d/build [:div#thelog.logdiv [:div.logmsg txt]]) 0)
)
(defn jslog [txt]
(js* "console.log(~{txt})")
)
(defn alert [clj-obj]
(jslog (u/clj->json clj-obj))
)
;(def debug (goog.debug.DebugWindow.))
(comment doto debug (.addLogRecord (goog.debug.LogRecord. goog.debug.Logger.Level.INFO "messge" "source"))
(.setVisible true))
(defn toggle-debug []
(if (d/visible? :debug)
(d/hide :debug)
(d/show :debug)) )
| null | https://raw.githubusercontent.com/zippy/anansi/881aa279e5e7836f3002fc2ef7623f2ee1860c9a/public/ss/src/debug.cljs | clojure | (def debug (goog.debug.DebugWindow.)) | (ns ss.debug
(:require [ss.dom-helpers :as d]
[ss.utils :as u]
[ goog.debug . DebugWindow : as debugw ]
))
(defn log [txt]
(d/insert-at (d/get-element :debug-log) (d/build [:div#thelog.logdiv [:div.logmsg txt]]) 0)
)
(defn jslog [txt]
(js* "console.log(~{txt})")
)
(defn alert [clj-obj]
(jslog (u/clj->json clj-obj))
)
(comment doto debug (.addLogRecord (goog.debug.LogRecord. goog.debug.Logger.Level.INFO "messge" "source"))
(.setVisible true))
(defn toggle-debug []
(if (d/visible? :debug)
(d/hide :debug)
(d/show :debug)) )
|
c1afe44f54e08467ef3878174f935f037b491e9cfcf718461088205dfcf4fb5c | sshirokov/CLSQL | aodbc-sql.lisp | -*- Mode : LISP ; Syntax : ANSI - Common - Lisp ; Base : 10 -*-
;;;; *************************************************************************
;;;; FILE IDENTIFICATION
;;;;
;;;; Name: aodbc-sql.cl
Purpose : Low - level interface for CLSQL AODBC backend
Programmer :
Date Started : Feb 2002
;;;;
This file , part of CLSQL , is Copyright ( c ) 2002 by
;;;;
CLSQL users are granted the rights to distribute and use this software
as governed by the terms of the Lisp Lesser GNU Public License
;;;; (), also known as the LLGPL.
;;;; *************************************************************************
(in-package #:clsql-aodbc)
;; interface foreign library loading routines
(defmethod clsql-sys:database-type-library-loaded ((database-type (eql :aodbc)))
"T if foreign library was able to be loaded successfully. "
finds Allegro 's DBI ( AODBC ) package
t))
(defmethod clsql-sys:database-type-load-foreign ((databae-type (eql :aodbc)))
t)
(when (find-package :dbi)
(clsql-sys:database-type-load-foreign :aodbc))
AODBC interface
(defclass aodbc-database (generic-odbc-database)
((aodbc-db-type :accessor database-aodbc-db-type :initform :unknown)))
(defmethod database-name-from-spec (connection-spec
(database-type (eql :aodbc)))
(check-connection-spec connection-spec database-type (dsn user password))
(destructuring-bind (dsn user password) connection-spec
(declare (ignore password))
(concatenate 'string dsn "/" user)))
(defmethod database-connect (connection-spec (database-type (eql :aodbc)))
(check-connection-spec connection-spec database-type (dsn user password))
#+aodbc-v2
(destructuring-bind (dsn user password) connection-spec
(handler-case
(make-instance 'aodbc-database
:name (database-name-from-spec connection-spec :aodbc)
:database-type :aodbc
:dbi-package (find-package '#:dbi)
:odbc-conn
(dbi:connect :user user
:password password
:data-source-name dsn))
(sql-error (e)
(error e))
Init or Connect failed
(error 'sql-connection-error
:database-type database-type
:connection-spec connection-spec
:message "Connection failed")))))
(defmethod database-query (query-expression (database aodbc-database)
result-types field-names)
#+aodbc-v2
(handler-case
(dbi:sql query-expression
:db (clsql-sys::odbc-conn database)
:types result-types
:column-names field-names)
#+ignore
(error ()
(error 'sql-database-data-error
:database database
:expression query-expression
:message "Query failed"))))
(defmethod database-create (connection-spec (type (eql :aodbc)))
(warn "Not implemented."))
(defmethod database-destroy (connection-spec (type (eql :aodbc)))
(warn "Not implemented."))
(defmethod database-probe (connection-spec (type (eql :aodbc)))
(warn "Not implemented."))
;;; Backend capabilities
(defmethod database-underlying-type ((database aodbc-database))
(database-aodbc-db-type database))
(defmethod db-backend-has-create/destroy-db? ((db-type (eql :aodbc)))
nil)
(defmethod database-initialize-database-type ((database-type (eql :aodbc)))
t)
(when (clsql-sys:database-type-library-loaded :aodbc)
(clsql-sys:initialize-database-type :database-type :aodbc))
| null | https://raw.githubusercontent.com/sshirokov/CLSQL/c680432aea0177677ae2ee7b810a7404f7a05cab/db-aodbc/aodbc-sql.lisp | lisp | Syntax : ANSI - Common - Lisp ; Base : 10 -*-
*************************************************************************
FILE IDENTIFICATION
Name: aodbc-sql.cl
(), also known as the LLGPL.
*************************************************************************
interface foreign library loading routines
Backend capabilities | Purpose : Low - level interface for CLSQL AODBC backend
Programmer :
Date Started : Feb 2002
This file , part of CLSQL , is Copyright ( c ) 2002 by
CLSQL users are granted the rights to distribute and use this software
as governed by the terms of the Lisp Lesser GNU Public License
(in-package #:clsql-aodbc)
(defmethod clsql-sys:database-type-library-loaded ((database-type (eql :aodbc)))
"T if foreign library was able to be loaded successfully. "
finds Allegro 's DBI ( AODBC ) package
t))
(defmethod clsql-sys:database-type-load-foreign ((databae-type (eql :aodbc)))
t)
(when (find-package :dbi)
(clsql-sys:database-type-load-foreign :aodbc))
AODBC interface
(defclass aodbc-database (generic-odbc-database)
((aodbc-db-type :accessor database-aodbc-db-type :initform :unknown)))
(defmethod database-name-from-spec (connection-spec
(database-type (eql :aodbc)))
(check-connection-spec connection-spec database-type (dsn user password))
(destructuring-bind (dsn user password) connection-spec
(declare (ignore password))
(concatenate 'string dsn "/" user)))
(defmethod database-connect (connection-spec (database-type (eql :aodbc)))
(check-connection-spec connection-spec database-type (dsn user password))
#+aodbc-v2
(destructuring-bind (dsn user password) connection-spec
(handler-case
(make-instance 'aodbc-database
:name (database-name-from-spec connection-spec :aodbc)
:database-type :aodbc
:dbi-package (find-package '#:dbi)
:odbc-conn
(dbi:connect :user user
:password password
:data-source-name dsn))
(sql-error (e)
(error e))
Init or Connect failed
(error 'sql-connection-error
:database-type database-type
:connection-spec connection-spec
:message "Connection failed")))))
(defmethod database-query (query-expression (database aodbc-database)
result-types field-names)
#+aodbc-v2
(handler-case
(dbi:sql query-expression
:db (clsql-sys::odbc-conn database)
:types result-types
:column-names field-names)
#+ignore
(error ()
(error 'sql-database-data-error
:database database
:expression query-expression
:message "Query failed"))))
(defmethod database-create (connection-spec (type (eql :aodbc)))
(warn "Not implemented."))
(defmethod database-destroy (connection-spec (type (eql :aodbc)))
(warn "Not implemented."))
(defmethod database-probe (connection-spec (type (eql :aodbc)))
(warn "Not implemented."))
(defmethod database-underlying-type ((database aodbc-database))
(database-aodbc-db-type database))
(defmethod db-backend-has-create/destroy-db? ((db-type (eql :aodbc)))
nil)
(defmethod database-initialize-database-type ((database-type (eql :aodbc)))
t)
(when (clsql-sys:database-type-library-loaded :aodbc)
(clsql-sys:initialize-database-type :database-type :aodbc))
|
872d2c43092ab29c938cfe256db208ae2c235f267fb01e9d4e4cdb1bd18380d0 | thephoeron/quipper-language | USV.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
-- | This module provides an implementation of the
main Unique Shortest Vector algorithm .
module Algorithms.USV.USV where
import Quipper
import QuipperLib.QFT
import QuipperLib.Arith
import Libraries.Sampling
import Algorithms.USV.Definitions
import Control.Monad (foldM, zipWithM, replicateM)
import Data.Maybe
import System.Random
import Text.Printf
import Libraries.Auxiliary
-- ==============================================================
-- * Coherent arithmetic
-- $ Some arithmetic functions used in the reductions of the /USV/ to
-- the /TPP/ and of the /TPP/ to the /DCP/.
-- | Compute the function /f/, that selects a subset
-- of lattice points. It is defined as:
--
\[image def_f.png ]
--
-- The arguments are:
--
-- * /bb_bar/, an /n/-dimensional matrix;
--
* /p/ , a prime such that /n/ ≤ /p/ ≤ 2 / n/ ;
--
* /m/ , an integer such that /1/ ≤ /m/ ≤ /p-1/ ;
--
-- * /i0/, an integer index such that /0/ ≤ /i0/ ≤ /n-1/;
--
-- * /t/, an integer (either /0/ or /1/);
--
-- * /a/=(/a/[sub 1],...,/a/[sub /n/]), an integer vector.
f_classical :: [[Integer]] -> Int -> Int -> Int -> (Int,[Int]) -> [Integer]
f_classical bb p m i0 (t,a) = matrix_mult bb a'
where
a' = map toInteger $ applyAt i0 (\x -> x*p + t*m) a
| Quantum version of ' f_classical ' .
f_quantum :: [[Integer]] -> Int -> Int -> Int -> TwoPoint -> Circ [QDInt]
f_quantum bb p m i0 = box "f" $ \(t,a) -> do
comment_with_label "ENTER: f_quantum" (t,a) ("t","a")
let n = (length . head) bb
b = maximum (map maximum bb)
s = ceiling (logBase 2 (fromIntegral b)) + 5*n
qp <- qinit (intm s (toInteger p))
qm <- qinit (intm s (toInteger m))
qbb <- qinit (map (\vs -> (map (\v -> (intm s v)) vs)) bb)
a <- mapM (\x -> qdint_extend_signed s x) a
let ai0 = a !! i0
(_,_,x) <- q_mult ai0 qp
q_add_in_place x qm `controlled` t
let a' = overwriteAt i0 x a
result <- q_matrix_mult qbb a'
comment_with_label "EXIT: f_quantum" (qp,qm,qbb,t,a) ("qp","qm","qbb","t","a")
return result
| Compute the function /g/ defined as :
--
\[image def_g1.png ]
--
-- The arguments are:
--
-- * /l/, an integer (in principle, a real number, but the
-- GFI only uses integer values);
--
* /w/ , a real number in the interval [ 0,1 ) ;
--
-- * /v/, an integer.
--
We note that in the quantum version of this function , /l/
and will be parameters , and /v/ will be a quantum
-- input. We implement this operation using only integer
-- division, using the following property: for all integers
/v/ , /m/ and real numbers ,
--
\[image floor2.png ]
g1_classical :: Integer -> Double -> Integer -> Integer
g1_classical l w v =
let m = 128 * l
c = ceiling (fromIntegral m * w)
in
(v - c) `div` m
| Compute the function /g/. The function /g/
-- partitions the space into hypercubes of size
128 / l/ at a random offset /w/. It is defined as :
--
\[image def_g.png ]
--
-- This is just the componentwise application of 'g1_classical'.
g_classical :: Integer -> [Double] -> [Integer] -> [Integer]
g_classical l w v = zipWith (g1_classical l) w v
| Quantum version of ' g1_classical ' .
g1_quantum :: Integer -> Double -> QDInt -> Circ QDInt
g1_quantum l w = box "g_1" $ \v -> do
comment_with_label "ENTER: g1_quantum" v "v"
let m = fromIntegral (128 * l)
c = ceiling (fromIntegral m * w)
l' = qdint_length v
c' <- qinit (intm l' c)
(_, _, n) <- q_sub v c'
m' <- qinit (intm l' m)
(_, _, q) <- q_div n m'
comment_with_label "EXIT: g1_quantum" (v,c',m',n,q) ("v","c'","m'","n","q")
return q
| Quantum version of ' g_classical ' .
g_quantum :: Integer -> [Double] -> [QDInt] -> Circ [QDInt]
g_quantum l w = box "g" $ \v -> do
zipWithM (g1_quantum l) w v
-- | Compute the function /h/, defined as:
--
\[image def_h.png ]
--
-- The function /h/ transforms a vector /a/=(/a/[sub 1],...,/a/[sub n])
of 4 / n/-bit integers into a 4 / n/[super 2]+/n/-bit integer by
inserting a 0 between each component of /a/.
h_classical :: [IntM] -> IntM
h_classical v = (intm (4*n^2+n) w)
where
n = length v
m = 4*n + 1
mm = 2^m
v' = map integer_of_intm_unsigned v
w = foldl (+) 0 $ zipWith (*) v' [mm^k | k <- [0..(n-1)]]
| Quantum version of ' h_classical ' .
h_quantum :: [QDInt] -> Circ QDInt
h_quantum a = do
comment_with_label "ENTER: h_quantum" a "a"
a <- mapM (extend . qulist_of_qdint_bh) (reverse a)
comment_with_label "EXIT: h_quantum" a "a"
return (qdint_of_qulist_bh (concat a))
where
-- | Prepend a qubit in state |0> to a list of qubits.
extend :: [Qubit] -> Circ [Qubit]
extend x = do
z <- qinit False
return (z : x)
-- ==============================================================
* Algorithm 1 : \"uSVP\ "
-- | Find the shortest vector. The argument, /bb/, is an
/n/-dimensional integer matrix . The algorithm first uses
-- /bb/ to generate a list of parameter tuples and then
-- recursively goes through this list by calling 'algorithm_Q'
-- on each tuple until it either finds the shortest vector
-- or exhausts the list and fails by returning 0.
--
-- Remark:
--
-- * Argument /n/ is redundant, it can be inferred from /bb/.
uSVP :: [[Integer]] -> Circ [Integer]
uSVP bb = do
-----------------------------------------------------------------
-- Prepare the list of parameter values,
-- and a random number generator.
-----------------------------------------------------------------
comment "ENTER: algorithm_uSVP"
let n = length bb
randomgen = mkStdGen n
p = find_prime (n^3)
bb_bar = (lll . reduce_lattice) bb
b1 = head bb_bar
l1 = norm b1
k = ceiling $ fromIntegral $ (n - 1) `div` 2
ls = [ceiling (l1 / (2^s)) | s <- [0..k] ]
parameters = [(l, m, i0, p) | l <- ls,
m <- [1..(p-1)],
i0 <- [0..(n-1)]]
-----------------------------------------------------------------
-- Conditional recursion over the list of parameters
-- using the function 'usvp_aux'.
-----------------------------------------------------------------
v <- usvp_aux n bb_bar parameters randomgen
comment "EXIT: algorithm_uSVP"
return v
-----------------------------------------------------------------
-- | For each tuple of parameters, call 'algorithm_Q' and
-- then test whether the returned vector is the shortest vector
-- in the lattice. If it is, return it. If not, move on to
-- the next tuple. If the end of the list is reached, return 0.
--
-- Remark:
--
-- * The algorithm takes as additional argument a random number
-- generator. At each iteration, a new seed is extracted and used
-- by the next iteration's generator.
-----------------------------------------------------------------
usvp_aux :: Int -> [[Integer]] -> [(Int, Int, Int, Int)] -> StdGen -> Circ [Integer]
usvp_aux n b [] randomgen = return (replicate n 0)
usvp_aux n b (h:t) randomgen = do
let (g1,g2) = split randomgen
u <- algorithm_Q b h g1
if (is_in_lattice u b) then return u
else usvp_aux n b t g2
-- ==============================================================
* Algorithm 2 : \"Q\ "
-- | Compute 'algorithm_Q'. The arguments are:
--
* /bb_bar/ , an /n/-dimensional LLL - reduced basis ;
--
* ( /l/,/m/,/i0/,/p/ ) , a 4 - tuple of integer parameters ;
--
-- * /randomgen/, a random number generator.
--
The algorithm first calls algorithm ' algorithm_R ' to prepare
a list of ' 's parameterized on ( /l/,/m/,/i0/,/p/ ) and
-- then calls 'tPP' on this list. With high probability, the
-- returned vector is the shortest vector in the lattice up to
one component .
--
-- Remark:
--
-- * Argument /n/ is redundant, it can be inferred
-- from /bb_bar/.
algorithm_Q :: [[Integer]] -> (Int, Int, Int, Int) -> StdGen -> Circ [Integer]
algorithm_Q bb_bar (l, m, i0, p) randomgen = do
-----------------------------------------------------------------
-- Extract (4*n^2+n) random number generators
-----------------------------------------------------------------
comment "ENTER: algorithm_Q"
let n = length bb_bar
generators = take (4*n^2+n) $ multi_split randomgen
-----------------------------------------------------------------
Call algorithm ' r ' to prepare a list of ' TwoPoint 's
-- using the given parameters and a random number generator.
-----------------------------------------------------------------
states <- sequence [algorithm_R bb_bar l m i0 p g | g <- generators]
-----------------------------------------------------------------
-- Run tpp to get the shortest vector up to i0-th component.
-----------------------------------------------------------------
u <- tPP n states
-----------------------------------------------------------------
-- Adjust i0-th component and return the vector.
-----------------------------------------------------------------
comment "EXIT: algorithm_Q"
return $ applyAt i0 (\x -> x*(toInteger p) + (toInteger m)) u
-- ==============================================================
* Algorithm 3 : \"R\ "
-- | Compute 'algorithm_R'. The arguments are:
--
* /bb_bar/ , an /n/-dimensional LLL - reduced basis ,
--
-- * /l/, an integer approximation of the length of the
-- shortest vector,
--
* /p/ , a prime such that /n/ ≤ /n/ ≤ 2 / n/ ,
--
* /m/ , an integer such that /1/ ≤ /m/ ≤ /p-1/ ,
--
-- * /i0/, an integer index such that /0/ ≤ /i0/ ≤ /n-1/ and
--
-- * /randomgen/, a random number generator.
--
The algorithm first calls the functions ' f_quantum ' and
-- 'g_quantum' to prepare a superposition of hypercubes
containing at most two lattice points , whose difference
-- is the shortest vector. It then measures the output to
collapses the state to a ' ' .
algorithm_R :: [[Integer]] -> Int -> Int -> Int -> Int -> StdGen -> Circ TwoPoint
algorithm_R bb_bar l m i0 p randomgen = do
comment "ENTER: algorithm_R"
let n = length bb_bar
b = maximum (map maximum bb_bar)
s = ceiling (logBase 2 (fromIntegral b)) + 5*n
ws = take n $ sample_random randomgen 0 1
-----------------------------------------------------------------
-- Use functions 'f_quantum' and 'g_quantum' to partition
the space into hypercubes containing two points whose
-- difference is the shortest vector.
-----------------------------------------------------------------
t <- qinit False
a <- qinit $ replicate n (intm (4*n) 0)
r <- qinit $ replicate n (intm s 0)
(t,a) <- map_hadamard (t,a)
((t,a),r) <- classical_to_reversible (\(t,a) -> do
result <- f_quantum bb_bar p m i0 (t,a)
result <- g_quantum (toInteger l) ws r
return result) ((t,a),r)
-----------------------------------------------------------------
Collapse the space onto one such cube to create a ' ' .
-----------------------------------------------------------------
r_measured <- measure r
cdiscard r_measured
comment "EXIT: algorithm_R"
return (t,a)
-- ==============================================================
* Algorithm 4 : \"TPP\ "
| Perform 's reduction of the /TPP/ to the /DCP/ and then
-- call 'dCP'. The arguments are:
--
-- * /n/, an integer and
--
* /states/ , a list of ' 's .
--
The algorithm transforms the ' TwoPoint 's in /states/ into
' CosetState 's using the function ' h_quantum ' , then calls
-- 'dCP' on this modified list to find the shortest vector.
tPP :: Int -> [TwoPoint] -> Circ [Integer]
tPP n states = do
comment_with_label "ENTER: algorithm_TPP" states "states"
let m = 2^(4*n)
ms = foldl (+) 0 [m*(2*m)^k | k <- [0..(n-1)]]
-----------------------------------------------------------------
Use the function h to transform ' TPP ' inputs ( i.e. ' 's )
into ' DCP ' inputs ( i.e. ' CosetState 's ) .
-----------------------------------------------------------------
states <- mapM (\(t,a) -> do
a <- h_quantum a
return (t,a)) states
-----------------------------------------------------------------
Call ' DCP ' to find the difference between .
-----------------------------------------------------------------
d <- dCP n 0 0 states
-----------------------------------------------------------------
-- Convert the integer output of 'dcp' back to a vector.
-----------------------------------------------------------------
comment "EXIT: algorithm_TPP"
return $ map (\x -> x-m) $ expand (d + ms) (2*m)
-- ==============================================================
* Algorithm 5 : \"DCP\ "
-- | Given integers /m/ and /n/ and a 'Psi_k' /(q,k)/
-- compute the last /n/ bits of the binary expansion
of on /m/ bits .
n_low_bits :: Int -> Int -> Psi_k -> [Bool]
n_low_bits m n p = take n $ boollist_of_int_lh m (toInteger(snd p))
-- | Given integers /m/ and /n/ and a list /l/ of 'Psi_k's, group the
elements of /l/ into pairs /(psi_p , psi_q)/ where
-- /p/ and /q/ share /n/ low bits. Return the list of all such
pairs together with the list of unpaired elements of
pairing :: Int -> Int -> [Psi_k] -> ([(Psi_k, Psi_k)], [Psi_k])
pairing m n l = find_partners (\p -> n_low_bits m n p) l
-- | Perform Kuperberg's sieve. The arguments are:
--
-- * /n/, an integer,
--
-- * /m/, an integer and
--
-- * /l/, a list of 'Psi_k's.
--
-- The algorithm recursively combines and sieves the
-- elements of /l/ until it reaches a list whose
elements have /m/[sup 2 ] trailing zeros .
-- At each step, the list of remaining 'Psi_k's are
-- paired and each pair
( ( /q/[sub 1 ] , /k/[sub 1 ] ) , ( /q/[sub 2 ] , /k/[sub 2 ] ) )
-- is combined into a new 'Psi_k' /(q, k)/ with
/k/= /k/[sub 1 ] ± /k/[sub 2 ] .
-- If /k/= /k/[sub 1] - /k/[sub 2], the 'Psi_k' is preserved,
-- otherwise it is discarded.
--
-- Remark:
--
-- * Uses 'dynamic_lift' to determine whether
-- to keep a discard a 'Psi_k'.
sieving :: Int -> Int -> [Psi_k] -> Circ [Psi_k]
sieving n m l = do
comment "ENTER: sieving"
l <- loop_with_indexM m l (\j l -> do
-- Pair the states sharing m+m*j low bits.
comment "ENTER: Pairing"
let mmj = m + m*j
(pairs, unpaired) = pairing n mmj l
-- Discard the states that haven't been paired.
qdiscard_psi_ks unpaired
comment "EXIT: Pairing"
Combine pairs ( Psi_k , Psi_l ) to get Psi_k±l .
-- If the measurement outcome ('sign') is 0, then the
associated state is of the form Psi_k
combined_states <- mapM (\((q,k),(q',l)) -> do
comment "ENTER: Combining"
q <- qnot q `controlled` q'
q <- measure q
sign <- dynamic_lift q
comment "EXIT: Combining"
return (sign, (q', (k-l)))) pairs
-- Separate the states according to the value of 'sign'.
-- Discard the states of the form Psi_k+l and
return the ones of the form Psi_k
let (plus, minus) = separate combined_states fst
qdiscard_psi_ks $ map snd plus
return $ map snd minus)
comment "EXIT: sieving"
return l
| Perform Kuperberg 's algorithm solving the Dihedral
Coset problem . The arguments are :
--
-- * /n/, an integer measuring the length of the output,
--
* /d/ , an integer to hold the output initially set to 0 ,
--
-- * /s/, an integer counter initially set to 0 and
--
* /states/ , a list of ' CosetState 's .
--
-- The algorithm proceeds recursively. At each iteration it
uses 's sieve on the first /n/ elements of /states/
-- to compute the /s/-th bit of the output and updates /d/ with
the result . Then it increments /s/ and repeats until /states/ is
-- exhausted.
--
-- Remark:
--
-- * The function 'dynamic_lift' used in this algorithm is presumably
-- very expensive in terms of resources. In this implementation
-- it is used profusely but there is room for optimization.
dCP :: Int -> Integer -> Int -> [CosetState] -> Circ Integer
dCP n d s states = if s == n then return d else do
comment (printf "ENTER algorithm_DCP: n=%d d=%d s=%d" n d s)
let nn = 2^n
r = exp $ -( 2*pi*(fromIntegral d / fromIntegral(nn)) )
m = ceiling $ sqrt $ fromIntegral $ n-s-1
(l1, l2) = splitAt n states
-----------------------------------------------------------------
Transform the first n coset states
-- into states of the form Psi_k.
-----------------------------------------------------------------
comment "ENTER: TO_Psi_k"
l <- mapM (\(t,a) -> do
a <- qft_int a
ca <- measure a
k <- mmap fromIntegral $ dynamic_lift ca
t <- named_rotation "R" (r*(fromIntegral k)) t
return (t, k)) l1
comment "EXIT: To_Psi_k"
-----------------------------------------------------------------
Sieve the Psi_k 's to get Psi_2^{n - s-1 } .
-----------------------------------------------------------------
l <- sieving n m l
-----------------------------------------------------------------
-- Extract the s-th bit of d by finding in l a state of the
form Psi_2^{n - s-1 } and measuring it in the + /- basis . The
-- remaining states in l are discarded.
-----------------------------------------------------------------
let ((q,k),psis) = find l (\x -> ((snd x) == 2^(n-s-1))) "The sieving process failed to produce a state of the form 2^k."
--let ((q,k),psis) = ((head l),(tail l))
q <- map_hadamard q
q <- measure q
q <- dynamic_lift q
qdiscard_psi_ks psis
let d_lsb = int_of_boollist_unsigned_bh [q]
-----------------------------------------------------------------
-- Update d_low and iterate on the remaining list.
-----------------------------------------------------------------
comment "EXIT: algorithm_DCP"
dCP n (d + (2^s)*d_lsb) (s+1) l2
| null | https://raw.githubusercontent.com/thephoeron/quipper-language/15e555343a15c07b9aa97aced1ada22414f04af6/Algorithms/USV/USV.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
| This module provides an implementation of the
==============================================================
* Coherent arithmetic
$ Some arithmetic functions used in the reductions of the /USV/ to
the /TPP/ and of the /TPP/ to the /DCP/.
| Compute the function /f/, that selects a subset
of lattice points. It is defined as:
The arguments are:
* /bb_bar/, an /n/-dimensional matrix;
* /i0/, an integer index such that /0/ ≤ /i0/ ≤ /n-1/;
* /t/, an integer (either /0/ or /1/);
* /a/=(/a/[sub 1],...,/a/[sub /n/]), an integer vector.
The arguments are:
* /l/, an integer (in principle, a real number, but the
GFI only uses integer values);
* /v/, an integer.
input. We implement this operation using only integer
division, using the following property: for all integers
partitions the space into hypercubes of size
This is just the componentwise application of 'g1_classical'.
| Compute the function /h/, defined as:
The function /h/ transforms a vector /a/=(/a/[sub 1],...,/a/[sub n])
| Prepend a qubit in state |0> to a list of qubits.
==============================================================
| Find the shortest vector. The argument, /bb/, is an
/bb/ to generate a list of parameter tuples and then
recursively goes through this list by calling 'algorithm_Q'
on each tuple until it either finds the shortest vector
or exhausts the list and fails by returning 0.
Remark:
* Argument /n/ is redundant, it can be inferred from /bb/.
---------------------------------------------------------------
Prepare the list of parameter values,
and a random number generator.
---------------------------------------------------------------
---------------------------------------------------------------
Conditional recursion over the list of parameters
using the function 'usvp_aux'.
---------------------------------------------------------------
---------------------------------------------------------------
| For each tuple of parameters, call 'algorithm_Q' and
then test whether the returned vector is the shortest vector
in the lattice. If it is, return it. If not, move on to
the next tuple. If the end of the list is reached, return 0.
Remark:
* The algorithm takes as additional argument a random number
generator. At each iteration, a new seed is extracted and used
by the next iteration's generator.
---------------------------------------------------------------
==============================================================
| Compute 'algorithm_Q'. The arguments are:
* /randomgen/, a random number generator.
then calls 'tPP' on this list. With high probability, the
returned vector is the shortest vector in the lattice up to
Remark:
* Argument /n/ is redundant, it can be inferred
from /bb_bar/.
---------------------------------------------------------------
Extract (4*n^2+n) random number generators
---------------------------------------------------------------
---------------------------------------------------------------
using the given parameters and a random number generator.
---------------------------------------------------------------
---------------------------------------------------------------
Run tpp to get the shortest vector up to i0-th component.
---------------------------------------------------------------
---------------------------------------------------------------
Adjust i0-th component and return the vector.
---------------------------------------------------------------
==============================================================
| Compute 'algorithm_R'. The arguments are:
* /l/, an integer approximation of the length of the
shortest vector,
* /i0/, an integer index such that /0/ ≤ /i0/ ≤ /n-1/ and
* /randomgen/, a random number generator.
'g_quantum' to prepare a superposition of hypercubes
is the shortest vector. It then measures the output to
---------------------------------------------------------------
Use functions 'f_quantum' and 'g_quantum' to partition
difference is the shortest vector.
---------------------------------------------------------------
---------------------------------------------------------------
---------------------------------------------------------------
==============================================================
call 'dCP'. The arguments are:
* /n/, an integer and
'dCP' on this modified list to find the shortest vector.
---------------------------------------------------------------
---------------------------------------------------------------
---------------------------------------------------------------
---------------------------------------------------------------
---------------------------------------------------------------
Convert the integer output of 'dcp' back to a vector.
---------------------------------------------------------------
==============================================================
| Given integers /m/ and /n/ and a 'Psi_k' /(q,k)/
compute the last /n/ bits of the binary expansion
| Given integers /m/ and /n/ and a list /l/ of 'Psi_k's, group the
/p/ and /q/ share /n/ low bits. Return the list of all such
| Perform Kuperberg's sieve. The arguments are:
* /n/, an integer,
* /m/, an integer and
* /l/, a list of 'Psi_k's.
The algorithm recursively combines and sieves the
elements of /l/ until it reaches a list whose
At each step, the list of remaining 'Psi_k's are
paired and each pair
is combined into a new 'Psi_k' /(q, k)/ with
If /k/= /k/[sub 1] - /k/[sub 2], the 'Psi_k' is preserved,
otherwise it is discarded.
Remark:
* Uses 'dynamic_lift' to determine whether
to keep a discard a 'Psi_k'.
Pair the states sharing m+m*j low bits.
Discard the states that haven't been paired.
If the measurement outcome ('sign') is 0, then the
Separate the states according to the value of 'sign'.
Discard the states of the form Psi_k+l and
* /n/, an integer measuring the length of the output,
* /s/, an integer counter initially set to 0 and
The algorithm proceeds recursively. At each iteration it
to compute the /s/-th bit of the output and updates /d/ with
exhausted.
Remark:
* The function 'dynamic_lift' used in this algorithm is presumably
very expensive in terms of resources. In this implementation
it is used profusely but there is room for optimization.
---------------------------------------------------------------
into states of the form Psi_k.
---------------------------------------------------------------
---------------------------------------------------------------
---------------------------------------------------------------
---------------------------------------------------------------
Extract the s-th bit of d by finding in l a state of the
remaining states in l are discarded.
---------------------------------------------------------------
let ((q,k),psis) = ((head l),(tail l))
---------------------------------------------------------------
Update d_low and iterate on the remaining list.
--------------------------------------------------------------- | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
main Unique Shortest Vector algorithm .
module Algorithms.USV.USV where
import Quipper
import QuipperLib.QFT
import QuipperLib.Arith
import Libraries.Sampling
import Algorithms.USV.Definitions
import Control.Monad (foldM, zipWithM, replicateM)
import Data.Maybe
import System.Random
import Text.Printf
import Libraries.Auxiliary
\[image def_f.png ]
* /p/ , a prime such that /n/ ≤ /p/ ≤ 2 / n/ ;
* /m/ , an integer such that /1/ ≤ /m/ ≤ /p-1/ ;
f_classical :: [[Integer]] -> Int -> Int -> Int -> (Int,[Int]) -> [Integer]
f_classical bb p m i0 (t,a) = matrix_mult bb a'
where
a' = map toInteger $ applyAt i0 (\x -> x*p + t*m) a
| Quantum version of ' f_classical ' .
f_quantum :: [[Integer]] -> Int -> Int -> Int -> TwoPoint -> Circ [QDInt]
f_quantum bb p m i0 = box "f" $ \(t,a) -> do
comment_with_label "ENTER: f_quantum" (t,a) ("t","a")
let n = (length . head) bb
b = maximum (map maximum bb)
s = ceiling (logBase 2 (fromIntegral b)) + 5*n
qp <- qinit (intm s (toInteger p))
qm <- qinit (intm s (toInteger m))
qbb <- qinit (map (\vs -> (map (\v -> (intm s v)) vs)) bb)
a <- mapM (\x -> qdint_extend_signed s x) a
let ai0 = a !! i0
(_,_,x) <- q_mult ai0 qp
q_add_in_place x qm `controlled` t
let a' = overwriteAt i0 x a
result <- q_matrix_mult qbb a'
comment_with_label "EXIT: f_quantum" (qp,qm,qbb,t,a) ("qp","qm","qbb","t","a")
return result
| Compute the function /g/ defined as :
\[image def_g1.png ]
* /w/ , a real number in the interval [ 0,1 ) ;
We note that in the quantum version of this function , /l/
and will be parameters , and /v/ will be a quantum
/v/ , /m/ and real numbers ,
\[image floor2.png ]
g1_classical :: Integer -> Double -> Integer -> Integer
g1_classical l w v =
let m = 128 * l
c = ceiling (fromIntegral m * w)
in
(v - c) `div` m
| Compute the function /g/. The function /g/
128 / l/ at a random offset /w/. It is defined as :
\[image def_g.png ]
g_classical :: Integer -> [Double] -> [Integer] -> [Integer]
g_classical l w v = zipWith (g1_classical l) w v
| Quantum version of ' g1_classical ' .
g1_quantum :: Integer -> Double -> QDInt -> Circ QDInt
g1_quantum l w = box "g_1" $ \v -> do
comment_with_label "ENTER: g1_quantum" v "v"
let m = fromIntegral (128 * l)
c = ceiling (fromIntegral m * w)
l' = qdint_length v
c' <- qinit (intm l' c)
(_, _, n) <- q_sub v c'
m' <- qinit (intm l' m)
(_, _, q) <- q_div n m'
comment_with_label "EXIT: g1_quantum" (v,c',m',n,q) ("v","c'","m'","n","q")
return q
| Quantum version of ' g_classical ' .
g_quantum :: Integer -> [Double] -> [QDInt] -> Circ [QDInt]
g_quantum l w = box "g" $ \v -> do
zipWithM (g1_quantum l) w v
\[image def_h.png ]
of 4 / n/-bit integers into a 4 / n/[super 2]+/n/-bit integer by
inserting a 0 between each component of /a/.
h_classical :: [IntM] -> IntM
h_classical v = (intm (4*n^2+n) w)
where
n = length v
m = 4*n + 1
mm = 2^m
v' = map integer_of_intm_unsigned v
w = foldl (+) 0 $ zipWith (*) v' [mm^k | k <- [0..(n-1)]]
| Quantum version of ' h_classical ' .
h_quantum :: [QDInt] -> Circ QDInt
h_quantum a = do
comment_with_label "ENTER: h_quantum" a "a"
a <- mapM (extend . qulist_of_qdint_bh) (reverse a)
comment_with_label "EXIT: h_quantum" a "a"
return (qdint_of_qulist_bh (concat a))
where
extend :: [Qubit] -> Circ [Qubit]
extend x = do
z <- qinit False
return (z : x)
* Algorithm 1 : \"uSVP\ "
/n/-dimensional integer matrix . The algorithm first uses
uSVP :: [[Integer]] -> Circ [Integer]
uSVP bb = do
comment "ENTER: algorithm_uSVP"
let n = length bb
randomgen = mkStdGen n
p = find_prime (n^3)
bb_bar = (lll . reduce_lattice) bb
b1 = head bb_bar
l1 = norm b1
k = ceiling $ fromIntegral $ (n - 1) `div` 2
ls = [ceiling (l1 / (2^s)) | s <- [0..k] ]
parameters = [(l, m, i0, p) | l <- ls,
m <- [1..(p-1)],
i0 <- [0..(n-1)]]
v <- usvp_aux n bb_bar parameters randomgen
comment "EXIT: algorithm_uSVP"
return v
usvp_aux :: Int -> [[Integer]] -> [(Int, Int, Int, Int)] -> StdGen -> Circ [Integer]
usvp_aux n b [] randomgen = return (replicate n 0)
usvp_aux n b (h:t) randomgen = do
let (g1,g2) = split randomgen
u <- algorithm_Q b h g1
if (is_in_lattice u b) then return u
else usvp_aux n b t g2
* Algorithm 2 : \"Q\ "
* /bb_bar/ , an /n/-dimensional LLL - reduced basis ;
* ( /l/,/m/,/i0/,/p/ ) , a 4 - tuple of integer parameters ;
The algorithm first calls algorithm ' algorithm_R ' to prepare
a list of ' 's parameterized on ( /l/,/m/,/i0/,/p/ ) and
one component .
algorithm_Q :: [[Integer]] -> (Int, Int, Int, Int) -> StdGen -> Circ [Integer]
algorithm_Q bb_bar (l, m, i0, p) randomgen = do
comment "ENTER: algorithm_Q"
let n = length bb_bar
generators = take (4*n^2+n) $ multi_split randomgen
Call algorithm ' r ' to prepare a list of ' TwoPoint 's
states <- sequence [algorithm_R bb_bar l m i0 p g | g <- generators]
u <- tPP n states
comment "EXIT: algorithm_Q"
return $ applyAt i0 (\x -> x*(toInteger p) + (toInteger m)) u
* Algorithm 3 : \"R\ "
* /bb_bar/ , an /n/-dimensional LLL - reduced basis ,
* /p/ , a prime such that /n/ ≤ /n/ ≤ 2 / n/ ,
* /m/ , an integer such that /1/ ≤ /m/ ≤ /p-1/ ,
The algorithm first calls the functions ' f_quantum ' and
containing at most two lattice points , whose difference
collapses the state to a ' ' .
algorithm_R :: [[Integer]] -> Int -> Int -> Int -> Int -> StdGen -> Circ TwoPoint
algorithm_R bb_bar l m i0 p randomgen = do
comment "ENTER: algorithm_R"
let n = length bb_bar
b = maximum (map maximum bb_bar)
s = ceiling (logBase 2 (fromIntegral b)) + 5*n
ws = take n $ sample_random randomgen 0 1
the space into hypercubes containing two points whose
t <- qinit False
a <- qinit $ replicate n (intm (4*n) 0)
r <- qinit $ replicate n (intm s 0)
(t,a) <- map_hadamard (t,a)
((t,a),r) <- classical_to_reversible (\(t,a) -> do
result <- f_quantum bb_bar p m i0 (t,a)
result <- g_quantum (toInteger l) ws r
return result) ((t,a),r)
Collapse the space onto one such cube to create a ' ' .
r_measured <- measure r
cdiscard r_measured
comment "EXIT: algorithm_R"
return (t,a)
* Algorithm 4 : \"TPP\ "
| Perform 's reduction of the /TPP/ to the /DCP/ and then
* /states/ , a list of ' 's .
The algorithm transforms the ' TwoPoint 's in /states/ into
' CosetState 's using the function ' h_quantum ' , then calls
tPP :: Int -> [TwoPoint] -> Circ [Integer]
tPP n states = do
comment_with_label "ENTER: algorithm_TPP" states "states"
let m = 2^(4*n)
ms = foldl (+) 0 [m*(2*m)^k | k <- [0..(n-1)]]
Use the function h to transform ' TPP ' inputs ( i.e. ' 's )
into ' DCP ' inputs ( i.e. ' CosetState 's ) .
states <- mapM (\(t,a) -> do
a <- h_quantum a
return (t,a)) states
Call ' DCP ' to find the difference between .
d <- dCP n 0 0 states
comment "EXIT: algorithm_TPP"
return $ map (\x -> x-m) $ expand (d + ms) (2*m)
* Algorithm 5 : \"DCP\ "
of on /m/ bits .
n_low_bits :: Int -> Int -> Psi_k -> [Bool]
n_low_bits m n p = take n $ boollist_of_int_lh m (toInteger(snd p))
elements of /l/ into pairs /(psi_p , psi_q)/ where
pairs together with the list of unpaired elements of
pairing :: Int -> Int -> [Psi_k] -> ([(Psi_k, Psi_k)], [Psi_k])
pairing m n l = find_partners (\p -> n_low_bits m n p) l
elements have /m/[sup 2 ] trailing zeros .
( ( /q/[sub 1 ] , /k/[sub 1 ] ) , ( /q/[sub 2 ] , /k/[sub 2 ] ) )
/k/= /k/[sub 1 ] ± /k/[sub 2 ] .
sieving :: Int -> Int -> [Psi_k] -> Circ [Psi_k]
sieving n m l = do
comment "ENTER: sieving"
l <- loop_with_indexM m l (\j l -> do
comment "ENTER: Pairing"
let mmj = m + m*j
(pairs, unpaired) = pairing n mmj l
qdiscard_psi_ks unpaired
comment "EXIT: Pairing"
Combine pairs ( Psi_k , Psi_l ) to get Psi_k±l .
associated state is of the form Psi_k
combined_states <- mapM (\((q,k),(q',l)) -> do
comment "ENTER: Combining"
q <- qnot q `controlled` q'
q <- measure q
sign <- dynamic_lift q
comment "EXIT: Combining"
return (sign, (q', (k-l)))) pairs
return the ones of the form Psi_k
let (plus, minus) = separate combined_states fst
qdiscard_psi_ks $ map snd plus
return $ map snd minus)
comment "EXIT: sieving"
return l
| Perform Kuperberg 's algorithm solving the Dihedral
Coset problem . The arguments are :
* /d/ , an integer to hold the output initially set to 0 ,
* /states/ , a list of ' CosetState 's .
uses 's sieve on the first /n/ elements of /states/
the result . Then it increments /s/ and repeats until /states/ is
dCP :: Int -> Integer -> Int -> [CosetState] -> Circ Integer
dCP n d s states = if s == n then return d else do
comment (printf "ENTER algorithm_DCP: n=%d d=%d s=%d" n d s)
let nn = 2^n
r = exp $ -( 2*pi*(fromIntegral d / fromIntegral(nn)) )
m = ceiling $ sqrt $ fromIntegral $ n-s-1
(l1, l2) = splitAt n states
Transform the first n coset states
comment "ENTER: TO_Psi_k"
l <- mapM (\(t,a) -> do
a <- qft_int a
ca <- measure a
k <- mmap fromIntegral $ dynamic_lift ca
t <- named_rotation "R" (r*(fromIntegral k)) t
return (t, k)) l1
comment "EXIT: To_Psi_k"
Sieve the Psi_k 's to get Psi_2^{n - s-1 } .
l <- sieving n m l
form Psi_2^{n - s-1 } and measuring it in the + /- basis . The
let ((q,k),psis) = find l (\x -> ((snd x) == 2^(n-s-1))) "The sieving process failed to produce a state of the form 2^k."
q <- map_hadamard q
q <- measure q
q <- dynamic_lift q
qdiscard_psi_ks psis
let d_lsb = int_of_boollist_unsigned_bh [q]
comment "EXIT: algorithm_DCP"
dCP n (d + (2^s)*d_lsb) (s+1) l2
|
0427d87341a212b4782d2911626a99690d02e7eff96c6ab1bbc2718193e81596 | tdammers/migrant | Run.hs | module Database.Migrant.Run
( migrate
, unsafeMigrate
, executePlan
, plan
, makePlan
, MigrationDirection (..)
)
where
import Database.Migrant.Driver.Class
import Database.Migrant.MigrationName
import Control.Monad (forM_)
data MigrationDirection
= MigrateUp
| MigrateDown
deriving (Show, Eq, Ord, Enum, Bounded)
-- | Create a migration plan based on the current situation on the database,
-- and the specified target.
plan :: Driver d
=> [MigrationName]
-> d
-> IO [(MigrationDirection, MigrationName)]
plan target driver = do
current <- getMigrations driver
return $ makePlan target current
-- | Make a plan from a previously loaded current situation and the specified
-- target.
makePlan :: [MigrationName]
-- ^ target
-> [MigrationName]
-- ^ current
-> [(MigrationDirection, MigrationName)]
makePlan [] []
-- Situation 0: nothing left to do
= []
makePlan [] xs
-- Situation 1: no more "up" targets left, but more migrations exist, so
-- we need to roll those back.
= [(MigrateDown, n) | n <- xs]
makePlan xs []
Situation 2 : only " up " targets left , run them .
= [(MigrateUp, n) | n <- xs]
makePlan (t:ts) (c:cs)
-- Situation 3: "up" targets exist, and we also have existing migrations
-- left. The same migration exists on both ends, so we can just skip
-- forward.
| t == c
= makePlan ts cs
-- Situation 4: both "up" targets and existing migrations are present but the
-- do not match, so we need to roll back existing migrations until a
-- consistent situation is restored.
| otherwise
= (MigrateDown, c):makePlan (t:ts) cs
-- | Apply a migration plan to a database.
-- This should generally be done within the same transaction as loading the
-- current situation from the database and creating a migration plan. Running
-- this action outside of a transaction may leave the database and migration
-- tracking in an inconsistent state.
executePlan :: Driver d
=> [(MigrationDirection, MigrationName)]
-> (MigrationName -> d -> IO ())
-> (MigrationName -> d -> IO ())
-> d
-> IO ()
executePlan migrationPlan up down driver = do
forM_ migrationPlan $ \(direction, name) -> do
let (run, mark) = case direction of
MigrateUp -> (up, markUp)
MigrateDown -> (down, markDown)
run name driver
mark name driver
-- | Safely (transactionally) infer and execute a migration.
migrate :: Driver d
=> [MigrationName]
^ Target situation
-> (MigrationName -> d -> IO ())
-- ^ Factory for \'up\' migration actions
-> (MigrationName -> d -> IO ())
-- ^ Factory for \'down\' migration actions
-> d
-> IO ()
migrate target up down driver =
withTransaction (unsafeMigrate target up down) driver
-- | Infer and execute a migration in a non-transactional fashion. This means
-- that migration failures may leave the database and migration tracking in
-- an inconsistent state, so you should never call this outside of a
-- transaction.
unsafeMigrate :: Driver d
=> [MigrationName]
^ Target situation
-> (MigrationName -> d -> IO ())
-- ^ Factory for \'up\' migration actions
-> (MigrationName -> d -> IO ())
-- ^ Factory for \'down\' migration actions
-> d
-> IO ()
unsafeMigrate target up down driver = do
initMigrations driver
migrationPlan <- plan target driver
executePlan migrationPlan up down driver
| null | https://raw.githubusercontent.com/tdammers/migrant/2bcbd3fc2eae1290f7cba6bd4fc7cc69ac0d24f2/migrant-core/src/Database/Migrant/Run.hs | haskell | | Create a migration plan based on the current situation on the database,
and the specified target.
| Make a plan from a previously loaded current situation and the specified
target.
^ target
^ current
Situation 0: nothing left to do
Situation 1: no more "up" targets left, but more migrations exist, so
we need to roll those back.
Situation 3: "up" targets exist, and we also have existing migrations
left. The same migration exists on both ends, so we can just skip
forward.
Situation 4: both "up" targets and existing migrations are present but the
do not match, so we need to roll back existing migrations until a
consistent situation is restored.
| Apply a migration plan to a database.
This should generally be done within the same transaction as loading the
current situation from the database and creating a migration plan. Running
this action outside of a transaction may leave the database and migration
tracking in an inconsistent state.
| Safely (transactionally) infer and execute a migration.
^ Factory for \'up\' migration actions
^ Factory for \'down\' migration actions
| Infer and execute a migration in a non-transactional fashion. This means
that migration failures may leave the database and migration tracking in
an inconsistent state, so you should never call this outside of a
transaction.
^ Factory for \'up\' migration actions
^ Factory for \'down\' migration actions | module Database.Migrant.Run
( migrate
, unsafeMigrate
, executePlan
, plan
, makePlan
, MigrationDirection (..)
)
where
import Database.Migrant.Driver.Class
import Database.Migrant.MigrationName
import Control.Monad (forM_)
data MigrationDirection
= MigrateUp
| MigrateDown
deriving (Show, Eq, Ord, Enum, Bounded)
plan :: Driver d
=> [MigrationName]
-> d
-> IO [(MigrationDirection, MigrationName)]
plan target driver = do
current <- getMigrations driver
return $ makePlan target current
makePlan :: [MigrationName]
-> [MigrationName]
-> [(MigrationDirection, MigrationName)]
makePlan [] []
= []
makePlan [] xs
= [(MigrateDown, n) | n <- xs]
makePlan xs []
Situation 2 : only " up " targets left , run them .
= [(MigrateUp, n) | n <- xs]
makePlan (t:ts) (c:cs)
| t == c
= makePlan ts cs
| otherwise
= (MigrateDown, c):makePlan (t:ts) cs
executePlan :: Driver d
=> [(MigrationDirection, MigrationName)]
-> (MigrationName -> d -> IO ())
-> (MigrationName -> d -> IO ())
-> d
-> IO ()
executePlan migrationPlan up down driver = do
forM_ migrationPlan $ \(direction, name) -> do
let (run, mark) = case direction of
MigrateUp -> (up, markUp)
MigrateDown -> (down, markDown)
run name driver
mark name driver
migrate :: Driver d
=> [MigrationName]
^ Target situation
-> (MigrationName -> d -> IO ())
-> (MigrationName -> d -> IO ())
-> d
-> IO ()
migrate target up down driver =
withTransaction (unsafeMigrate target up down) driver
unsafeMigrate :: Driver d
=> [MigrationName]
^ Target situation
-> (MigrationName -> d -> IO ())
-> (MigrationName -> d -> IO ())
-> d
-> IO ()
unsafeMigrate target up down driver = do
initMigrations driver
migrationPlan <- plan target driver
executePlan migrationPlan up down driver
|
312aa533efc4bfac5d2da43189370dd118fdb974b177fd856675a442253c45f9 | Ericson2314/lighthouse | StablePtr.hs | # OPTIONS_GHC -fno - implicit - prelude #
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.StablePtr
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer :
-- Stability : provisional
-- Portability : portable
--
This module is part of the Foreign Function Interface ( FFI ) and will usually
-- be imported via the module "Foreign".
--
-----------------------------------------------------------------------------
module Foreign.StablePtr
* Stable references to values
StablePtr -- abstract
, newStablePtr -- :: a -> IO (StablePtr a)
, deRefStablePtr -- :: StablePtr a -> IO a
, freeStablePtr -- :: StablePtr a -> IO ()
, castStablePtrToPtr -- :: StablePtr a -> Ptr ()
, castPtrToStablePtr -- :: Ptr () -> StablePtr a
, -- ** The C-side interface
-- $cinterface
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Stable
#endif
#ifdef __HUGS__
import Hugs.StablePtr
#endif
#ifdef __NHC__
import NHC.FFI
( StablePtr
, newStablePtr
, deRefStablePtr
, freeStablePtr
, castStablePtrToPtr
, castPtrToStablePtr
)
#endif
-- $cinterface
--
-- The following definition is available to C programs inter-operating with
code when including the header @HsFFI.h@.
--
-- > typedef void *HsStablePtr; /* C representation of a StablePtr */
--
-- Note that no assumptions may be made about the values representing stable
-- pointers. In fact, they need not even be valid memory addresses. The only
guarantee provided is that if they are passed back to Haskell land , the
-- function 'deRefStablePtr' will be able to reconstruct the
Haskell value referred to by the stable pointer .
| null | https://raw.githubusercontent.com/Ericson2314/lighthouse/210078b846ebd6c43b89b5f0f735362a01a9af02/ghc-6.8.2/libraries/base/Foreign/StablePtr.hs | haskell | ---------------------------------------------------------------------------
|
Module : Foreign.StablePtr
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
be imported via the module "Foreign".
---------------------------------------------------------------------------
abstract
:: a -> IO (StablePtr a)
:: StablePtr a -> IO a
:: StablePtr a -> IO ()
:: StablePtr a -> Ptr ()
:: Ptr () -> StablePtr a
** The C-side interface
$cinterface
$cinterface
The following definition is available to C programs inter-operating with
> typedef void *HsStablePtr; /* C representation of a StablePtr */
Note that no assumptions may be made about the values representing stable
pointers. In fact, they need not even be valid memory addresses. The only
function 'deRefStablePtr' will be able to reconstruct the | # OPTIONS_GHC -fno - implicit - prelude #
Copyright : ( c ) The University of Glasgow 2001
Maintainer :
This module is part of the Foreign Function Interface ( FFI ) and will usually
module Foreign.StablePtr
* Stable references to values
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Stable
#endif
#ifdef __HUGS__
import Hugs.StablePtr
#endif
#ifdef __NHC__
import NHC.FFI
( StablePtr
, newStablePtr
, deRefStablePtr
, freeStablePtr
, castStablePtrToPtr
, castPtrToStablePtr
)
#endif
code when including the header @HsFFI.h@.
guarantee provided is that if they are passed back to Haskell land , the
Haskell value referred to by the stable pointer .
|
0ce26fd669eb49c11eea197ae90ee98963e691a78d0f7e0028581975983bd237 | FPBench/FPBench | imperative.rkt | ;
; Common compiler for all imperative languages
C , JS , Go , Rust , Sollya , Scala , Fortran
, MATLAB
;
#lang racket
(require "common.rkt" "compilers.rkt" "fpcore-visitor.rkt" "supported.rkt")
(provide (all-from-out "common.rkt" "compilers.rkt" "fpcore-visitor.rkt" "supported.rkt")
make-imperative-compiler
default-infix-ops
bool-ops
imperative-visitor
compile-operator) ; exported for MATLAB
;;;;;;;;;;;;;;;;;;;;;;;;;;; language-specific abstractions ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define *imperative-lang* (make-parameter #f))
(struct imperative
(name ; string representation of language
infix ; list of ops that use default infix formatter
operator ; procedure to format any non-infix operator
constant ; procedure to format constants
procedure that returns language name of an precision
declare ; procedure to format declarations
assign ; procedure to format assignments
round ; procedure to format (explicit) casts
implicit-round ; procedure to format implicit casts
round-mode ; procedure to format changes to rounding mode
use-vars ; procedure to format post-processing on new variables
program ; procedure to format the entire program
flags)) ; list of optional flags to change minor behavior
;;;;;;;;;;;;;;;;;;;;;;;;;;; flags ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define valid-flags
'(no-parens-around-condition ; removes parenthesis from 'if' and 'while' conditions (Go, Python, Rust)
for-instead-of-while ; changes 'while' to 'for' (Go)
declarations are assignments ( Sollya , , )
semicolon-after-enclosing-brace ; end 'if' or 'while' blocks with "};" (Sollya)
if-then ; "if (cond) then { ... }" (Sollya, Fortran)
while-do ; "while (cond) do { ... }" (Sollya)
ensure rounding after any operation ( Sollya , , )
colon-instead-of-brace ; use a colon rather than braces for code blocks (Python)
use-elif ; use 'elif' instead of 'else if' (Python)
use ' elseif ' instead of ' else if ' ( MATLAB , )
boolean-ops-use-name ; boolean operators use alphabetic name rather than symbol (Python)
replace tabs with 4 spaces ( Fortran , Rust )
changes ' while ' to ' do while ' ( Fortran )
blocks enclosed by " < x > ... end < x > , implicitly no braces " ( Fortran )
blocks ended by " end " , implictly no braces " ( MATLAB , )
no-body)) ; do not compile the body (C header)
(define (valid-flag? maybe-flag)
(set-member? valid-flags maybe-flag))
(define (more-than-one-of? specific flags)
(> (count (curry set-member? specific) flags) 1))
(define (flag-conflict? flags)
(or (more-than-one-of? '(colon-instead-of-brace ; brace vs. colon vs. end <name> vs. end
end-block-with-name
end-block-with-end)
flags)
(more-than-one-of? '(for-instead-of-while ; while vs. for vs. do while
do-while)
flags)))
(define (format-condition cond)
(if (compile-flag-raised? 'no-parens-around-condition)
(format "~a" cond)
(format "(~a)" cond)))
(define (while-name)
(cond
[(compile-flag-raised? 'for-instead-of-while) "for"]
[(compile-flag-raised? 'do-while) "do while"]
[else "while"]))
(define (else-if-name)
(cond
[(compile-flag-raised? 'use-elif) "elif"]
[(compile-flag-raised? 'use-elseif) "elseif"]
[else "else if"]))
(define (after-if)
(if (compile-flag-raised? 'if-then)
" then"
""))
(define (after-while)
(if (compile-flag-raised? 'while-do)
" do"
""))
(define (if-declare decl indent)
(if (compile-flag-raised? 'never-declare)
""
(format "~a~a\n" indent decl)))
(define (if-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~aif ~a~a:\n"]
[(compile-flag-raised? 'end-block-with-name) "~aif ~a~a\n"]
[(compile-flag-raised? 'end-block-with-end) "~aif ~a~a\n"]
[else "~aif ~a~a {\n"]))
(define (else-if-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~a~a ~a~a:\n"]
[(compile-flag-raised? 'end-block-with-name) "~a~a ~a~a\n"]
[(compile-flag-raised? 'end-block-with-end) "~a~a ~a~a\n"]
[else "~a} ~a ~a~a {\n"]))
(define (else-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~aelse:\n"]
[(compile-flag-raised? 'end-block-with-name) "~aelse\n"]
[(compile-flag-raised? 'end-block-with-end) "~aelse\n"]
[else "~a} else {\n"]))
(define (while-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~a~a ~a~a:\n"]
[(compile-flag-raised? 'end-block-with-name) "~a~a ~a~a\n"]
[(compile-flag-raised? 'end-block-with-end) "~a~a ~a~a\n"]
[else "~a~a ~a~a {\n"]))
(define (end-of-block indent type)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) ""]
[(compile-flag-raised? 'end-block-with-end)
(format "~aend\n" indent)]
[(compile-flag-raised? 'end-block-with-name)
(define name
(cond
[(equal? type 'if) "if"]
[(compile-flag-raised? 'do-while) "do"]
[else (while-name)]))
(format "~aend ~a\n" indent name)]
[(compile-flag-raised? 'semicolon-after-enclosing-brace) (format "~a};\n" indent)]
[else (format "~a}\n" indent)]))
(define (visit-body vtor body ctx)
(if (compile-flag-raised? 'no-body)
(values "" ctx)
(visit/ctx vtor body ctx)))
(define (single-tab)
(if (compile-flag-raised? 'spaces-for-tabs)
" "
"\t"))
;;;;;;;;;;;;;;;;;;;;;;;;;;; shorthands ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (compile-flag-raised? flag)
(set-member? (imperative-flags (*imperative-lang*)) flag))
(define (compile-after-op x ctx)
(if (compile-flag-raised? 'round-after-operation)
(compile-round x ctx)
x))
(define (compile-infix-operator op args ctx)
(match (cons op args)
[(list '- a)
(compile-after-op (format (if (string-prefix? a "-") "-(~a)" "-~a") a) ctx)]
[(list 'not a)
(if (compile-flag-raised? 'boolean-ops-use-name)
(format "not ~a" a)
(format "!~a" a))]
[(list (or '== '!= '< '> '<= '>=))
(compile-constant 'TRUE ctx)]
[(list (or '+ '- '* '/) a b) ; binary arithmetic
(compile-after-op (format "(~a ~a ~a)" a op b) ctx)]
[(list (or '== '< '> '<= '>=) arg args ...)
(format "(~a)"
(string-join
(for/list ([a (cons arg args)] [b args])
(format "~a ~a ~a" a op b))
(if (compile-flag-raised? 'boolean-ops-use-name)
" and "
" && ")))]
[(list '!= args ...)
(format "(~a)"
(string-join
(let loop ([args args])
(if (null? args)
'()
(append
(for/list ([b (cdr args)])
(format "~a != ~a" (car args) b))
(loop (cdr args)))))
(if (compile-flag-raised? 'boolean-ops-use-name)
" and "
" && ")))]
[(list 'and a ...)
(define and-str (if (compile-flag-raised? 'boolean-ops-use-name) " and " " && "))
(format "(~a)" (string-join (map ~a a) and-str))]
[(list 'or a ...)
(define or-str (if (compile-flag-raised? 'boolean-ops-use-name) " or " " || "))
(format "(~a)" (string-join (map ~a a) or-str))]))
(define (compile-operator op args ctx)
(if (set-member? (imperative-infix (*imperative-lang*)) op)
(compile-infix-operator op args ctx)
((imperative-operator (*imperative-lang*)) op args ctx)))
(define (compile-function fn args ctx)
((imperative-operator (*imperative-lang*)) fn args ctx))
(define (compile-constant x ctx)
((imperative-constant (*imperative-lang*)) x ctx))
(define (compile-type x)
((imperative-type (*imperative-lang*)) x))
(define compile-declaration
(case-lambda
[(var ctx) ((imperative-declare (*imperative-lang*)) var ctx)]
[(var val ctx)
(if (compile-flag-raised? 'never-declare)
((imperative-assign (*imperative-lang*)) var (trim-infix-parens val) ctx)
((imperative-declare (*imperative-lang*)) var (trim-infix-parens val) ctx))]))
(define (compile-assignment var val ctx)
((imperative-assign (*imperative-lang*)) var (trim-infix-parens val) ctx))
(define (compile-round expr ctx)
((imperative-round (*imperative-lang*)) expr ctx))
(define (compile-implicit-round op arg ctx arg-ctx)
((imperative-implicit-round (*imperative-lang*)) op arg ctx arg-ctx))
(define (compile-round-mode mode ctx)
((imperative-round-mode (*imperative-lang*)) mode ctx))
(define (compile-use-vars vars ctx)
(define vars* (map (curry ctx-lookup-name ctx) vars))
((imperative-use-vars (*imperative-lang*)) vars* ctx))
(define (compile-program name args arg-ctxs body ret ctx used-vars)
((imperative-program (*imperative-lang*)) name args arg-ctxs body ret ctx used-vars))
;;;;;;;;;;;;;;;;;;;;;;;;;;; defaults ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define default-infix-ops '(+ - * / == != < > <= >= not and or))
(define (default-compile-operator fn args ctx)
(format "~a(~a)" fn (string-join (map ~a args) ", ")))
(define (default-compile-constant x ctx)
(~a x))
(define (default-compile-type type)
"var")
(define default-compile-declaration
(case-lambda
[(var ctx) (format "~a ~a;" (compile-type (ctx-lookup-prop ctx ':precision)) var)]
[(var val ctx) (format "~a ~a = ~a;" (compile-type (ctx-lookup-prop ctx ':precision)) var val)]))
(define (default-compile-assignment var val ctx)
(format "~a = ~a;" var val))
(define (default-compile-round expr ctx)
expr)
(define (default-compile-implicit-round op arg ctx arg-ctx)
arg)
(define (default-compile-round-mode expr ctx)
expr)
(define (default-use-vars vars ctx)
"")
(define (default-compile-program name args arg-ctxs body ret ctx used-vars)
(if (non-empty-string? body)
(format "function ~a(~a) {\n~a\treturn ~a;\n}\n"
name (string-join (map ~a args) ", ")
body ret)
(format "function ~a(~a) {\n\treturn ~a;\n}\n"
name (string-join (map ~a args) ", ")
ret)))
;;;;;;;;;;;;;;;;;;;;;;;;;;; utility ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define default-ctx
(ctx-update-props
(make-compiler-ctx)
'(:precision binary64 :round nearestEven)))
(define (fix-name name)
(string-join
(for/list ([char (~a name)])
(if (regexp-match #rx"[a-zA-Z0-9_]" (string char))
(string char)
(format "_~a_" (char->integer char))))
""))
(define bool-ops '(< > <= >= == != and or not
isfinite isinf isnan isnormal signbit))
;;;;;;;;;;;;;;;;;;;;;;;;;;; visitor ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (visit-if/imperative vtor cond ift iff #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define branches
(let loop ([expr (list 'if cond ift iff)])
(match expr
[(list 'if cond ift iff)
(define-values (cond* _) (visit/ctx vtor cond ctx))
(cons (list cond* ift) (loop iff))]
[_ (list (list #t expr))])))
(let loop ([branches branches] [first? #t] [ctx ctx] [ret #f])
(match* (first? (car branches))
[(#t (list cond ift))
(define-values (ctx* tmpvar) ; messy workaround to get ift context
(parameterize ([current-output-port (open-output-nowhere)])
(define-values (_ ift-ctx) (visit/ctx vtor ift ctx))
(define prec (ctx-lookup-prop ift-ctx ':precision))
(ctx-random-name (ctx-update-props ctx `(:precision ,prec)))))
(printf (if-declare (compile-declaration tmpvar ctx*) indent))
(printf (if-format) indent (format-condition (trim-infix-parens cond)) (after-if))
(define-values (ift* ift-ctx)
(let ([ctx0 (ctx-set-extra ctx 'indent (format "~a~a" indent (single-tab)))])
(visit/ctx vtor ift ctx0)))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment tmpvar ift* ctx))
(loop (cdr branches) #f ctx* tmpvar)]
[(_ (list #t last))
(printf (else-format) indent)
(define ctx* (ctx-set-extra ctx 'indent (format "~a~a" indent (single-tab))))
(define-values (last* else-ctx) (visit/ctx vtor last ctx*))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment ret last* ctx))
(printf (end-of-block indent 'if))
(values ret else-ctx)]
[(_ (list cond elif))
(printf (else-if-format) indent (else-if-name)
(format-condition (trim-infix-parens cond))
(after-if))
(define ctx* (ctx-set-extra ctx 'indent (format "~a~a" indent (single-tab))))
(define-values (elif* elif-ctx) (visit/ctx vtor elif ctx*))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment ret elif* ctx))
(loop (cdr branches) #f ctx ret)])))
(define (visit-let_/imperative vtor let_ vars vals body #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define ctx*
(for/fold ([ctx* ctx]) ([var (in-list vars)] [val (in-list vals)])
(define-values (val* val-ctx) (visit/ctx vtor val (match let_ ['let ctx] ['let* ctx*])))
(define prec (ctx-lookup-prop val-ctx ':precision))
(define-values (name-ctx name) (ctx-unique-name ctx* var prec))
(define decl-ctx (ctx-update-props ctx* `(:precision ,prec)))
(printf "~a~a\n" indent (compile-declaration name val* decl-ctx))
name-ctx))
(printf "~a" (compile-use-vars vars ctx*))
(visit/ctx vtor body ctx*))
(define (visit-while_/imperative vtor while_ cond vars inits updates body #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define-values (ctx* vars*)
(for/fold ([ctx* ctx] [vars* '()] #:result (values ctx* (reverse vars*)))
([var (in-list vars)] [val (in-list inits)])
(define val-ctx (match while_ ['while ctx] ['while* ctx*]))
(define-values (val* val*-ctx) (visit/ctx vtor val val-ctx))
(define prec (ctx-lookup-prop val*-ctx ':precision))
(define-values (name-ctx name) (ctx-unique-name ctx* var prec))
(define decl-ctx (ctx-update-props ctx* `(:precision ,prec)))
(printf "~a~a\n" indent (compile-declaration name val* decl-ctx))
(values name-ctx (cons name vars*))))
(define tmpvar
(let-values ([(cx name) (ctx-random-name ctx* 'boolean)])
name))
(printf "~a" (compile-use-vars vars ctx*))
(define-values (cond* cond*-ctx) (visit/ctx vtor cond ctx*))
(printf "~a~a\n" indent (compile-declaration tmpvar cond* cond*-ctx))
(printf (while-format) indent (while-name)
(format-condition tmpvar) (after-while))
(define ctx**
(match while_
['while
(define val-ctx (ctx-set-extra ctx* 'indent (format "~a~a" indent (single-tab))))
(define-values (ctx** vars**)
(for/fold ([ctx** ctx*] [vars* '()]
#:result (values (ctx-set-extra ctx* 'indent (format "~a~a" indent (single-tab)))
(reverse vars*)))
([var (in-list vars)] [val (in-list updates)])
(define-values (val* val*-ctx) (visit/ctx vtor val val-ctx))
(define prec (ctx-lookup-prop val*-ctx ':precision))
(define-values (name-ctx name) (ctx-unique-name ctx** var prec))
(define decl-ctx (ctx-update-props ctx** `(:precision ,prec)))
(printf "~a~a~a\n" indent (single-tab) (compile-declaration name val* decl-ctx))
(values name-ctx (cons name vars*))))
(printf "~a" (compile-use-vars vars ctx**))
(for ([var* (in-list vars*)] [var** (in-list vars**)])
(printf "~a~a~a\n" indent (single-tab) (compile-assignment var* var** ctx**)))
ctx**]
['while*
(define ctx** (ctx-set-extra ctx* 'indent (format "~a~a" indent (single-tab))))
(for ([var* (in-list vars*)] [val (in-list updates)])
(let-values ([(val* _) (visit/ctx vtor val ctx**)])
(printf "~a~a~a\n" indent (single-tab) (compile-assignment var* val* ctx**))))
ctx**]))
(define-values (cond** cond**-ctx) (visit/ctx vtor cond ctx**))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment tmpvar cond** cond**-ctx))
(printf (end-of-block indent 'while))
(visit/ctx vtor body ctx*))
(define (visit-cast/imperative vtor x #:ctx ctx)
(define-values (body* body-ctx) (visit/ctx vtor x ctx))
(values (compile-round body* ctx) body-ctx))
(define (visit-!/imperative vtor props body #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define curr-prec (ctx-lookup-prop ctx ':precision))
(define curr-round (ctx-lookup-prop ctx ':round))
(define ctx* (ctx-update-props ctx props))
(define new-prec (ctx-lookup-prop ctx* ':precision))
(define new-round (ctx-lookup-prop ctx* ':round))
(define body-ctx
(parameterize ([current-output-port (open-output-nowhere)])
(let-values ([(_ body-ctx) (visit/ctx vtor body ctx*)])
body-ctx)))
(define body-prec (ctx-lookup-prop body-ctx ':precision))
(define-values (ctx** tmpvar)
(let ([ctx** (ctx-update-props ctx* `(:precision ,body-prec))])
(ctx-random-name ctx**)))
(unless (equal? curr-round new-round)
(printf "~a" (compile-round-mode new-round ctx)))
(define-values (body* _) (visit/ctx vtor body ctx*))
(printf "~a~a\n" indent (compile-declaration tmpvar body* ctx**))
(unless (equal? curr-round new-round)
(printf "~a" (compile-round-mode curr-round ctx)))
(values tmpvar body-ctx))
(define (visit-op_/imperative vtor op args #:ctx ctx)
(define prec (ctx-lookup-prop ctx ':precision))
(define args*
(for/list ([arg args])
(define-values (arg* arg-ctx) (visit/ctx vtor arg ctx))
(define arg-prec (ctx-lookup-prop arg-ctx ':precision))
(if (equal? prec arg-prec)
arg*
(compile-implicit-round op arg* arg-ctx ctx))))
(values (compile-operator op args* ctx)
(if (set-member? bool-ops op)
(ctx-update-props ctx (list ':precision 'boolean))
ctx)))
(define (visit-call/imperative vtor fn args #:ctx ctx)
(define args*
(for/list ([arg args])
(define-values (arg* _) (visit/ctx vtor arg ctx))
arg*))
(values (compile-function fn args ctx) ctx))
(define (visit-digits/imperative vtor m e b #:ctx ctx)
(visit/ctx vtor (digits->number m e b) ctx))
(define (visit-number/imperative vtor x #:ctx ctx)
(values (compile-constant x ctx) ctx))
(define (visit-constant/imperative vtor x #:ctx ctx)
(values (compile-constant x ctx)
(if (set-member? '(TRUE FALSE) x)
(ctx-update-props ctx (list ':precision 'boolean))
ctx)))
(define (visit-symbol/imperative vtor x #:ctx ctx)
(define name (ctx-lookup-name ctx x))
(define var-prec (ctx-lookup-prec ctx name))
(values name (ctx-update-props ctx `(:precision ,var-prec))))
(define-expr-visitor default-compiler-visitor imperative-visitor
[visit-if visit-if/imperative]
[visit-let_ visit-let_/imperative]
[visit-while_ visit-while_/imperative]
[visit-cast visit-cast/imperative]
[visit-! visit-!/imperative]
[visit-call visit-call/imperative]
[visit-op_ visit-op_/imperative]
[visit-digits visit-digits/imperative]
[visit-number visit-number/imperative]
[visit-constant visit-constant/imperative]
[visit-symbol visit-symbol/imperative])
;;;;;;;;;;;;;;;;;;;;;;;;;;; compiler constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (make-imperative-compiler name
; language behavior
#:infix-ops [infix default-infix-ops]
#:operator [operator default-compile-operator]
#:constant [constant default-compile-constant]
#:type [type default-compile-type]
#:declare [declare default-compile-declaration]
#:assign [assign default-compile-assignment]
#:round [round default-compile-round]
#:implicit-round [implicit-round default-compile-implicit-round]
#:round-mode [round-mode default-compile-round-mode]
#:use-vars [use-vars default-use-vars]
#:program [program default-compile-program]
#:flags [flags '()]
; visitor behavior
#:visitor [vtor imperative-visitor]
#:reserved [reserved '()]
#:fix-name [fix-name-proc fix-name]
#:indent [indent "\t"])
(unless (andmap valid-flag? flags)
(error 'make-imperative-compiler "undefined imperative flags: ~a" flags))
(when (flag-conflict? flags)
(error 'make-imperative-compiler "conflicting flags: ~a" flags))
(define language
(imperative name infix operator constant type
declare assign round implicit-round round-mode
use-vars program flags))
(lambda (prog name)
(parameterize ([*gensym-used-names* (mutable-set)]
[*gensym-collisions* 1]
[*gensym-fix-name* fix-name-proc]
[*imperative-lang* language])
(define-values (args props body)
(match prog
[(list 'FPCore (list args ...) props ... body) (values args props body)]
[(list 'FPCore name (list args ...) props ... body) (values args props body)]))
(define ctx
(let ([ctx0 (ctx-update-props default-ctx props)])
(let ([ctx1 (ctx-reserve-names ctx0 reserved)])
(ctx-set-extra ctx1 'indent indent))))
; compiled function name
(define fname
(let-values ([(cx fname) (ctx-unique-name ctx name)])
(begin0 fname (set! ctx cx))))
; compiled argument names
(define-values (arg-names arg-ctxs)
(for/lists (ns ps) ([arg (in-list args)])
(match arg
[(list '! props ... name)
(define arg-ctx (ctx-update-props ctx props))
(define arg-prec (ctx-lookup-prop arg-ctx ':precision))
(define-values (cx aname) (ctx-unique-name ctx name arg-prec))
(begin0 (values aname arg-ctx) (set! ctx cx))]
[name
(define-values (cx aname) (ctx-unique-name ctx name))
(begin0 (values aname ctx) (set! ctx cx))])))
(define non-varnames (cons fname (map (curry ctx-lookup-name ctx) reserved)))
(define p (open-output-string))
(define-values (body* ret used-vars)
(parameterize ([current-output-port p])
(define-values (o cx) (visit-body vtor body ctx))
(values (get-output-string p)
(trim-infix-parens o)
(remove* non-varnames (set->list (*gensym-used-names*))))))
(compile-program fname arg-names arg-ctxs body* ret ctx used-vars))))
(module+ test
(require rackunit)
(define compile0 (make-imperative-compiler "default"))
(define (compile* . exprs)
(apply values (for/list ([expr exprs] [i (in-naturals 1)])
(compile0 expr (format "fn~a" i)))))
(compile*
'(FPCore (x) (if (< x 0) (+ x 1) (- x 1)))
'(FPCore (x) (let ([x 1] [y x]) (+ x y)))
'(FPCore (x) (let* ([x 1] [y x]) (+ x y)))
'(FPCore (x) (while (< x 4) ([x 0.0 (+ x 1.0)]) x))
'(FPCore (x) (while* (< x 4) ([x 0.0 (+ x 1.0)]) x))
'(FPCore (x) (+ (foo x) 1))
'(FPCore (x) (- (sqrt (+ x 1)) (sqrt x)))
'(FPCore (a b) (+ (* a b) (- a b))))
)
| null | https://raw.githubusercontent.com/FPBench/FPBench/11153e1ef2cfbebb016b5deb628485376eb62c85/src/imperative.rkt | racket |
Common compiler for all imperative languages
exported for MATLAB
language-specific abstractions ;;;;;;;;;;;;;;;;;;;;;;;;;;;
string representation of language
list of ops that use default infix formatter
procedure to format any non-infix operator
procedure to format constants
procedure to format declarations
procedure to format assignments
procedure to format (explicit) casts
procedure to format implicit casts
procedure to format changes to rounding mode
procedure to format post-processing on new variables
procedure to format the entire program
list of optional flags to change minor behavior
flags ;;;;;;;;;;;;;;;;;;;;;;;;;;;
removes parenthesis from 'if' and 'while' conditions (Go, Python, Rust)
changes 'while' to 'for' (Go)
end 'if' or 'while' blocks with "};" (Sollya)
"if (cond) then { ... }" (Sollya, Fortran)
"while (cond) do { ... }" (Sollya)
use a colon rather than braces for code blocks (Python)
use 'elif' instead of 'else if' (Python)
boolean operators use alphabetic name rather than symbol (Python)
do not compile the body (C header)
brace vs. colon vs. end <name> vs. end
while vs. for vs. do while
shorthands ;;;;;;;;;;;;;;;;;;;;;;;;;;;
binary arithmetic
defaults ;;;;;;;;;;;;;;;;;;;;;;;;;;;
utility ;;;;;;;;;;;;;;;;;;;;;;;;;;;
visitor ;;;;;;;;;;;;;;;;;;;;;;;;;;;
messy workaround to get ift context
compiler constructor ;;;;;;;;;;;;;;;;;;;;;;;;;;;
language behavior
visitor behavior
compiled function name
compiled argument names | C , JS , Go , Rust , Sollya , Scala , Fortran
, MATLAB
#lang racket
(require "common.rkt" "compilers.rkt" "fpcore-visitor.rkt" "supported.rkt")
(provide (all-from-out "common.rkt" "compilers.rkt" "fpcore-visitor.rkt" "supported.rkt")
make-imperative-compiler
default-infix-ops
bool-ops
imperative-visitor
(define *imperative-lang* (make-parameter #f))
(struct imperative
procedure that returns language name of an precision
(define valid-flags
declarations are assignments ( Sollya , , )
ensure rounding after any operation ( Sollya , , )
use ' elseif ' instead of ' else if ' ( MATLAB , )
replace tabs with 4 spaces ( Fortran , Rust )
changes ' while ' to ' do while ' ( Fortran )
blocks enclosed by " < x > ... end < x > , implicitly no braces " ( Fortran )
blocks ended by " end " , implictly no braces " ( MATLAB , )
(define (valid-flag? maybe-flag)
(set-member? valid-flags maybe-flag))
(define (more-than-one-of? specific flags)
(> (count (curry set-member? specific) flags) 1))
(define (flag-conflict? flags)
end-block-with-name
end-block-with-end)
flags)
do-while)
flags)))
(define (format-condition cond)
(if (compile-flag-raised? 'no-parens-around-condition)
(format "~a" cond)
(format "(~a)" cond)))
(define (while-name)
(cond
[(compile-flag-raised? 'for-instead-of-while) "for"]
[(compile-flag-raised? 'do-while) "do while"]
[else "while"]))
(define (else-if-name)
(cond
[(compile-flag-raised? 'use-elif) "elif"]
[(compile-flag-raised? 'use-elseif) "elseif"]
[else "else if"]))
(define (after-if)
(if (compile-flag-raised? 'if-then)
" then"
""))
(define (after-while)
(if (compile-flag-raised? 'while-do)
" do"
""))
(define (if-declare decl indent)
(if (compile-flag-raised? 'never-declare)
""
(format "~a~a\n" indent decl)))
(define (if-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~aif ~a~a:\n"]
[(compile-flag-raised? 'end-block-with-name) "~aif ~a~a\n"]
[(compile-flag-raised? 'end-block-with-end) "~aif ~a~a\n"]
[else "~aif ~a~a {\n"]))
(define (else-if-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~a~a ~a~a:\n"]
[(compile-flag-raised? 'end-block-with-name) "~a~a ~a~a\n"]
[(compile-flag-raised? 'end-block-with-end) "~a~a ~a~a\n"]
[else "~a} ~a ~a~a {\n"]))
(define (else-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~aelse:\n"]
[(compile-flag-raised? 'end-block-with-name) "~aelse\n"]
[(compile-flag-raised? 'end-block-with-end) "~aelse\n"]
[else "~a} else {\n"]))
(define (while-format)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) "~a~a ~a~a:\n"]
[(compile-flag-raised? 'end-block-with-name) "~a~a ~a~a\n"]
[(compile-flag-raised? 'end-block-with-end) "~a~a ~a~a\n"]
[else "~a~a ~a~a {\n"]))
(define (end-of-block indent type)
(cond
[(compile-flag-raised? 'colon-instead-of-brace) ""]
[(compile-flag-raised? 'end-block-with-end)
(format "~aend\n" indent)]
[(compile-flag-raised? 'end-block-with-name)
(define name
(cond
[(equal? type 'if) "if"]
[(compile-flag-raised? 'do-while) "do"]
[else (while-name)]))
(format "~aend ~a\n" indent name)]
[(compile-flag-raised? 'semicolon-after-enclosing-brace) (format "~a};\n" indent)]
[else (format "~a}\n" indent)]))
(define (visit-body vtor body ctx)
(if (compile-flag-raised? 'no-body)
(values "" ctx)
(visit/ctx vtor body ctx)))
(define (single-tab)
(if (compile-flag-raised? 'spaces-for-tabs)
" "
"\t"))
(define (compile-flag-raised? flag)
(set-member? (imperative-flags (*imperative-lang*)) flag))
(define (compile-after-op x ctx)
(if (compile-flag-raised? 'round-after-operation)
(compile-round x ctx)
x))
(define (compile-infix-operator op args ctx)
(match (cons op args)
[(list '- a)
(compile-after-op (format (if (string-prefix? a "-") "-(~a)" "-~a") a) ctx)]
[(list 'not a)
(if (compile-flag-raised? 'boolean-ops-use-name)
(format "not ~a" a)
(format "!~a" a))]
[(list (or '== '!= '< '> '<= '>=))
(compile-constant 'TRUE ctx)]
(compile-after-op (format "(~a ~a ~a)" a op b) ctx)]
[(list (or '== '< '> '<= '>=) arg args ...)
(format "(~a)"
(string-join
(for/list ([a (cons arg args)] [b args])
(format "~a ~a ~a" a op b))
(if (compile-flag-raised? 'boolean-ops-use-name)
" and "
" && ")))]
[(list '!= args ...)
(format "(~a)"
(string-join
(let loop ([args args])
(if (null? args)
'()
(append
(for/list ([b (cdr args)])
(format "~a != ~a" (car args) b))
(loop (cdr args)))))
(if (compile-flag-raised? 'boolean-ops-use-name)
" and "
" && ")))]
[(list 'and a ...)
(define and-str (if (compile-flag-raised? 'boolean-ops-use-name) " and " " && "))
(format "(~a)" (string-join (map ~a a) and-str))]
[(list 'or a ...)
(define or-str (if (compile-flag-raised? 'boolean-ops-use-name) " or " " || "))
(format "(~a)" (string-join (map ~a a) or-str))]))
(define (compile-operator op args ctx)
(if (set-member? (imperative-infix (*imperative-lang*)) op)
(compile-infix-operator op args ctx)
((imperative-operator (*imperative-lang*)) op args ctx)))
(define (compile-function fn args ctx)
((imperative-operator (*imperative-lang*)) fn args ctx))
(define (compile-constant x ctx)
((imperative-constant (*imperative-lang*)) x ctx))
(define (compile-type x)
((imperative-type (*imperative-lang*)) x))
(define compile-declaration
(case-lambda
[(var ctx) ((imperative-declare (*imperative-lang*)) var ctx)]
[(var val ctx)
(if (compile-flag-raised? 'never-declare)
((imperative-assign (*imperative-lang*)) var (trim-infix-parens val) ctx)
((imperative-declare (*imperative-lang*)) var (trim-infix-parens val) ctx))]))
(define (compile-assignment var val ctx)
((imperative-assign (*imperative-lang*)) var (trim-infix-parens val) ctx))
(define (compile-round expr ctx)
((imperative-round (*imperative-lang*)) expr ctx))
(define (compile-implicit-round op arg ctx arg-ctx)
((imperative-implicit-round (*imperative-lang*)) op arg ctx arg-ctx))
(define (compile-round-mode mode ctx)
((imperative-round-mode (*imperative-lang*)) mode ctx))
(define (compile-use-vars vars ctx)
(define vars* (map (curry ctx-lookup-name ctx) vars))
((imperative-use-vars (*imperative-lang*)) vars* ctx))
(define (compile-program name args arg-ctxs body ret ctx used-vars)
((imperative-program (*imperative-lang*)) name args arg-ctxs body ret ctx used-vars))
(define default-infix-ops '(+ - * / == != < > <= >= not and or))
(define (default-compile-operator fn args ctx)
(format "~a(~a)" fn (string-join (map ~a args) ", ")))
(define (default-compile-constant x ctx)
(~a x))
(define (default-compile-type type)
"var")
(define default-compile-declaration
(case-lambda
[(var ctx) (format "~a ~a;" (compile-type (ctx-lookup-prop ctx ':precision)) var)]
[(var val ctx) (format "~a ~a = ~a;" (compile-type (ctx-lookup-prop ctx ':precision)) var val)]))
(define (default-compile-assignment var val ctx)
(format "~a = ~a;" var val))
(define (default-compile-round expr ctx)
expr)
(define (default-compile-implicit-round op arg ctx arg-ctx)
arg)
(define (default-compile-round-mode expr ctx)
expr)
(define (default-use-vars vars ctx)
"")
(define (default-compile-program name args arg-ctxs body ret ctx used-vars)
(if (non-empty-string? body)
(format "function ~a(~a) {\n~a\treturn ~a;\n}\n"
name (string-join (map ~a args) ", ")
body ret)
(format "function ~a(~a) {\n\treturn ~a;\n}\n"
name (string-join (map ~a args) ", ")
ret)))
(define default-ctx
(ctx-update-props
(make-compiler-ctx)
'(:precision binary64 :round nearestEven)))
(define (fix-name name)
(string-join
(for/list ([char (~a name)])
(if (regexp-match #rx"[a-zA-Z0-9_]" (string char))
(string char)
(format "_~a_" (char->integer char))))
""))
(define bool-ops '(< > <= >= == != and or not
isfinite isinf isnan isnormal signbit))
(define (visit-if/imperative vtor cond ift iff #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define branches
(let loop ([expr (list 'if cond ift iff)])
(match expr
[(list 'if cond ift iff)
(define-values (cond* _) (visit/ctx vtor cond ctx))
(cons (list cond* ift) (loop iff))]
[_ (list (list #t expr))])))
(let loop ([branches branches] [first? #t] [ctx ctx] [ret #f])
(match* (first? (car branches))
[(#t (list cond ift))
(parameterize ([current-output-port (open-output-nowhere)])
(define-values (_ ift-ctx) (visit/ctx vtor ift ctx))
(define prec (ctx-lookup-prop ift-ctx ':precision))
(ctx-random-name (ctx-update-props ctx `(:precision ,prec)))))
(printf (if-declare (compile-declaration tmpvar ctx*) indent))
(printf (if-format) indent (format-condition (trim-infix-parens cond)) (after-if))
(define-values (ift* ift-ctx)
(let ([ctx0 (ctx-set-extra ctx 'indent (format "~a~a" indent (single-tab)))])
(visit/ctx vtor ift ctx0)))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment tmpvar ift* ctx))
(loop (cdr branches) #f ctx* tmpvar)]
[(_ (list #t last))
(printf (else-format) indent)
(define ctx* (ctx-set-extra ctx 'indent (format "~a~a" indent (single-tab))))
(define-values (last* else-ctx) (visit/ctx vtor last ctx*))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment ret last* ctx))
(printf (end-of-block indent 'if))
(values ret else-ctx)]
[(_ (list cond elif))
(printf (else-if-format) indent (else-if-name)
(format-condition (trim-infix-parens cond))
(after-if))
(define ctx* (ctx-set-extra ctx 'indent (format "~a~a" indent (single-tab))))
(define-values (elif* elif-ctx) (visit/ctx vtor elif ctx*))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment ret elif* ctx))
(loop (cdr branches) #f ctx ret)])))
(define (visit-let_/imperative vtor let_ vars vals body #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define ctx*
(for/fold ([ctx* ctx]) ([var (in-list vars)] [val (in-list vals)])
(define-values (val* val-ctx) (visit/ctx vtor val (match let_ ['let ctx] ['let* ctx*])))
(define prec (ctx-lookup-prop val-ctx ':precision))
(define-values (name-ctx name) (ctx-unique-name ctx* var prec))
(define decl-ctx (ctx-update-props ctx* `(:precision ,prec)))
(printf "~a~a\n" indent (compile-declaration name val* decl-ctx))
name-ctx))
(printf "~a" (compile-use-vars vars ctx*))
(visit/ctx vtor body ctx*))
(define (visit-while_/imperative vtor while_ cond vars inits updates body #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define-values (ctx* vars*)
(for/fold ([ctx* ctx] [vars* '()] #:result (values ctx* (reverse vars*)))
([var (in-list vars)] [val (in-list inits)])
(define val-ctx (match while_ ['while ctx] ['while* ctx*]))
(define-values (val* val*-ctx) (visit/ctx vtor val val-ctx))
(define prec (ctx-lookup-prop val*-ctx ':precision))
(define-values (name-ctx name) (ctx-unique-name ctx* var prec))
(define decl-ctx (ctx-update-props ctx* `(:precision ,prec)))
(printf "~a~a\n" indent (compile-declaration name val* decl-ctx))
(values name-ctx (cons name vars*))))
(define tmpvar
(let-values ([(cx name) (ctx-random-name ctx* 'boolean)])
name))
(printf "~a" (compile-use-vars vars ctx*))
(define-values (cond* cond*-ctx) (visit/ctx vtor cond ctx*))
(printf "~a~a\n" indent (compile-declaration tmpvar cond* cond*-ctx))
(printf (while-format) indent (while-name)
(format-condition tmpvar) (after-while))
(define ctx**
(match while_
['while
(define val-ctx (ctx-set-extra ctx* 'indent (format "~a~a" indent (single-tab))))
(define-values (ctx** vars**)
(for/fold ([ctx** ctx*] [vars* '()]
#:result (values (ctx-set-extra ctx* 'indent (format "~a~a" indent (single-tab)))
(reverse vars*)))
([var (in-list vars)] [val (in-list updates)])
(define-values (val* val*-ctx) (visit/ctx vtor val val-ctx))
(define prec (ctx-lookup-prop val*-ctx ':precision))
(define-values (name-ctx name) (ctx-unique-name ctx** var prec))
(define decl-ctx (ctx-update-props ctx** `(:precision ,prec)))
(printf "~a~a~a\n" indent (single-tab) (compile-declaration name val* decl-ctx))
(values name-ctx (cons name vars*))))
(printf "~a" (compile-use-vars vars ctx**))
(for ([var* (in-list vars*)] [var** (in-list vars**)])
(printf "~a~a~a\n" indent (single-tab) (compile-assignment var* var** ctx**)))
ctx**]
['while*
(define ctx** (ctx-set-extra ctx* 'indent (format "~a~a" indent (single-tab))))
(for ([var* (in-list vars*)] [val (in-list updates)])
(let-values ([(val* _) (visit/ctx vtor val ctx**)])
(printf "~a~a~a\n" indent (single-tab) (compile-assignment var* val* ctx**))))
ctx**]))
(define-values (cond** cond**-ctx) (visit/ctx vtor cond ctx**))
(printf "~a~a~a\n" indent (single-tab) (compile-assignment tmpvar cond** cond**-ctx))
(printf (end-of-block indent 'while))
(visit/ctx vtor body ctx*))
(define (visit-cast/imperative vtor x #:ctx ctx)
(define-values (body* body-ctx) (visit/ctx vtor x ctx))
(values (compile-round body* ctx) body-ctx))
(define (visit-!/imperative vtor props body #:ctx ctx)
(define indent (ctx-lookup-extra ctx 'indent))
(define curr-prec (ctx-lookup-prop ctx ':precision))
(define curr-round (ctx-lookup-prop ctx ':round))
(define ctx* (ctx-update-props ctx props))
(define new-prec (ctx-lookup-prop ctx* ':precision))
(define new-round (ctx-lookup-prop ctx* ':round))
(define body-ctx
(parameterize ([current-output-port (open-output-nowhere)])
(let-values ([(_ body-ctx) (visit/ctx vtor body ctx*)])
body-ctx)))
(define body-prec (ctx-lookup-prop body-ctx ':precision))
(define-values (ctx** tmpvar)
(let ([ctx** (ctx-update-props ctx* `(:precision ,body-prec))])
(ctx-random-name ctx**)))
(unless (equal? curr-round new-round)
(printf "~a" (compile-round-mode new-round ctx)))
(define-values (body* _) (visit/ctx vtor body ctx*))
(printf "~a~a\n" indent (compile-declaration tmpvar body* ctx**))
(unless (equal? curr-round new-round)
(printf "~a" (compile-round-mode curr-round ctx)))
(values tmpvar body-ctx))
(define (visit-op_/imperative vtor op args #:ctx ctx)
(define prec (ctx-lookup-prop ctx ':precision))
(define args*
(for/list ([arg args])
(define-values (arg* arg-ctx) (visit/ctx vtor arg ctx))
(define arg-prec (ctx-lookup-prop arg-ctx ':precision))
(if (equal? prec arg-prec)
arg*
(compile-implicit-round op arg* arg-ctx ctx))))
(values (compile-operator op args* ctx)
(if (set-member? bool-ops op)
(ctx-update-props ctx (list ':precision 'boolean))
ctx)))
(define (visit-call/imperative vtor fn args #:ctx ctx)
(define args*
(for/list ([arg args])
(define-values (arg* _) (visit/ctx vtor arg ctx))
arg*))
(values (compile-function fn args ctx) ctx))
(define (visit-digits/imperative vtor m e b #:ctx ctx)
(visit/ctx vtor (digits->number m e b) ctx))
(define (visit-number/imperative vtor x #:ctx ctx)
(values (compile-constant x ctx) ctx))
(define (visit-constant/imperative vtor x #:ctx ctx)
(values (compile-constant x ctx)
(if (set-member? '(TRUE FALSE) x)
(ctx-update-props ctx (list ':precision 'boolean))
ctx)))
(define (visit-symbol/imperative vtor x #:ctx ctx)
(define name (ctx-lookup-name ctx x))
(define var-prec (ctx-lookup-prec ctx name))
(values name (ctx-update-props ctx `(:precision ,var-prec))))
(define-expr-visitor default-compiler-visitor imperative-visitor
[visit-if visit-if/imperative]
[visit-let_ visit-let_/imperative]
[visit-while_ visit-while_/imperative]
[visit-cast visit-cast/imperative]
[visit-! visit-!/imperative]
[visit-call visit-call/imperative]
[visit-op_ visit-op_/imperative]
[visit-digits visit-digits/imperative]
[visit-number visit-number/imperative]
[visit-constant visit-constant/imperative]
[visit-symbol visit-symbol/imperative])
(define (make-imperative-compiler name
#:infix-ops [infix default-infix-ops]
#:operator [operator default-compile-operator]
#:constant [constant default-compile-constant]
#:type [type default-compile-type]
#:declare [declare default-compile-declaration]
#:assign [assign default-compile-assignment]
#:round [round default-compile-round]
#:implicit-round [implicit-round default-compile-implicit-round]
#:round-mode [round-mode default-compile-round-mode]
#:use-vars [use-vars default-use-vars]
#:program [program default-compile-program]
#:flags [flags '()]
#:visitor [vtor imperative-visitor]
#:reserved [reserved '()]
#:fix-name [fix-name-proc fix-name]
#:indent [indent "\t"])
(unless (andmap valid-flag? flags)
(error 'make-imperative-compiler "undefined imperative flags: ~a" flags))
(when (flag-conflict? flags)
(error 'make-imperative-compiler "conflicting flags: ~a" flags))
(define language
(imperative name infix operator constant type
declare assign round implicit-round round-mode
use-vars program flags))
(lambda (prog name)
(parameterize ([*gensym-used-names* (mutable-set)]
[*gensym-collisions* 1]
[*gensym-fix-name* fix-name-proc]
[*imperative-lang* language])
(define-values (args props body)
(match prog
[(list 'FPCore (list args ...) props ... body) (values args props body)]
[(list 'FPCore name (list args ...) props ... body) (values args props body)]))
(define ctx
(let ([ctx0 (ctx-update-props default-ctx props)])
(let ([ctx1 (ctx-reserve-names ctx0 reserved)])
(ctx-set-extra ctx1 'indent indent))))
(define fname
(let-values ([(cx fname) (ctx-unique-name ctx name)])
(begin0 fname (set! ctx cx))))
(define-values (arg-names arg-ctxs)
(for/lists (ns ps) ([arg (in-list args)])
(match arg
[(list '! props ... name)
(define arg-ctx (ctx-update-props ctx props))
(define arg-prec (ctx-lookup-prop arg-ctx ':precision))
(define-values (cx aname) (ctx-unique-name ctx name arg-prec))
(begin0 (values aname arg-ctx) (set! ctx cx))]
[name
(define-values (cx aname) (ctx-unique-name ctx name))
(begin0 (values aname ctx) (set! ctx cx))])))
(define non-varnames (cons fname (map (curry ctx-lookup-name ctx) reserved)))
(define p (open-output-string))
(define-values (body* ret used-vars)
(parameterize ([current-output-port p])
(define-values (o cx) (visit-body vtor body ctx))
(values (get-output-string p)
(trim-infix-parens o)
(remove* non-varnames (set->list (*gensym-used-names*))))))
(compile-program fname arg-names arg-ctxs body* ret ctx used-vars))))
(module+ test
(require rackunit)
(define compile0 (make-imperative-compiler "default"))
(define (compile* . exprs)
(apply values (for/list ([expr exprs] [i (in-naturals 1)])
(compile0 expr (format "fn~a" i)))))
(compile*
'(FPCore (x) (if (< x 0) (+ x 1) (- x 1)))
'(FPCore (x) (let ([x 1] [y x]) (+ x y)))
'(FPCore (x) (let* ([x 1] [y x]) (+ x y)))
'(FPCore (x) (while (< x 4) ([x 0.0 (+ x 1.0)]) x))
'(FPCore (x) (while* (< x 4) ([x 0.0 (+ x 1.0)]) x))
'(FPCore (x) (+ (foo x) 1))
'(FPCore (x) (- (sqrt (+ x 1)) (sqrt x)))
'(FPCore (a b) (+ (* a b) (- a b))))
)
|
2dcd907cacdde504768d44ef1191810d8b31eb9b892db1448caed7525fcfbb02 | sadiqj/ocaml-esp32 | w04.ml | [@@@ocaml.warning "+4"]
type expr = E of int [@@unboxed]
let f x = match x with (E e) -> e
type t = A | B
let g x = match x with
| A -> 0
| _ -> 1
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/testsuite/tests/warnings/w04.ml | ocaml | [@@@ocaml.warning "+4"]
type expr = E of int [@@unboxed]
let f x = match x with (E e) -> e
type t = A | B
let g x = match x with
| A -> 0
| _ -> 1
| |
bf5a118099c85a14f926c365c870e4cb0758433e7be1a2a6d42c1e7c845bf0ba | digikar99/numericals | sbcl-numericals.lisp | (in-package :sbcl-numericals.internals)
(define-binary-vectorized-op sbcl-numericals:d+
+ :double :avx2 (r a b)
(sb-assem:inst vaddpd r a b))
(define-binary-vectorized-op sbcl-numericals:d-
- :double :avx2 (r a b)
(sb-assem:inst vsubpd r a b))
(define-binary-vectorized-op sbcl-numericals:d*
* :double :avx2 (r a b)
(sb-assem:inst vmulpd r a b))
(define-binary-vectorized-op sbcl-numericals:d/
/ :double :avx2 (r a b)
(sb-assem:inst vdivpd r a b))
(define-binary-vectorized-op sbcl-numericals:s+
+ :single :avx2 (r a b)
(sb-assem:inst vaddps r a b))
(define-binary-vectorized-op sbcl-numericals:s-
- :single :avx2 (r a b)
(sb-assem:inst vsubps r a b))
(define-binary-vectorized-op sbcl-numericals:s/
/ :single :avx2 (r a b)
(sb-assem:inst vdivps r a b))
(define-binary-vectorized-op sbcl-numericals:s*
* :single :avx2 (r a b)
(sb-assem:inst vmulps r a b))
(define-binary-vectorized-op sbcl-numericals:d2+
+ :double :sse (r a b)
(move r a)
(sb-assem:inst addpd r b))
(define-binary-vectorized-op sbcl-numericals:d2-
- :double :sse (r a b)
(move r a)
(sb-assem:inst subpd r b))
(define-binary-vectorized-op sbcl-numericals:d2/
/ :double :sse (r a b)
(move r a)
(sb-assem:inst divpd r b))
(define-binary-vectorized-op sbcl-numericals:d2*
* :double :sse (r a b)
(move r a)
(sb-assem:inst mulpd r b))
(define-binary-vectorized-op sbcl-numericals:s2+
+ :single :sse (r a b)
(move r a)
(sb-assem:inst addps r b))
(define-binary-vectorized-op sbcl-numericals:s2-
- :single :sse (r a b)
(move r a)
(sb-assem:inst subps r b))
(define-binary-vectorized-op sbcl-numericals:s2/
/ :single :sse (r a b)
(move r a)
(sb-assem:inst divps r b))
(define-binary-vectorized-op sbcl-numericals:s2*
* :single :sse (r a b)
(move r a)
(sb-assem:inst mulps r b))
| null | https://raw.githubusercontent.com/digikar99/numericals/4f82b74e32b054f65110ee62ba080603c92ac103/sbcl-numericals/src/sbcl-numericals.lisp | lisp | (in-package :sbcl-numericals.internals)
(define-binary-vectorized-op sbcl-numericals:d+
+ :double :avx2 (r a b)
(sb-assem:inst vaddpd r a b))
(define-binary-vectorized-op sbcl-numericals:d-
- :double :avx2 (r a b)
(sb-assem:inst vsubpd r a b))
(define-binary-vectorized-op sbcl-numericals:d*
* :double :avx2 (r a b)
(sb-assem:inst vmulpd r a b))
(define-binary-vectorized-op sbcl-numericals:d/
/ :double :avx2 (r a b)
(sb-assem:inst vdivpd r a b))
(define-binary-vectorized-op sbcl-numericals:s+
+ :single :avx2 (r a b)
(sb-assem:inst vaddps r a b))
(define-binary-vectorized-op sbcl-numericals:s-
- :single :avx2 (r a b)
(sb-assem:inst vsubps r a b))
(define-binary-vectorized-op sbcl-numericals:s/
/ :single :avx2 (r a b)
(sb-assem:inst vdivps r a b))
(define-binary-vectorized-op sbcl-numericals:s*
* :single :avx2 (r a b)
(sb-assem:inst vmulps r a b))
(define-binary-vectorized-op sbcl-numericals:d2+
+ :double :sse (r a b)
(move r a)
(sb-assem:inst addpd r b))
(define-binary-vectorized-op sbcl-numericals:d2-
- :double :sse (r a b)
(move r a)
(sb-assem:inst subpd r b))
(define-binary-vectorized-op sbcl-numericals:d2/
/ :double :sse (r a b)
(move r a)
(sb-assem:inst divpd r b))
(define-binary-vectorized-op sbcl-numericals:d2*
* :double :sse (r a b)
(move r a)
(sb-assem:inst mulpd r b))
(define-binary-vectorized-op sbcl-numericals:s2+
+ :single :sse (r a b)
(move r a)
(sb-assem:inst addps r b))
(define-binary-vectorized-op sbcl-numericals:s2-
- :single :sse (r a b)
(move r a)
(sb-assem:inst subps r b))
(define-binary-vectorized-op sbcl-numericals:s2/
/ :single :sse (r a b)
(move r a)
(sb-assem:inst divps r b))
(define-binary-vectorized-op sbcl-numericals:s2*
* :single :sse (r a b)
(move r a)
(sb-assem:inst mulps r b))
| |
523f4446f8034a033c5b80667b0aa92b06f8c46787cf779353eaa7046e30e858 | ztellman/cantor | cantor.clj | Copyright ( c ) . All rights reserved .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^{:author "Zachary Tellman"}
cantor
(:require [cantor
[vector :as vec]
[matrix :as mat]
[misc :as misc]
[range :as range]])
(:use [clojure.contrib.def :only (defmacro-)]))
;;
(defmacro- import-fn [sym]
(let [m (meta (eval sym))
m (meta (intern (:ns m) (:name m)))
n (:name m)
arglists (:arglists m)
doc (:doc m)]
(list `def (with-meta n {:doc doc :arglists (list 'quote arglists)}) (eval sym))))
;; arithmetic operators
(defn add
"Add together scalars or vectors of equal dimension."
([a] (vec/add a))
([a b] (vec/add a b))
([a b c] (add (add a b) c))
([a b c & rest] (add (add a b c) (apply add rest))))
(defn sub
"Subtract scalars or vectors of equal dimension.
(sub a b c) is equivalent to (sub (sub a b) c)"
([a] (vec/sub a))
([a b] (vec/sub a b))
([a b c] (sub (sub a b) c))
([a b c & rest] (sub (sub a b c) (apply add rest))))
(defn mul
"Multiplies together a list of scalars, or a vector followed by any combination of
scalars and vectors of the same dimension."
([a] (vec/mul a))
([a b] (vec/mul a b))
([a b c] (mul (mul a b) c))
([a b c & rest] (mul (mul a b c) (apply mul rest))))
(defn div
"Divides a list of scalars, or a vector followed by any combination of scalars and
vectors of the same dimension.
(div a b c) = (div (div a b) c)"
([a] (vec/div a))
([a b] (vec/div a b))
([a b c] (div (div a b) c))
([a b c & rest] (div (div a b c) (apply mul rest))))
(import-fn #'vec/dot)
(import-fn #'vec/polar)
(import-fn #'vec/cartesian)
(defn lerp
"Linear interpolation between a and b, where t=0 is a, and t=1 is b."
[a b t]
(add a (mul (sub b a) t)))
(defn length-squared
"Calculates the length squared of v. Significantly more efficient than (length v)."
[v]
(dot v v))
(defn length
"Calculates the length of v."
[v]
(Math/sqrt (length-squared v)))
(defn normalize
"Normalizes v, such that its direction remains the same, but its length is 1."
[v]
(div v (length v)))
;; vector
(import-fn #'vec/vec2)
(import-fn #'vec/vec3)
(import-fn #'vec/vec4)
(import-fn #'vec/polar2)
(import-fn #'vec/polar3)
(import-fn #'vec/cartesian?)
(import-fn #'vec/polar?)
;; range
(import-fn #'range/range?)
(import-fn #'range/interval)
(import-fn #'range/box2)
(import-fn #'range/box3)
(import-fn #'range/upper)
(import-fn #'range/lower)
(import-fn #'range/offset)
;;(import-fn range/scale)
(import-fn #'range/intersection)
(import-fn #'range/union)
(import-fn #'range/inside?)
(import-fn #'range/size)
;; geometry
(import-fn #'vec/cross)
(import-fn #'mat/transform-matrix)
(import-fn #'mat/transform-vector)
(import-fn #'mat/rotation-matrix)
(import-fn #'mat/identity-matrix)
(import-fn #'mat/translation-matrix)
(import-fn #'mat/scaling-matrix)
(import-fn #'mat/normal-matrix)
(import-fn #'vec/map*)
(import-fn #'vec/all?)
;; misc
(import-fn #'misc/prime-factors)
(import-fn #'misc/rectangle-factors)
(import-fn #'misc/radians)
(import-fn #'misc/degrees)
;;
(defmacro- extend-numbers [& body]
`(do
(extend-type java.lang.Double ~@body)
(extend-type java.lang.Integer ~@body)
(extend-type java.lang.Float ~@body)
(extend-type clojure.lang.Ratio ~@body)))
(extend-numbers
vec/Arithmetic
(add
([a] a)
([a b] (+ a b)))
(sub
([a] (- a))
([a b] (- a b)))
(mul
([a] a)
([a b] (* a b)))
(div
([a] a)
([a b] (/ a b))))
(extend-numbers
vec/Tuple
(map-
([n f] (f n))
([a b f] (f a b))
([a b rest f] (apply f (list* a b rest))))
(all-
([n f] (f n))
([a b f] (f a b)))
(dimension [_] 1))
(extend-numbers
vec/Polar
(cartesian [n] (cartesian (polar2 n 1))))
| null | https://raw.githubusercontent.com/ztellman/cantor/5cde24acf95ce7c20f642f8f2ff223247acc27e1/src/cantor.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
arithmetic operators
vector
range
(import-fn range/scale)
geometry
misc
| Copyright ( c ) . All rights reserved .
(ns ^{:author "Zachary Tellman"}
cantor
(:require [cantor
[vector :as vec]
[matrix :as mat]
[misc :as misc]
[range :as range]])
(:use [clojure.contrib.def :only (defmacro-)]))
(defmacro- import-fn [sym]
(let [m (meta (eval sym))
m (meta (intern (:ns m) (:name m)))
n (:name m)
arglists (:arglists m)
doc (:doc m)]
(list `def (with-meta n {:doc doc :arglists (list 'quote arglists)}) (eval sym))))
(defn add
"Add together scalars or vectors of equal dimension."
([a] (vec/add a))
([a b] (vec/add a b))
([a b c] (add (add a b) c))
([a b c & rest] (add (add a b c) (apply add rest))))
(defn sub
"Subtract scalars or vectors of equal dimension.
(sub a b c) is equivalent to (sub (sub a b) c)"
([a] (vec/sub a))
([a b] (vec/sub a b))
([a b c] (sub (sub a b) c))
([a b c & rest] (sub (sub a b c) (apply add rest))))
(defn mul
"Multiplies together a list of scalars, or a vector followed by any combination of
scalars and vectors of the same dimension."
([a] (vec/mul a))
([a b] (vec/mul a b))
([a b c] (mul (mul a b) c))
([a b c & rest] (mul (mul a b c) (apply mul rest))))
(defn div
"Divides a list of scalars, or a vector followed by any combination of scalars and
vectors of the same dimension.
(div a b c) = (div (div a b) c)"
([a] (vec/div a))
([a b] (vec/div a b))
([a b c] (div (div a b) c))
([a b c & rest] (div (div a b c) (apply mul rest))))
(import-fn #'vec/dot)
(import-fn #'vec/polar)
(import-fn #'vec/cartesian)
(defn lerp
"Linear interpolation between a and b, where t=0 is a, and t=1 is b."
[a b t]
(add a (mul (sub b a) t)))
(defn length-squared
"Calculates the length squared of v. Significantly more efficient than (length v)."
[v]
(dot v v))
(defn length
"Calculates the length of v."
[v]
(Math/sqrt (length-squared v)))
(defn normalize
"Normalizes v, such that its direction remains the same, but its length is 1."
[v]
(div v (length v)))
(import-fn #'vec/vec2)
(import-fn #'vec/vec3)
(import-fn #'vec/vec4)
(import-fn #'vec/polar2)
(import-fn #'vec/polar3)
(import-fn #'vec/cartesian?)
(import-fn #'vec/polar?)
(import-fn #'range/range?)
(import-fn #'range/interval)
(import-fn #'range/box2)
(import-fn #'range/box3)
(import-fn #'range/upper)
(import-fn #'range/lower)
(import-fn #'range/offset)
(import-fn #'range/intersection)
(import-fn #'range/union)
(import-fn #'range/inside?)
(import-fn #'range/size)
(import-fn #'vec/cross)
(import-fn #'mat/transform-matrix)
(import-fn #'mat/transform-vector)
(import-fn #'mat/rotation-matrix)
(import-fn #'mat/identity-matrix)
(import-fn #'mat/translation-matrix)
(import-fn #'mat/scaling-matrix)
(import-fn #'mat/normal-matrix)
(import-fn #'vec/map*)
(import-fn #'vec/all?)
(import-fn #'misc/prime-factors)
(import-fn #'misc/rectangle-factors)
(import-fn #'misc/radians)
(import-fn #'misc/degrees)
(defmacro- extend-numbers [& body]
`(do
(extend-type java.lang.Double ~@body)
(extend-type java.lang.Integer ~@body)
(extend-type java.lang.Float ~@body)
(extend-type clojure.lang.Ratio ~@body)))
(extend-numbers
vec/Arithmetic
(add
([a] a)
([a b] (+ a b)))
(sub
([a] (- a))
([a b] (- a b)))
(mul
([a] a)
([a b] (* a b)))
(div
([a] a)
([a b] (/ a b))))
(extend-numbers
vec/Tuple
(map-
([n f] (f n))
([a b f] (f a b))
([a b rest f] (apply f (list* a b rest))))
(all-
([n f] (f n))
([a b f] (f a b)))
(dimension [_] 1))
(extend-numbers
vec/Polar
(cartesian [n] (cartesian (polar2 n 1))))
|
3dddc02a787e27afef970e4b501eab3ad8cc8a95ea920e1bb9919875116f6c15 | NorfairKing/mergeless | Persistent.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
module Data.Mergeless.Persistent
( -- * Client side
clientMakeSyncRequestQuery,
clientMergeSyncResponseQuery,
-- ** Raw processors
clientSyncProcessor,
-- * Server side
serverProcessSyncQuery,
serverProcessSyncWithCustomIdQuery,
-- ** Sync processors
serverSyncProcessor,
serverSyncProcessorWithCustomId,
-- * Utils
-- ** Client side
setupUnsyncedClientQuery,
setupClientQuery,
clientGetStoreQuery,
-- ** Server side side
serverGetStoreQuery,
setupServerQuery,
)
where
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.Map as M
import Data.Maybe
import Data.Mergeless
import qualified Data.Set as S
import Database.Persist
import Database.Persist.Sql
import Lens.Micro
-- | Make a sync request on the client side
clientMakeSyncRequestQuery ::
( Ord sid,
PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
-- | How to read a record
(clientRecord -> a) ->
-- | The server id field
EntityField clientRecord (Maybe sid) ->
-- | The deleted field
EntityField clientRecord Bool ->
SqlPersistT m (SyncRequest (Key clientRecord) sid a)
clientMakeSyncRequestQuery func serverIdField deletedField = do
syncRequestAdded <-
M.fromList . map (\(Entity cid ct) -> (cid, func ct))
<$> selectList
[ serverIdField ==. Nothing,
deletedField ==. False
]
[]
syncRequestSynced <-
S.fromList . mapMaybe (\e -> e ^. fieldLens serverIdField)
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. False
]
[]
syncRequestDeleted <-
S.fromList . mapMaybe (\e -> e ^. fieldLens serverIdField)
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. True
]
[]
pure SyncRequest {..}
-- | Merge a sync response on the client side
clientMergeSyncResponseQuery ::
( PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
| Create an un - deleted synced record on the client side
(sid -> a -> clientRecord) ->
-- | The server id field
EntityField clientRecord (Maybe sid) ->
-- | The deleted field
EntityField clientRecord Bool ->
SyncResponse (Key clientRecord) sid a ->
SqlPersistT m ()
clientMergeSyncResponseQuery func serverIdField deletedField = mergeSyncResponseCustom $ clientSyncProcessor func serverIdField deletedField
clientSyncProcessor ::
( PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
| Create an un - deleted synced record on the client side
(sid -> a -> clientRecord) ->
-- | The server id field
EntityField clientRecord (Maybe sid) ->
-- | The deleted field
EntityField clientRecord Bool ->
ClientSyncProcessor (Key clientRecord) sid a (SqlPersistT m)
clientSyncProcessor func serverIdField deletedField = ClientSyncProcessor {..}
where
clientSyncProcessorSyncServerAdded m = forM_ (M.toList m) $ \(si, st) ->
insert_ $ func si st
clientSyncProcessorSyncClientAdded m = forM_ (M.toList m) $ \(cid, sid) ->
update cid [serverIdField =. Just sid]
clientSyncProcessorSyncServerDeleted s = forM_ (S.toList s) $ \sid ->
deleteWhere [serverIdField ==. Just sid]
clientSyncProcessorSyncClientDeleted s = forM_ (S.toList s) $ \sid ->
deleteWhere [serverIdField ==. Just sid, deletedField ==. True]
-- | Process a sync query on the server side.
serverProcessSyncQuery ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
-- | Filters to select the relevant items
--
-- Use these if you have multiple users and you want to sync per-user
[Filter record] ->
-- | How to read a record
(record -> a) ->
-- | How to insert a _new_ record
(a -> record) ->
SyncRequest ci (Key record) a ->
SqlPersistT m (SyncResponse ci (Key record) a)
serverProcessSyncQuery filters funcTo funcFrom = processServerSyncCustom $ serverSyncProcessor filters funcTo funcFrom
-- | A server sync processor that uses the sqlkey of the record as the name
serverSyncProcessor ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
-- | Filters to select the relevant items
--
-- Use these if you have multiple users and you want to sync per-user
[Filter record] ->
-- | How to read a record
(record -> a) ->
-- | How to insert a _new_ record
(a -> record) ->
ServerSyncProcessor ci (Key record) a (SqlPersistT m)
serverSyncProcessor filters funcTo funcFrom =
ServerSyncProcessor {..}
where
serverSyncProcessorRead = M.fromList . map (\(Entity i record) -> (i, funcTo record)) <$> selectList filters []
serverSyncProcessorAddItems = mapM $ insert . funcFrom
serverSyncProcessorDeleteItems s = do
mapM_ delete s
pure s
-- | Process a sync query on the server side with a custom id.
serverProcessSyncWithCustomIdQuery ::
( Ord sid,
PersistEntity record,
PersistField sid,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
-- | The action to generate new identifiers
SqlPersistT m sid ->
-- | The id field
EntityField record sid ->
-- | Filters to select the relevant items
--
-- Use these if you have multiple users and you want to sync per-user
[Filter record] ->
-- | How to read a record
(record -> (sid, a)) ->
-- | How to insert a _new_ record
(sid -> a -> record) ->
SyncRequest ci sid a ->
SqlPersistT m (SyncResponse ci sid a)
serverProcessSyncWithCustomIdQuery genId idField filters funcTo funcFrom = processServerSyncCustom $ serverSyncProcessorWithCustomId genId idField filters funcTo funcFrom
-- | A server sync processor that uses a custom key as the name
serverSyncProcessorWithCustomId ::
( Ord sid,
PersistEntity record,
PersistField sid,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
-- | The action to generate new identifiers
SqlPersistT m sid ->
-- | The id field
EntityField record sid ->
-- | Filters to select the relevant items
--
-- Use these if you have multiple users and you want to sync per-user
[Filter record] ->
-- | How to read a record
(record -> (sid, a)) ->
-- | How to insert a _new_ record
(sid -> a -> record) ->
ServerSyncProcessor ci sid a (SqlPersistT m)
serverSyncProcessorWithCustomId genId idField filters funcTo funcFrom =
ServerSyncProcessor {..}
where
serverSyncProcessorRead = M.fromList . map (funcTo . entityVal) <$> selectList filters []
serverSyncProcessorAddItems = mapM $ \a -> do
sid <- genId
let record = funcFrom sid a
insert_ record
pure sid
serverSyncProcessorDeleteItems s = do
forM_ s $ \sid -> deleteWhere [idField ==. sid]
pure s
-- | Setup an unsynced client store
--
-- You shouldn't need this.
setupUnsyncedClientQuery ::
( PersistEntity clientRecord,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
-- | How to insert a _new_ record
(a -> clientRecord) ->
[a] ->
SqlPersistT m ()
setupUnsyncedClientQuery func = mapM_ (insert . func)
-- | Setup a client store
--
-- You shouldn't need this.
setupClientQuery ::
( PersistEntity clientRecord,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
| Create an un - deleted unsynced record on the client side
(a -> clientRecord) ->
| Create an un - deleted synced record on the client side
(sid -> a -> clientRecord) ->
-- | Create an deleted synced record on the client side
(sid -> clientRecord) ->
ClientStore (Key clientRecord) sid a ->
SqlPersistT m ()
setupClientQuery funcU funcS funcD ClientStore {..} = do
forM_ (M.toList clientStoreAdded) $ \(cid, st) ->
insertKey
cid
(funcU st)
forM_ (M.toList clientStoreSynced) $ \(sid, st) ->
insert_ (funcS sid st)
forM_ (S.toList clientStoreDeleted) $ \sid ->
insert_ (funcD sid)
-- | Get a client store
--
-- You shouldn't need this.
clientGetStoreQuery ::
( Ord sid,
PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
-- | How to red a record
(clientRecord -> a) ->
-- | The server id field
EntityField clientRecord (Maybe sid) ->
-- | The deleted field
EntityField clientRecord Bool ->
SqlPersistT m (ClientStore (Key clientRecord) sid a)
clientGetStoreQuery func serverIdField deletedField = do
clientStoreAdded <-
M.fromList . map (\(Entity cid ct) -> (cid, func ct))
<$> selectList
[ serverIdField ==. Nothing,
deletedField ==. False
]
[]
clientStoreSynced <-
M.fromList . mapMaybe (\e@(Entity _ ct) -> (,) <$> (e ^. fieldLens serverIdField) <*> pure (func ct))
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. False
]
[]
clientStoreDeleted <-
S.fromList . mapMaybe (\e -> e ^. fieldLens serverIdField)
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. True
]
[]
pure ClientStore {..}
-- | Get the server store from the database
--
-- You shouldn't need this.
serverGetStoreQuery ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
-- | How to read a record
(record -> a) ->
SqlPersistT m (ServerStore (Key record) a)
serverGetStoreQuery func = ServerStore . M.fromList . map (\(Entity stid st) -> (stid, func st)) <$> selectList [] []
-- | Set up a server store in the database.
--
-- You shouldn't need this.
This uses ' insertKey ' function and is therefore unsafe .
setupServerQuery ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
-- | How to write a record
(a -> record) ->
ServerStore (Key record) a ->
SqlPersistT m ()
setupServerQuery func ServerStore {..} = forM_ (M.toList serverStoreItems) $ \(i, e) -> void $ insertKey i $ func e
| null | https://raw.githubusercontent.com/NorfairKing/mergeless/370ceea253e71f47a20c8876f6f347b0c280aafe/mergeless-persistent/src/Data/Mergeless/Persistent.hs | haskell | # LANGUAGE GADTs #
* Client side
** Raw processors
* Server side
** Sync processors
* Utils
** Client side
** Server side side
| Make a sync request on the client side
| How to read a record
| The server id field
| The deleted field
| Merge a sync response on the client side
| The server id field
| The deleted field
| The server id field
| The deleted field
| Process a sync query on the server side.
| Filters to select the relevant items
Use these if you have multiple users and you want to sync per-user
| How to read a record
| How to insert a _new_ record
| A server sync processor that uses the sqlkey of the record as the name
| Filters to select the relevant items
Use these if you have multiple users and you want to sync per-user
| How to read a record
| How to insert a _new_ record
| Process a sync query on the server side with a custom id.
| The action to generate new identifiers
| The id field
| Filters to select the relevant items
Use these if you have multiple users and you want to sync per-user
| How to read a record
| How to insert a _new_ record
| A server sync processor that uses a custom key as the name
| The action to generate new identifiers
| The id field
| Filters to select the relevant items
Use these if you have multiple users and you want to sync per-user
| How to read a record
| How to insert a _new_ record
| Setup an unsynced client store
You shouldn't need this.
| How to insert a _new_ record
| Setup a client store
You shouldn't need this.
| Create an deleted synced record on the client side
| Get a client store
You shouldn't need this.
| How to red a record
| The server id field
| The deleted field
| Get the server store from the database
You shouldn't need this.
| How to read a record
| Set up a server store in the database.
You shouldn't need this.
| How to write a record | # LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
module Data.Mergeless.Persistent
clientMakeSyncRequestQuery,
clientMergeSyncResponseQuery,
clientSyncProcessor,
serverProcessSyncQuery,
serverProcessSyncWithCustomIdQuery,
serverSyncProcessor,
serverSyncProcessorWithCustomId,
setupUnsyncedClientQuery,
setupClientQuery,
clientGetStoreQuery,
serverGetStoreQuery,
setupServerQuery,
)
where
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.Map as M
import Data.Maybe
import Data.Mergeless
import qualified Data.Set as S
import Database.Persist
import Database.Persist.Sql
import Lens.Micro
clientMakeSyncRequestQuery ::
( Ord sid,
PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
(clientRecord -> a) ->
EntityField clientRecord (Maybe sid) ->
EntityField clientRecord Bool ->
SqlPersistT m (SyncRequest (Key clientRecord) sid a)
clientMakeSyncRequestQuery func serverIdField deletedField = do
syncRequestAdded <-
M.fromList . map (\(Entity cid ct) -> (cid, func ct))
<$> selectList
[ serverIdField ==. Nothing,
deletedField ==. False
]
[]
syncRequestSynced <-
S.fromList . mapMaybe (\e -> e ^. fieldLens serverIdField)
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. False
]
[]
syncRequestDeleted <-
S.fromList . mapMaybe (\e -> e ^. fieldLens serverIdField)
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. True
]
[]
pure SyncRequest {..}
clientMergeSyncResponseQuery ::
( PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
| Create an un - deleted synced record on the client side
(sid -> a -> clientRecord) ->
EntityField clientRecord (Maybe sid) ->
EntityField clientRecord Bool ->
SyncResponse (Key clientRecord) sid a ->
SqlPersistT m ()
clientMergeSyncResponseQuery func serverIdField deletedField = mergeSyncResponseCustom $ clientSyncProcessor func serverIdField deletedField
clientSyncProcessor ::
( PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
| Create an un - deleted synced record on the client side
(sid -> a -> clientRecord) ->
EntityField clientRecord (Maybe sid) ->
EntityField clientRecord Bool ->
ClientSyncProcessor (Key clientRecord) sid a (SqlPersistT m)
clientSyncProcessor func serverIdField deletedField = ClientSyncProcessor {..}
where
clientSyncProcessorSyncServerAdded m = forM_ (M.toList m) $ \(si, st) ->
insert_ $ func si st
clientSyncProcessorSyncClientAdded m = forM_ (M.toList m) $ \(cid, sid) ->
update cid [serverIdField =. Just sid]
clientSyncProcessorSyncServerDeleted s = forM_ (S.toList s) $ \sid ->
deleteWhere [serverIdField ==. Just sid]
clientSyncProcessorSyncClientDeleted s = forM_ (S.toList s) $ \sid ->
deleteWhere [serverIdField ==. Just sid, deletedField ==. True]
serverProcessSyncQuery ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
[Filter record] ->
(record -> a) ->
(a -> record) ->
SyncRequest ci (Key record) a ->
SqlPersistT m (SyncResponse ci (Key record) a)
serverProcessSyncQuery filters funcTo funcFrom = processServerSyncCustom $ serverSyncProcessor filters funcTo funcFrom
serverSyncProcessor ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
[Filter record] ->
(record -> a) ->
(a -> record) ->
ServerSyncProcessor ci (Key record) a (SqlPersistT m)
serverSyncProcessor filters funcTo funcFrom =
ServerSyncProcessor {..}
where
serverSyncProcessorRead = M.fromList . map (\(Entity i record) -> (i, funcTo record)) <$> selectList filters []
serverSyncProcessorAddItems = mapM $ insert . funcFrom
serverSyncProcessorDeleteItems s = do
mapM_ delete s
pure s
serverProcessSyncWithCustomIdQuery ::
( Ord sid,
PersistEntity record,
PersistField sid,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
SqlPersistT m sid ->
EntityField record sid ->
[Filter record] ->
(record -> (sid, a)) ->
(sid -> a -> record) ->
SyncRequest ci sid a ->
SqlPersistT m (SyncResponse ci sid a)
serverProcessSyncWithCustomIdQuery genId idField filters funcTo funcFrom = processServerSyncCustom $ serverSyncProcessorWithCustomId genId idField filters funcTo funcFrom
serverSyncProcessorWithCustomId ::
( Ord sid,
PersistEntity record,
PersistField sid,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
SqlPersistT m sid ->
EntityField record sid ->
[Filter record] ->
(record -> (sid, a)) ->
(sid -> a -> record) ->
ServerSyncProcessor ci sid a (SqlPersistT m)
serverSyncProcessorWithCustomId genId idField filters funcTo funcFrom =
ServerSyncProcessor {..}
where
serverSyncProcessorRead = M.fromList . map (funcTo . entityVal) <$> selectList filters []
serverSyncProcessorAddItems = mapM $ \a -> do
sid <- genId
let record = funcFrom sid a
insert_ record
pure sid
serverSyncProcessorDeleteItems s = do
forM_ s $ \sid -> deleteWhere [idField ==. sid]
pure s
setupUnsyncedClientQuery ::
( PersistEntity clientRecord,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
(a -> clientRecord) ->
[a] ->
SqlPersistT m ()
setupUnsyncedClientQuery func = mapM_ (insert . func)
setupClientQuery ::
( PersistEntity clientRecord,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
| Create an un - deleted unsynced record on the client side
(a -> clientRecord) ->
| Create an un - deleted synced record on the client side
(sid -> a -> clientRecord) ->
(sid -> clientRecord) ->
ClientStore (Key clientRecord) sid a ->
SqlPersistT m ()
setupClientQuery funcU funcS funcD ClientStore {..} = do
forM_ (M.toList clientStoreAdded) $ \(cid, st) ->
insertKey
cid
(funcU st)
forM_ (M.toList clientStoreSynced) $ \(sid, st) ->
insert_ (funcS sid st)
forM_ (S.toList clientStoreDeleted) $ \sid ->
insert_ (funcD sid)
clientGetStoreQuery ::
( Ord sid,
PersistEntity clientRecord,
PersistField sid,
PersistEntityBackend clientRecord ~ SqlBackend,
MonadIO m
) =>
(clientRecord -> a) ->
EntityField clientRecord (Maybe sid) ->
EntityField clientRecord Bool ->
SqlPersistT m (ClientStore (Key clientRecord) sid a)
clientGetStoreQuery func serverIdField deletedField = do
clientStoreAdded <-
M.fromList . map (\(Entity cid ct) -> (cid, func ct))
<$> selectList
[ serverIdField ==. Nothing,
deletedField ==. False
]
[]
clientStoreSynced <-
M.fromList . mapMaybe (\e@(Entity _ ct) -> (,) <$> (e ^. fieldLens serverIdField) <*> pure (func ct))
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. False
]
[]
clientStoreDeleted <-
S.fromList . mapMaybe (\e -> e ^. fieldLens serverIdField)
<$> selectList
[ serverIdField !=. Nothing,
deletedField ==. True
]
[]
pure ClientStore {..}
serverGetStoreQuery ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
(record -> a) ->
SqlPersistT m (ServerStore (Key record) a)
serverGetStoreQuery func = ServerStore . M.fromList . map (\(Entity stid st) -> (stid, func st)) <$> selectList [] []
This uses ' insertKey ' function and is therefore unsafe .
setupServerQuery ::
( PersistEntity record,
PersistEntityBackend record ~ SqlBackend,
MonadIO m
) =>
(a -> record) ->
ServerStore (Key record) a ->
SqlPersistT m ()
setupServerQuery func ServerStore {..} = forM_ (M.toList serverStoreItems) $ \(i, e) -> void $ insertKey i $ func e
|
85f69c7c68d77d1ab51540616b599ea7022c0286e03a5b2ec9d6adc46a376b31 | danr/hipspec | PropT33.hs | module PropT33 where
import Prelude(Bool(..))
import Zeno
-- Definitions
True && x = x
_ && _ = False
False || x = x
_ || _ = True
not True = False
not False = True
-- Nats
data Nat = S Nat | Z
(+) :: Nat -> Nat -> Nat
Z + y = y
(S x) + y = S (x + y)
(*) :: Nat -> Nat -> Nat
Z * _ = Z
(S x) * y = y + (x * y)
(==),(/=) :: Nat -> Nat -> Bool
Z == Z = True
Z == _ = False
S _ == Z = False
S x == S y = x == y
x /= y = not (x == y)
(<=) :: Nat -> Nat -> Bool
Z <= _ = True
_ <= Z = False
S x <= S y = x <= y
one, zero :: Nat
zero = Z
one = S Z
double :: Nat -> Nat
double Z = Z
double (S x) = S (S (double x))
even :: Nat -> Bool
even Z = True
even (S Z) = False
even (S (S x)) = even x
half :: Nat -> Nat
half Z = Z
half (S Z) = Z
half (S (S x)) = S (half x)
mult :: Nat -> Nat -> Nat -> Nat
mult Z _ acc = acc
mult (S x) y acc = mult x y (y + acc)
fac :: Nat -> Nat
fac Z = S Z
fac (S x) = S x * fac x
qfac :: Nat -> Nat -> Nat
qfac Z acc = acc
qfac (S x) acc = qfac x (S x * acc)
exp :: Nat -> Nat -> Nat
exp _ Z = S Z
exp x (S n) = x * exp x n
qexp :: Nat -> Nat -> Nat -> Nat
qexp x Z acc = acc
qexp x (S n) acc = qexp x n (x * acc)
-- Lists
length :: [a] -> Nat
length [] = Z
length (_:xs) = S (length xs)
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x : (xs ++ ys)
drop :: Nat -> [a] -> [a]
drop Z xs = xs
drop _ [] = []
drop (S x) (_:xs) = drop x xs
rev :: [a] -> [a]
rev [] = []
rev (x:xs) = rev xs ++ [x]
qrev :: [a] -> [a] -> [a]
qrev [] acc = acc
qrev (x:xs) acc = qrev xs (x:acc)
revflat :: [[a]] -> [a]
revflat [] = []
revflat ([]:xss) = revflat xss
revflat ((x:xs):xss) = revflat (xs:xss) ++ [x]
qrevflat :: [[a]] -> [a] -> [a]
qrevflat [] acc = acc
qrevflat ([]:xss) acc = qrevflat xss acc
qrevflat ((x:xs):xss) acc = qrevflat (xs:xss) (x:acc)
rotate :: Nat -> [a] -> [a]
rotate Z xs = xs
rotate _ [] = []
rotate (S n) (x:xs) = rotate n (xs ++ [x])
elem :: Nat -> [Nat] -> Bool
elem _ [] = False
elem n (x:xs) = n == x || elem n xs
subset :: [Nat] -> [Nat] -> Bool
subset [] ys = True
subset (x:xs) ys = x `elem` xs && subset xs ys
intersect,union :: [Nat] -> [Nat] -> [Nat]
(x:xs) `intersect` ys | x `elem` ys = x:(xs `intersect` ys)
| otherwise = xs `intersect` ys
[] `intersect` ys = []
union (x:xs) ys | x `elem` ys = union xs ys
| otherwise = x:(union xs ys)
union [] ys = ys
isort :: [Nat] -> [Nat]
isort [] = []
isort (x:xs) = insert x (isort xs)
insert :: Nat -> [Nat] -> [Nat]
insert n [] = [n]
insert n (x:xs) =
case n <= x of
True -> n : x : xs
False -> x : (insert n xs)
count :: Nat -> [Nat] -> Nat
count n (x:xs) | n == x = S (count n xs)
| otherwise = count n xs
count n [] = Z
sorted :: [Nat] -> Bool
sorted (x:y:xs) = x <= y && sorted (y:xs)
sorted _ = True
-- Theorem
prop_T33 :: Nat -> Prop
prop_T33 x = prove (fac x :=: qfac x one)
| null | https://raw.githubusercontent.com/danr/hipspec/a114db84abd5fee8ce0b026abc5380da11147aa9/testsuite/prod/zeno_version/PropT33.hs | haskell | Definitions
Nats
Lists
Theorem | module PropT33 where
import Prelude(Bool(..))
import Zeno
True && x = x
_ && _ = False
False || x = x
_ || _ = True
not True = False
not False = True
data Nat = S Nat | Z
(+) :: Nat -> Nat -> Nat
Z + y = y
(S x) + y = S (x + y)
(*) :: Nat -> Nat -> Nat
Z * _ = Z
(S x) * y = y + (x * y)
(==),(/=) :: Nat -> Nat -> Bool
Z == Z = True
Z == _ = False
S _ == Z = False
S x == S y = x == y
x /= y = not (x == y)
(<=) :: Nat -> Nat -> Bool
Z <= _ = True
_ <= Z = False
S x <= S y = x <= y
one, zero :: Nat
zero = Z
one = S Z
double :: Nat -> Nat
double Z = Z
double (S x) = S (S (double x))
even :: Nat -> Bool
even Z = True
even (S Z) = False
even (S (S x)) = even x
half :: Nat -> Nat
half Z = Z
half (S Z) = Z
half (S (S x)) = S (half x)
mult :: Nat -> Nat -> Nat -> Nat
mult Z _ acc = acc
mult (S x) y acc = mult x y (y + acc)
fac :: Nat -> Nat
fac Z = S Z
fac (S x) = S x * fac x
qfac :: Nat -> Nat -> Nat
qfac Z acc = acc
qfac (S x) acc = qfac x (S x * acc)
exp :: Nat -> Nat -> Nat
exp _ Z = S Z
exp x (S n) = x * exp x n
qexp :: Nat -> Nat -> Nat -> Nat
qexp x Z acc = acc
qexp x (S n) acc = qexp x n (x * acc)
length :: [a] -> Nat
length [] = Z
length (_:xs) = S (length xs)
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x : (xs ++ ys)
drop :: Nat -> [a] -> [a]
drop Z xs = xs
drop _ [] = []
drop (S x) (_:xs) = drop x xs
rev :: [a] -> [a]
rev [] = []
rev (x:xs) = rev xs ++ [x]
qrev :: [a] -> [a] -> [a]
qrev [] acc = acc
qrev (x:xs) acc = qrev xs (x:acc)
revflat :: [[a]] -> [a]
revflat [] = []
revflat ([]:xss) = revflat xss
revflat ((x:xs):xss) = revflat (xs:xss) ++ [x]
qrevflat :: [[a]] -> [a] -> [a]
qrevflat [] acc = acc
qrevflat ([]:xss) acc = qrevflat xss acc
qrevflat ((x:xs):xss) acc = qrevflat (xs:xss) (x:acc)
rotate :: Nat -> [a] -> [a]
rotate Z xs = xs
rotate _ [] = []
rotate (S n) (x:xs) = rotate n (xs ++ [x])
elem :: Nat -> [Nat] -> Bool
elem _ [] = False
elem n (x:xs) = n == x || elem n xs
subset :: [Nat] -> [Nat] -> Bool
subset [] ys = True
subset (x:xs) ys = x `elem` xs && subset xs ys
intersect,union :: [Nat] -> [Nat] -> [Nat]
(x:xs) `intersect` ys | x `elem` ys = x:(xs `intersect` ys)
| otherwise = xs `intersect` ys
[] `intersect` ys = []
union (x:xs) ys | x `elem` ys = union xs ys
| otherwise = x:(union xs ys)
union [] ys = ys
isort :: [Nat] -> [Nat]
isort [] = []
isort (x:xs) = insert x (isort xs)
insert :: Nat -> [Nat] -> [Nat]
insert n [] = [n]
insert n (x:xs) =
case n <= x of
True -> n : x : xs
False -> x : (insert n xs)
count :: Nat -> [Nat] -> Nat
count n (x:xs) | n == x = S (count n xs)
| otherwise = count n xs
count n [] = Z
sorted :: [Nat] -> Bool
sorted (x:y:xs) = x <= y && sorted (y:xs)
sorted _ = True
prop_T33 :: Nat -> Prop
prop_T33 x = prove (fac x :=: qfac x one)
|
363d1956daa115156a4d2222cb7098c2f3aeb7ec3f3de50fe83e7c5d83fc9b3c | brownplt/LambdaS5 | md.ml | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
$ I d : , v 1.6 2004 - 10 - 22 14:42:06 signoles Exp $
module P(G : Sig.P) = struct
module VertexSet = Set.Make(G.V)
module CT = Cliquetree.CliqueTree(G)
module Choose = Oper.Choose(G)
type edgeset = (G.V.t * G.V.t) list
let md g =
let gref = ref g in
let gtri = ref g in
let n = G.nb_vertex g in
let tri = ref [] in
let ord = ref [] in
let i = ref 0 in
while not (CT.is_chordal !gtri) && !i < n do
let v =
let x =
G.fold_vertex
(fun v' x ->
let deg' = G.out_degree !gref v' in
match x with
Some (v,deg) when deg' > deg -> x
| _ -> Some (v', deg'))
!gref None
in match x with
Some (v,_) -> v
| None -> failwith "Expecting some vertex"
in
let ng = G.succ !gref v in
let g', tri' =
List.fold_left
(fun (g, tri) v ->
let tri' =
List.fold_left
(fun tri v' ->
if v <> v' && not (G.mem_edge g v v') then
(v, v') :: tri
else tri)
tri ng
in
let g' =
List.fold_left
(fun g v' ->
if v <> v' then
G.add_edge g v v'
else g)
g ng
in
(g', tri'))
(!gref, []) ng
in
ord := v :: !ord;
gtri := List.fold_left
(fun g (x,y) -> G.add_edge g x y)
!gtri tri';
gref := G.remove_vertex g' v;
tri := tri' @ !tri;
incr i;
done;
(!gtri, !tri, !ord)
let triangulate g =
let gtri, _, _ = md g in
gtri
end
module I(G : Sig.I) = struct
module VertexSet = Set.Make(G.V)
module CT = Cliquetree.CliqueTree(G)
module Choose = Oper.Choose(G)
type edgeset = (G.V.t * G.V.t) list
module Copy = Gmap.Vertex(G)(struct include G include Builder.I(G) end)
let md g =
let gtri = Copy.map (fun x -> x) g in
let gcur = Copy.map (fun x -> x) g in
let n = G.nb_vertex g in
let tri = ref [] in
let ord = ref [] in
let i = ref 0 in
while not (CT.is_chordal gtri) && !i < n do
let v =
let x =
G.fold_vertex
(fun v' x ->
let deg' = G.out_degree gcur v' in
match x with
Some (v,deg) when deg' > deg -> x
| _ -> Some (v', deg'))
gcur None
in match x with
Some (v,_) -> v
| None -> failwith "Expecting some vertex"
in
let ng = G.succ gcur v in
let tri' =
List.fold_left
(fun tri v ->
List.fold_left
(fun tri v' ->
let tri' =
if v <> v' && not (G.mem_edge g v v') then
(v, v') :: tri
else
tri
in
List.iter (fun v' -> if v <> v' then G.add_edge gcur v v') ng;
tri')
tri ng)
[] ng
in
ord := v :: !ord;
List.iter
(fun (x,y) -> G.add_edge gtri x y)
tri';
G.remove_vertex gcur v;
tri := tri' @ !tri;
incr i;
done;
(gtri, !tri, !ord)
let triangulate g =
let gtri, _, _ = md g in
gtri
end
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/ocamlgraph-1.8.1/src/md.ml | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************ | : a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
$ I d : , v 1.6 2004 - 10 - 22 14:42:06 signoles Exp $
module P(G : Sig.P) = struct
module VertexSet = Set.Make(G.V)
module CT = Cliquetree.CliqueTree(G)
module Choose = Oper.Choose(G)
type edgeset = (G.V.t * G.V.t) list
let md g =
let gref = ref g in
let gtri = ref g in
let n = G.nb_vertex g in
let tri = ref [] in
let ord = ref [] in
let i = ref 0 in
while not (CT.is_chordal !gtri) && !i < n do
let v =
let x =
G.fold_vertex
(fun v' x ->
let deg' = G.out_degree !gref v' in
match x with
Some (v,deg) when deg' > deg -> x
| _ -> Some (v', deg'))
!gref None
in match x with
Some (v,_) -> v
| None -> failwith "Expecting some vertex"
in
let ng = G.succ !gref v in
let g', tri' =
List.fold_left
(fun (g, tri) v ->
let tri' =
List.fold_left
(fun tri v' ->
if v <> v' && not (G.mem_edge g v v') then
(v, v') :: tri
else tri)
tri ng
in
let g' =
List.fold_left
(fun g v' ->
if v <> v' then
G.add_edge g v v'
else g)
g ng
in
(g', tri'))
(!gref, []) ng
in
ord := v :: !ord;
gtri := List.fold_left
(fun g (x,y) -> G.add_edge g x y)
!gtri tri';
gref := G.remove_vertex g' v;
tri := tri' @ !tri;
incr i;
done;
(!gtri, !tri, !ord)
let triangulate g =
let gtri, _, _ = md g in
gtri
end
module I(G : Sig.I) = struct
module VertexSet = Set.Make(G.V)
module CT = Cliquetree.CliqueTree(G)
module Choose = Oper.Choose(G)
type edgeset = (G.V.t * G.V.t) list
module Copy = Gmap.Vertex(G)(struct include G include Builder.I(G) end)
let md g =
let gtri = Copy.map (fun x -> x) g in
let gcur = Copy.map (fun x -> x) g in
let n = G.nb_vertex g in
let tri = ref [] in
let ord = ref [] in
let i = ref 0 in
while not (CT.is_chordal gtri) && !i < n do
let v =
let x =
G.fold_vertex
(fun v' x ->
let deg' = G.out_degree gcur v' in
match x with
Some (v,deg) when deg' > deg -> x
| _ -> Some (v', deg'))
gcur None
in match x with
Some (v,_) -> v
| None -> failwith "Expecting some vertex"
in
let ng = G.succ gcur v in
let tri' =
List.fold_left
(fun tri v ->
List.fold_left
(fun tri v' ->
let tri' =
if v <> v' && not (G.mem_edge g v v') then
(v, v') :: tri
else
tri
in
List.iter (fun v' -> if v <> v' then G.add_edge gcur v v') ng;
tri')
tri ng)
[] ng
in
ord := v :: !ord;
List.iter
(fun (x,y) -> G.add_edge gtri x y)
tri';
G.remove_vertex gcur v;
tri := tri' @ !tri;
incr i;
done;
(gtri, !tri, !ord)
let triangulate g =
let gtri, _, _ = md g in
gtri
end
|
88cb927ea2545cb04549621836a923a1106c1afcc497b816074043b73f7031dd | andrewthad/quickcheck-classes | Ring.hs | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -Wall #
module Test.QuickCheck.Classes.Ring
(
#if HAVE_SEMIRINGS
ringLaws
#endif
) where
#if HAVE_SEMIRINGS
import Data.Semiring
import Prelude hiding (Num(..))
#endif
import Data.Proxy (Proxy)
import Test.QuickCheck hiding ((.&.))
import Test.QuickCheck.Classes.Internal (Laws(..), myForAllShrink)
#if HAVE_SEMIRINGS
-- | Tests the following properties:
--
-- [/Additive Inverse/]
@'negate ' a ' + ' a ≡ 0@
--
-- Note that this does not test any of the laws tested by 'Test.QuickCheck.Classes.Semiring.semiringLaws'.
ringLaws :: (Ring a, Eq a, Arbitrary a, Show a) => Proxy a -> Laws
ringLaws p = Laws "Ring"
[ ("Additive Inverse", ringAdditiveInverse p)
]
ringAdditiveInverse :: forall a. (Ring a, Eq a, Arbitrary a, Show a) => Proxy a -> Property
ringAdditiveInverse _ = myForAllShrink True (const True)
(\(a :: a) -> ["a = " ++ show a])
"negate a + a"
(\a -> negate a + a)
"0"
(const zero)
#endif
| null | https://raw.githubusercontent.com/andrewthad/quickcheck-classes/0fc6c0602bc6875cdbde34cbdbcf229a175af62f/quickcheck-classes/src/Test/QuickCheck/Classes/Ring.hs | haskell | | Tests the following properties:
[/Additive Inverse/]
Note that this does not test any of the laws tested by 'Test.QuickCheck.Classes.Semiring.semiringLaws'. | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -Wall #
module Test.QuickCheck.Classes.Ring
(
#if HAVE_SEMIRINGS
ringLaws
#endif
) where
#if HAVE_SEMIRINGS
import Data.Semiring
import Prelude hiding (Num(..))
#endif
import Data.Proxy (Proxy)
import Test.QuickCheck hiding ((.&.))
import Test.QuickCheck.Classes.Internal (Laws(..), myForAllShrink)
#if HAVE_SEMIRINGS
@'negate ' a ' + ' a ≡ 0@
ringLaws :: (Ring a, Eq a, Arbitrary a, Show a) => Proxy a -> Laws
ringLaws p = Laws "Ring"
[ ("Additive Inverse", ringAdditiveInverse p)
]
ringAdditiveInverse :: forall a. (Ring a, Eq a, Arbitrary a, Show a) => Proxy a -> Property
ringAdditiveInverse _ = myForAllShrink True (const True)
(\(a :: a) -> ["a = " ++ show a])
"negate a + a"
(\a -> negate a + a)
"0"
(const zero)
#endif
|
b5bf6956a12896d0c51c8af26e52c6dd3570b9fbdfac51d7c77f3fd96b3a68df | clojure-interop/google-cloud-clients | CloudRedisClient.clj | (ns com.google.cloud.redis.v1beta1.CloudRedisClient
"Service Description: Configures and manages Cloud Memorystore for Redis instances
Google Cloud Memorystore for Redis v1beta1
The `redis.googleapis.com` service implements the Google Cloud Memorystore for Redis API and
defines the following resource model for managing Redis instances: * The service works with a
collection of cloud projects, named: `/projects/*` * Each project has a collection of
available locations, named: `/locations/*` * Each location has a collection of Redis
instances, named: `/instances/*` * As such, Redis instances are resources of the form:
`/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
Note that location_id must be refering to a GCP `region`; for example: *
`projects/redpepper-1290/locations/us-central1/instances/my-redis`
This class provides the ability to make remote calls to the backing service through method
calls that map to API methods. Sample code to get started:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
InstanceName name = InstanceName.of(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
Instance response = cloudRedisClient.getInstance(name);
}
Note: close() needs to be called on the cloudRedisClient object to clean up resources such as
threads. In the example above, try-with-resources is used, which automatically calls close().
The surface of this class includes several types of Java methods for each of the API's
methods:
A \"flattened\" method. With this type of method, the fields of the request type have been
converted into function parameters. It may be the case that not all fields are available as
parameters, and not every API method will have a flattened method entry point.
A \"request object\" method. This type of method only takes one parameter, a request object,
which must be constructed before the call. Not every API method will have a request object
method.
A \"callable\" method. This type of method takes no parameters and returns an immutable API
callable object, which can be used to initiate calls to the service.
See the individual methods for example code.
Many parameters require resource names to be formatted in a particular way. To assist with
these names, this class includes a format method for each type of name, and additionally a parse
method to extract the individual identifiers contained within names that are returned.
This class can be customized by passing in a custom instance of CloudRedisSettings to
create(). For example:
To customize credentials:
CloudRedisSettings cloudRedisSettings =
CloudRedisSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
.build();
CloudRedisClient cloudRedisClient =
CloudRedisClient.create(cloudRedisSettings);
To customize the endpoint:
CloudRedisSettings cloudRedisSettings =
CloudRedisSettings.newBuilder().setEndpoint(myEndpoint).build();
CloudRedisClient cloudRedisClient =
CloudRedisClient.create(cloudRedisSettings);"
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.redis.v1beta1 CloudRedisClient]))
(defn *create
"Constructs an instance of CloudRedisClient, using the given settings. The channels are created
based on the settings passed in, or defaults for any settings that are not set.
settings - `com.google.cloud.redis.v1beta1.CloudRedisSettings`
returns: `com.google.cloud.redis.v1beta1.CloudRedisClient`
throws: java.io.IOException"
(^com.google.cloud.redis.v1beta1.CloudRedisClient [^com.google.cloud.redis.v1beta1.CloudRedisSettings settings]
(CloudRedisClient/create settings))
(^com.google.cloud.redis.v1beta1.CloudRedisClient []
(CloudRedisClient/create )))
(defn delete-instance-operation-callable
"Deletes a specific Redis instance. Instance stops serving and data is deleted.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
InstanceName name = InstanceName.of(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
DeleteInstanceRequest request = DeleteInstanceRequest.newBuilder()
.setName(name.toString())
.build();
OperationFuture<Empty, Any> future = cloudRedisClient.deleteInstanceOperationCallable().futureCall(request);
// Do something
future.get();
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.protobuf.Empty,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.deleteInstanceOperationCallable))))
(defn export-instance-async
"Export Redis instance data into a Redis RDB format file in Cloud Storage.
Redis will continue serving during this operation.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
OutputConfig outputConfig = OutputConfig.newBuilder().build();
Instance response = cloudRedisClient.exportInstanceAsync(formattedName, outputConfig).get();
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `java.lang.String`
output-config - Required. Specify data to be exported. - `com.google.cloud.redis.v1beta1.OutputConfig`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^java.lang.String name ^com.google.cloud.redis.v1beta1.OutputConfig output-config]
(-> this (.exportInstanceAsync name output-config)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.ExportInstanceRequest request]
(-> this (.exportInstanceAsync request))))
(defn failover-instance-callable
"Initiates a failover of the master node to current replica node for a specific STANDARD tier
Cloud Memorystore for Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
FailoverInstanceRequest.DataProtectionMode dataProtectionMode = FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED;
FailoverInstanceRequest request = FailoverInstanceRequest.newBuilder()
.setName(formattedName)
.setDataProtectionMode(dataProtectionMode)
.build();
ApiFuture<Operation> future = cloudRedisClient.failoverInstanceCallable().futureCall(request);
// Do something
Operation response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.failoverInstanceCallable))))
(defn export-instance-callable
"Export Redis instance data into a Redis RDB format file in Cloud Storage.
Redis will continue serving during this operation.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
OutputConfig outputConfig = OutputConfig.newBuilder().build();
ExportInstanceRequest request = ExportInstanceRequest.newBuilder()
.setName(formattedName)
.setOutputConfig(outputConfig)
.build();
ApiFuture<Operation> future = cloudRedisClient.exportInstanceCallable().futureCall(request);
// Do something
Operation response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.exportInstanceCallable))))
(defn get-settings
"returns: `com.google.cloud.redis.v1beta1.CloudRedisSettings`"
(^com.google.cloud.redis.v1beta1.CloudRedisSettings [^CloudRedisClient this]
(-> this (.getSettings))))
(defn list-instances-callable
"Lists all Redis instances owned by a project in either the specified location (region) or all
locations.
The location should have the following format: *
`projects/{project_id}/locations/{location_id}`
If `location_id` is specified as `-` (wildcard), then all regions available to the project
are queried, and the results are aggregated.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
LocationName parent = LocationName.of(\"[PROJECT]\", \"[LOCATION]\");
ListInstancesRequest request = ListInstancesRequest.newBuilder()
.setParent(parent.toString())
.build();
while (true) {
ListInstancesResponse response = cloudRedisClient.listInstancesCallable().call(request);
for (Instance element : response.getInstancesList()) {
// doThingsWith(element);
}
String nextPageToken = response.getNextPageToken();
if (!Strings.isNullOrEmpty(nextPageToken)) {
request = request.toBuilder().setPageToken(nextPageToken).build();
} else {
break;
}
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.ListInstancesResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.listInstancesCallable))))
(defn export-instance-operation-callable
"Export Redis instance data into a Redis RDB format file in Cloud Storage.
Redis will continue serving during this operation.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
OutputConfig outputConfig = OutputConfig.newBuilder().build();
ExportInstanceRequest request = ExportInstanceRequest.newBuilder()
.setName(formattedName)
.setOutputConfig(outputConfig)
.build();
OperationFuture<Instance, Any> future = cloudRedisClient.exportInstanceOperationCallable().futureCall(request);
// Do something
Instance response = future.get();
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.exportInstanceOperationCallable))))
(defn get-stub
"returns: `(value="A restructuring of stub classes is planned, so this may break in the future") com.google.cloud.redis.v1beta1.stub.CloudRedisStub`"
([^CloudRedisClient this]
(-> this (.getStub))))
(defn list-instances
"Lists all Redis instances owned by a project in either the specified location (region) or all
locations.
The location should have the following format: *
`projects/{project_id}/locations/{location_id}`
If `location_id` is specified as `-` (wildcard), then all regions available to the project
are queried, and the results are aggregated.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
LocationName parent = LocationName.of(\"[PROJECT]\", \"[LOCATION]\");
for (Instance element : cloudRedisClient.listInstances(parent).iterateAll()) {
// doThingsWith(element);
}
}
parent - Required. The resource name of the instance location using the form: `projects/{project_id}/locations/{location_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.LocationName`
returns: `com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse [^CloudRedisClient this ^com.google.cloud.redis.v1beta1.LocationName parent]
(-> this (.listInstances parent))))
(defn list-instances-paged-callable
"Lists all Redis instances owned by a project in either the specified location (region) or all
locations.
The location should have the following format: *
`projects/{project_id}/locations/{location_id}`
If `location_id` is specified as `-` (wildcard), then all regions available to the project
are queried, and the results are aggregated.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
LocationName parent = LocationName.of(\"[PROJECT]\", \"[LOCATION]\");
ListInstancesRequest request = ListInstancesRequest.newBuilder()
.setParent(parent.toString())
.build();
ApiFuture<ListInstancesPagedResponse> future = cloudRedisClient.listInstancesPagedCallable().futureCall(request);
// Do something
for (Instance element : future.get().iterateAll()) {
// doThingsWith(element);
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.listInstancesPagedCallable))))
(defn create-instance-async
"Creates a Redis instance based on the specified tier and memory size.
By default, the instance is accessible from the project's [default
network](/compute/docs/networks-and-firewalls#networks).
The creation is executed asynchronously and callers may check the returned operation to
track its progress. Once the operation is completed the Redis instance will be fully
functional. Completed longrunning.Operation will contain the new instance object in the
response field.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
LocationName parent = LocationName.of(\"[PROJECT]\", \"[LOCATION]\");
String instanceId = \"test_instance\";
Instance.Tier tier = Instance.Tier.BASIC;
int memorySizeGb = 1;
Instance instance = Instance.newBuilder()
.setTier(tier)
.setMemorySizeGb(memorySizeGb)
.build();
Instance response = cloudRedisClient.createInstanceAsync(parent, instanceId, instance).get();
}
parent - Required. The resource name of the instance location using the form: `projects/{project_id}/locations/{location_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.LocationName`
instance-id - Required. The logical name of the Redis instance in the customer project with the following restrictions: * Must contain only lowercase letters, numbers, and hyphens. * Must start with a letter. * Must be between 1-40 characters. * Must end with a number or a letter. * Must be unique within the customer project / location - `java.lang.String`
instance - Required. A Redis [Instance] resource - `com.google.cloud.redis.v1beta1.Instance`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.LocationName parent ^java.lang.String instance-id ^com.google.cloud.redis.v1beta1.Instance instance]
(-> this (.createInstanceAsync parent instance-id instance)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.CreateInstanceRequest request]
(-> this (.createInstanceAsync request))))
(defn shutdown?
"returns: `boolean`"
(^Boolean [^CloudRedisClient this]
(-> this (.isShutdown))))
(defn failover-instance-operation-callable
"Initiates a failover of the master node to current replica node for a specific STANDARD tier
Cloud Memorystore for Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
FailoverInstanceRequest.DataProtectionMode dataProtectionMode = FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED;
FailoverInstanceRequest request = FailoverInstanceRequest.newBuilder()
.setName(formattedName)
.setDataProtectionMode(dataProtectionMode)
.build();
OperationFuture<Instance, Any> future = cloudRedisClient.failoverInstanceOperationCallable().futureCall(request);
// Do something
Instance response = future.get();
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.failoverInstanceOperationCallable))))
(defn await-termination
"duration - `long`
unit - `java.util.concurrent.TimeUnit`
returns: `boolean`
throws: java.lang.InterruptedException"
(^Boolean [^CloudRedisClient this ^Long duration ^java.util.concurrent.TimeUnit unit]
(-> this (.awaitTermination duration unit))))
(defn import-instance-callable
"Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
Redis may stop serving during this operation. Instance state will be IMPORTING for entire
operation. When complete, the instance will contain only data from the imported file.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
InputConfig inputConfig = InputConfig.newBuilder().build();
ImportInstanceRequest request = ImportInstanceRequest.newBuilder()
.setName(formattedName)
.setInputConfig(inputConfig)
.build();
ApiFuture<Operation> future = cloudRedisClient.importInstanceCallable().futureCall(request);
// Do something
Operation response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.importInstanceCallable))))
(defn create-instance-operation-callable
"Creates a Redis instance based on the specified tier and memory size.
By default, the instance is accessible from the project's [default
network](/compute/docs/networks-and-firewalls#networks).
The creation is executed asynchronously and callers may check the returned operation to
track its progress. Once the operation is completed the Redis instance will be fully
functional. Completed longrunning.Operation will contain the new instance object in the
response field.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
LocationName parent = LocationName.of(\"[PROJECT]\", \"[LOCATION]\");
String instanceId = \"test_instance\";
Instance.Tier tier = Instance.Tier.BASIC;
int memorySizeGb = 1;
Instance instance = Instance.newBuilder()
.setTier(tier)
.setMemorySizeGb(memorySizeGb)
.build();
CreateInstanceRequest request = CreateInstanceRequest.newBuilder()
.setParent(parent.toString())
.setInstanceId(instanceId)
.setInstance(instance)
.build();
OperationFuture<Instance, Any> future = cloudRedisClient.createInstanceOperationCallable().futureCall(request);
// Do something
Instance response = future.get();
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.createInstanceOperationCallable))))
(defn update-instance-operation-callable
"Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object in the response field.
The returned operation is automatically deleted after a few hours, so there is no need to call
DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String pathsElement = \"display_name\";
String pathsElement2 = \"memory_size_gb\";
List<String> paths = Arrays.asList(pathsElement, pathsElement2);
FieldMask updateMask = FieldMask.newBuilder()
.addAllPaths(paths)
.build();
String displayName = \"UpdatedDisplayName\";
int memorySizeGb = 4;
Instance instance = Instance.newBuilder()
.setDisplayName(displayName)
.setMemorySizeGb(memorySizeGb)
.build();
UpdateInstanceRequest request = UpdateInstanceRequest.newBuilder()
.setUpdateMask(updateMask)
.setInstance(instance)
.build();
OperationFuture<Instance, Any> future = cloudRedisClient.updateInstanceOperationCallable().futureCall(request);
// Do something
Instance response = future.get();
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.updateInstanceOperationCallable))))
(defn shutdown
""
([^CloudRedisClient this]
(-> this (.shutdown))))
(defn get-instance
"Gets the details of a specific Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
InstanceName name = InstanceName.of(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
Instance response = cloudRedisClient.getInstance(name);
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.InstanceName`
returns: `com.google.cloud.redis.v1beta1.Instance`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.cloud.redis.v1beta1.Instance [^CloudRedisClient this ^com.google.cloud.redis.v1beta1.InstanceName name]
(-> this (.getInstance name))))
(defn delete-instance-async
"Deletes a specific Redis instance. Instance stops serving and data is deleted.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
InstanceName name = InstanceName.of(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
cloudRedisClient.deleteInstanceAsync(name).get();
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.InstanceName`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.protobuf.Empty,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.InstanceName name]
(-> this (.deleteInstanceAsync name))))
(defn get-operations-client
"Returns the OperationsClient that can be used to query the status of a long-running operation
returned by another API method call.
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.longrunning.OperationsClient`"
([^CloudRedisClient this]
(-> this (.getOperationsClient))))
(defn import-instance-async
"Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
Redis may stop serving during this operation. Instance state will be IMPORTING for entire
operation. When complete, the instance will contain only data from the imported file.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
InputConfig inputConfig = InputConfig.newBuilder().build();
Instance response = cloudRedisClient.importInstanceAsync(formattedName, inputConfig).get();
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `java.lang.String`
input-config - Required. Specify data to be imported. - `com.google.cloud.redis.v1beta1.InputConfig`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^java.lang.String name ^com.google.cloud.redis.v1beta1.InputConfig input-config]
(-> this (.importInstanceAsync name input-config)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.ImportInstanceRequest request]
(-> this (.importInstanceAsync request))))
(defn close
""
([^CloudRedisClient this]
(-> this (.close))))
(defn delete-instance-callable
"Deletes a specific Redis instance. Instance stops serving and data is deleted.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
InstanceName name = InstanceName.of(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
DeleteInstanceRequest request = DeleteInstanceRequest.newBuilder()
.setName(name.toString())
.build();
ApiFuture<Operation> future = cloudRedisClient.deleteInstanceCallable().futureCall(request);
// Do something
future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.deleteInstanceCallable))))
(defn create-instance-callable
"Creates a Redis instance based on the specified tier and memory size.
By default, the instance is accessible from the project's [default
network](/compute/docs/networks-and-firewalls#networks).
The creation is executed asynchronously and callers may check the returned operation to
track its progress. Once the operation is completed the Redis instance will be fully
functional. Completed longrunning.Operation will contain the new instance object in the
response field.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
LocationName parent = LocationName.of(\"[PROJECT]\", \"[LOCATION]\");
String instanceId = \"test_instance\";
Instance.Tier tier = Instance.Tier.BASIC;
int memorySizeGb = 1;
Instance instance = Instance.newBuilder()
.setTier(tier)
.setMemorySizeGb(memorySizeGb)
.build();
CreateInstanceRequest request = CreateInstanceRequest.newBuilder()
.setParent(parent.toString())
.setInstanceId(instanceId)
.setInstance(instance)
.build();
ApiFuture<Operation> future = cloudRedisClient.createInstanceCallable().futureCall(request);
// Do something
Operation response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.createInstanceCallable))))
(defn terminated?
"returns: `boolean`"
(^Boolean [^CloudRedisClient this]
(-> this (.isTerminated))))
(defn import-instance-operation-callable
"Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
Redis may stop serving during this operation. Instance state will be IMPORTING for entire
operation. When complete, the instance will contain only data from the imported file.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
InputConfig inputConfig = InputConfig.newBuilder().build();
ImportInstanceRequest request = ImportInstanceRequest.newBuilder()
.setName(formattedName)
.setInputConfig(inputConfig)
.build();
OperationFuture<Instance, Any> future = cloudRedisClient.importInstanceOperationCallable().futureCall(request);
// Do something
Instance response = future.get();
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.importInstanceOperationCallable))))
(defn update-instance-async
"Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object in the response field.
The returned operation is automatically deleted after a few hours, so there is no need to call
DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String pathsElement = \"display_name\";
String pathsElement2 = \"memory_size_gb\";
List<String> paths = Arrays.asList(pathsElement, pathsElement2);
FieldMask updateMask = FieldMask.newBuilder()
.addAllPaths(paths)
.build();
String displayName = \"UpdatedDisplayName\";
int memorySizeGb = 4;
Instance instance = Instance.newBuilder()
.setDisplayName(displayName)
.setMemorySizeGb(memorySizeGb)
.build();
Instance response = cloudRedisClient.updateInstanceAsync(updateMask, instance).get();
}
update-mask - Required. Mask of fields to update. At least one path must be supplied in this field. The elements of the repeated paths field may only include these fields from [Instance][google.cloud.redis.v1beta1.Instance]: * `displayName` * `labels` * `memorySizeGb` * `redisConfig` - `com.google.protobuf.FieldMask`
instance - Required. Update description. Only fields specified in update_mask are updated. - `com.google.cloud.redis.v1beta1.Instance`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^com.google.protobuf.FieldMask update-mask ^com.google.cloud.redis.v1beta1.Instance instance]
(-> this (.updateInstanceAsync update-mask instance)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.UpdateInstanceRequest request]
(-> this (.updateInstanceAsync request))))
(defn update-instance-callable
"Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object in the response field.
The returned operation is automatically deleted after a few hours, so there is no need to call
DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String pathsElement = \"display_name\";
String pathsElement2 = \"memory_size_gb\";
List<String> paths = Arrays.asList(pathsElement, pathsElement2);
FieldMask updateMask = FieldMask.newBuilder()
.addAllPaths(paths)
.build();
String displayName = \"UpdatedDisplayName\";
int memorySizeGb = 4;
Instance instance = Instance.newBuilder()
.setDisplayName(displayName)
.setMemorySizeGb(memorySizeGb)
.build();
UpdateInstanceRequest request = UpdateInstanceRequest.newBuilder()
.setUpdateMask(updateMask)
.setInstance(instance)
.build();
ApiFuture<Operation> future = cloudRedisClient.updateInstanceCallable().futureCall(request);
// Do something
Operation response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.updateInstanceCallable))))
(defn get-instance-callable
"Gets the details of a specific Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
InstanceName name = InstanceName.of(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
GetInstanceRequest request = GetInstanceRequest.newBuilder()
.setName(name.toString())
.build();
ApiFuture<Instance> future = cloudRedisClient.getInstanceCallable().futureCall(request);
// Do something
Instance response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.GetInstanceRequest,com.google.cloud.redis.v1beta1.Instance>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.getInstanceCallable))))
(defn failover-instance-async
"Initiates a failover of the master node to current replica node for a specific STANDARD tier
Cloud Memorystore for Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
String formattedName = InstanceName.format(\"[PROJECT]\", \"[LOCATION]\", \"[INSTANCE]\");
FailoverInstanceRequest.DataProtectionMode dataProtectionMode = FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED;
Instance response = cloudRedisClient.failoverInstanceAsync(formattedName, dataProtectionMode).get();
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `java.lang.String`
data-protection-mode - Optional. Available data protection modes that the user can choose. If it's unspecified, data protection mode will be LIMITED_DATA_LOSS by default. - `com.google.cloud.redis.v1beta1.FailoverInstanceRequest$DataProtectionMode`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^java.lang.String name ^com.google.cloud.redis.v1beta1.FailoverInstanceRequest$DataProtectionMode data-protection-mode]
(-> this (.failoverInstanceAsync name data-protection-mode)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.FailoverInstanceRequest request]
(-> this (.failoverInstanceAsync request))))
(defn shutdown-now
""
([^CloudRedisClient this]
(-> this (.shutdownNow))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.redis/src/com/google/cloud/redis/v1beta1/CloudRedisClient.clj | clojure | for example: *
"
| (ns com.google.cloud.redis.v1beta1.CloudRedisClient
"Service Description: Configures and manages Cloud Memorystore for Redis instances
Google Cloud Memorystore for Redis v1beta1
The `redis.googleapis.com` service implements the Google Cloud Memorystore for Redis API and
defines the following resource model for managing Redis instances: * The service works with a
collection of cloud projects, named: `/projects/*` * Each project has a collection of
available locations, named: `/locations/*` * Each location has a collection of Redis
instances, named: `/instances/*` * As such, Redis instances are resources of the form:
`/projects/{project_id}/locations/{location_id}/instances/{instance_id}`
`projects/redpepper-1290/locations/us-central1/instances/my-redis`
This class provides the ability to make remote calls to the backing service through method
calls that map to API methods. Sample code to get started:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
}
Note: close() needs to be called on the cloudRedisClient object to clean up resources such as
threads. In the example above, try-with-resources is used, which automatically calls close().
The surface of this class includes several types of Java methods for each of the API's
methods:
A \"flattened\" method. With this type of method, the fields of the request type have been
converted into function parameters. It may be the case that not all fields are available as
parameters, and not every API method will have a flattened method entry point.
A \"request object\" method. This type of method only takes one parameter, a request object,
which must be constructed before the call. Not every API method will have a request object
method.
A \"callable\" method. This type of method takes no parameters and returns an immutable API
callable object, which can be used to initiate calls to the service.
See the individual methods for example code.
Many parameters require resource names to be formatted in a particular way. To assist with
these names, this class includes a format method for each type of name, and additionally a parse
method to extract the individual identifiers contained within names that are returned.
This class can be customized by passing in a custom instance of CloudRedisSettings to
create(). For example:
To customize credentials:
CloudRedisSettings cloudRedisSettings =
CloudRedisSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
CloudRedisClient cloudRedisClient =
To customize the endpoint:
CloudRedisSettings cloudRedisSettings =
CloudRedisClient cloudRedisClient =
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.redis.v1beta1 CloudRedisClient]))
(defn *create
"Constructs an instance of CloudRedisClient, using the given settings. The channels are created
based on the settings passed in, or defaults for any settings that are not set.
settings - `com.google.cloud.redis.v1beta1.CloudRedisSettings`
returns: `com.google.cloud.redis.v1beta1.CloudRedisClient`
throws: java.io.IOException"
(^com.google.cloud.redis.v1beta1.CloudRedisClient [^com.google.cloud.redis.v1beta1.CloudRedisSettings settings]
(CloudRedisClient/create settings))
(^com.google.cloud.redis.v1beta1.CloudRedisClient []
(CloudRedisClient/create )))
(defn delete-instance-operation-callable
"Deletes a specific Redis instance. Instance stops serving and data is deleted.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
DeleteInstanceRequest request = DeleteInstanceRequest.newBuilder()
.setName(name.toString())
// Do something
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.protobuf.Empty,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.deleteInstanceOperationCallable))))
(defn export-instance-async
"Export Redis instance data into a Redis RDB format file in Cloud Storage.
Redis will continue serving during this operation.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `java.lang.String`
output-config - Required. Specify data to be exported. - `com.google.cloud.redis.v1beta1.OutputConfig`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^java.lang.String name ^com.google.cloud.redis.v1beta1.OutputConfig output-config]
(-> this (.exportInstanceAsync name output-config)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.ExportInstanceRequest request]
(-> this (.exportInstanceAsync request))))
(defn failover-instance-callable
"Initiates a failover of the master node to current replica node for a specific STANDARD tier
Cloud Memorystore for Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
FailoverInstanceRequest request = FailoverInstanceRequest.newBuilder()
.setName(formattedName)
.setDataProtectionMode(dataProtectionMode)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.failoverInstanceCallable))))
(defn export-instance-callable
"Export Redis instance data into a Redis RDB format file in Cloud Storage.
Redis will continue serving during this operation.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
ExportInstanceRequest request = ExportInstanceRequest.newBuilder()
.setName(formattedName)
.setOutputConfig(outputConfig)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.exportInstanceCallable))))
(defn get-settings
"returns: `com.google.cloud.redis.v1beta1.CloudRedisSettings`"
(^com.google.cloud.redis.v1beta1.CloudRedisSettings [^CloudRedisClient this]
(-> this (.getSettings))))
(defn list-instances-callable
"Lists all Redis instances owned by a project in either the specified location (region) or all
locations.
The location should have the following format: *
`projects/{project_id}/locations/{location_id}`
If `location_id` is specified as `-` (wildcard), then all regions available to the project
are queried, and the results are aggregated.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
ListInstancesRequest request = ListInstancesRequest.newBuilder()
.setParent(parent.toString())
while (true) {
for (Instance element : response.getInstancesList()) {
}
if (!Strings.isNullOrEmpty(nextPageToken)) {
} else {
}
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.ListInstancesResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.listInstancesCallable))))
(defn export-instance-operation-callable
"Export Redis instance data into a Redis RDB format file in Cloud Storage.
Redis will continue serving during this operation.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
ExportInstanceRequest request = ExportInstanceRequest.newBuilder()
.setName(formattedName)
.setOutputConfig(outputConfig)
// Do something
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.exportInstanceOperationCallable))))
(defn get-stub
"returns: `(value="A restructuring of stub classes is planned, so this may break in the future") com.google.cloud.redis.v1beta1.stub.CloudRedisStub`"
([^CloudRedisClient this]
(-> this (.getStub))))
(defn list-instances
"Lists all Redis instances owned by a project in either the specified location (region) or all
locations.
The location should have the following format: *
`projects/{project_id}/locations/{location_id}`
If `location_id` is specified as `-` (wildcard), then all regions available to the project
are queried, and the results are aggregated.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
for (Instance element : cloudRedisClient.listInstances(parent).iterateAll()) {
}
}
parent - Required. The resource name of the instance location using the form: `projects/{project_id}/locations/{location_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.LocationName`
returns: `com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse [^CloudRedisClient this ^com.google.cloud.redis.v1beta1.LocationName parent]
(-> this (.listInstances parent))))
(defn list-instances-paged-callable
"Lists all Redis instances owned by a project in either the specified location (region) or all
locations.
The location should have the following format: *
`projects/{project_id}/locations/{location_id}`
If `location_id` is specified as `-` (wildcard), then all regions available to the project
are queried, and the results are aggregated.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
ListInstancesRequest request = ListInstancesRequest.newBuilder()
.setParent(parent.toString())
// Do something
for (Instance element : future.get().iterateAll()) {
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.listInstancesPagedCallable))))
(defn create-instance-async
"Creates a Redis instance based on the specified tier and memory size.
By default, the instance is accessible from the project's [default
network](/compute/docs/networks-and-firewalls#networks).
The creation is executed asynchronously and callers may check the returned operation to
track its progress. Once the operation is completed the Redis instance will be fully
functional. Completed longrunning.Operation will contain the new instance object in the
response field.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
Instance instance = Instance.newBuilder()
.setTier(tier)
.setMemorySizeGb(memorySizeGb)
}
parent - Required. The resource name of the instance location using the form: `projects/{project_id}/locations/{location_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.LocationName`
instance-id - Required. The logical name of the Redis instance in the customer project with the following restrictions: * Must contain only lowercase letters, numbers, and hyphens. * Must start with a letter. * Must be between 1-40 characters. * Must end with a number or a letter. * Must be unique within the customer project / location - `java.lang.String`
instance - Required. A Redis [Instance] resource - `com.google.cloud.redis.v1beta1.Instance`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.LocationName parent ^java.lang.String instance-id ^com.google.cloud.redis.v1beta1.Instance instance]
(-> this (.createInstanceAsync parent instance-id instance)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.CreateInstanceRequest request]
(-> this (.createInstanceAsync request))))
(defn shutdown?
"returns: `boolean`"
(^Boolean [^CloudRedisClient this]
(-> this (.isShutdown))))
(defn failover-instance-operation-callable
"Initiates a failover of the master node to current replica node for a specific STANDARD tier
Cloud Memorystore for Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
FailoverInstanceRequest request = FailoverInstanceRequest.newBuilder()
.setName(formattedName)
.setDataProtectionMode(dataProtectionMode)
// Do something
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.failoverInstanceOperationCallable))))
(defn await-termination
"duration - `long`
unit - `java.util.concurrent.TimeUnit`
returns: `boolean`
throws: java.lang.InterruptedException"
(^Boolean [^CloudRedisClient this ^Long duration ^java.util.concurrent.TimeUnit unit]
(-> this (.awaitTermination duration unit))))
(defn import-instance-callable
"Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
Redis may stop serving during this operation. Instance state will be IMPORTING for entire
operation. When complete, the instance will contain only data from the imported file.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
ImportInstanceRequest request = ImportInstanceRequest.newBuilder()
.setName(formattedName)
.setInputConfig(inputConfig)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.importInstanceCallable))))
(defn create-instance-operation-callable
"Creates a Redis instance based on the specified tier and memory size.
By default, the instance is accessible from the project's [default
network](/compute/docs/networks-and-firewalls#networks).
The creation is executed asynchronously and callers may check the returned operation to
track its progress. Once the operation is completed the Redis instance will be fully
functional. Completed longrunning.Operation will contain the new instance object in the
response field.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
Instance instance = Instance.newBuilder()
.setTier(tier)
.setMemorySizeGb(memorySizeGb)
CreateInstanceRequest request = CreateInstanceRequest.newBuilder()
.setParent(parent.toString())
.setInstanceId(instanceId)
.setInstance(instance)
// Do something
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.createInstanceOperationCallable))))
(defn update-instance-operation-callable
"Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object in the response field.
The returned operation is automatically deleted after a few hours, so there is no need to call
DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
FieldMask updateMask = FieldMask.newBuilder()
.addAllPaths(paths)
Instance instance = Instance.newBuilder()
.setDisplayName(displayName)
.setMemorySizeGb(memorySizeGb)
UpdateInstanceRequest request = UpdateInstanceRequest.newBuilder()
.setUpdateMask(updateMask)
.setInstance(instance)
// Do something
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.updateInstanceOperationCallable))))
(defn shutdown
""
([^CloudRedisClient this]
(-> this (.shutdown))))
(defn get-instance
"Gets the details of a specific Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.InstanceName`
returns: `com.google.cloud.redis.v1beta1.Instance`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.cloud.redis.v1beta1.Instance [^CloudRedisClient this ^com.google.cloud.redis.v1beta1.InstanceName name]
(-> this (.getInstance name))))
(defn delete-instance-async
"Deletes a specific Redis instance. Instance stops serving and data is deleted.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `com.google.cloud.redis.v1beta1.InstanceName`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.protobuf.Empty,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.InstanceName name]
(-> this (.deleteInstanceAsync name))))
(defn get-operations-client
"Returns the OperationsClient that can be used to query the status of a long-running operation
returned by another API method call.
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.longrunning.OperationsClient`"
([^CloudRedisClient this]
(-> this (.getOperationsClient))))
(defn import-instance-async
"Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
Redis may stop serving during this operation. Instance state will be IMPORTING for entire
operation. When complete, the instance will contain only data from the imported file.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `java.lang.String`
input-config - Required. Specify data to be imported. - `com.google.cloud.redis.v1beta1.InputConfig`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^java.lang.String name ^com.google.cloud.redis.v1beta1.InputConfig input-config]
(-> this (.importInstanceAsync name input-config)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.ImportInstanceRequest request]
(-> this (.importInstanceAsync request))))
(defn close
""
([^CloudRedisClient this]
(-> this (.close))))
(defn delete-instance-callable
"Deletes a specific Redis instance. Instance stops serving and data is deleted.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
DeleteInstanceRequest request = DeleteInstanceRequest.newBuilder()
.setName(name.toString())
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.deleteInstanceCallable))))
(defn create-instance-callable
"Creates a Redis instance based on the specified tier and memory size.
By default, the instance is accessible from the project's [default
network](/compute/docs/networks-and-firewalls#networks).
The creation is executed asynchronously and callers may check the returned operation to
track its progress. Once the operation is completed the Redis instance will be fully
functional. Completed longrunning.Operation will contain the new instance object in the
response field.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
Instance instance = Instance.newBuilder()
.setTier(tier)
.setMemorySizeGb(memorySizeGb)
CreateInstanceRequest request = CreateInstanceRequest.newBuilder()
.setParent(parent.toString())
.setInstanceId(instanceId)
.setInstance(instance)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.createInstanceCallable))))
(defn terminated?
"returns: `boolean`"
(^Boolean [^CloudRedisClient this]
(-> this (.isTerminated))))
(defn import-instance-operation-callable
"Import a Redis RDB snapshot file from Cloud Storage into a Redis instance.
Redis may stop serving during this operation. Instance state will be IMPORTING for entire
operation. When complete, the instance will contain only data from the imported file.
The returned operation is automatically deleted after a few hours, so there is no need to
call DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
ImportInstanceRequest request = ImportInstanceRequest.newBuilder()
.setName(formattedName)
.setInputConfig(inputConfig)
// Do something
}
returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`"
([^CloudRedisClient this]
(-> this (.importInstanceOperationCallable))))
(defn update-instance-async
"Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object in the response field.
The returned operation is automatically deleted after a few hours, so there is no need to call
DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
FieldMask updateMask = FieldMask.newBuilder()
.addAllPaths(paths)
Instance instance = Instance.newBuilder()
.setDisplayName(displayName)
.setMemorySizeGb(memorySizeGb)
}
update-mask - Required. Mask of fields to update. At least one path must be supplied in this field. The elements of the repeated paths field may only include these fields from [Instance][google.cloud.redis.v1beta1.Instance]: * `displayName` * `labels` * `memorySizeGb` * `redisConfig` - `com.google.protobuf.FieldMask`
instance - Required. Update description. Only fields specified in update_mask are updated. - `com.google.cloud.redis.v1beta1.Instance`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^com.google.protobuf.FieldMask update-mask ^com.google.cloud.redis.v1beta1.Instance instance]
(-> this (.updateInstanceAsync update-mask instance)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.UpdateInstanceRequest request]
(-> this (.updateInstanceAsync request))))
(defn update-instance-callable
"Updates the metadata and configuration of a specific Redis instance.
Completed longrunning.Operation will contain the new instance object in the response field.
The returned operation is automatically deleted after a few hours, so there is no need to call
DeleteOperation.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
FieldMask updateMask = FieldMask.newBuilder()
.addAllPaths(paths)
Instance instance = Instance.newBuilder()
.setDisplayName(displayName)
.setMemorySizeGb(memorySizeGb)
UpdateInstanceRequest request = UpdateInstanceRequest.newBuilder()
.setUpdateMask(updateMask)
.setInstance(instance)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.longrunning.Operation>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.updateInstanceCallable))))
(defn get-instance-callable
"Gets the details of a specific Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
GetInstanceRequest request = GetInstanceRequest.newBuilder()
.setName(name.toString())
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.GetInstanceRequest,com.google.cloud.redis.v1beta1.Instance>`"
(^com.google.api.gax.rpc.UnaryCallable [^CloudRedisClient this]
(-> this (.getInstanceCallable))))
(defn failover-instance-async
"Initiates a failover of the master node to current replica node for a specific STANDARD tier
Cloud Memorystore for Redis instance.
Sample code:
try (CloudRedisClient cloudRedisClient = CloudRedisClient.create()) {
}
name - Required. Redis instance resource name using the form: `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where `location_id` refers to a GCP region. - `java.lang.String`
data-protection-mode - Optional. Available data protection modes that the user can choose. If it's unspecified, data protection mode will be LIMITED_DATA_LOSS by default. - `com.google.cloud.redis.v1beta1.FailoverInstanceRequest$DataProtectionMode`
returns: `(value="The surface for long-running operations is not stable yet and may change in the future.") com.google.api.gax.longrunning.OperationFuture<com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^CloudRedisClient this ^java.lang.String name ^com.google.cloud.redis.v1beta1.FailoverInstanceRequest$DataProtectionMode data-protection-mode]
(-> this (.failoverInstanceAsync name data-protection-mode)))
([^CloudRedisClient this ^com.google.cloud.redis.v1beta1.FailoverInstanceRequest request]
(-> this (.failoverInstanceAsync request))))
(defn shutdown-now
""
([^CloudRedisClient this]
(-> this (.shutdownNow))))
|
5d13babde5f78679630aee89a49b053a1f770855f36bdbb0626153b81dcef7cc | metaphor/lein-flyway | project.clj | (defproject sample "0.1.0-SNAPSHOT"
:description "sample"
:url ""
:min-lein-version "2.0.0"
:dependencies [[org.clojure/clojure "1.9.0"]]
:profiles {:dev {:dependencies [[mysql/mysql-connector-java "5.1.36"]]}}
;; Usually you need put your migrations in resource classpath
:resource-paths ["src-resources"]
:plugins [[com.github.metaphor/lein-flyway "6.0.0-SNAPSHOT"]]
Flyway Database Migration configuration
:flyway {:driver "com.mysql.jdbc.Driver"
:url "jdbc:mysql:8806/leinflyway"
:user "root"
:password "donotuseroot"})
| null | https://raw.githubusercontent.com/metaphor/lein-flyway/7f320138027b258a3d457ab38cd62a1bc54c29ce/example/project.clj | clojure | Usually you need put your migrations in resource classpath | (defproject sample "0.1.0-SNAPSHOT"
:description "sample"
:url ""
:min-lein-version "2.0.0"
:dependencies [[org.clojure/clojure "1.9.0"]]
:profiles {:dev {:dependencies [[mysql/mysql-connector-java "5.1.36"]]}}
:resource-paths ["src-resources"]
:plugins [[com.github.metaphor/lein-flyway "6.0.0-SNAPSHOT"]]
Flyway Database Migration configuration
:flyway {:driver "com.mysql.jdbc.Driver"
:url "jdbc:mysql:8806/leinflyway"
:user "root"
:password "donotuseroot"})
|
9b5699aa8d1e6ab7f08de8034b73f851add8aae69aaf12251aac9a2faa2fa77f | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Duration.MN.Tests
( tests
) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Duration.MN.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "MN Tests"
[ makeCorpusTest [Seal Duration] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Duration/MN/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Duration.MN.Tests
( tests
) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Duration.MN.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "MN Tests"
[ makeCorpusTest [Seal Duration] corpus
]
|
823033acc79fef7d3d1ec40cc2baa885eb8ad6b253d5e33274b145f5950e4ab5 | linuxsoares/artigos-clojure | config.clj | (ns hello-web-app.config
(:require [cprop.core :refer [load-config]]
[cprop.source :as source]
[mount.core :refer [args defstate]]))
(defstate env :start (load-config
:merge
[(args)
(source/from-system-props)
(source/from-env)]))
| null | https://raw.githubusercontent.com/linuxsoares/artigos-clojure/6c4183e767e71168c778973dd5831e7c9edfaf4a/criando-web-application-clojure/hello-web-app/src/clj/hello_web_app/config.clj | clojure | (ns hello-web-app.config
(:require [cprop.core :refer [load-config]]
[cprop.source :as source]
[mount.core :refer [args defstate]]))
(defstate env :start (load-config
:merge
[(args)
(source/from-system-props)
(source/from-env)]))
| |
fb187c710cc54f3aecc69c288996b3efe951dca86d13e3b996cd004859288e42 | lucasdicioccio/prodapi | Reports.hs | module Prod.Gen.Docs.Reports where
import Prod.Reports
import Servant.Docs
import Data.Proxy
import GHC.Generics
import Data.Aeson
import Data.Text (Text)
data Example = Example { stackTrace :: [Text] }
deriving (Generic)
instance ToJSON Example
instance FromJSON Example
instance ToSample Int where
toSamples _ =
[ ("an example integer", 42) ]
instance ToSample (Report Example) where
toSamples _ =
[ ("an example of stack-trace reporting", Report 1611183428 0 [ Example [ "err toto.js at 236: undefined is not a function" ] ]) ]
run :: IO ()
run = putStrLn $ markdown $ docs (Proxy @(ReportsApi Example))
| null | https://raw.githubusercontent.com/lucasdicioccio/prodapi/4c43e1d617832f8ae88cb15afada1d5ab5e46ea4/prodapi-gen/src/Prod/Gen/Docs/Reports.hs | haskell | module Prod.Gen.Docs.Reports where
import Prod.Reports
import Servant.Docs
import Data.Proxy
import GHC.Generics
import Data.Aeson
import Data.Text (Text)
data Example = Example { stackTrace :: [Text] }
deriving (Generic)
instance ToJSON Example
instance FromJSON Example
instance ToSample Int where
toSamples _ =
[ ("an example integer", 42) ]
instance ToSample (Report Example) where
toSamples _ =
[ ("an example of stack-trace reporting", Report 1611183428 0 [ Example [ "err toto.js at 236: undefined is not a function" ] ]) ]
run :: IO ()
run = putStrLn $ markdown $ docs (Proxy @(ReportsApi Example))
| |
8dc5ae88a34da2e4f785d134a52a8985cb1be2dc0f5cd6e13a1d7d4d1cc96e94 | sdiehl/kaleidoscope | Codegen.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE GeneralizedNewtypeDeriving #
module Codegen where
import Data.Word
import Data.String
import Data.List
import Data.Function
import qualified Data.Map as Map
import Control.Monad.State
import Control.Applicative
import LLVM.AST
import LLVM.AST.Global
import qualified LLVM.AST as AST
import qualified LLVM.AST.Linkage as L
import qualified LLVM.AST.Constant as C
import qualified LLVM.AST.Attribute as A
import qualified LLVM.AST.CallingConvention as CC
import qualified LLVM.AST.FloatingPointPredicate as FP
-------------------------------------------------------------------------------
-- Module Level
-------------------------------------------------------------------------------
newtype LLVM a = LLVM (State AST.Module a)
deriving (Functor, Applicative, Monad, MonadState AST.Module )
runLLVM :: AST.Module -> LLVM a -> AST.Module
runLLVM mod (LLVM m) = execState m mod
emptyModule :: String -> AST.Module
emptyModule label = defaultModule { moduleName = label }
addDefn :: Definition -> LLVM ()
addDefn d = do
defs <- gets moduleDefinitions
modify $ \s -> s { moduleDefinitions = defs ++ [d] }
define :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM ()
define retty label argtys body = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = body
}
external :: Type -> String -> [(Type, Name)] -> LLVM ()
external retty label argtys = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, linkage = L.External
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = []
}
---------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
IEEE 754 double
double :: Type
double = FloatingPointType 64 IEEE
-------------------------------------------------------------------------------
-- Names
-------------------------------------------------------------------------------
type Names = Map.Map String Int
uniqueName :: String -> Names -> (String, Names)
uniqueName nm ns =
case Map.lookup nm ns of
Nothing -> (nm, Map.insert nm 1 ns)
Just ix -> (nm ++ show ix, Map.insert nm (ix+1) ns)
-------------------------------------------------------------------------------
Codegen State
-------------------------------------------------------------------------------
type SymbolTable = [(String, Operand)]
data CodegenState
= CodegenState {
currentBlock :: Name -- Name of the active block to append to
, blocks :: Map.Map Name BlockState -- Blocks for function
, symtab :: SymbolTable -- Function scope symbol table
, blockCount :: Int -- Count of basic blocks
, count :: Word -- Count of unnamed instructions
, names :: Names -- Name Supply
} deriving Show
data BlockState
= BlockState {
idx :: Int -- Block index
Stack of instructions
, term :: Maybe (Named Terminator) -- Block terminator
} deriving Show
-------------------------------------------------------------------------------
Codegen Operations
-------------------------------------------------------------------------------
newtype Codegen a = Codegen { runCodegen :: State CodegenState a }
deriving (Functor, Applicative, Monad, MonadState CodegenState )
sortBlocks :: [(Name, BlockState)] -> [(Name, BlockState)]
sortBlocks = sortBy (compare `on` (idx . snd))
createBlocks :: CodegenState -> [BasicBlock]
createBlocks m = map makeBlock $ sortBlocks $ Map.toList (blocks m)
makeBlock :: (Name, BlockState) -> BasicBlock
makeBlock (l, (BlockState _ s t)) = BasicBlock l (reverse s) (maketerm t)
where
maketerm (Just x) = x
maketerm Nothing = error $ "Block has no terminator: " ++ (show l)
entryBlockName :: String
entryBlockName = "entry"
emptyBlock :: Int -> BlockState
emptyBlock i = BlockState i [] Nothing
emptyCodegen :: CodegenState
emptyCodegen = CodegenState (Name entryBlockName) Map.empty [] 1 0 Map.empty
execCodegen :: Codegen a -> CodegenState
execCodegen m = execState (runCodegen m) emptyCodegen
fresh :: Codegen Word
fresh = do
i <- gets count
modify $ \s -> s { count = 1 + i }
return $ i + 1
instr :: Instruction -> Codegen (Operand)
instr ins = do
n <- fresh
let ref = (UnName n)
blk <- current
let i = stack blk
modifyBlock (blk { stack = (ref := ins) : i } )
return $ local ref
terminator :: Named Terminator -> Codegen (Named Terminator)
terminator trm = do
blk <- current
modifyBlock (blk { term = Just trm })
return trm
-------------------------------------------------------------------------------
Block Stack
-------------------------------------------------------------------------------
entry :: Codegen Name
entry = gets currentBlock
addBlock :: String -> Codegen Name
addBlock bname = do
bls <- gets blocks
ix <- gets blockCount
nms <- gets names
let new = emptyBlock ix
(qname, supply) = uniqueName bname nms
modify $ \s -> s { blocks = Map.insert (Name qname) new bls
, blockCount = ix + 1
, names = supply
}
return (Name qname)
setBlock :: Name -> Codegen Name
setBlock bname = do
modify $ \s -> s { currentBlock = bname }
return bname
getBlock :: Codegen Name
getBlock = gets currentBlock
modifyBlock :: BlockState -> Codegen ()
modifyBlock new = do
active <- gets currentBlock
modify $ \s -> s { blocks = Map.insert active new (blocks s) }
current :: Codegen BlockState
current = do
c <- gets currentBlock
blks <- gets blocks
case Map.lookup c blks of
Just x -> return x
Nothing -> error $ "No such block: " ++ show c
-------------------------------------------------------------------------------
-- Symbol Table
-------------------------------------------------------------------------------
assign :: String -> Operand -> Codegen ()
assign var x = do
lcls <- gets symtab
modify $ \s -> s { symtab = [(var, x)] ++ lcls }
getvar :: String -> Codegen Operand
getvar var = do
syms <- gets symtab
case lookup var syms of
Just x -> return x
Nothing -> error $ "Local variable not in scope: " ++ show var
-------------------------------------------------------------------------------
-- References
local :: Name -> Operand
local = LocalReference double
global :: Name -> C.Constant
global = C.GlobalReference double
externf :: Name -> Operand
externf = ConstantOperand . C.GlobalReference double
-- Arithmetic and Constants
fadd :: Operand -> Operand -> Codegen Operand
fadd a b = instr $ FAdd NoFastMathFlags a b []
fsub :: Operand -> Operand -> Codegen Operand
fsub a b = instr $ FSub NoFastMathFlags a b []
fmul :: Operand -> Operand -> Codegen Operand
fmul a b = instr $ FMul NoFastMathFlags a b []
fdiv :: Operand -> Operand -> Codegen Operand
fdiv a b = instr $ FDiv NoFastMathFlags a b []
fcmp :: FP.FloatingPointPredicate -> Operand -> Operand -> Codegen Operand
fcmp cond a b = instr $ FCmp cond a b []
cons :: C.Constant -> Operand
cons = ConstantOperand
uitofp :: Type -> Operand -> Codegen Operand
uitofp ty a = instr $ UIToFP a ty []
toArgs :: [Operand] -> [(Operand, [A.ParameterAttribute])]
toArgs = map (\x -> (x, []))
-- Effects
call :: Operand -> [Operand] -> Codegen Operand
call fn args = instr $ Call Nothing CC.C [] (Right fn) (toArgs args) [] []
alloca :: Type -> Codegen Operand
alloca ty = instr $ Alloca ty Nothing 0 []
store :: Operand -> Operand -> Codegen Operand
store ptr val = instr $ Store False ptr val Nothing 0 []
load :: Operand -> Codegen Operand
load ptr = instr $ Load False ptr Nothing 0 []
Control Flow
br :: Name -> Codegen (Named Terminator)
br val = terminator $ Do $ Br val []
cbr :: Operand -> Name -> Name -> Codegen (Named Terminator)
cbr cond tr fl = terminator $ Do $ CondBr cond tr fl []
ret :: Operand -> Codegen (Named Terminator)
ret val = terminator $ Do $ Ret (Just val) []
| null | https://raw.githubusercontent.com/sdiehl/kaleidoscope/682bdafe6d8f90caca4cdd0adb30bd3ebd9eff7b/src/chapter3/Codegen.hs | haskell | # LANGUAGE OverloadedStrings #
-----------------------------------------------------------------------------
Module Level
-----------------------------------------------------------------------------
-------------------------------------------------------------------------------
Types
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Names
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Name of the active block to append to
Blocks for function
Function scope symbol table
Count of basic blocks
Count of unnamed instructions
Name Supply
Block index
Block terminator
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Symbol Table
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
References
Arithmetic and Constants
Effects | # LANGUAGE GeneralizedNewtypeDeriving #
module Codegen where
import Data.Word
import Data.String
import Data.List
import Data.Function
import qualified Data.Map as Map
import Control.Monad.State
import Control.Applicative
import LLVM.AST
import LLVM.AST.Global
import qualified LLVM.AST as AST
import qualified LLVM.AST.Linkage as L
import qualified LLVM.AST.Constant as C
import qualified LLVM.AST.Attribute as A
import qualified LLVM.AST.CallingConvention as CC
import qualified LLVM.AST.FloatingPointPredicate as FP
newtype LLVM a = LLVM (State AST.Module a)
deriving (Functor, Applicative, Monad, MonadState AST.Module )
runLLVM :: AST.Module -> LLVM a -> AST.Module
runLLVM mod (LLVM m) = execState m mod
emptyModule :: String -> AST.Module
emptyModule label = defaultModule { moduleName = label }
addDefn :: Definition -> LLVM ()
addDefn d = do
defs <- gets moduleDefinitions
modify $ \s -> s { moduleDefinitions = defs ++ [d] }
define :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM ()
define retty label argtys body = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = body
}
external :: Type -> String -> [(Type, Name)] -> LLVM ()
external retty label argtys = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, linkage = L.External
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = []
}
IEEE 754 double
double :: Type
double = FloatingPointType 64 IEEE
type Names = Map.Map String Int
uniqueName :: String -> Names -> (String, Names)
uniqueName nm ns =
case Map.lookup nm ns of
Nothing -> (nm, Map.insert nm 1 ns)
Just ix -> (nm ++ show ix, Map.insert nm (ix+1) ns)
Codegen State
type SymbolTable = [(String, Operand)]
data CodegenState
= CodegenState {
} deriving Show
data BlockState
= BlockState {
Stack of instructions
} deriving Show
Codegen Operations
newtype Codegen a = Codegen { runCodegen :: State CodegenState a }
deriving (Functor, Applicative, Monad, MonadState CodegenState )
sortBlocks :: [(Name, BlockState)] -> [(Name, BlockState)]
sortBlocks = sortBy (compare `on` (idx . snd))
createBlocks :: CodegenState -> [BasicBlock]
createBlocks m = map makeBlock $ sortBlocks $ Map.toList (blocks m)
makeBlock :: (Name, BlockState) -> BasicBlock
makeBlock (l, (BlockState _ s t)) = BasicBlock l (reverse s) (maketerm t)
where
maketerm (Just x) = x
maketerm Nothing = error $ "Block has no terminator: " ++ (show l)
entryBlockName :: String
entryBlockName = "entry"
emptyBlock :: Int -> BlockState
emptyBlock i = BlockState i [] Nothing
emptyCodegen :: CodegenState
emptyCodegen = CodegenState (Name entryBlockName) Map.empty [] 1 0 Map.empty
execCodegen :: Codegen a -> CodegenState
execCodegen m = execState (runCodegen m) emptyCodegen
fresh :: Codegen Word
fresh = do
i <- gets count
modify $ \s -> s { count = 1 + i }
return $ i + 1
instr :: Instruction -> Codegen (Operand)
instr ins = do
n <- fresh
let ref = (UnName n)
blk <- current
let i = stack blk
modifyBlock (blk { stack = (ref := ins) : i } )
return $ local ref
terminator :: Named Terminator -> Codegen (Named Terminator)
terminator trm = do
blk <- current
modifyBlock (blk { term = Just trm })
return trm
Block Stack
entry :: Codegen Name
entry = gets currentBlock
addBlock :: String -> Codegen Name
addBlock bname = do
bls <- gets blocks
ix <- gets blockCount
nms <- gets names
let new = emptyBlock ix
(qname, supply) = uniqueName bname nms
modify $ \s -> s { blocks = Map.insert (Name qname) new bls
, blockCount = ix + 1
, names = supply
}
return (Name qname)
setBlock :: Name -> Codegen Name
setBlock bname = do
modify $ \s -> s { currentBlock = bname }
return bname
getBlock :: Codegen Name
getBlock = gets currentBlock
modifyBlock :: BlockState -> Codegen ()
modifyBlock new = do
active <- gets currentBlock
modify $ \s -> s { blocks = Map.insert active new (blocks s) }
current :: Codegen BlockState
current = do
c <- gets currentBlock
blks <- gets blocks
case Map.lookup c blks of
Just x -> return x
Nothing -> error $ "No such block: " ++ show c
assign :: String -> Operand -> Codegen ()
assign var x = do
lcls <- gets symtab
modify $ \s -> s { symtab = [(var, x)] ++ lcls }
getvar :: String -> Codegen Operand
getvar var = do
syms <- gets symtab
case lookup var syms of
Just x -> return x
Nothing -> error $ "Local variable not in scope: " ++ show var
local :: Name -> Operand
local = LocalReference double
global :: Name -> C.Constant
global = C.GlobalReference double
externf :: Name -> Operand
externf = ConstantOperand . C.GlobalReference double
fadd :: Operand -> Operand -> Codegen Operand
fadd a b = instr $ FAdd NoFastMathFlags a b []
fsub :: Operand -> Operand -> Codegen Operand
fsub a b = instr $ FSub NoFastMathFlags a b []
fmul :: Operand -> Operand -> Codegen Operand
fmul a b = instr $ FMul NoFastMathFlags a b []
fdiv :: Operand -> Operand -> Codegen Operand
fdiv a b = instr $ FDiv NoFastMathFlags a b []
fcmp :: FP.FloatingPointPredicate -> Operand -> Operand -> Codegen Operand
fcmp cond a b = instr $ FCmp cond a b []
cons :: C.Constant -> Operand
cons = ConstantOperand
uitofp :: Type -> Operand -> Codegen Operand
uitofp ty a = instr $ UIToFP a ty []
toArgs :: [Operand] -> [(Operand, [A.ParameterAttribute])]
toArgs = map (\x -> (x, []))
call :: Operand -> [Operand] -> Codegen Operand
call fn args = instr $ Call Nothing CC.C [] (Right fn) (toArgs args) [] []
alloca :: Type -> Codegen Operand
alloca ty = instr $ Alloca ty Nothing 0 []
store :: Operand -> Operand -> Codegen Operand
store ptr val = instr $ Store False ptr val Nothing 0 []
load :: Operand -> Codegen Operand
load ptr = instr $ Load False ptr Nothing 0 []
Control Flow
br :: Name -> Codegen (Named Terminator)
br val = terminator $ Do $ Br val []
cbr :: Operand -> Name -> Name -> Codegen (Named Terminator)
cbr cond tr fl = terminator $ Do $ CondBr cond tr fl []
ret :: Operand -> Codegen (Named Terminator)
ret val = terminator $ Do $ Ret (Just val) []
|
d42394c69d99690c9d9b1f14e0527bcee9b519c0ee80071b73c464fa85b76631 | basho/riak_cs | riak_cs_pbc.erl | %% ---------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
%% @doc Thin wrapper of `riakc_pb_socket'
-module(riak_cs_pbc).
-export([ping/3,
get/6,
repl_get/7,
put/4,
put/5,
delete_obj/5,
mapred/5,
list_keys/4,
get_index_eq/5,
get_index_eq/6,
get_index_range/7,
get_cluster_id/1,
check_connection_status/2,
pause_to_reconnect/3
]).
%% Lower level APIs, which don't update stats.
-export([
get_sans_stats/5,
put_object/5,
put_sans_stats/3,
put_sans_stats/4,
list_keys_sans_stats/3
]).
-include_lib("riakc/include/riakc.hrl").
-define(WITH_STATS(StatsKey, Statement),
begin
_ = riak_cs_stats:inflow(StatsKey),
StartTime__with_stats = os:timestamp(),
Result__with_stats = Statement,
_ = riak_cs_stats:update_with_start(StatsKey, StartTime__with_stats,
Result__with_stats),
Result__with_stats
end).
-spec ping(pid(), timeout(), riak_cs_stats:key()) -> pong.
ping(PbcPid, Timeout, StatsKey) ->
_ = riak_cs_stats:inflow(StatsKey),
StartTime = os:timestamp(),
Result = riakc_pb_socket:ping(PbcPid, Timeout),
case Result of
pong ->
_ = riak_cs_stats:update_with_start(StatsKey, StartTime);
_ ->
_ = riak_cs_stats:update_error_with_start(StatsKey, StartTime)
end,
Result.
@doc Get an object from Riak
-spec get_sans_stats(pid(), binary(), binary(), proplists:proplist(), timeout()) ->
{ok, riakc_obj:riakc_obj()} | {error, term()}.
get_sans_stats(PbcPid, BucketName, Key, Opts, Timeout) ->
riakc_pb_socket:get(PbcPid, BucketName, Key, Opts, Timeout).
-spec get(pid(), binary(), binary(), proplists:proplist(), timeout(),
riak_cs_stats:key()) ->
{ok, riakc_obj:riakc_obj()} | {error, term()}.
get(PbcPid, BucketName, Key, Opts, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, get_sans_stats(PbcPid, BucketName, Key, Opts, Timeout)).
-spec repl_get(pid(), binary(), binary(), binary(),
proplists:proplist(), timeout(), riak_cs_stats:key()) ->
{ok, riakc_obj:riakc_obj()} | {error, term()}.
repl_get(PbcPid, BucketName, Key, ClusterID, Opts, Timeout, StatsKey) ->
?WITH_STATS(StatsKey,
riak_repl_pb_api:get(PbcPid, BucketName, Key, ClusterID, Opts, Timeout)).
@doc Store an object in Riak
TODO : two ` put_object ' are without stats yet .
-spec put_object(pid(), binary(), undefined | binary(), binary(), [term()]) -> ok | {error, term()}.
put_object(_PbcPid, BucketName, undefined, Value, Metadata) ->
error_logger:warning_msg("Attempt to put object into ~p with undefined key "
"and value ~P and dict ~p\n",
[BucketName, Value, 30, Metadata]),
{error, bad_key};
put_object(PbcPid, BucketName, Key, Value, Metadata) ->
RiakObject = riakc_obj:new(BucketName, Key, Value),
NewObj = riakc_obj:update_metadata(RiakObject, Metadata),
riakc_pb_socket:put(PbcPid, NewObj).
put_sans_stats(PbcPid, RiakcObj, Timeout) ->
put_sans_stats(PbcPid, RiakcObj, [], Timeout).
put_sans_stats(PbcPid, RiakcObj, Options, Timeout) ->
riakc_pb_socket:put(PbcPid, RiakcObj, Options, Timeout).
put(PbcPid, RiakcObj, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, put_sans_stats(PbcPid, RiakcObj, [], Timeout)).
put(PbcPid, RiakcObj, Options, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, put_sans_stats(PbcPid, RiakcObj, Options, Timeout)).
-spec delete_obj(pid(), riakc_obj:riakc_obj(), delete_options(),
non_neg_integer(),riak_cs_stats:key()) ->
ok | {error, term()}.
delete_obj(PbcPid, RiakcObj, Options, Timeout, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:delete_obj(PbcPid, RiakcObj, Options, Timeout)).
-spec mapred(pid(), mapred_inputs(), [mapred_queryterm()], timeout(),
riak_cs_stats:key()) ->
{ok, mapred_result()} |
{error, {badqterm, mapred_queryterm()}} |
{error, timeout} |
{error, term()}.
mapred(Pid, Inputs, Query, Timeout, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:mapred(Pid, Inputs, Query, Timeout)).
%% @doc List the keys from a bucket
-spec list_keys(pid(), binary(), timeout(), riak_cs_stats:key()) ->
{ok, [binary()]} | {error, term()}.
list_keys(PbcPid, BucketName, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, list_keys_sans_stats(PbcPid, BucketName, Timeout)).
-spec list_keys_sans_stats(pid(), binary(), timeout()) -> {ok, [binary()]} | {error, term()}.
list_keys_sans_stats(PbcPid, BucketName, Timeout) ->
case riakc_pb_socket:list_keys(PbcPid, BucketName, Timeout) of
{ok, Keys} ->
%% TODO:
%% This is a naive implementation,
%% the longer-term solution is likely
going to involve 2i and merging the
%% results from each of the vnodes.
{ok, lists:sort(Keys)};
{error, _}=Error ->
Error
end.
-spec get_index_eq(pid(), bucket(), binary() | secondary_index_id(), key() | integer(),
riak_cs_stats:key()) ->
{ok, index_results()} | {error, term()}.
get_index_eq(PbcPid, Bucket, Index, Key, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:get_index_eq(PbcPid, Bucket, Index, Key)).
-spec get_index_eq(pid(), bucket(), binary() | secondary_index_id(), key() | integer(),
list(), riak_cs_stats:key()) ->
{ok, index_results()} | {error, term()}.
get_index_eq(PbcPid, Bucket, Index, Key, Opts, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:get_index_eq(PbcPid, Bucket, Index, Key, Opts)).
-spec get_index_range(pid(), bucket(), binary() | secondary_index_id(),
key() | integer() | list(),
key() | integer() | list(),
list(),
riak_cs_stats:key()) ->
{ok, index_results()} | {error, term()}.
get_index_range(PbcPid, Bucket, Index, StartKey, EndKey, Opts, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:get_index_range(PbcPid, Bucket, Index,
StartKey, EndKey, Opts)).
%% @doc Attempt to determine the cluster id
-spec get_cluster_id(pid()) -> undefined | binary().
get_cluster_id(Pbc) ->
StatsKey = [riakc, get_clusterid],
Res = ?WITH_STATS(StatsKey, get_cluster_id_sans_stats(Pbc)),
case Res of
{ok, ClusterID} -> ClusterID;
{error, _} -> undefined
end.
get_cluster_id_sans_stats(Pbc) ->
Timeout = riak_cs_config:cluster_id_timeout(),
try
riak_repl_pb_api:get_clusterid(Pbc, Timeout)
catch C:R ->
Disable ` proxy_get ' so we do not repeatedly have to
handle this same exception . This would happen if an OSS
%% install has `proxy_get' enabled.
application:set_env(riak_cs, proxy_get, disabled),
{error, {C, R}}
end.
%% @doc don't reuse return value
-spec check_connection_status(pid(), term()) -> any().
check_connection_status(Pbc, Where) ->
try
case riakc_pb_socket:is_connected(Pbc) of
true -> ok;
Other ->
_ = lager:warning("Connection status of ~p at ~p: ~p",
[Pbc, Where, Other])
end
catch
Type:Error ->
_ = lager:warning("Connection status of ~p at ~p: ~p",
[Pbc, Where, {Type, Error}])
end.
%% @doc Pause for a while so that underlying `riaic_pb_socket' can have
%% room for reconnection.
%%
If ` Reason ' ( second argument ) is ` timeout ' or ` disconnected ' , loop
%% until the connection will be reconnected. Otherwise, do nothing.
%% This function return always `ok' even in the case of timeout.
-spec pause_to_reconnect(pid(), term(), non_neg_integer()) -> ok.
pause_to_reconnect(Pbc, Reason, Timeout)
when Reason =:= timeout orelse Reason =:= disconnected ->
pause_to_reconnect0(Pbc, Timeout, os:timestamp());
pause_to_reconnect(_Pbc, _Other, _Timeout) ->
ok.
pause_to_reconnect0(Pbc, Timeout, Start) ->
lager:debug("riak_cs_pbc:pause_to_reconnect0"),
case riakc_pb_socket:is_connected(Pbc, ?FIRST_RECONNECT_INTERVAL) of
true -> ok;
{false, _} ->
Remaining = Timeout - timer:now_diff(os:timestamp(), Start) div 1000,
case Remaining of
Positive when 0 < Positive ->
%% sleep to avoid busy-loop calls of `is_connected'
_ = timer:sleep(?FIRST_RECONNECT_INTERVAL),
pause_to_reconnect0(Pbc, Timeout, Start);
_ ->
ok
end
end.
| null | https://raw.githubusercontent.com/basho/riak_cs/c0c1012d1c9c691c74c8c5d9f69d388f5047bcd2/src/riak_cs_pbc.erl | erlang | ---------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
@doc Thin wrapper of `riakc_pb_socket'
Lower level APIs, which don't update stats.
@doc List the keys from a bucket
TODO:
This is a naive implementation,
the longer-term solution is likely
results from each of the vnodes.
@doc Attempt to determine the cluster id
install has `proxy_get' enabled.
@doc don't reuse return value
@doc Pause for a while so that underlying `riaic_pb_socket' can have
room for reconnection.
until the connection will be reconnected. Otherwise, do nothing.
This function return always `ok' even in the case of timeout.
sleep to avoid busy-loop calls of `is_connected' | Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_cs_pbc).
-export([ping/3,
get/6,
repl_get/7,
put/4,
put/5,
delete_obj/5,
mapred/5,
list_keys/4,
get_index_eq/5,
get_index_eq/6,
get_index_range/7,
get_cluster_id/1,
check_connection_status/2,
pause_to_reconnect/3
]).
-export([
get_sans_stats/5,
put_object/5,
put_sans_stats/3,
put_sans_stats/4,
list_keys_sans_stats/3
]).
-include_lib("riakc/include/riakc.hrl").
-define(WITH_STATS(StatsKey, Statement),
begin
_ = riak_cs_stats:inflow(StatsKey),
StartTime__with_stats = os:timestamp(),
Result__with_stats = Statement,
_ = riak_cs_stats:update_with_start(StatsKey, StartTime__with_stats,
Result__with_stats),
Result__with_stats
end).
-spec ping(pid(), timeout(), riak_cs_stats:key()) -> pong.
ping(PbcPid, Timeout, StatsKey) ->
_ = riak_cs_stats:inflow(StatsKey),
StartTime = os:timestamp(),
Result = riakc_pb_socket:ping(PbcPid, Timeout),
case Result of
pong ->
_ = riak_cs_stats:update_with_start(StatsKey, StartTime);
_ ->
_ = riak_cs_stats:update_error_with_start(StatsKey, StartTime)
end,
Result.
@doc Get an object from Riak
-spec get_sans_stats(pid(), binary(), binary(), proplists:proplist(), timeout()) ->
{ok, riakc_obj:riakc_obj()} | {error, term()}.
get_sans_stats(PbcPid, BucketName, Key, Opts, Timeout) ->
riakc_pb_socket:get(PbcPid, BucketName, Key, Opts, Timeout).
-spec get(pid(), binary(), binary(), proplists:proplist(), timeout(),
riak_cs_stats:key()) ->
{ok, riakc_obj:riakc_obj()} | {error, term()}.
get(PbcPid, BucketName, Key, Opts, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, get_sans_stats(PbcPid, BucketName, Key, Opts, Timeout)).
-spec repl_get(pid(), binary(), binary(), binary(),
proplists:proplist(), timeout(), riak_cs_stats:key()) ->
{ok, riakc_obj:riakc_obj()} | {error, term()}.
repl_get(PbcPid, BucketName, Key, ClusterID, Opts, Timeout, StatsKey) ->
?WITH_STATS(StatsKey,
riak_repl_pb_api:get(PbcPid, BucketName, Key, ClusterID, Opts, Timeout)).
@doc Store an object in Riak
TODO : two ` put_object ' are without stats yet .
-spec put_object(pid(), binary(), undefined | binary(), binary(), [term()]) -> ok | {error, term()}.
put_object(_PbcPid, BucketName, undefined, Value, Metadata) ->
error_logger:warning_msg("Attempt to put object into ~p with undefined key "
"and value ~P and dict ~p\n",
[BucketName, Value, 30, Metadata]),
{error, bad_key};
put_object(PbcPid, BucketName, Key, Value, Metadata) ->
RiakObject = riakc_obj:new(BucketName, Key, Value),
NewObj = riakc_obj:update_metadata(RiakObject, Metadata),
riakc_pb_socket:put(PbcPid, NewObj).
put_sans_stats(PbcPid, RiakcObj, Timeout) ->
put_sans_stats(PbcPid, RiakcObj, [], Timeout).
put_sans_stats(PbcPid, RiakcObj, Options, Timeout) ->
riakc_pb_socket:put(PbcPid, RiakcObj, Options, Timeout).
put(PbcPid, RiakcObj, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, put_sans_stats(PbcPid, RiakcObj, [], Timeout)).
put(PbcPid, RiakcObj, Options, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, put_sans_stats(PbcPid, RiakcObj, Options, Timeout)).
-spec delete_obj(pid(), riakc_obj:riakc_obj(), delete_options(),
non_neg_integer(),riak_cs_stats:key()) ->
ok | {error, term()}.
delete_obj(PbcPid, RiakcObj, Options, Timeout, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:delete_obj(PbcPid, RiakcObj, Options, Timeout)).
-spec mapred(pid(), mapred_inputs(), [mapred_queryterm()], timeout(),
riak_cs_stats:key()) ->
{ok, mapred_result()} |
{error, {badqterm, mapred_queryterm()}} |
{error, timeout} |
{error, term()}.
mapred(Pid, Inputs, Query, Timeout, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:mapred(Pid, Inputs, Query, Timeout)).
-spec list_keys(pid(), binary(), timeout(), riak_cs_stats:key()) ->
{ok, [binary()]} | {error, term()}.
list_keys(PbcPid, BucketName, Timeout, StatsKey) ->
?WITH_STATS(StatsKey, list_keys_sans_stats(PbcPid, BucketName, Timeout)).
-spec list_keys_sans_stats(pid(), binary(), timeout()) -> {ok, [binary()]} | {error, term()}.
list_keys_sans_stats(PbcPid, BucketName, Timeout) ->
case riakc_pb_socket:list_keys(PbcPid, BucketName, Timeout) of
{ok, Keys} ->
going to involve 2i and merging the
{ok, lists:sort(Keys)};
{error, _}=Error ->
Error
end.
-spec get_index_eq(pid(), bucket(), binary() | secondary_index_id(), key() | integer(),
riak_cs_stats:key()) ->
{ok, index_results()} | {error, term()}.
get_index_eq(PbcPid, Bucket, Index, Key, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:get_index_eq(PbcPid, Bucket, Index, Key)).
-spec get_index_eq(pid(), bucket(), binary() | secondary_index_id(), key() | integer(),
list(), riak_cs_stats:key()) ->
{ok, index_results()} | {error, term()}.
get_index_eq(PbcPid, Bucket, Index, Key, Opts, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:get_index_eq(PbcPid, Bucket, Index, Key, Opts)).
-spec get_index_range(pid(), bucket(), binary() | secondary_index_id(),
key() | integer() | list(),
key() | integer() | list(),
list(),
riak_cs_stats:key()) ->
{ok, index_results()} | {error, term()}.
get_index_range(PbcPid, Bucket, Index, StartKey, EndKey, Opts, StatsKey) ->
?WITH_STATS(StatsKey,
riakc_pb_socket:get_index_range(PbcPid, Bucket, Index,
StartKey, EndKey, Opts)).
-spec get_cluster_id(pid()) -> undefined | binary().
get_cluster_id(Pbc) ->
StatsKey = [riakc, get_clusterid],
Res = ?WITH_STATS(StatsKey, get_cluster_id_sans_stats(Pbc)),
case Res of
{ok, ClusterID} -> ClusterID;
{error, _} -> undefined
end.
get_cluster_id_sans_stats(Pbc) ->
Timeout = riak_cs_config:cluster_id_timeout(),
try
riak_repl_pb_api:get_clusterid(Pbc, Timeout)
catch C:R ->
Disable ` proxy_get ' so we do not repeatedly have to
handle this same exception . This would happen if an OSS
application:set_env(riak_cs, proxy_get, disabled),
{error, {C, R}}
end.
-spec check_connection_status(pid(), term()) -> any().
check_connection_status(Pbc, Where) ->
try
case riakc_pb_socket:is_connected(Pbc) of
true -> ok;
Other ->
_ = lager:warning("Connection status of ~p at ~p: ~p",
[Pbc, Where, Other])
end
catch
Type:Error ->
_ = lager:warning("Connection status of ~p at ~p: ~p",
[Pbc, Where, {Type, Error}])
end.
If ` Reason ' ( second argument ) is ` timeout ' or ` disconnected ' , loop
-spec pause_to_reconnect(pid(), term(), non_neg_integer()) -> ok.
pause_to_reconnect(Pbc, Reason, Timeout)
when Reason =:= timeout orelse Reason =:= disconnected ->
pause_to_reconnect0(Pbc, Timeout, os:timestamp());
pause_to_reconnect(_Pbc, _Other, _Timeout) ->
ok.
pause_to_reconnect0(Pbc, Timeout, Start) ->
lager:debug("riak_cs_pbc:pause_to_reconnect0"),
case riakc_pb_socket:is_connected(Pbc, ?FIRST_RECONNECT_INTERVAL) of
true -> ok;
{false, _} ->
Remaining = Timeout - timer:now_diff(os:timestamp(), Start) div 1000,
case Remaining of
Positive when 0 < Positive ->
_ = timer:sleep(?FIRST_RECONNECT_INTERVAL),
pause_to_reconnect0(Pbc, Timeout, Start);
_ ->
ok
end
end.
|
1c72ee0d1cd41a48954b5ed3a97ed9cae0f2958d765540c4610957dee0569db2 | dhruvp/angular-phonecat-re-frame | project.clj | (defproject phonecat-re-frame "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:source-paths ["src/clj" "src/cljs"]
:dependencies [[org.clojure/clojure "1.6.0"]
[cljsjs/react "0.12.2-5"]
[reagent "0.5.0-alpha3"]
[reagent-forms "0.4.3"]
[reagent-utils "0.1.2"]
[secretary "1.2.1"]
[org.clojure/clojurescript "0.0-2913" :scope "provided"]
[ring "1.3.2"]
[ring/ring-defaults "0.1.3"]
[prone "0.8.0"]
[compojure "1.3.2"]
[selmer "0.8.0"]
[environ "1.0.0"]
[re-frame "0.2.0"]
[cljs-ajax "0.3.10"]]
:plugins [
[lein-cljsbuild "1.0.4"]
[lein-environ "1.0.0"]
[lein-ring "0.9.1"]
[lein-asset-minifier "0.2.2"]]
:ring {:handler phonecat-re-frame.handler/app
:uberwar-name "phonecat-re-frame.war"}
:min-lein-version "2.5.0"
:uberjar-name "phonecat-re-frame.jar"
:main phonecat-re-frame.server
:clean-targets ^{:protect false} ["resources/public/js"]
:minify-assets
{:assets
{"resources/public/css/site.min.css" "resources/public/css/site.css"}}
:cljsbuild {:builds {:app {:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/app.js"
:output-dir "resources/public/js/out"
;;:externs ["react/externs/react.js"]
:asset-path "js/out"
:optimizations :none
:pretty-print true}}}}
:profiles {:dev {:repl-options {:init-ns phonecat-re-frame.handler
:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}
:dependencies [[ring-mock "0.1.5"]
[ring/ring-devel "1.3.2"]
[leiningen "2.5.1"]
[figwheel "0.2.5-SNAPSHOT"]
[weasel "0.6.0-SNAPSHOT"]
[com.cemerick/piggieback "0.1.6-SNAPSHOT"]
[pjstadig/humane-test-output "0.6.0"]]
:source-paths ["env/dev/clj"]
:plugins [[lein-figwheel "0.2.3-SNAPSHOT"]]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]
:figwheel {:http-server-root "public"
:server-port 3449
:css-dirs ["resources/public/css"]
:ring-handler phonecat-re-frame.handler/app}
:env {:dev? true}
:cljsbuild {:builds {:app {:source-paths ["env/dev/cljs"]
:compiler { :main "phonecat-re-frame.dev"
:source-map true}}
}
}}
:uberjar {:hooks [leiningen.cljsbuild minify-assets.plugin/hooks]
:env {:production true}
:aot :all
:omit-source true
:cljsbuild {:jar true
:builds {:app
{:source-paths ["env/prod/cljs"]
:compiler
{:optimizations :advanced
:pretty-print false}}}}}
:production {:ring {:open-browser? false
:stacktraces? false
:auto-reload? false}
:cljsbuild {:builds {:app {:compiler {:main "phonecat-re-frame.prod"}}}}
}})
| null | https://raw.githubusercontent.com/dhruvp/angular-phonecat-re-frame/f2410203ce1c0946058544099f2d3556280b48de/project.clj | clojure | :externs ["react/externs/react.js"] | (defproject phonecat-re-frame "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:source-paths ["src/clj" "src/cljs"]
:dependencies [[org.clojure/clojure "1.6.0"]
[cljsjs/react "0.12.2-5"]
[reagent "0.5.0-alpha3"]
[reagent-forms "0.4.3"]
[reagent-utils "0.1.2"]
[secretary "1.2.1"]
[org.clojure/clojurescript "0.0-2913" :scope "provided"]
[ring "1.3.2"]
[ring/ring-defaults "0.1.3"]
[prone "0.8.0"]
[compojure "1.3.2"]
[selmer "0.8.0"]
[environ "1.0.0"]
[re-frame "0.2.0"]
[cljs-ajax "0.3.10"]]
:plugins [
[lein-cljsbuild "1.0.4"]
[lein-environ "1.0.0"]
[lein-ring "0.9.1"]
[lein-asset-minifier "0.2.2"]]
:ring {:handler phonecat-re-frame.handler/app
:uberwar-name "phonecat-re-frame.war"}
:min-lein-version "2.5.0"
:uberjar-name "phonecat-re-frame.jar"
:main phonecat-re-frame.server
:clean-targets ^{:protect false} ["resources/public/js"]
:minify-assets
{:assets
{"resources/public/css/site.min.css" "resources/public/css/site.css"}}
:cljsbuild {:builds {:app {:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/app.js"
:output-dir "resources/public/js/out"
:asset-path "js/out"
:optimizations :none
:pretty-print true}}}}
:profiles {:dev {:repl-options {:init-ns phonecat-re-frame.handler
:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}
:dependencies [[ring-mock "0.1.5"]
[ring/ring-devel "1.3.2"]
[leiningen "2.5.1"]
[figwheel "0.2.5-SNAPSHOT"]
[weasel "0.6.0-SNAPSHOT"]
[com.cemerick/piggieback "0.1.6-SNAPSHOT"]
[pjstadig/humane-test-output "0.6.0"]]
:source-paths ["env/dev/clj"]
:plugins [[lein-figwheel "0.2.3-SNAPSHOT"]]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]
:figwheel {:http-server-root "public"
:server-port 3449
:css-dirs ["resources/public/css"]
:ring-handler phonecat-re-frame.handler/app}
:env {:dev? true}
:cljsbuild {:builds {:app {:source-paths ["env/dev/cljs"]
:compiler { :main "phonecat-re-frame.dev"
:source-map true}}
}
}}
:uberjar {:hooks [leiningen.cljsbuild minify-assets.plugin/hooks]
:env {:production true}
:aot :all
:omit-source true
:cljsbuild {:jar true
:builds {:app
{:source-paths ["env/prod/cljs"]
:compiler
{:optimizations :advanced
:pretty-print false}}}}}
:production {:ring {:open-browser? false
:stacktraces? false
:auto-reload? false}
:cljsbuild {:builds {:app {:compiler {:main "phonecat-re-frame.prod"}}}}
}})
|
cac77762c60f219c652ca45897729170f83c5ff723ad861567c14308ff705e40 | mongodb-haskell/mongodb | Example.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ExtendedDefaultRules #
module Main (main) where
import Database.MongoDB (Action, Document, Document, Value, access,
close, connect, delete, exclude, find,
host, insertMany, master, project, rest,
select, sort, (=:))
#if (__GLASGOW_HASKELL__ >= 800)
import Control.Monad.IO.Class (liftIO)
#else
import Control.Monad.Trans (liftIO)
#endif
main :: IO ()
main = do
pipe <- connect (host "127.0.0.1")
e <- access pipe master "baseball" run
close pipe
print e
run :: Action IO ()
run = do
clearTeams
insertTeams
allTeams >>= printDocs "All Teams"
nationalLeagueTeams >>= printDocs "National League Teams"
newYorkTeams >>= printDocs "New York Teams"
clearTeams :: Action IO ()
clearTeams = delete (select [] "team")
insertTeams :: Action IO [Value]
insertTeams = insertMany "team" [
["name" =: "Yankees", "home" =: ["city" =: "New York", "state" =: "NY"], "league" =: "American"],
["name" =: "Mets", "home" =: ["city" =: "New York", "state" =: "NY"], "league" =: "National"],
["name" =: "Phillies", "home" =: ["city" =: "Philadelphia", "state" =: "PA"], "league" =: "National"],
["name" =: "Red Sox", "home" =: ["city" =: "Boston", "state" =: "MA"], "league" =: "American"] ]
allTeams :: Action IO [Document]
allTeams = rest =<< find (select [] "team") {sort = ["home.city" =: 1]}
nationalLeagueTeams :: Action IO [Document]
nationalLeagueTeams = rest =<< find (select ["league" =: "National"] "team")
newYorkTeams :: Action IO [Document]
newYorkTeams = rest =<< find (select ["home.state" =: "NY"] "team") {project = ["name" =: 1, "league" =: 1]}
printDocs :: String -> [Document] -> Action IO ()
printDocs title docs = liftIO $ putStrLn title >> mapM_ (print . exclude ["_id"]) docs
| null | https://raw.githubusercontent.com/mongodb-haskell/mongodb/6d338dac692b6ae855a72b2fbf626462fffb0515/doc/Example.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE CPP #
# LANGUAGE ExtendedDefaultRules #
module Main (main) where
import Database.MongoDB (Action, Document, Document, Value, access,
close, connect, delete, exclude, find,
host, insertMany, master, project, rest,
select, sort, (=:))
#if (__GLASGOW_HASKELL__ >= 800)
import Control.Monad.IO.Class (liftIO)
#else
import Control.Monad.Trans (liftIO)
#endif
main :: IO ()
main = do
pipe <- connect (host "127.0.0.1")
e <- access pipe master "baseball" run
close pipe
print e
run :: Action IO ()
run = do
clearTeams
insertTeams
allTeams >>= printDocs "All Teams"
nationalLeagueTeams >>= printDocs "National League Teams"
newYorkTeams >>= printDocs "New York Teams"
clearTeams :: Action IO ()
clearTeams = delete (select [] "team")
insertTeams :: Action IO [Value]
insertTeams = insertMany "team" [
["name" =: "Yankees", "home" =: ["city" =: "New York", "state" =: "NY"], "league" =: "American"],
["name" =: "Mets", "home" =: ["city" =: "New York", "state" =: "NY"], "league" =: "National"],
["name" =: "Phillies", "home" =: ["city" =: "Philadelphia", "state" =: "PA"], "league" =: "National"],
["name" =: "Red Sox", "home" =: ["city" =: "Boston", "state" =: "MA"], "league" =: "American"] ]
allTeams :: Action IO [Document]
allTeams = rest =<< find (select [] "team") {sort = ["home.city" =: 1]}
nationalLeagueTeams :: Action IO [Document]
nationalLeagueTeams = rest =<< find (select ["league" =: "National"] "team")
newYorkTeams :: Action IO [Document]
newYorkTeams = rest =<< find (select ["home.state" =: "NY"] "team") {project = ["name" =: 1, "league" =: 1]}
printDocs :: String -> [Document] -> Action IO ()
printDocs title docs = liftIO $ putStrLn title >> mapM_ (print . exclude ["_id"]) docs
|
5f58b9c45e629250a911ee2cc51a141a3140bf37a5d2a919cb7e0f5c86b10cba | w7cook/AoPL | Lexer.hs | module Lexer where
import Data.Char
import Data.List
BEGIN : BasicToken BEGIN : Token
data Token = Digits Int
| Symbol String
END : BasicToken
| TokenKeyword String
| TokenIdent String
--END:Token
deriving Show
lexer :: [String] -> [String] -> String -> [Token]
lexer symbols keywords str = lex str where
lex [] = []
lex (c:cs)
| isSpace c = lex cs
| isAlpha c = lexAlpha keywords (c:cs)
| isDigit c = lexDigits (c:cs)
| True = lexSym symbols (c:cs)
lexSym :: [String] -> String -> [Token]
lexSym (s:ss) cs =
case stripPrefix s cs of
Nothing -> lexSym ss cs
Just rest -> Symbol s : lex rest
lexSym [] (c:cs) = error ("Unrecognized symbol '" ++ [c] ++ "'")
lexDigits cs = Digits (read num) : lex rest
where (num, rest) = span isDigit cs
lexAlpha keywords str = token : lex rest where
(first, rest) = span isAlphaNum str
token = if elem first keywords
then TokenKeyword first
else TokenIdent first
happyError t = error ("Parse error at " ++ show t ++ "\n")
| null | https://raw.githubusercontent.com/w7cook/AoPL/af2f9d31ec658e9d175735335ad27101cca3e247/src/Lexer.hs | haskell | END:Token | module Lexer where
import Data.Char
import Data.List
BEGIN : BasicToken BEGIN : Token
data Token = Digits Int
| Symbol String
END : BasicToken
| TokenKeyword String
| TokenIdent String
deriving Show
lexer :: [String] -> [String] -> String -> [Token]
lexer symbols keywords str = lex str where
lex [] = []
lex (c:cs)
| isSpace c = lex cs
| isAlpha c = lexAlpha keywords (c:cs)
| isDigit c = lexDigits (c:cs)
| True = lexSym symbols (c:cs)
lexSym :: [String] -> String -> [Token]
lexSym (s:ss) cs =
case stripPrefix s cs of
Nothing -> lexSym ss cs
Just rest -> Symbol s : lex rest
lexSym [] (c:cs) = error ("Unrecognized symbol '" ++ [c] ++ "'")
lexDigits cs = Digits (read num) : lex rest
where (num, rest) = span isDigit cs
lexAlpha keywords str = token : lex rest where
(first, rest) = span isAlphaNum str
token = if elem first keywords
then TokenKeyword first
else TokenIdent first
happyError t = error ("Parse error at " ++ show t ++ "\n")
|
00f855b753e54362b3fc175b3ac53a9a5d390931ad5c7a2bf552734e58320bba | arsalan0c/cdp-hs | BackgroundService.hs | {-# LANGUAGE OverloadedStrings, RecordWildCards, TupleSections #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE DeriveGeneric #
# LANGUAGE TypeFamilies #
{- |
= BackgroundService
Defines events for background web platform features.
-}
module CDP.Domains.BackgroundService (module CDP.Domains.BackgroundService) where
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.Loops
import Control.Monad.Trans (liftIO)
import qualified Data.Map as M
import Data.Maybe
import Data.Functor.Identity
import Data.String
import qualified Data.Text as T
import qualified Data.List as List
import qualified Data.Text.IO as TI
import qualified Data.Vector as V
import Data.Aeson.Types (Parser(..))
import Data.Aeson (FromJSON (..), ToJSON (..), (.:), (.:?), (.=), (.!=), (.:!))
import qualified Data.Aeson as A
import qualified Network.HTTP.Simple as Http
import qualified Network.URI as Uri
import qualified Network.WebSockets as WS
import Control.Concurrent
import qualified Data.ByteString.Lazy as BS
import qualified Data.Map as Map
import Data.Proxy
import System.Random
import GHC.Generics
import Data.Char
import Data.Default
import CDP.Internal.Utils
import CDP.Domains.DOMPageNetworkEmulationSecurity as DOMPageNetworkEmulationSecurity
import CDP.Domains.ServiceWorker as ServiceWorker
| Type ' BackgroundService . ' .
-- The Background Service that will be associated with the commands/events.
-- Every Background Service operates independently, but they share the same
-- API.
data BackgroundServiceServiceName = BackgroundServiceServiceNameBackgroundFetch | BackgroundServiceServiceNameBackgroundSync | BackgroundServiceServiceNamePushMessaging | BackgroundServiceServiceNameNotifications | BackgroundServiceServiceNamePaymentHandler | BackgroundServiceServiceNamePeriodicBackgroundSync
deriving (Ord, Eq, Show, Read)
instance FromJSON BackgroundServiceServiceName where
parseJSON = A.withText "BackgroundServiceServiceName" $ \v -> case v of
"backgroundFetch" -> pure BackgroundServiceServiceNameBackgroundFetch
"backgroundSync" -> pure BackgroundServiceServiceNameBackgroundSync
"pushMessaging" -> pure BackgroundServiceServiceNamePushMessaging
"notifications" -> pure BackgroundServiceServiceNameNotifications
"paymentHandler" -> pure BackgroundServiceServiceNamePaymentHandler
"periodicBackgroundSync" -> pure BackgroundServiceServiceNamePeriodicBackgroundSync
"_" -> fail "failed to parse BackgroundServiceServiceName"
instance ToJSON BackgroundServiceServiceName where
toJSON v = A.String $ case v of
BackgroundServiceServiceNameBackgroundFetch -> "backgroundFetch"
BackgroundServiceServiceNameBackgroundSync -> "backgroundSync"
BackgroundServiceServiceNamePushMessaging -> "pushMessaging"
BackgroundServiceServiceNameNotifications -> "notifications"
BackgroundServiceServiceNamePaymentHandler -> "paymentHandler"
BackgroundServiceServiceNamePeriodicBackgroundSync -> "periodicBackgroundSync"
| Type ' BackgroundService . EventMetadata ' .
-- A key-value pair for additional event information to pass along.
data BackgroundServiceEventMetadata = BackgroundServiceEventMetadata
{
backgroundServiceEventMetadataKey :: T.Text,
backgroundServiceEventMetadataValue :: T.Text
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceEventMetadata where
parseJSON = A.withObject "BackgroundServiceEventMetadata" $ \o -> BackgroundServiceEventMetadata
<$> o A..: "key"
<*> o A..: "value"
instance ToJSON BackgroundServiceEventMetadata where
toJSON p = A.object $ catMaybes [
("key" A..=) <$> Just (backgroundServiceEventMetadataKey p),
("value" A..=) <$> Just (backgroundServiceEventMetadataValue p)
]
-- | Type 'BackgroundService.BackgroundServiceEvent'.
data BackgroundServiceBackgroundServiceEvent = BackgroundServiceBackgroundServiceEvent
{
| Timestamp of the event ( in seconds ) .
backgroundServiceBackgroundServiceEventTimestamp :: DOMPageNetworkEmulationSecurity.NetworkTimeSinceEpoch,
-- | The origin this event belongs to.
backgroundServiceBackgroundServiceEventOrigin :: T.Text,
-- | The Service Worker ID that initiated the event.
backgroundServiceBackgroundServiceEventServiceWorkerRegistrationId :: ServiceWorker.ServiceWorkerRegistrationID,
| The Background Service this event belongs to .
backgroundServiceBackgroundServiceEventService :: BackgroundServiceServiceName,
-- | A description of the event.
backgroundServiceBackgroundServiceEventEventName :: T.Text,
-- | An identifier that groups related events together.
backgroundServiceBackgroundServiceEventInstanceId :: T.Text,
-- | A list of event-specific information.
backgroundServiceBackgroundServiceEventEventMetadata :: [BackgroundServiceEventMetadata]
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceBackgroundServiceEvent where
parseJSON = A.withObject "BackgroundServiceBackgroundServiceEvent" $ \o -> BackgroundServiceBackgroundServiceEvent
<$> o A..: "timestamp"
<*> o A..: "origin"
<*> o A..: "serviceWorkerRegistrationId"
<*> o A..: "service"
<*> o A..: "eventName"
<*> o A..: "instanceId"
<*> o A..: "eventMetadata"
instance ToJSON BackgroundServiceBackgroundServiceEvent where
toJSON p = A.object $ catMaybes [
("timestamp" A..=) <$> Just (backgroundServiceBackgroundServiceEventTimestamp p),
("origin" A..=) <$> Just (backgroundServiceBackgroundServiceEventOrigin p),
("serviceWorkerRegistrationId" A..=) <$> Just (backgroundServiceBackgroundServiceEventServiceWorkerRegistrationId p),
("service" A..=) <$> Just (backgroundServiceBackgroundServiceEventService p),
("eventName" A..=) <$> Just (backgroundServiceBackgroundServiceEventEventName p),
("instanceId" A..=) <$> Just (backgroundServiceBackgroundServiceEventInstanceId p),
("eventMetadata" A..=) <$> Just (backgroundServiceBackgroundServiceEventEventMetadata p)
]
-- | Type of the 'BackgroundService.recordingStateChanged' event.
data BackgroundServiceRecordingStateChanged = BackgroundServiceRecordingStateChanged
{
backgroundServiceRecordingStateChangedIsRecording :: Bool,
backgroundServiceRecordingStateChangedService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceRecordingStateChanged where
parseJSON = A.withObject "BackgroundServiceRecordingStateChanged" $ \o -> BackgroundServiceRecordingStateChanged
<$> o A..: "isRecording"
<*> o A..: "service"
instance Event BackgroundServiceRecordingStateChanged where
eventName _ = "BackgroundService.recordingStateChanged"
-- | Type of the 'BackgroundService.backgroundServiceEventReceived' event.
data BackgroundServiceBackgroundServiceEventReceived = BackgroundServiceBackgroundServiceEventReceived
{
backgroundServiceBackgroundServiceEventReceivedBackgroundServiceEvent :: BackgroundServiceBackgroundServiceEvent
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceBackgroundServiceEventReceived where
parseJSON = A.withObject "BackgroundServiceBackgroundServiceEventReceived" $ \o -> BackgroundServiceBackgroundServiceEventReceived
<$> o A..: "backgroundServiceEvent"
instance Event BackgroundServiceBackgroundServiceEventReceived where
eventName _ = "BackgroundService.backgroundServiceEventReceived"
-- | Enables event updates for the service.
-- | Parameters of the 'BackgroundService.startObserving' command.
data PBackgroundServiceStartObserving = PBackgroundServiceStartObserving
{
pBackgroundServiceStartObservingService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceStartObserving
:: BackgroundServiceServiceName
-> PBackgroundServiceStartObserving
pBackgroundServiceStartObserving
arg_pBackgroundServiceStartObservingService
= PBackgroundServiceStartObserving
arg_pBackgroundServiceStartObservingService
instance ToJSON PBackgroundServiceStartObserving where
toJSON p = A.object $ catMaybes [
("service" A..=) <$> Just (pBackgroundServiceStartObservingService p)
]
instance Command PBackgroundServiceStartObserving where
type CommandResponse PBackgroundServiceStartObserving = ()
commandName _ = "BackgroundService.startObserving"
fromJSON = const . A.Success . const ()
-- | Disables event updates for the service.
-- | Parameters of the 'BackgroundService.stopObserving' command.
data PBackgroundServiceStopObserving = PBackgroundServiceStopObserving
{
pBackgroundServiceStopObservingService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceStopObserving
:: BackgroundServiceServiceName
-> PBackgroundServiceStopObserving
pBackgroundServiceStopObserving
arg_pBackgroundServiceStopObservingService
= PBackgroundServiceStopObserving
arg_pBackgroundServiceStopObservingService
instance ToJSON PBackgroundServiceStopObserving where
toJSON p = A.object $ catMaybes [
("service" A..=) <$> Just (pBackgroundServiceStopObservingService p)
]
instance Command PBackgroundServiceStopObserving where
type CommandResponse PBackgroundServiceStopObserving = ()
commandName _ = "BackgroundService.stopObserving"
fromJSON = const . A.Success . const ()
-- | Set the recording state for the service.
-- | Parameters of the 'BackgroundService.setRecording' command.
data PBackgroundServiceSetRecording = PBackgroundServiceSetRecording
{
pBackgroundServiceSetRecordingShouldRecord :: Bool,
pBackgroundServiceSetRecordingService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceSetRecording
:: Bool
-> BackgroundServiceServiceName
-> PBackgroundServiceSetRecording
pBackgroundServiceSetRecording
arg_pBackgroundServiceSetRecordingShouldRecord
arg_pBackgroundServiceSetRecordingService
= PBackgroundServiceSetRecording
arg_pBackgroundServiceSetRecordingShouldRecord
arg_pBackgroundServiceSetRecordingService
instance ToJSON PBackgroundServiceSetRecording where
toJSON p = A.object $ catMaybes [
("shouldRecord" A..=) <$> Just (pBackgroundServiceSetRecordingShouldRecord p),
("service" A..=) <$> Just (pBackgroundServiceSetRecordingService p)
]
instance Command PBackgroundServiceSetRecording where
type CommandResponse PBackgroundServiceSetRecording = ()
commandName _ = "BackgroundService.setRecording"
fromJSON = const . A.Success . const ()
-- | Clears all stored data for the service.
-- | Parameters of the 'BackgroundService.clearEvents' command.
data PBackgroundServiceClearEvents = PBackgroundServiceClearEvents
{
pBackgroundServiceClearEventsService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceClearEvents
:: BackgroundServiceServiceName
-> PBackgroundServiceClearEvents
pBackgroundServiceClearEvents
arg_pBackgroundServiceClearEventsService
= PBackgroundServiceClearEvents
arg_pBackgroundServiceClearEventsService
instance ToJSON PBackgroundServiceClearEvents where
toJSON p = A.object $ catMaybes [
("service" A..=) <$> Just (pBackgroundServiceClearEventsService p)
]
instance Command PBackgroundServiceClearEvents where
type CommandResponse PBackgroundServiceClearEvents = ()
commandName _ = "BackgroundService.clearEvents"
fromJSON = const . A.Success . const ()
| null | https://raw.githubusercontent.com/arsalan0c/cdp-hs/6e70dbc59d394a0794b4a92e95f8851c11a3a624/src/CDP/Domains/BackgroundService.hs | haskell | # LANGUAGE OverloadedStrings, RecordWildCards, TupleSections #
|
= BackgroundService
Defines events for background web platform features.
The Background Service that will be associated with the commands/events.
Every Background Service operates independently, but they share the same
API.
A key-value pair for additional event information to pass along.
| Type 'BackgroundService.BackgroundServiceEvent'.
| The origin this event belongs to.
| The Service Worker ID that initiated the event.
| A description of the event.
| An identifier that groups related events together.
| A list of event-specific information.
| Type of the 'BackgroundService.recordingStateChanged' event.
| Type of the 'BackgroundService.backgroundServiceEventReceived' event.
| Enables event updates for the service.
| Parameters of the 'BackgroundService.startObserving' command.
| Disables event updates for the service.
| Parameters of the 'BackgroundService.stopObserving' command.
| Set the recording state for the service.
| Parameters of the 'BackgroundService.setRecording' command.
| Clears all stored data for the service.
| Parameters of the 'BackgroundService.clearEvents' command. | # LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE DeriveGeneric #
# LANGUAGE TypeFamilies #
module CDP.Domains.BackgroundService (module CDP.Domains.BackgroundService) where
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.Loops
import Control.Monad.Trans (liftIO)
import qualified Data.Map as M
import Data.Maybe
import Data.Functor.Identity
import Data.String
import qualified Data.Text as T
import qualified Data.List as List
import qualified Data.Text.IO as TI
import qualified Data.Vector as V
import Data.Aeson.Types (Parser(..))
import Data.Aeson (FromJSON (..), ToJSON (..), (.:), (.:?), (.=), (.!=), (.:!))
import qualified Data.Aeson as A
import qualified Network.HTTP.Simple as Http
import qualified Network.URI as Uri
import qualified Network.WebSockets as WS
import Control.Concurrent
import qualified Data.ByteString.Lazy as BS
import qualified Data.Map as Map
import Data.Proxy
import System.Random
import GHC.Generics
import Data.Char
import Data.Default
import CDP.Internal.Utils
import CDP.Domains.DOMPageNetworkEmulationSecurity as DOMPageNetworkEmulationSecurity
import CDP.Domains.ServiceWorker as ServiceWorker
| Type ' BackgroundService . ' .
data BackgroundServiceServiceName = BackgroundServiceServiceNameBackgroundFetch | BackgroundServiceServiceNameBackgroundSync | BackgroundServiceServiceNamePushMessaging | BackgroundServiceServiceNameNotifications | BackgroundServiceServiceNamePaymentHandler | BackgroundServiceServiceNamePeriodicBackgroundSync
deriving (Ord, Eq, Show, Read)
instance FromJSON BackgroundServiceServiceName where
parseJSON = A.withText "BackgroundServiceServiceName" $ \v -> case v of
"backgroundFetch" -> pure BackgroundServiceServiceNameBackgroundFetch
"backgroundSync" -> pure BackgroundServiceServiceNameBackgroundSync
"pushMessaging" -> pure BackgroundServiceServiceNamePushMessaging
"notifications" -> pure BackgroundServiceServiceNameNotifications
"paymentHandler" -> pure BackgroundServiceServiceNamePaymentHandler
"periodicBackgroundSync" -> pure BackgroundServiceServiceNamePeriodicBackgroundSync
"_" -> fail "failed to parse BackgroundServiceServiceName"
instance ToJSON BackgroundServiceServiceName where
toJSON v = A.String $ case v of
BackgroundServiceServiceNameBackgroundFetch -> "backgroundFetch"
BackgroundServiceServiceNameBackgroundSync -> "backgroundSync"
BackgroundServiceServiceNamePushMessaging -> "pushMessaging"
BackgroundServiceServiceNameNotifications -> "notifications"
BackgroundServiceServiceNamePaymentHandler -> "paymentHandler"
BackgroundServiceServiceNamePeriodicBackgroundSync -> "periodicBackgroundSync"
| Type ' BackgroundService . EventMetadata ' .
data BackgroundServiceEventMetadata = BackgroundServiceEventMetadata
{
backgroundServiceEventMetadataKey :: T.Text,
backgroundServiceEventMetadataValue :: T.Text
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceEventMetadata where
parseJSON = A.withObject "BackgroundServiceEventMetadata" $ \o -> BackgroundServiceEventMetadata
<$> o A..: "key"
<*> o A..: "value"
instance ToJSON BackgroundServiceEventMetadata where
toJSON p = A.object $ catMaybes [
("key" A..=) <$> Just (backgroundServiceEventMetadataKey p),
("value" A..=) <$> Just (backgroundServiceEventMetadataValue p)
]
data BackgroundServiceBackgroundServiceEvent = BackgroundServiceBackgroundServiceEvent
{
| Timestamp of the event ( in seconds ) .
backgroundServiceBackgroundServiceEventTimestamp :: DOMPageNetworkEmulationSecurity.NetworkTimeSinceEpoch,
backgroundServiceBackgroundServiceEventOrigin :: T.Text,
backgroundServiceBackgroundServiceEventServiceWorkerRegistrationId :: ServiceWorker.ServiceWorkerRegistrationID,
| The Background Service this event belongs to .
backgroundServiceBackgroundServiceEventService :: BackgroundServiceServiceName,
backgroundServiceBackgroundServiceEventEventName :: T.Text,
backgroundServiceBackgroundServiceEventInstanceId :: T.Text,
backgroundServiceBackgroundServiceEventEventMetadata :: [BackgroundServiceEventMetadata]
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceBackgroundServiceEvent where
parseJSON = A.withObject "BackgroundServiceBackgroundServiceEvent" $ \o -> BackgroundServiceBackgroundServiceEvent
<$> o A..: "timestamp"
<*> o A..: "origin"
<*> o A..: "serviceWorkerRegistrationId"
<*> o A..: "service"
<*> o A..: "eventName"
<*> o A..: "instanceId"
<*> o A..: "eventMetadata"
instance ToJSON BackgroundServiceBackgroundServiceEvent where
toJSON p = A.object $ catMaybes [
("timestamp" A..=) <$> Just (backgroundServiceBackgroundServiceEventTimestamp p),
("origin" A..=) <$> Just (backgroundServiceBackgroundServiceEventOrigin p),
("serviceWorkerRegistrationId" A..=) <$> Just (backgroundServiceBackgroundServiceEventServiceWorkerRegistrationId p),
("service" A..=) <$> Just (backgroundServiceBackgroundServiceEventService p),
("eventName" A..=) <$> Just (backgroundServiceBackgroundServiceEventEventName p),
("instanceId" A..=) <$> Just (backgroundServiceBackgroundServiceEventInstanceId p),
("eventMetadata" A..=) <$> Just (backgroundServiceBackgroundServiceEventEventMetadata p)
]
data BackgroundServiceRecordingStateChanged = BackgroundServiceRecordingStateChanged
{
backgroundServiceRecordingStateChangedIsRecording :: Bool,
backgroundServiceRecordingStateChangedService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceRecordingStateChanged where
parseJSON = A.withObject "BackgroundServiceRecordingStateChanged" $ \o -> BackgroundServiceRecordingStateChanged
<$> o A..: "isRecording"
<*> o A..: "service"
instance Event BackgroundServiceRecordingStateChanged where
eventName _ = "BackgroundService.recordingStateChanged"
data BackgroundServiceBackgroundServiceEventReceived = BackgroundServiceBackgroundServiceEventReceived
{
backgroundServiceBackgroundServiceEventReceivedBackgroundServiceEvent :: BackgroundServiceBackgroundServiceEvent
}
deriving (Eq, Show)
instance FromJSON BackgroundServiceBackgroundServiceEventReceived where
parseJSON = A.withObject "BackgroundServiceBackgroundServiceEventReceived" $ \o -> BackgroundServiceBackgroundServiceEventReceived
<$> o A..: "backgroundServiceEvent"
instance Event BackgroundServiceBackgroundServiceEventReceived where
eventName _ = "BackgroundService.backgroundServiceEventReceived"
data PBackgroundServiceStartObserving = PBackgroundServiceStartObserving
{
pBackgroundServiceStartObservingService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceStartObserving
:: BackgroundServiceServiceName
-> PBackgroundServiceStartObserving
pBackgroundServiceStartObserving
arg_pBackgroundServiceStartObservingService
= PBackgroundServiceStartObserving
arg_pBackgroundServiceStartObservingService
instance ToJSON PBackgroundServiceStartObserving where
toJSON p = A.object $ catMaybes [
("service" A..=) <$> Just (pBackgroundServiceStartObservingService p)
]
instance Command PBackgroundServiceStartObserving where
type CommandResponse PBackgroundServiceStartObserving = ()
commandName _ = "BackgroundService.startObserving"
fromJSON = const . A.Success . const ()
data PBackgroundServiceStopObserving = PBackgroundServiceStopObserving
{
pBackgroundServiceStopObservingService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceStopObserving
:: BackgroundServiceServiceName
-> PBackgroundServiceStopObserving
pBackgroundServiceStopObserving
arg_pBackgroundServiceStopObservingService
= PBackgroundServiceStopObserving
arg_pBackgroundServiceStopObservingService
instance ToJSON PBackgroundServiceStopObserving where
toJSON p = A.object $ catMaybes [
("service" A..=) <$> Just (pBackgroundServiceStopObservingService p)
]
instance Command PBackgroundServiceStopObserving where
type CommandResponse PBackgroundServiceStopObserving = ()
commandName _ = "BackgroundService.stopObserving"
fromJSON = const . A.Success . const ()
data PBackgroundServiceSetRecording = PBackgroundServiceSetRecording
{
pBackgroundServiceSetRecordingShouldRecord :: Bool,
pBackgroundServiceSetRecordingService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceSetRecording
:: Bool
-> BackgroundServiceServiceName
-> PBackgroundServiceSetRecording
pBackgroundServiceSetRecording
arg_pBackgroundServiceSetRecordingShouldRecord
arg_pBackgroundServiceSetRecordingService
= PBackgroundServiceSetRecording
arg_pBackgroundServiceSetRecordingShouldRecord
arg_pBackgroundServiceSetRecordingService
instance ToJSON PBackgroundServiceSetRecording where
toJSON p = A.object $ catMaybes [
("shouldRecord" A..=) <$> Just (pBackgroundServiceSetRecordingShouldRecord p),
("service" A..=) <$> Just (pBackgroundServiceSetRecordingService p)
]
instance Command PBackgroundServiceSetRecording where
type CommandResponse PBackgroundServiceSetRecording = ()
commandName _ = "BackgroundService.setRecording"
fromJSON = const . A.Success . const ()
data PBackgroundServiceClearEvents = PBackgroundServiceClearEvents
{
pBackgroundServiceClearEventsService :: BackgroundServiceServiceName
}
deriving (Eq, Show)
pBackgroundServiceClearEvents
:: BackgroundServiceServiceName
-> PBackgroundServiceClearEvents
pBackgroundServiceClearEvents
arg_pBackgroundServiceClearEventsService
= PBackgroundServiceClearEvents
arg_pBackgroundServiceClearEventsService
instance ToJSON PBackgroundServiceClearEvents where
toJSON p = A.object $ catMaybes [
("service" A..=) <$> Just (pBackgroundServiceClearEventsService p)
]
instance Command PBackgroundServiceClearEvents where
type CommandResponse PBackgroundServiceClearEvents = ()
commandName _ = "BackgroundService.clearEvents"
fromJSON = const . A.Success . const ()
|
9ad61ed2aada1b072a768d52e83d95d6bb0e391e9d1fc6d9bb72f5a4a95cb722 | BranchTaken/Hemlock | test_normalize.ml | open! Basis.Rudiments
open! Basis
open Path
let test () =
List.iter [
"";
"/";
"//";
"////";
"../a";
"/../a";
"//../a";
"///../a";
"/../../a";
"/./.././../a";
"./../a";
"/./../a";
"a/b//../c";
"a/b/./../c";
"///"; "///."; "///.."; "///a"; "//./"; "//./."; "//./.."; "//./a"; "//../"; "//../.";
"//../.."; "//../a"; "//a/"; "//a/."; "//a/.."; "//a/b"; "/.//"; "/.//."; "/.//.."; "/.//a";
"/././"; "/././."; "/././.."; "/././a"; "/./../"; "/./../."; "/./../.."; "/./../a"; "/./a/";
"/./a/."; "/./a/.."; "/./a/b"; "/..//"; "/..//."; "/..//.."; "/..//a"; "/.././"; "/.././.";
"/.././.."; "/.././a"; "/../../"; "/../../."; "/../../.."; "/../../a"; "/../a/"; "/../a/.";
"/../a/.."; "/../a/b"; "/a//"; "/a//."; "/a//.."; "/a//b"; "/a/./"; "/a/./."; "/a/./..";
"/a/./b"; "/a/../"; "/a/../."; "/a/../.."; "/a/../b"; "/a/b/"; "/a/b/."; "/a/b/.."; "/a/b/c";
".///"; ".///."; ".///.."; ".///a"; ".//./"; ".//./."; ".//./.."; ".//./a"; ".//../"; ".//../.";
".//../.."; ".//../a"; ".//a/"; ".//a/."; ".//a/.."; ".//a/b"; "././/"; "././/."; "././/..";
"././/a"; "./././"; "./././."; "./././.."; "./././a"; "././../"; "././../."; "././../..";
"././../a"; "././a/"; "././a/."; "././a/.."; "././a/b"; "./..//"; "./..//."; "./..//..";
"./..//a"; "./.././"; "./.././."; "./.././.."; "./.././a"; "./../../"; "./../../.";
"./../../.."; "./../../a"; "./../a/"; "./../a/."; "./../a/.."; "./../a/b"; "./a//"; "./a//.";
"./a//.."; "./a//b"; "./a/./"; "./a/./."; "./a/./.."; "./a/./b"; "./a/../"; "./a/../.";
"./a/../.."; "./a/../b"; "./a/b/"; "./a/b/."; "./a/b/.."; "./a/b/c"; "..///"; "..///.";
"..///.."; "..///a"; "..//./"; "..//./."; "..//./.."; "..//./a"; "..//../"; "..//../.";
"..//../.."; "..//../a"; "..//a/"; "..//a/."; "..//a/.."; "..//a/b"; ".././/"; ".././/.";
".././/.."; ".././/a"; "../././"; "../././."; "../././.."; "../././a"; ".././../"; ".././../.";
".././../.."; ".././../a"; ".././a/"; ".././a/."; ".././a/.."; ".././a/b"; "../..//";
"../..//."; "../..//.."; "../..//a"; "../.././"; "../.././."; "../.././.."; "../.././a";
"../../../"; "../../../."; "../../../.."; "../../../a"; "../../a/"; "../../a/."; "../../a/..";
"../../a/b"; "../a//"; "../a//."; "../a//.."; "../a//b"; "../a/./"; "../a/./."; "../a/./..";
"../a/./b"; "../a/../"; "../a/../."; "../a/../.."; "../a/../b"; "../a/b/"; "../a/b/.";
"../a/b/.."; "../a/b/c"; "a///"; "a///."; "a///.."; "a///b"; "a//./"; "a//./."; "a//./..";
"a//./b"; "a//../"; "a//../."; "a//../.."; "a//../b"; "a//b/"; "a//b/."; "a//b/.."; "a//b/c";
"a/.//"; "a/.//."; "a/.//.."; "a/.//b"; "a/././"; "a/././."; "a/././.."; "a/././b"; "a/./../";
"a/./../."; "a/./../.."; "a/./../b"; "a/./b/"; "a/./b/."; "a/./b/.."; "a/./b/c"; "a/..//";
"a/..//."; "a/..//.."; "a/..//b"; "a/.././"; "a/.././."; "a/.././.."; "a/.././b"; "a/../../";
"a/../../."; "a/../../.."; "a/../../b"; "a/../b/"; "a/../b/."; "a/../b/.."; "a/../b/c"; "a/b//";
"a/b//."; "a/b//.."; "a/b//c"; "a/b/./"; "a/b/./."; "a/b/./.."; "a/b/./c"; "a/b/../";
"a/b/../."; "a/b/../.."; "a/b/../c"; "a/b/c/"; "a/b/c/."; "a/b/c/.."; "a/b/c/d";
] ~f:(fun path_str ->
let path = of_string path_str in
File.Fmt.stdout
|> Fmt.fmt "normalize " |> String.pp path_str
|> Fmt.fmt "\n -> " |> pp (normalize path)
|> Fmt.fmt "\n" |> ignore
)
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/ed397cf3294ca397024e69eb3b1ed5f1db773db6/bootstrap/test/basis/path/test_normalize.ml | ocaml | open! Basis.Rudiments
open! Basis
open Path
let test () =
List.iter [
"";
"/";
"//";
"////";
"../a";
"/../a";
"//../a";
"///../a";
"/../../a";
"/./.././../a";
"./../a";
"/./../a";
"a/b//../c";
"a/b/./../c";
"///"; "///."; "///.."; "///a"; "//./"; "//./."; "//./.."; "//./a"; "//../"; "//../.";
"//../.."; "//../a"; "//a/"; "//a/."; "//a/.."; "//a/b"; "/.//"; "/.//."; "/.//.."; "/.//a";
"/././"; "/././."; "/././.."; "/././a"; "/./../"; "/./../."; "/./../.."; "/./../a"; "/./a/";
"/./a/."; "/./a/.."; "/./a/b"; "/..//"; "/..//."; "/..//.."; "/..//a"; "/.././"; "/.././.";
"/.././.."; "/.././a"; "/../../"; "/../../."; "/../../.."; "/../../a"; "/../a/"; "/../a/.";
"/../a/.."; "/../a/b"; "/a//"; "/a//."; "/a//.."; "/a//b"; "/a/./"; "/a/./."; "/a/./..";
"/a/./b"; "/a/../"; "/a/../."; "/a/../.."; "/a/../b"; "/a/b/"; "/a/b/."; "/a/b/.."; "/a/b/c";
".///"; ".///."; ".///.."; ".///a"; ".//./"; ".//./."; ".//./.."; ".//./a"; ".//../"; ".//../.";
".//../.."; ".//../a"; ".//a/"; ".//a/."; ".//a/.."; ".//a/b"; "././/"; "././/."; "././/..";
"././/a"; "./././"; "./././."; "./././.."; "./././a"; "././../"; "././../."; "././../..";
"././../a"; "././a/"; "././a/."; "././a/.."; "././a/b"; "./..//"; "./..//."; "./..//..";
"./..//a"; "./.././"; "./.././."; "./.././.."; "./.././a"; "./../../"; "./../../.";
"./../../.."; "./../../a"; "./../a/"; "./../a/."; "./../a/.."; "./../a/b"; "./a//"; "./a//.";
"./a//.."; "./a//b"; "./a/./"; "./a/./."; "./a/./.."; "./a/./b"; "./a/../"; "./a/../.";
"./a/../.."; "./a/../b"; "./a/b/"; "./a/b/."; "./a/b/.."; "./a/b/c"; "..///"; "..///.";
"..///.."; "..///a"; "..//./"; "..//./."; "..//./.."; "..//./a"; "..//../"; "..//../.";
"..//../.."; "..//../a"; "..//a/"; "..//a/."; "..//a/.."; "..//a/b"; ".././/"; ".././/.";
".././/.."; ".././/a"; "../././"; "../././."; "../././.."; "../././a"; ".././../"; ".././../.";
".././../.."; ".././../a"; ".././a/"; ".././a/."; ".././a/.."; ".././a/b"; "../..//";
"../..//."; "../..//.."; "../..//a"; "../.././"; "../.././."; "../.././.."; "../.././a";
"../../../"; "../../../."; "../../../.."; "../../../a"; "../../a/"; "../../a/."; "../../a/..";
"../../a/b"; "../a//"; "../a//."; "../a//.."; "../a//b"; "../a/./"; "../a/./."; "../a/./..";
"../a/./b"; "../a/../"; "../a/../."; "../a/../.."; "../a/../b"; "../a/b/"; "../a/b/.";
"../a/b/.."; "../a/b/c"; "a///"; "a///."; "a///.."; "a///b"; "a//./"; "a//./."; "a//./..";
"a//./b"; "a//../"; "a//../."; "a//../.."; "a//../b"; "a//b/"; "a//b/."; "a//b/.."; "a//b/c";
"a/.//"; "a/.//."; "a/.//.."; "a/.//b"; "a/././"; "a/././."; "a/././.."; "a/././b"; "a/./../";
"a/./../."; "a/./../.."; "a/./../b"; "a/./b/"; "a/./b/."; "a/./b/.."; "a/./b/c"; "a/..//";
"a/..//."; "a/..//.."; "a/..//b"; "a/.././"; "a/.././."; "a/.././.."; "a/.././b"; "a/../../";
"a/../../."; "a/../../.."; "a/../../b"; "a/../b/"; "a/../b/."; "a/../b/.."; "a/../b/c"; "a/b//";
"a/b//."; "a/b//.."; "a/b//c"; "a/b/./"; "a/b/./."; "a/b/./.."; "a/b/./c"; "a/b/../";
"a/b/../."; "a/b/../.."; "a/b/../c"; "a/b/c/"; "a/b/c/."; "a/b/c/.."; "a/b/c/d";
] ~f:(fun path_str ->
let path = of_string path_str in
File.Fmt.stdout
|> Fmt.fmt "normalize " |> String.pp path_str
|> Fmt.fmt "\n -> " |> pp (normalize path)
|> Fmt.fmt "\n" |> ignore
)
let _ = test ()
| |
ce22009796073a9f5a979b7e9acc042ed8b574a3831ff9f184162a24796428fd | DomainDrivenArchitecture/dda-k8s-crate | apple.clj | Licensed to the Apache Software Foundation ( ASF ) under one
; or more contributor license agreements. See the NOTICE file
; distributed with this work for additional information
; regarding copyright ownership. The ASF licenses this file
to you under the Apache License , Version 2.0 ( the
; "License"); you may not use this file except in compliance
; with the License. You may obtain a copy of the License at
;
; -2.0
;
; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
(ns dda.pallet.dda-k8s-crate.infra.apple
(:require
[clojure.spec.alpha :as s]
[schema.core :as sch]
[dda.provision :as p]
[dda.provision.pallet :as pp]))
(s/def ::fqdn string?)
(s/def ::secret-name string?)
(s/def ::cluster-issuer string?)
(s/def ::facility keyword?)
(s/def ::user string?)
(s/def ::apple (s/keys :req [::fqdn ::secret-name ::cluster-issuer]))
(sch/def Apple {:fqdn sch/Str :secret-name sch/Str :cluster-issuer sch/Str})
(def apple "apple")
(s/fdef user-configure-apple
:args (s/cat :facility ::facility :user ::user :config ::apple))
(defn user-configure-apple
[facility user config]
(let [facility-name (name facility)]
(p/provision-log ::pp/pallet facility-name apple ::p/info "user-configure-apple")
(p/copy-resources-to-user
::pp/pallet user facility-name apple
[{:filename "apple.yml"}
{:filename "ingress_apple_https.yml" :config config}
{:filename "install.sh"}
{:filename "remove.sh"}
{:filename "verify.sh" :config config}])
(p/exec-file-on-target-as-user
::pp/pallet user facility-name apple "install.sh")))
| null | https://raw.githubusercontent.com/DomainDrivenArchitecture/dda-k8s-crate/eaeb4d965a63692973d3c1d98759fbdf756596b2/main/src/dda/pallet/dda_k8s_crate/infra/apple.clj | clojure | or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Licensed to the Apache Software Foundation ( ASF ) under one
to you under the Apache License , Version 2.0 ( the
distributed under the License is distributed on an " AS IS " BASIS ,
(ns dda.pallet.dda-k8s-crate.infra.apple
(:require
[clojure.spec.alpha :as s]
[schema.core :as sch]
[dda.provision :as p]
[dda.provision.pallet :as pp]))
(s/def ::fqdn string?)
(s/def ::secret-name string?)
(s/def ::cluster-issuer string?)
(s/def ::facility keyword?)
(s/def ::user string?)
(s/def ::apple (s/keys :req [::fqdn ::secret-name ::cluster-issuer]))
(sch/def Apple {:fqdn sch/Str :secret-name sch/Str :cluster-issuer sch/Str})
(def apple "apple")
(s/fdef user-configure-apple
:args (s/cat :facility ::facility :user ::user :config ::apple))
(defn user-configure-apple
[facility user config]
(let [facility-name (name facility)]
(p/provision-log ::pp/pallet facility-name apple ::p/info "user-configure-apple")
(p/copy-resources-to-user
::pp/pallet user facility-name apple
[{:filename "apple.yml"}
{:filename "ingress_apple_https.yml" :config config}
{:filename "install.sh"}
{:filename "remove.sh"}
{:filename "verify.sh" :config config}])
(p/exec-file-on-target-as-user
::pp/pallet user facility-name apple "install.sh")))
|
5403d4109efc7901afc8ad6ce4a73f445aebca878205e5cd8de2cfaf1d87de2a | jaked/froc | proto.ml | module type Sync =
sig
val get_board : unit -> int option array array
end
module type Lwt =
sig
val get_board : unit -> int option array array Lwt.t
end
| null | https://raw.githubusercontent.com/jaked/froc/6068a1fab883ed9254bfeb53a1f9c15e8af0bb20/examples/froc-dom/sudoku/proto.ml | ocaml | module type Sync =
sig
val get_board : unit -> int option array array
end
module type Lwt =
sig
val get_board : unit -> int option array array Lwt.t
end
| |
a2ca1ce8996e03fe1535b948e99b24e045b63cde625758f8dbbe914e2954d28a | NorfairKing/haskell-testing-intro | Lib.hs | module Lib
( someFunc
, myReverse
, myAbs
, writeFileDouble
) where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ [x]
myAbs :: Int -> Int
myAbs x
| x < 0 = -x
| x >= 0 = x
writeFileDouble :: FilePath -> String -> IO ()
writeFileDouble path content = writeFile path $ content ++ content
| null | https://raw.githubusercontent.com/NorfairKing/haskell-testing-intro/0f8c433978840307a5237dcf2c8af22bbdf54dc9/src/Lib.hs | haskell | module Lib
( someFunc
, myReverse
, myAbs
, writeFileDouble
) where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = myReverse xs ++ [x]
myAbs :: Int -> Int
myAbs x
| x < 0 = -x
| x >= 0 = x
writeFileDouble :: FilePath -> String -> IO ()
writeFileDouble path content = writeFile path $ content ++ content
| |
8ef1bcfc382588f7f1065428a3508e2ac45eb2462d661ca81e7736e8cf409fd8 | ocaml/uchar | topkg.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the BSD3 license , see license at the end of the file .
% % NAME%% release % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the BSD3 license, see license at the end of the file.
%%NAME%% release %%VERSION%%
---------------------------------------------------------------------------*)
(* Public api *)
(** Build environment access *)
module type Env = sig
val bool : string -> bool
(** [bool key] declares [key] as being a boolean key in the environment.
Specifing key=(true|false) on the command line becomes mandatory. *)
val native : bool
(** [native] is [bool "native"]. *)
val native_dynlink : bool
(** [native_dylink] is [bool "native-dynlink"] *)
end
(** Exts defines sets of file extensions. *)
module type Exts = sig
type ext = [`Ext of string | `Obj | `Lib | `Dll | `Exe]
(** The type for extensions. *)
val interface : ext list
(** [interface] is [[".mli"; ".cmi"; ".cmti"]] *)
val interface_opt : ext list
(** [interface_opt] is [".cmx" :: interface] *)
val c_library : ext list
(** [c_library] is the extension for C libraries. This is determined
from [ocamlc -config]. *)
val c_dll_library : ext list
(** [c_dll_library] is the extension for C dynamic libraries. This
is determined from [ocamlc -config]. *)
val library : ext list
(** [library] is [[".cma"; ".cmxa"; ".cmxs"] @ c_library] *)
val module_library : ext list
* [ ] is [ ( interface_opt @ library ) ] .
val exe : ext list
(** [exe] is the extension for C libraries. This is determined from
[ocamlc -config]. *)
val exts : string list -> ext list
(** [exts sl] is [sl] as a list of extensions. *)
val ext : string -> ext list
(** [ext s] is [s] as a list of extensions. *)
end
(** Package description. *)
module type Pkg = sig
type builder =
[ `OCamlbuild of string list
| `OCamlbuild_no_ocamlfind of string list
| `Other of string * string ]
* The type for build tools .
{ ul
{ - [ ` OCamlbuild args ] , [ ocamlbuild ] is invoked with ` args ` and
` -use - ocamlfind ` . }
{ - [ ` OCamlbuild_no_ocamlfind args ] , [ ocamlbuild ] is invoked with
[ args ] }
{ - [ ` Other ( tool , ) ] , tool [ tool ] is invoked that generates
its build artefacts in [ ] . } }
{ul
{- [`OCamlbuild args], [ocamlbuild] is invoked with `args` and
`-use-ocamlfind`.}
{- [`OCamlbuild_no_ocamlfind args], [ocamlbuild] is invoked with
[args]}
{- [`Other (tool, bdir)], tool [tool] is invoked that generates
its build artefacts in [bdir].}} *)
type moves
(** The type for install moves. *)
type field = ?cond:bool ->
?exts:[`Ext of string | `Obj | `Lib | `Dll | `Exe] list -> ?dst:string ->
string -> moves
* The type for field install functions . A call
[ field cond exts dst path ] generates install moves as follows :
{ ul
{ - If [ cond ] is [ false ] ( defaults to [ true ] ) , no move is generated . }
{ - If [ exts ] is present , generates a move for each path in
the list [ List.map ( fun e - > path ^ e ) exts ] . }
{ - If [ dst ] is present this path is used as the move destination
( allows to install in subdirectories ) . If absent [ dst ] is
[ Filename.basename path ] . }
[field cond exts dst path] generates install moves as follows:
{ul
{- If [cond] is [false] (defaults to [true]), no move is generated.}
{- If [exts] is present, generates a move for each path in
the list [List.map (fun e -> path ^ e) exts].}
{- If [dst] is present this path is used as the move destination
(allows to install in subdirectories). If absent [dst] is
[Filename.basename path].} *)
val lib : field
val bin : ?auto:bool -> field
* If [ auto ] is true ( defaults to false ) generates
[ path ^ " .native " ] if { ! Env.native } is [ true ] and
[ path ^ " .byte " ] if { ! Env.native } is [ false ] . If
[ auto ] is true it also adds { ! } to the destination .
[path ^ ".native"] if {!Env.native} is [true] and
[path ^ ".byte"] if {!Env.native} is [false]. If
[auto] is true it also adds {!Ext.exe} to the destination. *)
* See { ! } .
* See { ! } .
val toplevel : field
val share : field
val share_root : field
val etc : field
val doc : field
val misc : field
val stublibs : field
val man : field
val describe : string -> builder:builder -> moves list -> unit
(** [describe name builder moves] describes a package named [name] with
builder [builder] and install moves [moves]. *)
val find_ocamlc : builder -> string
end
(* Implementation *)
let str = Printf.sprintf
module String = struct
include String
String.trim is 4.00.0 only .
let trim s =
let max = String.length s - 1 in
let drop = function ' ' | '\t' | '\n' | '\r' -> true | _ -> false in
let rec left i = if i < max && drop s.[i] then left (i + 1) else i in
let rec right i = if i >= 0 && drop s.[i] then right (i - 1) else i in
let left, right = left 0, right max in
let len = right - left + 1 in
if len < 0 then "" else String.sub s left len
let cut ?(rev = false) ~at s =
let find_index = if rev then String.rindex else String.index in
match try Some (find_index s at) with Not_found -> None with
| None -> None
| Some i ->
Some (String.sub s 0 i, String.sub s (i+1) (String.length s - i - 1))
end
module OCaml_config : sig
val read : ocamlc:string -> (string * string) list
val ccomp : (string * string) list -> [ `Win_msvc | `Win_cc | `Other ]
end = struct
let tmp_file () =
let f = Filename.temp_file (Filename.basename Sys.argv.(0)) "topkg" in
let delete () = try Sys.remove f with Sys_error _ -> () in
at_exit delete; f
let read ~ocamlc = try
let tmpf = tmp_file () in
let cmd = str "%s -config > %s" ocamlc (Filename.quote tmpf) in
let ret = Sys.command cmd in
if ret <> 0 then failwith (str "exec `%s' exited with %d" cmd ret) else
let ic = open_in tmpf in
try
let rec loop acc = try match String.cut ~at:':' (input_line ic) with
| None -> loop acc
| Some (k, v) -> loop ((k, String.trim v) :: acc)
with
| End_of_file -> close_in ic; acc
in
loop []
with exn -> (try close_in ic with _ -> ()); raise exn
with
| Failure s | Sys_error s ->
Printf.eprintf "Warning: could not read OCaml configuration`: %s\n" s;
[]
let ccomp config =
try
let ccomp_type = List.assoc "ccomp_type" config in
let os_type = List.assoc "os_type" config in
match ccomp_type, os_type with
| "msvc", _ -> `Win_msvc
| "cc", "Win32" -> `Win_cc
| _, _ -> `Other
with Not_found ->
Printf.eprintf "Warning: could not determine the C toolchain\n";
`Other
end
module Topkg : sig
val cmd : [`Build | `Explain | `Help ]
val env : (string * bool) list
val err_parse : string -> unit
val err_mdef : string -> unit
val err_miss : string -> unit
val err_file : string -> string -> unit
val warn_unused : string -> unit
end = struct
(* Parses the command line. The actual cmd execution occurs in the call
to Pkg.describe. *)
let err ?(stop = true) fmt =
let k _ = if stop then exit 1 else () in
Format.kfprintf k Format.err_formatter ("%s: " ^^ fmt ^^ "@.") Sys.argv.(0)
let err_parse a = err "argument `%s' is not of the form key=(true|false)" a
let err_mdef a = err "bool `%s' is defined more than once" a
let err_file f e = err "%s: %s" f e
let err_miss a = err ~stop:false "argument `%s=(true|false)' is missing" a
let warn_unused k = err ~stop:false "warning: environment key `%s` unused" k
let cmd, env =
let rec parse_env acc = function (* not t.r. *)
| arg :: args ->
begin try
(* String.cut ... *)
let len = String.length arg in
let eq = String.index arg '=' in
let bool = bool_of_string (String.sub arg (eq + 1) (len - eq - 1)) in
let key = String.sub arg 0 eq in
if key = "" then raise Exit else
try ignore (List.assoc key acc); err_mdef key; [] with
| Not_found -> parse_env ((key, bool) :: acc) args
with
| Invalid_argument _ | Not_found | Exit -> err_parse arg; []
end
| [] -> acc
in
match List.tl (Array.to_list Sys.argv) with
| "explain" :: args -> `Explain, parse_env [] args
| ("help" | "-h" | "--help" | "-help") :: args -> `Help, parse_env [] args
| args -> `Build, parse_env [] args
end
module Env : sig
include Env
val get : unit -> (string * bool) list
val error : unit -> bool
end = struct
let error = ref false
let env = ref []
let get () = !env
let add_bool key b = env := (key, b) :: !env
let bool key =
let b = try List.assoc key Topkg.env with
| Not_found ->
if Topkg.cmd = `Build then (error := true; Topkg.err_miss key; true)
else true
in
add_bool key b; b
let native = bool "native"
let native_dynlink = bool "native-dynlink"
let error () = !error
end
module Exts (* : Exts *) = struct
type ext = [`Ext of string | `Obj | `Lib | `Dll | `Exe]
let interface = [`Ext ".mli"; `Ext ".cmi"; `Ext ".cmti"]
let interface_opt = `Ext ".cmx" :: interface
let c_library = [`Lib]
let c_dll_library = [`Dll]
let library = [`Ext ".cma"; `Ext ".cmxa"; `Ext ".cmxs"] @ c_library
let module_library = (interface_opt @ library)
let exe = [`Exe]
let ext e = [`Ext e]
let exts es = List.map (fun e -> `Ext e) es
let ext_to_string =
let r map = function
| `Ext s -> s
| e -> try List.assoc e map with Not_found -> assert false
in
function
| `Win_msvc -> r [`Obj, ".obj"; `Lib, ".lib"; `Dll, ".dll"; `Exe, ".exe"]
| `Win_cc -> r [`Obj, ".o"; `Lib, ".a"; `Dll, ".dll"; `Exe, ".exe"]
| `Other -> r [`Obj, ".o"; `Lib, ".a"; `Dll, ".so"; `Exe, "" ]
end
module Pkg : Pkg = struct
type builder =
[ `OCamlbuild of string list
| `OCamlbuild_no_ocamlfind of string list
| `Other of string * string ]
type file = string * Exts.ext
type moves = (string * (file * file)) list
type field =
?cond:bool -> ?exts:Exts.ext list -> ?dst:string -> string -> moves
let to_file s = match String.cut ~rev:true s ~at:'.' with
| None -> s, `Ext ""
| Some (name, ext) -> name, `Ext (str ".%s" ext)
let warn_unused () =
let keys = List.map fst Topkg.env in
let keys_used = List.map fst (Env.get ()) in
let unused = List.find_all (fun k -> not (List.mem k keys_used)) keys in
List.iter Topkg.warn_unused unused
let build_strings ?(exec_sep = " ") btool bdir ext_to_string mvs =
let no_build = [ ".cmti"; ".cmt" ] in
let install = Buffer.create 1871 in
let exec = Buffer.create 1871 in
let file_to_str (n, ext) = str "%s%s" n (ext_to_string ext) in
let rec add_mvs current = function
| (field, (src, dst)) :: mvs when field = current ->
let src = file_to_str src in
let dst = file_to_str dst in
if List.exists (Filename.check_suffix src) no_build then
Buffer.add_string install (str "\n \"?%s/%s\" {\"%s\"}" bdir src dst)
else begin
Buffer.add_string exec (str "%s%s" exec_sep src);
Buffer.add_string install (str "\n \"%s/%s\" {\"%s\"}" bdir src dst);
end;
add_mvs current mvs
| (((field, _) :: _) as mvs) ->
if current <> "" (* first *) then Buffer.add_string install " ]\n";
Buffer.add_string install (str "%s: [" field);
add_mvs field mvs
| [] -> ()
in
Buffer.add_string exec btool;
add_mvs "" mvs;
Buffer.add_string install " ]\n";
Buffer.contents install, Buffer.contents exec
let pr = Format.printf
let pr_explanation ccomp btool bdir pkg mvs =
let env = Env.get () in
let ext_to_string = Exts.ext_to_string ccomp in
let exec_sep = " \\\n " in
let install, exec = build_strings ~exec_sep btool bdir ext_to_string mvs in
pr "@[<v>";
pr "Package name: %s@," pkg;
pr "Build tool: %s@," btool;
pr "Build directory: %s@," bdir;
pr "Environment:@, ";
List.iter (fun (k,v) -> pr "%s=%b@, " k v) (List.sort compare env);
pr "@,Build invocation:@,";
pr " %s@,@," exec;
pr "Install file:@,";
pr "%s@," install;
pr "@]";
()
let pr_help () =
pr "Usage example:@\n %s" Sys.argv.(0);
List.iter (fun (k,v) -> pr " %s=%b" k v) (List.sort compare (Env.get ()));
pr "@."
let build ccomp btool bdir pkg mvs =
let ext_to_string = Exts.ext_to_string ccomp in
let install, exec = build_strings btool bdir ext_to_string mvs in
let e = Sys.command exec in
if e <> 0 then exit e else
let install_file = pkg ^ ".install" in
try
let oc = open_out install_file in
output_string oc install; flush oc; close_out oc
with Sys_error e -> Topkg.err_file install_file e
let mvs ?(drop_exts = []) field ?(cond = true) ?(exts = []) ?dst src =
if not cond then [] else
let mv src dst = (field, (src, dst)) in
let expand exts s d = List.map (fun e -> mv (s, e) (d, e)) exts in
let dst = match dst with None -> Filename.basename src | Some dst -> dst in
let files =
if exts = [] then [mv (to_file src) (to_file dst)] else
expand exts src dst
in
let has_ext (_, ext) ext' = ext = ext' in
let keep (_, (src, _)) = not (List.exists (has_ext src) drop_exts) in
List.find_all keep files
let lib =
let drop_exts =
if Env.native && not Env.native_dynlink then Exts.ext ".cmxs" else
if Env.native then [] else
Exts.c_library @ Exts.exts [".cmx"; ".cmxa"; ".cmxs"]
in
mvs ~drop_exts "lib"
let share = mvs "share"
let share_root = mvs "share_root"
let etc = mvs "etc"
let toplevel = mvs "toplevel"
let doc = mvs "doc"
let misc = mvs "misc"
let stublibs = mvs "stublibs"
let man = mvs "man"
let bin_drops = if not Env.native then Exts.ext ".native" else []
let bin_mvs field ?(auto = false) ?cond ?(exts = Exts.exe) ?dst src =
let src, dst =
if not auto then src, dst else
let dst = match dst with
| None -> Some (Filename.basename src)
| Some _ as dst -> dst
in
let src = if Env.native then src ^ ".native" else src ^ ".byte" in
src, dst
in
mvs ~drop_exts:bin_drops field ?cond ~exts ?dst src
let bin = bin_mvs "bin"
let sbin = bin_mvs "sbin"
let libexec = bin_mvs "libexec"
let find_ocamlc = function
| `OCamlbuild _ -> "ocamlfind ocamlc"
| `OCamlbuild_no_ocamlfind _ | `Other _ ->
match try Some (Sys.getenv "HOST_XBIN") with Not_found -> None with
| Some path -> Filename.quote (Filename.concat path "ocamlc")
| None -> "ocamlc"
let get_ccomp builder =
let config = OCaml_config.read ~ocamlc:(find_ocamlc builder) in
OCaml_config.ccomp config
let describe pkg ~builder mvs =
if Env.error () then (pr_help (); exit 1) else
let mvs = List.sort compare (List.flatten mvs) in
let btool, bdir = match builder with
| `OCamlbuild args ->
let args = "-use-ocamlfind" :: "-classic-display" :: args in
str "ocamlbuild %s" (String.concat " " args), "_build"
| `OCamlbuild_no_ocamlfind args ->
str "ocamlbuild %s" (String.concat " " args), "_build"
| `Other (btool, bdir) -> btool, bdir
in
let ccomp = get_ccomp builder in
match Topkg.cmd with
| `Explain -> pr_explanation ccomp btool bdir pkg mvs
| `Help -> pr_help ()
| `Build -> warn_unused (); build ccomp btool bdir pkg mvs
end
---------------------------------------------------------------------------
Copyright ( c ) 2014 .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
3 . Neither the name of nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Daniel C. Bünzli nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/ocaml/uchar/f9988830581a1f233d32e79aaacf8af76ddb9613/pkg/topkg.ml | ocaml | Public api
* Build environment access
* [bool key] declares [key] as being a boolean key in the environment.
Specifing key=(true|false) on the command line becomes mandatory.
* [native] is [bool "native"].
* [native_dylink] is [bool "native-dynlink"]
* Exts defines sets of file extensions.
* The type for extensions.
* [interface] is [[".mli"; ".cmi"; ".cmti"]]
* [interface_opt] is [".cmx" :: interface]
* [c_library] is the extension for C libraries. This is determined
from [ocamlc -config].
* [c_dll_library] is the extension for C dynamic libraries. This
is determined from [ocamlc -config].
* [library] is [[".cma"; ".cmxa"; ".cmxs"] @ c_library]
* [exe] is the extension for C libraries. This is determined from
[ocamlc -config].
* [exts sl] is [sl] as a list of extensions.
* [ext s] is [s] as a list of extensions.
* Package description.
* The type for install moves.
* [describe name builder moves] describes a package named [name] with
builder [builder] and install moves [moves].
Implementation
Parses the command line. The actual cmd execution occurs in the call
to Pkg.describe.
not t.r.
String.cut ...
: Exts
first | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the BSD3 license , see license at the end of the file .
% % NAME%% release % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the BSD3 license, see license at the end of the file.
%%NAME%% release %%VERSION%%
---------------------------------------------------------------------------*)
module type Env = sig
val bool : string -> bool
val native : bool
val native_dynlink : bool
end
module type Exts = sig
type ext = [`Ext of string | `Obj | `Lib | `Dll | `Exe]
val interface : ext list
val interface_opt : ext list
val c_library : ext list
val c_dll_library : ext list
val library : ext list
val module_library : ext list
* [ ] is [ ( interface_opt @ library ) ] .
val exe : ext list
val exts : string list -> ext list
val ext : string -> ext list
end
module type Pkg = sig
type builder =
[ `OCamlbuild of string list
| `OCamlbuild_no_ocamlfind of string list
| `Other of string * string ]
* The type for build tools .
{ ul
{ - [ ` OCamlbuild args ] , [ ocamlbuild ] is invoked with ` args ` and
` -use - ocamlfind ` . }
{ - [ ` OCamlbuild_no_ocamlfind args ] , [ ocamlbuild ] is invoked with
[ args ] }
{ - [ ` Other ( tool , ) ] , tool [ tool ] is invoked that generates
its build artefacts in [ ] . } }
{ul
{- [`OCamlbuild args], [ocamlbuild] is invoked with `args` and
`-use-ocamlfind`.}
{- [`OCamlbuild_no_ocamlfind args], [ocamlbuild] is invoked with
[args]}
{- [`Other (tool, bdir)], tool [tool] is invoked that generates
its build artefacts in [bdir].}} *)
type moves
type field = ?cond:bool ->
?exts:[`Ext of string | `Obj | `Lib | `Dll | `Exe] list -> ?dst:string ->
string -> moves
* The type for field install functions . A call
[ field cond exts dst path ] generates install moves as follows :
{ ul
{ - If [ cond ] is [ false ] ( defaults to [ true ] ) , no move is generated . }
{ - If [ exts ] is present , generates a move for each path in
the list [ List.map ( fun e - > path ^ e ) exts ] . }
{ - If [ dst ] is present this path is used as the move destination
( allows to install in subdirectories ) . If absent [ dst ] is
[ Filename.basename path ] . }
[field cond exts dst path] generates install moves as follows:
{ul
{- If [cond] is [false] (defaults to [true]), no move is generated.}
{- If [exts] is present, generates a move for each path in
the list [List.map (fun e -> path ^ e) exts].}
{- If [dst] is present this path is used as the move destination
(allows to install in subdirectories). If absent [dst] is
[Filename.basename path].} *)
val lib : field
val bin : ?auto:bool -> field
* If [ auto ] is true ( defaults to false ) generates
[ path ^ " .native " ] if { ! Env.native } is [ true ] and
[ path ^ " .byte " ] if { ! Env.native } is [ false ] . If
[ auto ] is true it also adds { ! } to the destination .
[path ^ ".native"] if {!Env.native} is [true] and
[path ^ ".byte"] if {!Env.native} is [false]. If
[auto] is true it also adds {!Ext.exe} to the destination. *)
* See { ! } .
* See { ! } .
val toplevel : field
val share : field
val share_root : field
val etc : field
val doc : field
val misc : field
val stublibs : field
val man : field
val describe : string -> builder:builder -> moves list -> unit
val find_ocamlc : builder -> string
end
let str = Printf.sprintf
module String = struct
include String
String.trim is 4.00.0 only .
let trim s =
let max = String.length s - 1 in
let drop = function ' ' | '\t' | '\n' | '\r' -> true | _ -> false in
let rec left i = if i < max && drop s.[i] then left (i + 1) else i in
let rec right i = if i >= 0 && drop s.[i] then right (i - 1) else i in
let left, right = left 0, right max in
let len = right - left + 1 in
if len < 0 then "" else String.sub s left len
let cut ?(rev = false) ~at s =
let find_index = if rev then String.rindex else String.index in
match try Some (find_index s at) with Not_found -> None with
| None -> None
| Some i ->
Some (String.sub s 0 i, String.sub s (i+1) (String.length s - i - 1))
end
module OCaml_config : sig
val read : ocamlc:string -> (string * string) list
val ccomp : (string * string) list -> [ `Win_msvc | `Win_cc | `Other ]
end = struct
let tmp_file () =
let f = Filename.temp_file (Filename.basename Sys.argv.(0)) "topkg" in
let delete () = try Sys.remove f with Sys_error _ -> () in
at_exit delete; f
let read ~ocamlc = try
let tmpf = tmp_file () in
let cmd = str "%s -config > %s" ocamlc (Filename.quote tmpf) in
let ret = Sys.command cmd in
if ret <> 0 then failwith (str "exec `%s' exited with %d" cmd ret) else
let ic = open_in tmpf in
try
let rec loop acc = try match String.cut ~at:':' (input_line ic) with
| None -> loop acc
| Some (k, v) -> loop ((k, String.trim v) :: acc)
with
| End_of_file -> close_in ic; acc
in
loop []
with exn -> (try close_in ic with _ -> ()); raise exn
with
| Failure s | Sys_error s ->
Printf.eprintf "Warning: could not read OCaml configuration`: %s\n" s;
[]
let ccomp config =
try
let ccomp_type = List.assoc "ccomp_type" config in
let os_type = List.assoc "os_type" config in
match ccomp_type, os_type with
| "msvc", _ -> `Win_msvc
| "cc", "Win32" -> `Win_cc
| _, _ -> `Other
with Not_found ->
Printf.eprintf "Warning: could not determine the C toolchain\n";
`Other
end
module Topkg : sig
val cmd : [`Build | `Explain | `Help ]
val env : (string * bool) list
val err_parse : string -> unit
val err_mdef : string -> unit
val err_miss : string -> unit
val err_file : string -> string -> unit
val warn_unused : string -> unit
end = struct
let err ?(stop = true) fmt =
let k _ = if stop then exit 1 else () in
Format.kfprintf k Format.err_formatter ("%s: " ^^ fmt ^^ "@.") Sys.argv.(0)
let err_parse a = err "argument `%s' is not of the form key=(true|false)" a
let err_mdef a = err "bool `%s' is defined more than once" a
let err_file f e = err "%s: %s" f e
let err_miss a = err ~stop:false "argument `%s=(true|false)' is missing" a
let warn_unused k = err ~stop:false "warning: environment key `%s` unused" k
let cmd, env =
| arg :: args ->
begin try
let len = String.length arg in
let eq = String.index arg '=' in
let bool = bool_of_string (String.sub arg (eq + 1) (len - eq - 1)) in
let key = String.sub arg 0 eq in
if key = "" then raise Exit else
try ignore (List.assoc key acc); err_mdef key; [] with
| Not_found -> parse_env ((key, bool) :: acc) args
with
| Invalid_argument _ | Not_found | Exit -> err_parse arg; []
end
| [] -> acc
in
match List.tl (Array.to_list Sys.argv) with
| "explain" :: args -> `Explain, parse_env [] args
| ("help" | "-h" | "--help" | "-help") :: args -> `Help, parse_env [] args
| args -> `Build, parse_env [] args
end
module Env : sig
include Env
val get : unit -> (string * bool) list
val error : unit -> bool
end = struct
let error = ref false
let env = ref []
let get () = !env
let add_bool key b = env := (key, b) :: !env
let bool key =
let b = try List.assoc key Topkg.env with
| Not_found ->
if Topkg.cmd = `Build then (error := true; Topkg.err_miss key; true)
else true
in
add_bool key b; b
let native = bool "native"
let native_dynlink = bool "native-dynlink"
let error () = !error
end
type ext = [`Ext of string | `Obj | `Lib | `Dll | `Exe]
let interface = [`Ext ".mli"; `Ext ".cmi"; `Ext ".cmti"]
let interface_opt = `Ext ".cmx" :: interface
let c_library = [`Lib]
let c_dll_library = [`Dll]
let library = [`Ext ".cma"; `Ext ".cmxa"; `Ext ".cmxs"] @ c_library
let module_library = (interface_opt @ library)
let exe = [`Exe]
let ext e = [`Ext e]
let exts es = List.map (fun e -> `Ext e) es
let ext_to_string =
let r map = function
| `Ext s -> s
| e -> try List.assoc e map with Not_found -> assert false
in
function
| `Win_msvc -> r [`Obj, ".obj"; `Lib, ".lib"; `Dll, ".dll"; `Exe, ".exe"]
| `Win_cc -> r [`Obj, ".o"; `Lib, ".a"; `Dll, ".dll"; `Exe, ".exe"]
| `Other -> r [`Obj, ".o"; `Lib, ".a"; `Dll, ".so"; `Exe, "" ]
end
module Pkg : Pkg = struct
type builder =
[ `OCamlbuild of string list
| `OCamlbuild_no_ocamlfind of string list
| `Other of string * string ]
type file = string * Exts.ext
type moves = (string * (file * file)) list
type field =
?cond:bool -> ?exts:Exts.ext list -> ?dst:string -> string -> moves
let to_file s = match String.cut ~rev:true s ~at:'.' with
| None -> s, `Ext ""
| Some (name, ext) -> name, `Ext (str ".%s" ext)
let warn_unused () =
let keys = List.map fst Topkg.env in
let keys_used = List.map fst (Env.get ()) in
let unused = List.find_all (fun k -> not (List.mem k keys_used)) keys in
List.iter Topkg.warn_unused unused
let build_strings ?(exec_sep = " ") btool bdir ext_to_string mvs =
let no_build = [ ".cmti"; ".cmt" ] in
let install = Buffer.create 1871 in
let exec = Buffer.create 1871 in
let file_to_str (n, ext) = str "%s%s" n (ext_to_string ext) in
let rec add_mvs current = function
| (field, (src, dst)) :: mvs when field = current ->
let src = file_to_str src in
let dst = file_to_str dst in
if List.exists (Filename.check_suffix src) no_build then
Buffer.add_string install (str "\n \"?%s/%s\" {\"%s\"}" bdir src dst)
else begin
Buffer.add_string exec (str "%s%s" exec_sep src);
Buffer.add_string install (str "\n \"%s/%s\" {\"%s\"}" bdir src dst);
end;
add_mvs current mvs
| (((field, _) :: _) as mvs) ->
Buffer.add_string install (str "%s: [" field);
add_mvs field mvs
| [] -> ()
in
Buffer.add_string exec btool;
add_mvs "" mvs;
Buffer.add_string install " ]\n";
Buffer.contents install, Buffer.contents exec
let pr = Format.printf
let pr_explanation ccomp btool bdir pkg mvs =
let env = Env.get () in
let ext_to_string = Exts.ext_to_string ccomp in
let exec_sep = " \\\n " in
let install, exec = build_strings ~exec_sep btool bdir ext_to_string mvs in
pr "@[<v>";
pr "Package name: %s@," pkg;
pr "Build tool: %s@," btool;
pr "Build directory: %s@," bdir;
pr "Environment:@, ";
List.iter (fun (k,v) -> pr "%s=%b@, " k v) (List.sort compare env);
pr "@,Build invocation:@,";
pr " %s@,@," exec;
pr "Install file:@,";
pr "%s@," install;
pr "@]";
()
let pr_help () =
pr "Usage example:@\n %s" Sys.argv.(0);
List.iter (fun (k,v) -> pr " %s=%b" k v) (List.sort compare (Env.get ()));
pr "@."
let build ccomp btool bdir pkg mvs =
let ext_to_string = Exts.ext_to_string ccomp in
let install, exec = build_strings btool bdir ext_to_string mvs in
let e = Sys.command exec in
if e <> 0 then exit e else
let install_file = pkg ^ ".install" in
try
let oc = open_out install_file in
output_string oc install; flush oc; close_out oc
with Sys_error e -> Topkg.err_file install_file e
let mvs ?(drop_exts = []) field ?(cond = true) ?(exts = []) ?dst src =
if not cond then [] else
let mv src dst = (field, (src, dst)) in
let expand exts s d = List.map (fun e -> mv (s, e) (d, e)) exts in
let dst = match dst with None -> Filename.basename src | Some dst -> dst in
let files =
if exts = [] then [mv (to_file src) (to_file dst)] else
expand exts src dst
in
let has_ext (_, ext) ext' = ext = ext' in
let keep (_, (src, _)) = not (List.exists (has_ext src) drop_exts) in
List.find_all keep files
let lib =
let drop_exts =
if Env.native && not Env.native_dynlink then Exts.ext ".cmxs" else
if Env.native then [] else
Exts.c_library @ Exts.exts [".cmx"; ".cmxa"; ".cmxs"]
in
mvs ~drop_exts "lib"
let share = mvs "share"
let share_root = mvs "share_root"
let etc = mvs "etc"
let toplevel = mvs "toplevel"
let doc = mvs "doc"
let misc = mvs "misc"
let stublibs = mvs "stublibs"
let man = mvs "man"
let bin_drops = if not Env.native then Exts.ext ".native" else []
let bin_mvs field ?(auto = false) ?cond ?(exts = Exts.exe) ?dst src =
let src, dst =
if not auto then src, dst else
let dst = match dst with
| None -> Some (Filename.basename src)
| Some _ as dst -> dst
in
let src = if Env.native then src ^ ".native" else src ^ ".byte" in
src, dst
in
mvs ~drop_exts:bin_drops field ?cond ~exts ?dst src
let bin = bin_mvs "bin"
let sbin = bin_mvs "sbin"
let libexec = bin_mvs "libexec"
let find_ocamlc = function
| `OCamlbuild _ -> "ocamlfind ocamlc"
| `OCamlbuild_no_ocamlfind _ | `Other _ ->
match try Some (Sys.getenv "HOST_XBIN") with Not_found -> None with
| Some path -> Filename.quote (Filename.concat path "ocamlc")
| None -> "ocamlc"
let get_ccomp builder =
let config = OCaml_config.read ~ocamlc:(find_ocamlc builder) in
OCaml_config.ccomp config
let describe pkg ~builder mvs =
if Env.error () then (pr_help (); exit 1) else
let mvs = List.sort compare (List.flatten mvs) in
let btool, bdir = match builder with
| `OCamlbuild args ->
let args = "-use-ocamlfind" :: "-classic-display" :: args in
str "ocamlbuild %s" (String.concat " " args), "_build"
| `OCamlbuild_no_ocamlfind args ->
str "ocamlbuild %s" (String.concat " " args), "_build"
| `Other (btool, bdir) -> btool, bdir
in
let ccomp = get_ccomp builder in
match Topkg.cmd with
| `Explain -> pr_explanation ccomp btool bdir pkg mvs
| `Help -> pr_help ()
| `Build -> warn_unused (); build ccomp btool bdir pkg mvs
end
---------------------------------------------------------------------------
Copyright ( c ) 2014 .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
3 . Neither the name of nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Daniel C. Bünzli nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*)
|
3a171c7e11a9769cb18d929ec118b19c2b1451e13c7f44f083e15fa49d2c903e | ijvcms/chuanqi_dev | map_20233.erl | -module(map_20233).
-export([
range/0,
data/0
]).
range() -> {72, 48}.
data() ->
{
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,0,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,2,0,0,1,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,1,0,0,0,0,0,0,0,2,2,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,1},
{1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,2,2,2,2,2,0,0,0,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,2,2,2,2,2,2,2,2,2,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,2,2,1},
{1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,2,2,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,2,2,2,2,2,0,0,0,0,2,2,2,2,1},
{1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,1},
{1,1,2,2,2,2,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1},
{1,1,1,1,2,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,2,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,1},
{1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,1,1,1,1,1,1},
{1,1,2,2,2,0,0,1,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,1,0,0,0,0,0,0,1,1,1,1},
{1,1,1,1,2,2,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,0,0,1,1,1,0,0,0,2,2,2,2,2,1},
{1,1,1,1,1,2,2,2,2,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,2,2,1,1,1},
{1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,0,0,0,0,2,2,0,0,0,0,2,2,2,2,2,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,0,0,0,2,0,2,0,0,0,2,2,2,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,1,0,0,2,2,2,2,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,0,2,2,2,1,0,0,0,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,2,2,2,2,0,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,0,2,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,2,2,2,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}
}.
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/map_data/map_20233.erl | erlang | -module(map_20233).
-export([
range/0,
data/0
]).
range() -> {72, 48}.
data() ->
{
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,0,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,1,1,0,0,0,0,0,2,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,2,0,0,1,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,1,0,0,0,0,0,0,0,2,2,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,1},
{1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,2,2,2,2,2,0,0,0,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,2,2,2,2,2,2,2,2,2,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,2,2,1},
{1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,2,2,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,1,1,1,1,2,2,2,2,2,0,0,0,0,2,2,2,2,1},
{1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,1},
{1,1,2,2,2,2,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1},
{1,1,1,1,2,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,2,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,1,1,1,1,1,1,1,1},
{1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,1,1,1,1,1,1},
{1,1,2,2,2,0,0,1,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,2,0,0,0,1,0,0,0,0,0,0,1,1,1,1},
{1,1,1,1,2,2,1,1,1,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,2,0,0,1,1,1,0,0,0,2,2,2,2,2,1},
{1,1,1,1,1,2,2,2,2,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,2,2,1,1,1},
{1,1,1,1,1,1,1,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,0,0,0,0,2,2,0,0,0,0,2,2,2,2,2,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,0,0,0,2,0,2,0,0,0,2,2,2,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,0,0,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,1,0,0,2,2,2,2,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,0,2,2,2,1,0,0,0,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,0,2,2,2,2,0,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,0,2,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,2,2,2,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1,1,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,2,1,1,1,1,1,1,1,1,2,2,2,2,0,0,0,0,0,0,0,0,2,2,2,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}
}.
| |
518577f39fb1abd3146403ab93d31f2c4595b0f8de9e4d2fa8083b54b9a0805d | godfat/sandbox | tjs2hs.hs |
import Control.Applicative hiding ((<|>))
import Text.ParserCombinators.Parsec
pInt : :
pInt = do
-- val <- many1 digit
return ( read )
-- pDouble :: Parser Double
-- pDouble = do
left
-- char '.'
right
-- return (read (show left ++ "." ++ show right))
pInt :: Parser Int
pInt = read <$> many1 digit
pDouble :: Parser Double
pDouble = num <$> pInt <*> (char '.' *> pInt) where
num l r = read $ show l ++ "." ++ show r
pNum :: Parser Double
pNum = try pDouble <|> (pInt >>= return . fromIntegral)
pGroup :: Parser Double
pGroup = char '(' *> pExpression <* char ')'
pFactor :: Parser Double
pFactor = pGroup <|> pNum
pTerm :: Parser Double
pTerm = try ((*) <$> pFactor <*> (char '*' *> pTerm))
<|> try ((/) <$> pFactor <*> (char '/' *> pTerm))
<|> pFactor
pExpression :: Parser Double
pExpression = try ((+) <$> pTerm <*> (char '+' *> pExpression))
<|> try ((-) <$> pTerm <*> (char '-' *> pExpression))
<|> pTerm
pCalculate :: Parser Double
pCalculate = pExpression <* eof
pNum' :: Parser Double
pNum' = do
left <- many1 digit
(do
char '.'
right <- many1 digit
return (read (left ++ "." ++ right))) <|> return (read left)
pNum'' :: Parser Double
pNum'' = do
left <- many1 digit
(try (char '.') >>
do
right <- many1 digit
return (read (left ++ "." ++ right))) <|> return (read left)
| null | https://raw.githubusercontent.com/godfat/sandbox/eb6294238f92543339adfdfb4ba88586ba0e82b8/haskell/parsing/tjs2hs.hs | haskell | val <- many1 digit
pDouble :: Parser Double
pDouble = do
char '.'
return (read (show left ++ "." ++ show right)) |
import Control.Applicative hiding ((<|>))
import Text.ParserCombinators.Parsec
pInt : :
pInt = do
return ( read )
left
right
pInt :: Parser Int
pInt = read <$> many1 digit
pDouble :: Parser Double
pDouble = num <$> pInt <*> (char '.' *> pInt) where
num l r = read $ show l ++ "." ++ show r
pNum :: Parser Double
pNum = try pDouble <|> (pInt >>= return . fromIntegral)
pGroup :: Parser Double
pGroup = char '(' *> pExpression <* char ')'
pFactor :: Parser Double
pFactor = pGroup <|> pNum
pTerm :: Parser Double
pTerm = try ((*) <$> pFactor <*> (char '*' *> pTerm))
<|> try ((/) <$> pFactor <*> (char '/' *> pTerm))
<|> pFactor
pExpression :: Parser Double
pExpression = try ((+) <$> pTerm <*> (char '+' *> pExpression))
<|> try ((-) <$> pTerm <*> (char '-' *> pExpression))
<|> pTerm
pCalculate :: Parser Double
pCalculate = pExpression <* eof
pNum' :: Parser Double
pNum' = do
left <- many1 digit
(do
char '.'
right <- many1 digit
return (read (left ++ "." ++ right))) <|> return (read left)
pNum'' :: Parser Double
pNum'' = do
left <- many1 digit
(try (char '.') >>
do
right <- many1 digit
return (read (left ++ "." ++ right))) <|> return (read left)
|
6fe08bef0a86b9ac9598069c2c457edf5f425d75beb26914165fad432b9dd29a | onedata/op-worker | file_subscription.erl | %%%-------------------------------------------------------------------
@author
( C ) 2016 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc File subscription model.
%%% @end
%%%-------------------------------------------------------------------
-module(file_subscription).
-author("Krzysztof Trzepla").
-include("modules/events/types.hrl").
-include("modules/datastore/datastore_models.hrl").
-include("modules/datastore/datastore_runner.hrl").
-include_lib("ctool/include/logging.hrl").
%% API
-export([save/1, get/1, exists/1, delete/2, update/2, create/1]).
%% datastore_model callbacks
-export([get_ctx/0]).
-type key() :: datastore:key().
-type record() :: #file_subscription{}.
-type doc() :: datastore_doc:doc(record()).
-type diff() :: datastore_doc:diff(record()).
-type pred() :: datastore_doc:pred(record()).
-define(CTX, #{
model => ?MODULE,
disc_driver => undefined
}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Saves permission cache.
%% @end
%%--------------------------------------------------------------------
-spec save(doc()) -> {ok, key()} | {error, term()}.
save(Doc) ->
?extract_key(datastore_model:save(?CTX, Doc)).
%%--------------------------------------------------------------------
%% @doc
%% Updates permission cache.
%% @end
%%--------------------------------------------------------------------
-spec update(key(), diff()) -> {ok, doc()} | {error, term()}.
update(Key, Diff) ->
datastore_model:update(?CTX, Key, Diff).
%%--------------------------------------------------------------------
%% @doc
%% Creates permission cache.
%% @end
%%--------------------------------------------------------------------
-spec create(doc()) -> {ok, key()} | {error, term()}.
create(Doc) ->
?extract_key(datastore_model:create(?CTX, Doc)).
%%--------------------------------------------------------------------
%% @doc
%% Returns permission cache.
%% @end
%%--------------------------------------------------------------------
-spec get(key()) -> {ok, doc()} | {error, term()}.
get(Key) ->
datastore_model:get(?CTX, Key).
%%--------------------------------------------------------------------
%% @doc
%% Deletes permission cache.
%% @end
%%--------------------------------------------------------------------
-spec delete(key(), pred()) -> ok | {error, term()}.
delete(Key, Pred) ->
datastore_model:delete(?CTX, Key, Pred).
%%--------------------------------------------------------------------
%% @doc
%% Checks whether permission cache exists.
%% @end
%%--------------------------------------------------------------------
-spec exists(key()) -> boolean().
exists(Key) ->
{ok, Exists} = datastore_model:exists(?CTX, Key),
Exists.
%%%===================================================================
%%% datastore_model callbacks
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Returns model's context.
%% @end
%%--------------------------------------------------------------------
-spec get_ctx() -> datastore:ctx().
get_ctx() ->
?CTX. | null | https://raw.githubusercontent.com/onedata/op-worker/b09f05b6928121cec4d6b41ce8037fe056e6b4b3/src/modules/datastore/models/file/file_subscription.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc File subscription model.
@end
-------------------------------------------------------------------
API
datastore_model callbacks
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Saves permission cache.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Updates permission cache.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Creates permission cache.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns permission cache.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Deletes permission cache.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Checks whether permission cache exists.
@end
--------------------------------------------------------------------
===================================================================
datastore_model callbacks
===================================================================
--------------------------------------------------------------------
@doc
Returns model's context.
@end
-------------------------------------------------------------------- | @author
( C ) 2016 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(file_subscription).
-author("Krzysztof Trzepla").
-include("modules/events/types.hrl").
-include("modules/datastore/datastore_models.hrl").
-include("modules/datastore/datastore_runner.hrl").
-include_lib("ctool/include/logging.hrl").
-export([save/1, get/1, exists/1, delete/2, update/2, create/1]).
-export([get_ctx/0]).
-type key() :: datastore:key().
-type record() :: #file_subscription{}.
-type doc() :: datastore_doc:doc(record()).
-type diff() :: datastore_doc:diff(record()).
-type pred() :: datastore_doc:pred(record()).
-define(CTX, #{
model => ?MODULE,
disc_driver => undefined
}).
-spec save(doc()) -> {ok, key()} | {error, term()}.
save(Doc) ->
?extract_key(datastore_model:save(?CTX, Doc)).
-spec update(key(), diff()) -> {ok, doc()} | {error, term()}.
update(Key, Diff) ->
datastore_model:update(?CTX, Key, Diff).
-spec create(doc()) -> {ok, key()} | {error, term()}.
create(Doc) ->
?extract_key(datastore_model:create(?CTX, Doc)).
-spec get(key()) -> {ok, doc()} | {error, term()}.
get(Key) ->
datastore_model:get(?CTX, Key).
-spec delete(key(), pred()) -> ok | {error, term()}.
delete(Key, Pred) ->
datastore_model:delete(?CTX, Key, Pred).
-spec exists(key()) -> boolean().
exists(Key) ->
{ok, Exists} = datastore_model:exists(?CTX, Key),
Exists.
-spec get_ctx() -> datastore:ctx().
get_ctx() ->
?CTX. |
750b7cb6b78905baa781e77e9d3799d642821731708a8f55c7bb74515cef422b | NorfairKing/sydtest | Around.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE IncoherentInstances #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Test.Syd.Def.Around where
import Control.Exception
import Control.Monad.Reader
import Control.Monad.Writer.Strict
import Data.Kind
import Test.QuickCheck.IO ()
import Test.Syd.Def.TestDefM
import Test.Syd.HList
import Test.Syd.Run
import Test.Syd.SpecDef
-- | Run a custom action before every spec item, to set up an inner resource 'inner'.
--
-- Note that this function turns off shrinking.
-- See
before ::
-- | The function to run before every test, to produce the inner resource
IO inner ->
TestDefM outers inner result ->
TestDefM outers () result
before action = beforeWith $ \() -> action
-- | Run a custom action before every spec item without setting up any inner resources.
--
-- Note that this function turns off shrinking.
-- See
before_ ::
-- | The function to run before every test
IO () ->
TestDefM outers inner result ->
TestDefM outers inner result
before_ action = beforeWith $ \inner -> do
action
pure inner
| Run a custom action before every spec item , to set up an inner resource ' ' using the previously set up resource ' '
--
-- Note that this function turns off shrinking.
-- See
beforeWith ::
forall outers oldInner newInner result.
(oldInner -> IO newInner) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
beforeWith action = beforeWith' (\(_ :: HList outers) -> action)
| Run a custom action before every spec item , to set up an inner resource ' ' using the previously set up resource ' ' and potentially any of the outer resources
--
-- Note that this function turns off shrinking.
-- See
beforeWith' ::
HContains outers outer =>
(outer -> oldInner -> IO newInner) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
beforeWith' action = aroundWith' $ \func outer inner -> action outer inner >>= func outer
-- | Run a custom action after every spec item, using the inner resource 'c'.
--
-- Note that this function turns off shrinking.
-- See
after ::
-- | The function to run after every test, using the inner resource
(inner -> IO ()) ->
TestDefM outers inner result ->
TestDefM outers inner result
after action = aroundWith $ \e x -> e x `finally` action x
-- | Run a custom action after every spec item without using any inner resources.
--
-- Note that this function turns off shrinking.
-- See
after_ ::
-- | The function to run after every test
IO () ->
TestDefM outers inner result ->
TestDefM outers inner result
after_ action = after $ \_ -> action
-- | Run a custom action before and/or after every spec item, to provide access to an inner resource 'c'.
--
-- See the @FOOTGUN@ note in the docs for 'around_'.
--
-- Note that this function turns off shrinking.
-- See
around ::
-- | The function to provide the inner resource around every test
((inner -> IO ()) -> IO ()) ->
TestDefM outers inner result ->
TestDefM outers () result
around action = aroundWith $ \e () -> action e
-- | Run a custom action before and/or after every spec item without accessing any inner resources.
--
-- It is important that the wrapper function that you provide runs the action that it gets _exactly once_.
--
-- == __FOOTGUN__
--
-- This combinator gives the programmer a lot of power.
-- In fact, it gives the programmer enough power to break the test framework.
-- Indeed, you can provide a wrapper function that just _doesn't_ run the function like this:
--
-- > spec :: Spec
-- > spec = do
-- > let don'tDo :: IO () -> IO ()
-- > don'tDo _ = pure ()
-- > around_ don'tDo $ do
-- > it "should pass" True
--
-- During execution, you'll then get an error like this:
--
> thread blocked indefinitely in an MVar operation
--
-- The same problem exists when using 'Test.Syd.Def.Around.aroundAll_'.
--
-- The same thing will go wrong if you run the given action more than once like this:
--
-- > spec :: Spec
-- > spec = do
-- > let doTwice :: IO () -> IO ()
-- > doTwice f = f >> f
-- > around_ doTwice $ do
-- > it "should pass" True
--
--
Note : If you 're interested in fixing this , talk to me , but only after GHC has gotten impredicative types because that will likely be a requirement .
--
-- Note that this function turns off shrinking.
-- See
around_ ::
-- | The function to wrap every test with
(IO () -> IO ()) ->
TestDefM outers inner result ->
TestDefM outers inner result
around_ action = aroundWith $ \e a -> action (e a)
-- | Run a custom action before and/or after every spec item, to provide access to an inner resource 'c' while using the inner resource 'd'.
--
-- See the @FOOTGUN@ note in the docs for 'around_'.
--
-- Note that this function turns off shrinking.
-- See
aroundWith ::
forall newInner oldInner outers result.
((newInner -> IO ()) -> (oldInner -> IO ())) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
aroundWith func =
aroundWith' $
\(takeAC :: HList outers -> newInner -> IO ()) -- Just to make sure the 'a' is not ambiguous.
a
d ->
func (\c -> takeAC a c) d
| Run a custom action around every spec item , to provide access to an inner resource ' ' while using the inner resource ' ' and any outer resource available .
--
-- Note that this function turns off shrinking.
-- See
aroundWith' ::
forall newInner oldInner outer result (outers :: [Type]).
HContains outers outer =>
-- | The function that provides the new inner resource using the old resource.
-- It can also use and modify the outer resource
((outer -> newInner -> IO ()) -> (outer -> oldInner -> IO ())) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
aroundWith' func (TestDefM rwst) =
local (\tde -> tde {testDefEnvTestRunSettings = (testDefEnvTestRunSettings tde) {testRunSettingMaxShrinks = 0}}) $
TestDefM $
flip mapWriterT rwst $ \inner -> do
(res, forest) <- inner
-- a: outers
c :
d :
let modifyVal ::
forall x.
HContains x outer =>
(ProgressReporter -> ((HList x -> newInner -> IO ()) -> IO ()) -> IO TestRunResult) ->
ProgressReporter ->
((HList x -> oldInner -> IO ()) -> IO ()) ->
IO TestRunResult
modifyVal takeSupplyXC progressReporter supplyXD =
let supplyXC :: (HList x -> newInner -> IO ()) -> IO ()
supplyXC takeXC =
let takeXD :: HList x -> oldInner -> IO ()
takeXD x d =
let takeAC _ c = takeXC x c
in func takeAC (getElem x) d
in supplyXD takeXD
in takeSupplyXC progressReporter supplyXC
For this function to work recursively , the first parameter of the input and the output types must be the same
modifyTree ::
forall x extra. HContains x outer => SpecDefTree x newInner extra -> SpecDefTree x oldInner extra
modifyTree = \case
DefDescribeNode t sdf -> DefDescribeNode t $ modifyForest sdf
DefPendingNode t mr -> DefPendingNode t mr
DefSpecifyNode t td e -> DefSpecifyNode t (modifyVal <$> td) e
DefWrapNode f sdf -> DefWrapNode f $ modifyForest sdf
DefBeforeAllNode f sdf -> DefBeforeAllNode f $ modifyForest sdf
DefAroundAllNode f sdf -> DefAroundAllNode f $ modifyForest sdf
DefAroundAllWithNode f sdf -> DefAroundAllWithNode f $ modifyForest sdf
DefAfterAllNode f sdf -> DefAfterAllNode f $ modifyForest sdf
DefParallelismNode f sdf -> DefParallelismNode f $ modifyForest sdf
DefRandomisationNode f sdf -> DefRandomisationNode f $ modifyForest sdf
DefRetriesNode f sdf -> DefRetriesNode f $ modifyForest sdf
DefFlakinessNode f sdf -> DefFlakinessNode f $ modifyForest sdf
DefExpectationNode f sdf -> DefExpectationNode f $ modifyForest sdf
modifyForest ::
forall x extra.
HContains x outer =>
SpecDefForest x newInner extra ->
SpecDefForest x oldInner extra
modifyForest = map modifyTree
let forest' :: SpecDefForest outers oldInner ()
forest' = modifyForest forest
pure (res, forest')
| null | https://raw.githubusercontent.com/NorfairKing/sydtest/61f8cc4bc010140cca1613b9aee173c87e3cfe9d/sydtest/src/Test/Syd/Def/Around.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
| Run a custom action before every spec item, to set up an inner resource 'inner'.
Note that this function turns off shrinking.
See
| The function to run before every test, to produce the inner resource
| Run a custom action before every spec item without setting up any inner resources.
Note that this function turns off shrinking.
See
| The function to run before every test
Note that this function turns off shrinking.
See
Note that this function turns off shrinking.
See
| Run a custom action after every spec item, using the inner resource 'c'.
Note that this function turns off shrinking.
See
| The function to run after every test, using the inner resource
| Run a custom action after every spec item without using any inner resources.
Note that this function turns off shrinking.
See
| The function to run after every test
| Run a custom action before and/or after every spec item, to provide access to an inner resource 'c'.
See the @FOOTGUN@ note in the docs for 'around_'.
Note that this function turns off shrinking.
See
| The function to provide the inner resource around every test
| Run a custom action before and/or after every spec item without accessing any inner resources.
It is important that the wrapper function that you provide runs the action that it gets _exactly once_.
== __FOOTGUN__
This combinator gives the programmer a lot of power.
In fact, it gives the programmer enough power to break the test framework.
Indeed, you can provide a wrapper function that just _doesn't_ run the function like this:
> spec :: Spec
> spec = do
> let don'tDo :: IO () -> IO ()
> don'tDo _ = pure ()
> around_ don'tDo $ do
> it "should pass" True
During execution, you'll then get an error like this:
The same problem exists when using 'Test.Syd.Def.Around.aroundAll_'.
The same thing will go wrong if you run the given action more than once like this:
> spec :: Spec
> spec = do
> let doTwice :: IO () -> IO ()
> doTwice f = f >> f
> around_ doTwice $ do
> it "should pass" True
Note that this function turns off shrinking.
See
| The function to wrap every test with
| Run a custom action before and/or after every spec item, to provide access to an inner resource 'c' while using the inner resource 'd'.
See the @FOOTGUN@ note in the docs for 'around_'.
Note that this function turns off shrinking.
See
Just to make sure the 'a' is not ambiguous.
Note that this function turns off shrinking.
See
| The function that provides the new inner resource using the old resource.
It can also use and modify the outer resource
a: outers | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE IncoherentInstances #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Test.Syd.Def.Around where
import Control.Exception
import Control.Monad.Reader
import Control.Monad.Writer.Strict
import Data.Kind
import Test.QuickCheck.IO ()
import Test.Syd.Def.TestDefM
import Test.Syd.HList
import Test.Syd.Run
import Test.Syd.SpecDef
before ::
IO inner ->
TestDefM outers inner result ->
TestDefM outers () result
before action = beforeWith $ \() -> action
before_ ::
IO () ->
TestDefM outers inner result ->
TestDefM outers inner result
before_ action = beforeWith $ \inner -> do
action
pure inner
| Run a custom action before every spec item , to set up an inner resource ' ' using the previously set up resource ' '
beforeWith ::
forall outers oldInner newInner result.
(oldInner -> IO newInner) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
beforeWith action = beforeWith' (\(_ :: HList outers) -> action)
| Run a custom action before every spec item , to set up an inner resource ' ' using the previously set up resource ' ' and potentially any of the outer resources
beforeWith' ::
HContains outers outer =>
(outer -> oldInner -> IO newInner) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
beforeWith' action = aroundWith' $ \func outer inner -> action outer inner >>= func outer
after ::
(inner -> IO ()) ->
TestDefM outers inner result ->
TestDefM outers inner result
after action = aroundWith $ \e x -> e x `finally` action x
after_ ::
IO () ->
TestDefM outers inner result ->
TestDefM outers inner result
after_ action = after $ \_ -> action
around ::
((inner -> IO ()) -> IO ()) ->
TestDefM outers inner result ->
TestDefM outers () result
around action = aroundWith $ \e () -> action e
> thread blocked indefinitely in an MVar operation
Note : If you 're interested in fixing this , talk to me , but only after GHC has gotten impredicative types because that will likely be a requirement .
around_ ::
(IO () -> IO ()) ->
TestDefM outers inner result ->
TestDefM outers inner result
around_ action = aroundWith $ \e a -> action (e a)
aroundWith ::
forall newInner oldInner outers result.
((newInner -> IO ()) -> (oldInner -> IO ())) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
aroundWith func =
aroundWith' $
a
d ->
func (\c -> takeAC a c) d
| Run a custom action around every spec item , to provide access to an inner resource ' ' while using the inner resource ' ' and any outer resource available .
aroundWith' ::
forall newInner oldInner outer result (outers :: [Type]).
HContains outers outer =>
((outer -> newInner -> IO ()) -> (outer -> oldInner -> IO ())) ->
TestDefM outers newInner result ->
TestDefM outers oldInner result
aroundWith' func (TestDefM rwst) =
local (\tde -> tde {testDefEnvTestRunSettings = (testDefEnvTestRunSettings tde) {testRunSettingMaxShrinks = 0}}) $
TestDefM $
flip mapWriterT rwst $ \inner -> do
(res, forest) <- inner
c :
d :
let modifyVal ::
forall x.
HContains x outer =>
(ProgressReporter -> ((HList x -> newInner -> IO ()) -> IO ()) -> IO TestRunResult) ->
ProgressReporter ->
((HList x -> oldInner -> IO ()) -> IO ()) ->
IO TestRunResult
modifyVal takeSupplyXC progressReporter supplyXD =
let supplyXC :: (HList x -> newInner -> IO ()) -> IO ()
supplyXC takeXC =
let takeXD :: HList x -> oldInner -> IO ()
takeXD x d =
let takeAC _ c = takeXC x c
in func takeAC (getElem x) d
in supplyXD takeXD
in takeSupplyXC progressReporter supplyXC
For this function to work recursively , the first parameter of the input and the output types must be the same
modifyTree ::
forall x extra. HContains x outer => SpecDefTree x newInner extra -> SpecDefTree x oldInner extra
modifyTree = \case
DefDescribeNode t sdf -> DefDescribeNode t $ modifyForest sdf
DefPendingNode t mr -> DefPendingNode t mr
DefSpecifyNode t td e -> DefSpecifyNode t (modifyVal <$> td) e
DefWrapNode f sdf -> DefWrapNode f $ modifyForest sdf
DefBeforeAllNode f sdf -> DefBeforeAllNode f $ modifyForest sdf
DefAroundAllNode f sdf -> DefAroundAllNode f $ modifyForest sdf
DefAroundAllWithNode f sdf -> DefAroundAllWithNode f $ modifyForest sdf
DefAfterAllNode f sdf -> DefAfterAllNode f $ modifyForest sdf
DefParallelismNode f sdf -> DefParallelismNode f $ modifyForest sdf
DefRandomisationNode f sdf -> DefRandomisationNode f $ modifyForest sdf
DefRetriesNode f sdf -> DefRetriesNode f $ modifyForest sdf
DefFlakinessNode f sdf -> DefFlakinessNode f $ modifyForest sdf
DefExpectationNode f sdf -> DefExpectationNode f $ modifyForest sdf
modifyForest ::
forall x extra.
HContains x outer =>
SpecDefForest x newInner extra ->
SpecDefForest x oldInner extra
modifyForest = map modifyTree
let forest' :: SpecDefForest outers oldInner ()
forest' = modifyForest forest
pure (res, forest')
|
827eed62319b2a71d288c8b2b7db00c7a56ea6eef67b297e3b6e90f2076c3338 | alanz/ghc-exactprint | OutOfHeap.hs | import qualified Data.Array.Unboxed as UA
import Data.Word
main :: IO ()
main = print (UA.listArray (1, 2^(20::Int)) (repeat 0)
:: UA.UArray Int Word64)
-- this unboxed array should at least take:
2 ^ 20 * 64 bits
= 8 * ( 2 ^ 20 bytes )
= 8 MiB ( in heap )
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/OutOfHeap.hs | haskell | this unboxed array should at least take: | import qualified Data.Array.Unboxed as UA
import Data.Word
main :: IO ()
main = print (UA.listArray (1, 2^(20::Int)) (repeat 0)
:: UA.UArray Int Word64)
2 ^ 20 * 64 bits
= 8 * ( 2 ^ 20 bytes )
= 8 MiB ( in heap )
|
9f8cfb2e082619dd6fb7566224ace8f3127780f7eb410c755fc597a3e1f67893 | jaspervdj/advent-of-code | Bounded.hs | -- | Simple 2D grids backed by a vector.
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE RecordWildCards #
module AdventOfCode.Grid.Bounded
( G.Dir (..)
, G.turnLeft
, G.turnRight
, G.turnAround
, G.Pos
, G.origin
, G.move
, G.neighbours
, G.diagonal
, G.manhattan
, Grid (..)
, generate
, fromString
, mapWithKey
, lookup
, index
, (!)
, toString
, toList
) where
import qualified AdventOfCode.Grid as G
import AdventOfCode.V2 (V2 (..))
import Control.Monad (when)
import Data.Maybe (fromMaybe)
import qualified Data.Vector as V
import qualified Data.Vector.Extra as V (generate')
import Prelude hiding (lookup)
data Grid a = Grid
{ gridWidth :: {-# UNPACK #-} !Int
, gridHeight :: {-# UNPACK #-} !Int
, gridData :: {-# UNPACK #-} !(V.Vector a)
} deriving (Eq, Foldable, Functor, Show, Traversable)
emptyGrid :: Grid a
emptyGrid = Grid 0 0 V.empty
generate :: Int -> Int -> (G.Pos -> a) -> Grid a
generate width height f = Grid
{ gridWidth = width
, gridHeight = height
, gridData = V.generate' (width * height) $ \idx ->
let (y, x) = idx `divMod` width in f (V2 x y)
}
fromString :: String -> Either String (Grid Char)
fromString string = case lines string of
[] -> Right emptyGrid
(x : xs) ->
let row = V.fromList x
width = V.length row in
go width [row] xs
where
go width rows [] =
Right $ Grid width (length rows) (V.concat $ reverse rows)
go width rows (x : xs) = do
let row = V.fromList x
when (V.length row /= width) $ Left "row length mismatch"
go width (row : rows) xs
mapWithKey :: (G.Pos -> a -> b) -> Grid a -> Grid b
mapWithKey f Grid {..} = generate gridWidth gridHeight $ \(V2 x y) ->
f (V2 x y) $! V.unsafeIndex gridData (y * gridWidth + x)
{-# INLINABLE mapWithKey #-}
lookup :: G.Pos -> Grid a -> Maybe a
lookup (V2 x y) Grid {..}
| x < 0 || x >= gridWidth || y < 0 || y >= gridHeight = Nothing
| otherwise = Just $
V.unsafeIndex gridData (y * gridWidth + x)
index :: G.Pos -> Grid a -> a
index v g = fromMaybe
(error $ "AdventOfCode.Grid.Bounded.index: out of bounds: " ++ show v)
(lookup v g)
(!) :: Grid a -> G.Pos -> a
g ! v = index v g
toString :: Grid Char -> String
toString g@Grid {..} = unlines $ do
y <- [0 .. gridHeight - 1]
[[fromMaybe ' ' $ lookup (V2 x y) g | x <- [0 .. gridWidth - 1]]]
toList :: Grid a -> [(G.Pos, a)]
toList g@Grid {..} = do
y <- [0 .. gridHeight - 1]
x <- [0 .. gridWidth - 1]
pure (V2 x y, index (V2 x y) g)
| null | https://raw.githubusercontent.com/jaspervdj/advent-of-code/2795c41f55b6cb9dfa05d49d15c53fe2aeb9afcd/lib/hs/AdventOfCode/Grid/Bounded.hs | haskell | | Simple 2D grids backed by a vector.
# LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveTraversable #
# UNPACK #
# UNPACK #
# UNPACK #
# INLINABLE mapWithKey # | # LANGUAGE RecordWildCards #
module AdventOfCode.Grid.Bounded
( G.Dir (..)
, G.turnLeft
, G.turnRight
, G.turnAround
, G.Pos
, G.origin
, G.move
, G.neighbours
, G.diagonal
, G.manhattan
, Grid (..)
, generate
, fromString
, mapWithKey
, lookup
, index
, (!)
, toString
, toList
) where
import qualified AdventOfCode.Grid as G
import AdventOfCode.V2 (V2 (..))
import Control.Monad (when)
import Data.Maybe (fromMaybe)
import qualified Data.Vector as V
import qualified Data.Vector.Extra as V (generate')
import Prelude hiding (lookup)
data Grid a = Grid
} deriving (Eq, Foldable, Functor, Show, Traversable)
emptyGrid :: Grid a
emptyGrid = Grid 0 0 V.empty
generate :: Int -> Int -> (G.Pos -> a) -> Grid a
generate width height f = Grid
{ gridWidth = width
, gridHeight = height
, gridData = V.generate' (width * height) $ \idx ->
let (y, x) = idx `divMod` width in f (V2 x y)
}
fromString :: String -> Either String (Grid Char)
fromString string = case lines string of
[] -> Right emptyGrid
(x : xs) ->
let row = V.fromList x
width = V.length row in
go width [row] xs
where
go width rows [] =
Right $ Grid width (length rows) (V.concat $ reverse rows)
go width rows (x : xs) = do
let row = V.fromList x
when (V.length row /= width) $ Left "row length mismatch"
go width (row : rows) xs
mapWithKey :: (G.Pos -> a -> b) -> Grid a -> Grid b
mapWithKey f Grid {..} = generate gridWidth gridHeight $ \(V2 x y) ->
f (V2 x y) $! V.unsafeIndex gridData (y * gridWidth + x)
lookup :: G.Pos -> Grid a -> Maybe a
lookup (V2 x y) Grid {..}
| x < 0 || x >= gridWidth || y < 0 || y >= gridHeight = Nothing
| otherwise = Just $
V.unsafeIndex gridData (y * gridWidth + x)
index :: G.Pos -> Grid a -> a
index v g = fromMaybe
(error $ "AdventOfCode.Grid.Bounded.index: out of bounds: " ++ show v)
(lookup v g)
(!) :: Grid a -> G.Pos -> a
g ! v = index v g
toString :: Grid Char -> String
toString g@Grid {..} = unlines $ do
y <- [0 .. gridHeight - 1]
[[fromMaybe ' ' $ lookup (V2 x y) g | x <- [0 .. gridWidth - 1]]]
toList :: Grid a -> [(G.Pos, a)]
toList g@Grid {..} = do
y <- [0 .. gridHeight - 1]
x <- [0 .. gridWidth - 1]
pure (V2 x y, index (V2 x y) g)
|
ee105ecfb5a97fd9d029c1c7e3cd1a024d1319cfecac6062fb6b6f3f77daedc3 | herd/herdtools7 | symbValue.mli | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2010 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
(** Operations on symbolic values *)
module Make :
functor (Cst:Constant.S) ->
functor (ArchOp:ArchOp.S
with type scalar = Cst.Scalar.t
and type pteval = Cst.PteVal.t
and type instr = Cst.Instr.t) ->
sig
include Value.S
with module Cst = Cst and type arch_op1 = ArchOp.op1
end
| null | https://raw.githubusercontent.com/herd/herdtools7/b22ec02af1300a45e2b646cce4253ecd4fa7f250/lib/symbValue.mli | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
* Operations on symbolic values | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2010 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
module Make :
functor (Cst:Constant.S) ->
functor (ArchOp:ArchOp.S
with type scalar = Cst.Scalar.t
and type pteval = Cst.PteVal.t
and type instr = Cst.Instr.t) ->
sig
include Value.S
with module Cst = Cst and type arch_op1 = ArchOp.op1
end
|
70d76eabb2616413218d1969e486b5c9c84f02851858f9b64e65e8e62e42d21f | dstarcev/stepic-haskell | Task20.hs | module Module4.Task20 where
eitherToMaybe :: Either a b -> Maybe a
eitherToMaybe (Left a) = Just a
eitherToMaybe (Right _) = Nothing
| null | https://raw.githubusercontent.com/dstarcev/stepic-haskell/6a8cf4b3cc17333ac4175e825db57dbe151ebae0/src/Module4/Task20.hs | haskell | module Module4.Task20 where
eitherToMaybe :: Either a b -> Maybe a
eitherToMaybe (Left a) = Just a
eitherToMaybe (Right _) = Nothing
| |
85a23997470c840c20a84791b7136d781373e66ed09f3f7e70a2083d5c046255 | owlbarn/owl_opt | owl_opt.ml | * { 1 Single - precision module }
module S = struct
(** Single-precision vanilla gradient descent (see: {!module: Owl_opt.S.Gd.Make}). *)
module Gd = Gd_s
* Single - precision ( see : { ! module : Owl_opt . S.Adam . Make } ) .
module Adam = Adam_s
* Single - precision Rmsprop ( see : { ! module : Owl_opt . S.Rmsprop . Make } ) .
module Rmsprop = Rmsprop_s
end
(** {1 Double-precision module} *)
module D = struct
(** Double-precision vanilla gradient descent (see: {!module:Owl_opt.D.Gd.Make}). *)
module Gd = Gd_d
* Double - precision ( see : { ! module : Owl_opt . D.Adam . Make } ) .
module Adam = Adam_d
* Double - precision Rmsprop ( see : { ! module : Owl_opt . D.Rmsprop . Make } ) .
module Rmsprop = Rmsprop_d
end
(** {1 Prm module type} *)
module Prms = Prms
* { 1 Learning rate module }
module Lr = Lr
| null | https://raw.githubusercontent.com/owlbarn/owl_opt/c3b34072dddbce2d70e1698c5f1fd84d783f9cef/src/opt/owl_opt.ml | ocaml | * Single-precision vanilla gradient descent (see: {!module: Owl_opt.S.Gd.Make}).
* {1 Double-precision module}
* Double-precision vanilla gradient descent (see: {!module:Owl_opt.D.Gd.Make}).
* {1 Prm module type} | * { 1 Single - precision module }
module S = struct
module Gd = Gd_s
* Single - precision ( see : { ! module : Owl_opt . S.Adam . Make } ) .
module Adam = Adam_s
* Single - precision Rmsprop ( see : { ! module : Owl_opt . S.Rmsprop . Make } ) .
module Rmsprop = Rmsprop_s
end
module D = struct
module Gd = Gd_d
* Double - precision ( see : { ! module : Owl_opt . D.Adam . Make } ) .
module Adam = Adam_d
* Double - precision Rmsprop ( see : { ! module : Owl_opt . D.Rmsprop . Make } ) .
module Rmsprop = Rmsprop_d
end
module Prms = Prms
* { 1 Learning rate module }
module Lr = Lr
|
71b6996f94315c6423a54e9186e7679ec13b889ed4078fa492e52855328a1aa0 | diku-dk/futhark | TypesValues.hs | module Futhark.Internalise.TypesValues
( -- * Internalising types
internaliseReturnType,
internaliseLambdaReturnType,
internaliseEntryReturnType,
internaliseType,
internaliseParamTypes,
internaliseLoopParamType,
internalisePrimType,
internalisedTypeSize,
internaliseSumType,
-- * Internalising values
internalisePrimValue,
)
where
import Control.Monad.State
import Data.Bitraversable (bitraverse)
import Data.List (delete, find, foldl')
import Data.Map.Strict qualified as M
import Data.Maybe
import Futhark.IR.SOACS as I
import Futhark.Internalise.Monad
import Language.Futhark qualified as E
internaliseUniqueness :: E.Uniqueness -> I.Uniqueness
internaliseUniqueness E.Nonunique = I.Nonunique
internaliseUniqueness E.Unique = I.Unique
newtype TypeState = TypeState {typeCounter :: Int}
newtype InternaliseTypeM a
= InternaliseTypeM (State TypeState a)
deriving (Functor, Applicative, Monad, MonadState TypeState)
runInternaliseTypeM :: InternaliseTypeM a -> a
runInternaliseTypeM = runInternaliseTypeM' mempty
runInternaliseTypeM' :: [VName] -> InternaliseTypeM a -> a
runInternaliseTypeM' exts (InternaliseTypeM m) = evalState m $ TypeState (length exts)
internaliseParamTypes ::
[E.TypeBase E.Size ()] ->
InternaliseM [[I.TypeBase Shape Uniqueness]]
internaliseParamTypes ts =
mapM (mapM mkAccCerts) . runInternaliseTypeM $
mapM (fmap (map onType) . internaliseTypeM mempty) ts
where
onType = fromMaybe bad . hasStaticShape
bad = error $ "internaliseParamTypes: " ++ prettyString ts
We need to fix up the arrays for any Acc return values or loop
parameters . We look at the concrete types for this , since the Acc
parameter name in the second list will just be something we made up .
fixupKnownTypes :: [TypeBase shape1 u1] -> [TypeBase shape2 u2] -> [TypeBase shape2 u2]
fixupKnownTypes = zipWith fixup
where
fixup (Acc acc ispace ts _) (Acc _ _ _ u2) = Acc acc ispace ts u2
fixup _ t = t
-- Generate proper certificates for the placeholder accumulator
-- certificates produced by internaliseType (identified with tag 0).
-- Only needed when we cannot use 'fixupKnownTypes'.
mkAccCerts :: TypeBase shape u -> InternaliseM (TypeBase shape u)
mkAccCerts (Array pt shape u) =
pure $ Array pt shape u
mkAccCerts (Acc c shape ts u) =
Acc <$> c' <*> pure shape <*> pure ts <*> pure u
where
c'
| baseTag c == 0 = newVName "acc_cert"
| otherwise = pure c
mkAccCerts t = pure t
internaliseLoopParamType ::
E.TypeBase E.Size () ->
[TypeBase shape u] ->
InternaliseM [I.TypeBase Shape Uniqueness]
internaliseLoopParamType et ts =
fixupKnownTypes ts . concat <$> internaliseParamTypes [et]
internaliseReturnType ::
E.StructRetType ->
[TypeBase shape u] ->
[I.TypeBase ExtShape Uniqueness]
internaliseReturnType (E.RetType dims et) ts =
fixupKnownTypes ts $ runInternaliseTypeM' dims (internaliseTypeM exts et)
where
exts = M.fromList $ zip dims [0 ..]
internaliseLambdaReturnType ::
E.TypeBase E.Size () ->
[TypeBase shape u] ->
InternaliseM [I.TypeBase Shape NoUniqueness]
internaliseLambdaReturnType et ts =
map fromDecl <$> internaliseLoopParamType et ts
-- | As 'internaliseReturnType', but returns components of a top-level
-- tuple type piecemeal.
internaliseEntryReturnType ::
E.StructRetType ->
[[I.TypeBase ExtShape Uniqueness]]
internaliseEntryReturnType (E.RetType dims et) =
runInternaliseTypeM' dims . mapM (internaliseTypeM exts) $
case E.isTupleRecord et of
Just ets | not $ null ets -> ets
_ -> [et]
where
exts = M.fromList $ zip dims [0 ..]
internaliseType ::
E.TypeBase E.Size () ->
[I.TypeBase I.ExtShape Uniqueness]
internaliseType = runInternaliseTypeM . internaliseTypeM mempty
newId :: InternaliseTypeM Int
newId = do
i <- gets typeCounter
modify $ \s -> s {typeCounter = i + 1}
pure i
internaliseDim ::
M.Map VName Int ->
E.Size ->
InternaliseTypeM ExtSize
internaliseDim exts d =
case d of
E.AnySize _ -> Ext <$> newId
E.ConstSize n -> pure $ Free $ intConst I.Int64 $ toInteger n
E.NamedSize name -> pure $ namedDim name
where
namedDim (E.QualName _ name)
| Just x <- name `M.lookup` exts = I.Ext x
| otherwise = I.Free $ I.Var name
internaliseTypeM ::
M.Map VName Int ->
E.StructType ->
InternaliseTypeM [I.TypeBase ExtShape Uniqueness]
internaliseTypeM exts orig_t =
case orig_t of
E.Array _ u shape et -> do
dims <- internaliseShape shape
ets <- internaliseTypeM exts $ E.Scalar et
pure [I.arrayOf et' (Shape dims) $ internaliseUniqueness u | et' <- ets]
E.Scalar (E.Prim bt) ->
pure [I.Prim $ internalisePrimType bt]
E.Scalar (E.Record ets)
-- XXX: we map empty records to units, because otherwise
-- arrays of unit will lose their sizes.
| null ets -> pure [I.Prim I.Unit]
| otherwise ->
concat <$> mapM (internaliseTypeM exts . snd) (E.sortFields ets)
E.Scalar (E.TypeVar _ u tn [E.TypeArgType arr_t _])
| baseTag (E.qualLeaf tn) <= E.maxIntrinsicTag,
baseString (E.qualLeaf tn) == "acc" -> do
ts <- map (fromDecl . onAccType) <$> internaliseTypeM exts arr_t
let acc_param = VName "PLACEHOLDER" 0 -- See mkAccCerts.
acc_t = Acc acc_param (Shape [arraysSize 0 ts]) (map rowType ts) $ internaliseUniqueness u
pure [acc_t]
E.Scalar E.TypeVar {} ->
error $ "internaliseTypeM: cannot handle type variable: " ++ prettyString orig_t
E.Scalar E.Arrow {} ->
error $ "internaliseTypeM: cannot handle function type: " ++ prettyString orig_t
E.Scalar (E.Sum cs) -> do
(ts, _) <-
internaliseConstructors
<$> traverse (fmap concat . mapM (internaliseTypeM exts)) cs
pure $ I.Prim (I.IntType I.Int8) : ts
where
internaliseShape = mapM (internaliseDim exts) . E.shapeDims
onAccType = fromMaybe bad . hasStaticShape
bad = error $ "internaliseTypeM Acc: " ++ prettyString orig_t
internaliseConstructors ::
M.Map Name [I.TypeBase ExtShape Uniqueness] ->
( [I.TypeBase ExtShape Uniqueness],
M.Map Name (Int, [Int])
)
internaliseConstructors cs =
foldl' onConstructor mempty $ zip (E.sortConstrs cs) [0 ..]
where
onConstructor (ts, mapping) ((c, c_ts), i) =
let (_, js, new_ts) =
foldl' f (zip (map fromDecl ts) [0 ..], mempty, mempty) c_ts
in (ts ++ new_ts, M.insert c (i, js) mapping)
where
f (ts', js, new_ts) t
| Just (_, j) <- find ((== fromDecl t) . fst) ts' =
( delete (fromDecl t, j) ts',
js ++ [j],
new_ts
)
| otherwise =
( ts',
js ++ [length ts + length new_ts],
new_ts ++ [t]
)
internaliseSumType ::
M.Map Name [E.StructType] ->
InternaliseM
( [I.TypeBase ExtShape Uniqueness],
M.Map Name (Int, [Int])
)
internaliseSumType cs =
bitraverse (mapM mkAccCerts) pure . runInternaliseTypeM $
internaliseConstructors
<$> traverse (fmap concat . mapM (internaliseTypeM mempty)) cs
| How many core language values are needed to represent one source
-- language value of the given type?
internalisedTypeSize :: E.TypeBase E.Size als -> Int
-- A few special cases for performance.
internalisedTypeSize (E.Scalar (E.Prim _)) = 1
internalisedTypeSize (E.Array _ _ _ (E.Prim _)) = 1
internalisedTypeSize t = length $ internaliseType (t `E.setAliases` ())
-- | Convert an external primitive to an internal primitive.
internalisePrimType :: E.PrimType -> I.PrimType
internalisePrimType (E.Signed t) = I.IntType t
internalisePrimType (E.Unsigned t) = I.IntType t
internalisePrimType (E.FloatType t) = I.FloatType t
internalisePrimType E.Bool = I.Bool
-- | Convert an external primitive value to an internal primitive value.
internalisePrimValue :: E.PrimValue -> I.PrimValue
internalisePrimValue (E.SignedValue v) = I.IntValue v
internalisePrimValue (E.UnsignedValue v) = I.IntValue v
internalisePrimValue (E.FloatValue v) = I.FloatValue v
internalisePrimValue (E.BoolValue b) = I.BoolValue b
| null | https://raw.githubusercontent.com/diku-dk/futhark/98e4a75e4de7042afe030837084764bbf3c6c66e/src/Futhark/Internalise/TypesValues.hs | haskell | * Internalising types
* Internalising values
Generate proper certificates for the placeholder accumulator
certificates produced by internaliseType (identified with tag 0).
Only needed when we cannot use 'fixupKnownTypes'.
| As 'internaliseReturnType', but returns components of a top-level
tuple type piecemeal.
XXX: we map empty records to units, because otherwise
arrays of unit will lose their sizes.
See mkAccCerts.
language value of the given type?
A few special cases for performance.
| Convert an external primitive to an internal primitive.
| Convert an external primitive value to an internal primitive value. | module Futhark.Internalise.TypesValues
internaliseReturnType,
internaliseLambdaReturnType,
internaliseEntryReturnType,
internaliseType,
internaliseParamTypes,
internaliseLoopParamType,
internalisePrimType,
internalisedTypeSize,
internaliseSumType,
internalisePrimValue,
)
where
import Control.Monad.State
import Data.Bitraversable (bitraverse)
import Data.List (delete, find, foldl')
import Data.Map.Strict qualified as M
import Data.Maybe
import Futhark.IR.SOACS as I
import Futhark.Internalise.Monad
import Language.Futhark qualified as E
internaliseUniqueness :: E.Uniqueness -> I.Uniqueness
internaliseUniqueness E.Nonunique = I.Nonunique
internaliseUniqueness E.Unique = I.Unique
newtype TypeState = TypeState {typeCounter :: Int}
newtype InternaliseTypeM a
= InternaliseTypeM (State TypeState a)
deriving (Functor, Applicative, Monad, MonadState TypeState)
runInternaliseTypeM :: InternaliseTypeM a -> a
runInternaliseTypeM = runInternaliseTypeM' mempty
runInternaliseTypeM' :: [VName] -> InternaliseTypeM a -> a
runInternaliseTypeM' exts (InternaliseTypeM m) = evalState m $ TypeState (length exts)
internaliseParamTypes ::
[E.TypeBase E.Size ()] ->
InternaliseM [[I.TypeBase Shape Uniqueness]]
internaliseParamTypes ts =
mapM (mapM mkAccCerts) . runInternaliseTypeM $
mapM (fmap (map onType) . internaliseTypeM mempty) ts
where
onType = fromMaybe bad . hasStaticShape
bad = error $ "internaliseParamTypes: " ++ prettyString ts
We need to fix up the arrays for any Acc return values or loop
parameters . We look at the concrete types for this , since the Acc
parameter name in the second list will just be something we made up .
fixupKnownTypes :: [TypeBase shape1 u1] -> [TypeBase shape2 u2] -> [TypeBase shape2 u2]
fixupKnownTypes = zipWith fixup
where
fixup (Acc acc ispace ts _) (Acc _ _ _ u2) = Acc acc ispace ts u2
fixup _ t = t
mkAccCerts :: TypeBase shape u -> InternaliseM (TypeBase shape u)
mkAccCerts (Array pt shape u) =
pure $ Array pt shape u
mkAccCerts (Acc c shape ts u) =
Acc <$> c' <*> pure shape <*> pure ts <*> pure u
where
c'
| baseTag c == 0 = newVName "acc_cert"
| otherwise = pure c
mkAccCerts t = pure t
internaliseLoopParamType ::
E.TypeBase E.Size () ->
[TypeBase shape u] ->
InternaliseM [I.TypeBase Shape Uniqueness]
internaliseLoopParamType et ts =
fixupKnownTypes ts . concat <$> internaliseParamTypes [et]
internaliseReturnType ::
E.StructRetType ->
[TypeBase shape u] ->
[I.TypeBase ExtShape Uniqueness]
internaliseReturnType (E.RetType dims et) ts =
fixupKnownTypes ts $ runInternaliseTypeM' dims (internaliseTypeM exts et)
where
exts = M.fromList $ zip dims [0 ..]
internaliseLambdaReturnType ::
E.TypeBase E.Size () ->
[TypeBase shape u] ->
InternaliseM [I.TypeBase Shape NoUniqueness]
internaliseLambdaReturnType et ts =
map fromDecl <$> internaliseLoopParamType et ts
internaliseEntryReturnType ::
E.StructRetType ->
[[I.TypeBase ExtShape Uniqueness]]
internaliseEntryReturnType (E.RetType dims et) =
runInternaliseTypeM' dims . mapM (internaliseTypeM exts) $
case E.isTupleRecord et of
Just ets | not $ null ets -> ets
_ -> [et]
where
exts = M.fromList $ zip dims [0 ..]
internaliseType ::
E.TypeBase E.Size () ->
[I.TypeBase I.ExtShape Uniqueness]
internaliseType = runInternaliseTypeM . internaliseTypeM mempty
newId :: InternaliseTypeM Int
newId = do
i <- gets typeCounter
modify $ \s -> s {typeCounter = i + 1}
pure i
internaliseDim ::
M.Map VName Int ->
E.Size ->
InternaliseTypeM ExtSize
internaliseDim exts d =
case d of
E.AnySize _ -> Ext <$> newId
E.ConstSize n -> pure $ Free $ intConst I.Int64 $ toInteger n
E.NamedSize name -> pure $ namedDim name
where
namedDim (E.QualName _ name)
| Just x <- name `M.lookup` exts = I.Ext x
| otherwise = I.Free $ I.Var name
internaliseTypeM ::
M.Map VName Int ->
E.StructType ->
InternaliseTypeM [I.TypeBase ExtShape Uniqueness]
internaliseTypeM exts orig_t =
case orig_t of
E.Array _ u shape et -> do
dims <- internaliseShape shape
ets <- internaliseTypeM exts $ E.Scalar et
pure [I.arrayOf et' (Shape dims) $ internaliseUniqueness u | et' <- ets]
E.Scalar (E.Prim bt) ->
pure [I.Prim $ internalisePrimType bt]
E.Scalar (E.Record ets)
| null ets -> pure [I.Prim I.Unit]
| otherwise ->
concat <$> mapM (internaliseTypeM exts . snd) (E.sortFields ets)
E.Scalar (E.TypeVar _ u tn [E.TypeArgType arr_t _])
| baseTag (E.qualLeaf tn) <= E.maxIntrinsicTag,
baseString (E.qualLeaf tn) == "acc" -> do
ts <- map (fromDecl . onAccType) <$> internaliseTypeM exts arr_t
acc_t = Acc acc_param (Shape [arraysSize 0 ts]) (map rowType ts) $ internaliseUniqueness u
pure [acc_t]
E.Scalar E.TypeVar {} ->
error $ "internaliseTypeM: cannot handle type variable: " ++ prettyString orig_t
E.Scalar E.Arrow {} ->
error $ "internaliseTypeM: cannot handle function type: " ++ prettyString orig_t
E.Scalar (E.Sum cs) -> do
(ts, _) <-
internaliseConstructors
<$> traverse (fmap concat . mapM (internaliseTypeM exts)) cs
pure $ I.Prim (I.IntType I.Int8) : ts
where
internaliseShape = mapM (internaliseDim exts) . E.shapeDims
onAccType = fromMaybe bad . hasStaticShape
bad = error $ "internaliseTypeM Acc: " ++ prettyString orig_t
internaliseConstructors ::
M.Map Name [I.TypeBase ExtShape Uniqueness] ->
( [I.TypeBase ExtShape Uniqueness],
M.Map Name (Int, [Int])
)
internaliseConstructors cs =
foldl' onConstructor mempty $ zip (E.sortConstrs cs) [0 ..]
where
onConstructor (ts, mapping) ((c, c_ts), i) =
let (_, js, new_ts) =
foldl' f (zip (map fromDecl ts) [0 ..], mempty, mempty) c_ts
in (ts ++ new_ts, M.insert c (i, js) mapping)
where
f (ts', js, new_ts) t
| Just (_, j) <- find ((== fromDecl t) . fst) ts' =
( delete (fromDecl t, j) ts',
js ++ [j],
new_ts
)
| otherwise =
( ts',
js ++ [length ts + length new_ts],
new_ts ++ [t]
)
internaliseSumType ::
M.Map Name [E.StructType] ->
InternaliseM
( [I.TypeBase ExtShape Uniqueness],
M.Map Name (Int, [Int])
)
internaliseSumType cs =
bitraverse (mapM mkAccCerts) pure . runInternaliseTypeM $
internaliseConstructors
<$> traverse (fmap concat . mapM (internaliseTypeM mempty)) cs
| How many core language values are needed to represent one source
internalisedTypeSize :: E.TypeBase E.Size als -> Int
internalisedTypeSize (E.Scalar (E.Prim _)) = 1
internalisedTypeSize (E.Array _ _ _ (E.Prim _)) = 1
internalisedTypeSize t = length $ internaliseType (t `E.setAliases` ())
internalisePrimType :: E.PrimType -> I.PrimType
internalisePrimType (E.Signed t) = I.IntType t
internalisePrimType (E.Unsigned t) = I.IntType t
internalisePrimType (E.FloatType t) = I.FloatType t
internalisePrimType E.Bool = I.Bool
internalisePrimValue :: E.PrimValue -> I.PrimValue
internalisePrimValue (E.SignedValue v) = I.IntValue v
internalisePrimValue (E.UnsignedValue v) = I.IntValue v
internalisePrimValue (E.FloatValue v) = I.FloatValue v
internalisePrimValue (E.BoolValue b) = I.BoolValue b
|
3593df1bbc431be4e20bc84b76c06d3cc6dfd15f3ab17b095a8d440c6294eabb | plumatic/grab-bag | docs_test.clj | (ns domain.docs-test
(:use clojure.test plumbing.core plumbing.test domain.doc-test-utils
domain.docs)
(:require
[schema.core :as s]
[plumbing.io :as io]
[domain.docs.fitness-stats :as fitness-stats]
[domain.docs.products :as products]
[domain.docs.views-by-client :as views-by-client]
[domain.interests.indexer :as indexer]
[domain.interests.manager :as interests-manager]
[domain.metadata :as metadata]))
(deftest all-feed-interest-ids-test
(is-=-by set
(map #(apply interests-manager/interest-id %) [[:feed 42]])
(all-feed-interest-ids
(test-doc {:feed-ids [42]}))))
(deftest all-topic-interest-ids-test
(is-=-by set
(map #(apply interests-manager/interest-id %)
[[:topic 1]
[:topic 6]])
(all-topic-interest-ids
(test-doc {:topic-predictions [[1 1.0 1.0] [6 1.0 1.0]]}))))
(deftest test-new-roundtrips
(doseq [[label doc] {"external" (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4 {:user-id 5 :action :click :dwell-ms 1000}]
:feed-ids [8 10]
:comments [5]
:tags {:admin ["foo"]}})
"post" (test-post
{:submitter-id 3
:external-shares {:twitter [1 2 3]}
:activity [3 4]
:comments {5 true 6 {7 true 8 [9 10]}}})}]
(is-=-doc doc (-> doc write-doc read-doc))
(is-=-doc doc (-> doc io/to-data io/from-data))))
(deftest dwell-times-test
(is-= [1000 24] (dwell-times (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4 {:user-id 5 :action :click :dwell-ms 1000}
{:user-id 5 :action :click :dwell-ms 24}]
:feed-ids [8 10]
:comments [5]
:tags {:admin ["foo"]}}))))
;; Can we read super-old docs, retrofitted from client-docs way back when?
(deftest old-read-test
(let [d (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4]
:comments [5]
:topic-predictions nil})
old-data (-> d
write-doc
(dissoc :feed-ids :topic-predictions :experimental :views-by-client)
(assoc :cluster-features nil
:topics [["iPad Apps" 1.9773323452423037]
["Twitter" 1.9686608205344753]]
:ner []))
old-doc (read-doc old-data)]
(is (not (s/check domain.docs.Doc old-doc)))
(is-=-doc old-doc
(-> d
(assoc :feed-ids nil
:cluster-features {}
:topics [["iPad Apps" 1.9773323452423037 42]
["Twitter" 1.9686608205344753 42]])
(assoc-in [:type-info :ner] {:title-entities [] :text-entities []})))
(testing "fixing old images"
;; coercion tested by validation
(let [[s1 s2 s3] (for [i (range 3)] {:width (int i) :height (int i)})]
(doseq [[name [old new]]
{"empty" [nil nil]
"no-in-div" [[{:size s1 :url "u1"}
{:size s2 :url "u2"}]
[{:size s1 :url "u1" :in-div false}
{:size s2 :url "u2" :in-div true}]]
"with-in-div" [[{:size s1 :url "u1" :in-div true}
{:size s2 :url "u2" :in-div false}]
[{:size s1 :url "u1" :in-div true}
{:size s2 :url "u2" :in-div false}]]}]
(testing name
(is-= new
(:images (read-doc (assoc old-data :images old))))))))
(testing "fixing old non-image ints"
;; schema validation confirms conversion
(let [cluster-features {(int 1) 1.0 (int 2) 2.0}
layout {:iphone-retina
{:feed-portrait-full
{:image-key "a" :extension "b" :image-size [(int 2) (int 3)]}}}
doc (read-doc (merge old-data
{:cluster-features cluster-features
:layout layout}))]
(is-= cluster-features (:cluster-features doc))
(is-= layout (:layout (external-info doc)))
))
(testing "fixing html-des"
(is-= nil
(:html-des (external-info (read-doc (assoc (write-doc d) :html-des {:html "<a tag></a>"})))))
(let [h {:html "<a tag></a>" :chunk-offsets [[1 10] [11 200]]}]
(is-= h
(:html-des (external-info (read-doc (assoc (write-doc d) :html-des h)))))))
(testing "adding tags"
(is-= {} (:tags old-data))
(is-= {} (:tags (read-doc (dissoc old-data :tags)))))
(testing "adding metadata"
(is (-> old-data (dissoc :metadata) read-doc (contains? :metadata)))
(let [metadata (metadata/metadata {"meta" ["data"]} [])]
(is-= metadata (-> old-data (assoc :metadata metadata) read-doc :metadata))))))
(deftest read-old-products-test
(let [itunes-product {:product {:name "fish"
:type "animal"
:genres []
:price nil
:rating nil}
:url "fish.com"
:key "01234567"
:source :itunes
:referral-type :internal
:in-div true
:highlights ["gills" "feet"]}
legacy-itunes {:name "fish"
:type "animal"
:key "01234567"
:in-div true
:highlights ["gills" "feet"]
:url "fish.com"
:source :itunes}
legacy-amazon {:title "Dog"
:price "$0.30"
:url "dog.com"
:item-attributes {}
:type :internal}
empty-products (test-external-doc
{:products nil})
written-empty (write-doc empty-products)
with-products (test-external-doc
{:products [itunes-product]})
written-products (assoc (write-doc empty-products)
:products [legacy-itunes])]
(testing "Ignore nil :commerce"
(is-=-doc empty-products
(read-doc (assoc written-empty :commerce nil)))
(is-=-doc (assoc-in empty-products [:type-info :products] [itunes-product])
(read-doc (assoc written-products :commerce nil))))
(testing "Add non-nil :commerce to :products"
(is-= [(products/old-itunes->reference legacy-itunes)
(products/old-amazon->reference legacy-amazon)]
(-> written-empty
(assoc
:commerce legacy-amazon
:products [legacy-itunes])
read-doc
(safe-get-in [:type-info :products])))
(is-= [(products/old-amazon->reference legacy-amazon)]
(-> written-empty
(assoc :commerce legacy-amazon)
read-doc
(safe-get-in [:type-info :products]))))))
;; can we read docs with new keys, and just ignore them?
(deftest forward-compatible-test
(doseq [[t d] {"external" (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4]
:comments [5]})
"post" (test-post
{:submitter-id 10
:activity [3 4]
:comments [5]})}]
(testing t
(let [data (write-doc d)
d2 (read-doc (assoc data :an-extra-key "fooo"))]
(is (not (s/check domain.docs.Doc d2)))
(is-=-doc d d2)))))
(deftest type-info-validation
(is-=-by str
`{:type-info {:url (~'not (~'instance? String 1))}}
(s/check domain.docs.Doc
(assoc-in (test-external-doc {})
[:type-info :url] 1))))
(deftest write-doc-test
(let [d (test-external-doc {})
w (write-doc d)]
(testing "empty metadata is not written"
(is (contains? d :metadata))
(is-= (metadata/metadata {} []) (:metadata d))
(is (not (contains? w :metadata))))))
(deftest read-and-write-validation
(is (thrown? Exception (write-doc (assoc-in (test-external-doc {})
[:type-info :url] 1))))
(let [w (write-doc (test-external-doc {}))]
(is (thrown? Exception (read-doc (assoc w :url 1))))))
(deftest upgrade-feed-and-topic-id-test
(with-redefs [interests-manager/index!
(fn [i {:keys [type key]}]
(let [type (keyword type)]
(indexer/index-of
@#'interests-manager/+type-index+ type
(safe-get-in
{:topic {"foo" 99 "bar" 101}
:feed {"ponies.com" 1000}}
[type key]))))]
(is-= {:topic-predictions [99 101]
:feed-ids [1000]}
(-> (test-external-doc
{:topics ["foo" "baz" "bar"]
:domain {:name "ponies.com"}})
write-doc
(dissoc :topic-predictions :feed-ids)
(read-doc ::interest-manager)
(select-keys [:topic-predictions :feed-ids])
(update-in [:topic-predictions] (partial mapv :id))))))
(deftest keep-top-topics-test
(let [process (fn->> (keep-top-topics second)
(map first)
(apply str))]
(doseq [[start result] [[0.651 "mlkjihg"]
[0.45 "ml"]]
:let [topics (map list "abcdefghijklm" (range start 100 0.01))]
t [topics (reverse topics)]]
(is-= result (process t)))))
(deftest top-topic-interest-ids-test
(is-= [10]
(top-topic-interest-ids
(test-doc {:topic-predictions
[{:id 10 :score 0.8 :confidence 1.0}
{:id 5 :score 0.5 :confidence 1.0}]}))))
(deftest clone-writable-fields-test
(let [d (test-external-doc {})
od (-> d write-doc read-doc)
c (clone-writable-fields d)]
(is (= (dissoc d :ranking-features)) (dissoc c :ranking-features))
(fitness-stats/increment-view-count! (:fitness-stats c) 1)
(views-by-client/add-view! (:views-by-client c) :iphone 2)
(is (= (dissoc d :ranking-features) (dissoc od :ranking-features)))
(is (not= (dissoc d :ranking-features) (dissoc c :ranking-features)))
(is (not= (:fitness-stats d) (:fitness-stats c)))
(is (not= (:views-by-client d) (:views-by-client c)))))
(deftest reconcile-fitness-stats-with-views!-test
(let [d (test-external-doc {})
fitness-count 123
client-count 7]
(fitness-stats/increment-view-count! (:fitness-stats d) fitness-count)
(testing "when no client-views, fitness stats remain"
(is-= fitness-count
(-> d write-doc read-doc (safe-get :fitness-stats) fitness-stats/view-count)))
(testing "client-views override fitness-stats when present"
(let [clients [:iphone :android]]
(doseq [client clients]
(dotimes [n client-count]
(views-by-client/add-view! (:views-by-client d) client n)))
(is-= (* (count clients) client-count)
(-> d write-doc read-doc (safe-get :fitness-stats) fitness-stats/view-count))))))
(deftest core-title-test
(is-= "3sfhomesalespriceyouwontbelievetrustme"
(core-title
(test-external-doc
{:title "3 S.F. home sales: Price$ you won't believe - trust me!"}))))
(deftest core-url-test
(is-= "httpwwweastbayexpresscomoakland2015bestof"
(core-url ""))
(is-= "httpwwweastbayexpresscomoakland2015bestof"
(core-url "")))
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/domain/test/domain/docs_test.clj | clojure | Can we read super-old docs, retrofitted from client-docs way back when?
coercion tested by validation
schema validation confirms conversion
can we read docs with new keys, and just ignore them? | (ns domain.docs-test
(:use clojure.test plumbing.core plumbing.test domain.doc-test-utils
domain.docs)
(:require
[schema.core :as s]
[plumbing.io :as io]
[domain.docs.fitness-stats :as fitness-stats]
[domain.docs.products :as products]
[domain.docs.views-by-client :as views-by-client]
[domain.interests.indexer :as indexer]
[domain.interests.manager :as interests-manager]
[domain.metadata :as metadata]))
(deftest all-feed-interest-ids-test
(is-=-by set
(map #(apply interests-manager/interest-id %) [[:feed 42]])
(all-feed-interest-ids
(test-doc {:feed-ids [42]}))))
(deftest all-topic-interest-ids-test
(is-=-by set
(map #(apply interests-manager/interest-id %)
[[:topic 1]
[:topic 6]])
(all-topic-interest-ids
(test-doc {:topic-predictions [[1 1.0 1.0] [6 1.0 1.0]]}))))
(deftest test-new-roundtrips
(doseq [[label doc] {"external" (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4 {:user-id 5 :action :click :dwell-ms 1000}]
:feed-ids [8 10]
:comments [5]
:tags {:admin ["foo"]}})
"post" (test-post
{:submitter-id 3
:external-shares {:twitter [1 2 3]}
:activity [3 4]
:comments {5 true 6 {7 true 8 [9 10]}}})}]
(is-=-doc doc (-> doc write-doc read-doc))
(is-=-doc doc (-> doc io/to-data io/from-data))))
(deftest dwell-times-test
(is-= [1000 24] (dwell-times (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4 {:user-id 5 :action :click :dwell-ms 1000}
{:user-id 5 :action :click :dwell-ms 24}]
:feed-ids [8 10]
:comments [5]
:tags {:admin ["foo"]}}))))
(deftest old-read-test
(let [d (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4]
:comments [5]
:topic-predictions nil})
old-data (-> d
write-doc
(dissoc :feed-ids :topic-predictions :experimental :views-by-client)
(assoc :cluster-features nil
:topics [["iPad Apps" 1.9773323452423037]
["Twitter" 1.9686608205344753]]
:ner []))
old-doc (read-doc old-data)]
(is (not (s/check domain.docs.Doc old-doc)))
(is-=-doc old-doc
(-> d
(assoc :feed-ids nil
:cluster-features {}
:topics [["iPad Apps" 1.9773323452423037 42]
["Twitter" 1.9686608205344753 42]])
(assoc-in [:type-info :ner] {:title-entities [] :text-entities []})))
(testing "fixing old images"
(let [[s1 s2 s3] (for [i (range 3)] {:width (int i) :height (int i)})]
(doseq [[name [old new]]
{"empty" [nil nil]
"no-in-div" [[{:size s1 :url "u1"}
{:size s2 :url "u2"}]
[{:size s1 :url "u1" :in-div false}
{:size s2 :url "u2" :in-div true}]]
"with-in-div" [[{:size s1 :url "u1" :in-div true}
{:size s2 :url "u2" :in-div false}]
[{:size s1 :url "u1" :in-div true}
{:size s2 :url "u2" :in-div false}]]}]
(testing name
(is-= new
(:images (read-doc (assoc old-data :images old))))))))
(testing "fixing old non-image ints"
(let [cluster-features {(int 1) 1.0 (int 2) 2.0}
layout {:iphone-retina
{:feed-portrait-full
{:image-key "a" :extension "b" :image-size [(int 2) (int 3)]}}}
doc (read-doc (merge old-data
{:cluster-features cluster-features
:layout layout}))]
(is-= cluster-features (:cluster-features doc))
(is-= layout (:layout (external-info doc)))
))
(testing "fixing html-des"
(is-= nil
(:html-des (external-info (read-doc (assoc (write-doc d) :html-des {:html "<a tag></a>"})))))
(let [h {:html "<a tag></a>" :chunk-offsets [[1 10] [11 200]]}]
(is-= h
(:html-des (external-info (read-doc (assoc (write-doc d) :html-des h)))))))
(testing "adding tags"
(is-= {} (:tags old-data))
(is-= {} (:tags (read-doc (dissoc old-data :tags)))))
(testing "adding metadata"
(is (-> old-data (dissoc :metadata) read-doc (contains? :metadata)))
(let [metadata (metadata/metadata {"meta" ["data"]} [])]
(is-= metadata (-> old-data (assoc :metadata metadata) read-doc :metadata))))))
(deftest read-old-products-test
(let [itunes-product {:product {:name "fish"
:type "animal"
:genres []
:price nil
:rating nil}
:url "fish.com"
:key "01234567"
:source :itunes
:referral-type :internal
:in-div true
:highlights ["gills" "feet"]}
legacy-itunes {:name "fish"
:type "animal"
:key "01234567"
:in-div true
:highlights ["gills" "feet"]
:url "fish.com"
:source :itunes}
legacy-amazon {:title "Dog"
:price "$0.30"
:url "dog.com"
:item-attributes {}
:type :internal}
empty-products (test-external-doc
{:products nil})
written-empty (write-doc empty-products)
with-products (test-external-doc
{:products [itunes-product]})
written-products (assoc (write-doc empty-products)
:products [legacy-itunes])]
(testing "Ignore nil :commerce"
(is-=-doc empty-products
(read-doc (assoc written-empty :commerce nil)))
(is-=-doc (assoc-in empty-products [:type-info :products] [itunes-product])
(read-doc (assoc written-products :commerce nil))))
(testing "Add non-nil :commerce to :products"
(is-= [(products/old-itunes->reference legacy-itunes)
(products/old-amazon->reference legacy-amazon)]
(-> written-empty
(assoc
:commerce legacy-amazon
:products [legacy-itunes])
read-doc
(safe-get-in [:type-info :products])))
(is-= [(products/old-amazon->reference legacy-amazon)]
(-> written-empty
(assoc :commerce legacy-amazon)
read-doc
(safe-get-in [:type-info :products]))))))
(deftest forward-compatible-test
(doseq [[t d] {"external" (test-external-doc
{:external-shares {:twitter [1 2 3]}
:activity [3 4]
:comments [5]})
"post" (test-post
{:submitter-id 10
:activity [3 4]
:comments [5]})}]
(testing t
(let [data (write-doc d)
d2 (read-doc (assoc data :an-extra-key "fooo"))]
(is (not (s/check domain.docs.Doc d2)))
(is-=-doc d d2)))))
(deftest type-info-validation
(is-=-by str
`{:type-info {:url (~'not (~'instance? String 1))}}
(s/check domain.docs.Doc
(assoc-in (test-external-doc {})
[:type-info :url] 1))))
(deftest write-doc-test
(let [d (test-external-doc {})
w (write-doc d)]
(testing "empty metadata is not written"
(is (contains? d :metadata))
(is-= (metadata/metadata {} []) (:metadata d))
(is (not (contains? w :metadata))))))
(deftest read-and-write-validation
(is (thrown? Exception (write-doc (assoc-in (test-external-doc {})
[:type-info :url] 1))))
(let [w (write-doc (test-external-doc {}))]
(is (thrown? Exception (read-doc (assoc w :url 1))))))
(deftest upgrade-feed-and-topic-id-test
(with-redefs [interests-manager/index!
(fn [i {:keys [type key]}]
(let [type (keyword type)]
(indexer/index-of
@#'interests-manager/+type-index+ type
(safe-get-in
{:topic {"foo" 99 "bar" 101}
:feed {"ponies.com" 1000}}
[type key]))))]
(is-= {:topic-predictions [99 101]
:feed-ids [1000]}
(-> (test-external-doc
{:topics ["foo" "baz" "bar"]
:domain {:name "ponies.com"}})
write-doc
(dissoc :topic-predictions :feed-ids)
(read-doc ::interest-manager)
(select-keys [:topic-predictions :feed-ids])
(update-in [:topic-predictions] (partial mapv :id))))))
(deftest keep-top-topics-test
(let [process (fn->> (keep-top-topics second)
(map first)
(apply str))]
(doseq [[start result] [[0.651 "mlkjihg"]
[0.45 "ml"]]
:let [topics (map list "abcdefghijklm" (range start 100 0.01))]
t [topics (reverse topics)]]
(is-= result (process t)))))
(deftest top-topic-interest-ids-test
(is-= [10]
(top-topic-interest-ids
(test-doc {:topic-predictions
[{:id 10 :score 0.8 :confidence 1.0}
{:id 5 :score 0.5 :confidence 1.0}]}))))
(deftest clone-writable-fields-test
(let [d (test-external-doc {})
od (-> d write-doc read-doc)
c (clone-writable-fields d)]
(is (= (dissoc d :ranking-features)) (dissoc c :ranking-features))
(fitness-stats/increment-view-count! (:fitness-stats c) 1)
(views-by-client/add-view! (:views-by-client c) :iphone 2)
(is (= (dissoc d :ranking-features) (dissoc od :ranking-features)))
(is (not= (dissoc d :ranking-features) (dissoc c :ranking-features)))
(is (not= (:fitness-stats d) (:fitness-stats c)))
(is (not= (:views-by-client d) (:views-by-client c)))))
(deftest reconcile-fitness-stats-with-views!-test
(let [d (test-external-doc {})
fitness-count 123
client-count 7]
(fitness-stats/increment-view-count! (:fitness-stats d) fitness-count)
(testing "when no client-views, fitness stats remain"
(is-= fitness-count
(-> d write-doc read-doc (safe-get :fitness-stats) fitness-stats/view-count)))
(testing "client-views override fitness-stats when present"
(let [clients [:iphone :android]]
(doseq [client clients]
(dotimes [n client-count]
(views-by-client/add-view! (:views-by-client d) client n)))
(is-= (* (count clients) client-count)
(-> d write-doc read-doc (safe-get :fitness-stats) fitness-stats/view-count))))))
(deftest core-title-test
(is-= "3sfhomesalespriceyouwontbelievetrustme"
(core-title
(test-external-doc
{:title "3 S.F. home sales: Price$ you won't believe - trust me!"}))))
(deftest core-url-test
(is-= "httpwwweastbayexpresscomoakland2015bestof"
(core-url ""))
(is-= "httpwwweastbayexpresscomoakland2015bestof"
(core-url "")))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.