_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
56a63f7958062ee16131e69db35c452055c3f24b802bcd0089a74c68cbe57b5b | astro/hashvortex | nodespoofer.hs | # LANGUAGE TupleSections #
module Main where
import System.Environment
import Network.Socket (SockAddr(SockAddrInet), PortNumber(PortNum))
import Control.Monad
import Control.Monad.Reader
import Control.Applicative
import Data.Sequence (Seq, ViewL((:<), EmptyL), (|>))
import qualified Data.Sequence as Seq
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.IORef
import qualified Network.Libev as Ev
import qualified Data.ByteString.Lazy.Char8 as B8
import Foreign (nullFunPtr)
import Data.List (foldl')
import BEncoding (bdictLookup, BValue(BString, BDict, BList))
import qualified Node
import KRPC
import NodeId
import EventLog
import qualified MagnetGrep
type Time = Ev.EvTimestamp
data Peer = Peer { peerNodeId :: NodeId,
peerAddr :: SockAddr }
queryQueueMax = 256
data AppState = AppState { stQueryQueue :: Seq Peer }
data AppContext = AppContext { ctxState :: IORef AppState,
ctxNode :: IORef Node.Node,
ctxTargets :: [SockAddr],
ctxEvLoop :: Ev.EvLoopPtr,
ctxLogger :: Logger,
ctxPort :: PortNumber
}
type App a = ReaderT AppContext IO a
getState :: App AppState
getState = ctxState <$> ask >>=
liftIO . readIORef
putState :: AppState -> App ()
putState app = do appRef <- ctxState <$> ask
liftIO $ writeIORef appRef app
now :: App Ev.EvTimestamp
now = ctxEvLoop <$> ask >>=
(liftIO . Ev.evNow)
setTimer :: Ev.EvTimestamp -> Ev.EvTimestamp -> App () -> App ()
setTimer delay repeat handler
= do ctx <- ask
let evLoop = ctxEvLoop ctx
handler' = runReaderT handler ctx
liftIO $ do evTimer <- Ev.mkEvTimer
evCbRef <- newIORef nullFunPtr
evCb <- Ev.mkTimerCallback $ \evLoop evTimer evType ->
do when (repeat <= 0) $
stop first ?
Ev.freeEvTimer evTimer
evCb <- readIORef evCbRef
Ev.freeTimerCallback evCb
handler'
writeIORef evCbRef evCb
Ev.evTimerInit evTimer evCb delay repeat
Ev.evTimerStart evLoop evTimer
setTimeout delay
= setTimer delay 0
setInterval interval
= setTimer interval interval
Model
appendPeer :: Peer -> App ()
appendPeer peer
= do app <- getState
myPort <- ctxPort <$> ask
targets <- ctxTargets <$> ask
let queue = stQueryQueue app
portAllowed
= case peerAddr peer of
SockAddrInet peerPort _ -> peerPort /= myPort
_ -> False
isTarget = peerAddr peer `elem` targets
when (Seq.length queue < queryQueueMax &&
portAllowed &&
not isTarget) $
let queue' = queue |> peer
in queue' `seq`
putState $ app { stQueryQueue = queue' }
popPeer :: App (Maybe Peer)
popPeer
= do app <- getState
case Seq.viewl $ stQueryQueue app of
peer :< queue ->
do putState $ app { stQueryQueue = queue }
return $ Just peer
_ ->
return Nothing
-- Query handling
token = B8.pack "a"
onQuery' addr (Ping nodeId)
= do appendPeer $ Peer nodeId addr
nodeId' <- liftIO $ makeRandomNeighbor nodeId
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId')]
onQuery' addr (FindNode nodeId target)
= do nodeId' <- liftIO $ makeRandomNeighbor nodeId
targets <- ctxTargets <$> ask
nodes <- encodeNodes <$>
mapM (\addr ->
do target' <- liftIO $ makeRandomNeighbor target
return (target', addr)
) (take 8 targets)
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId'),
(BString $ B8.pack "nodes",
BString nodes)]
onQuery' addr (GetPeers nodeId infoHash)
= do nodeId' <- liftIO $ makeRandomNeighbor nodeId
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId'),
(BString $ B8.pack "token",
BString token),
(BString $ B8.pack "values",
BList [])]
onQuery' addr (AnnouncePeer nodeId infoHash port token)
= do nodeId' <- liftIO $ makeRandomNeighbor nodeId
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId')]
onQuery' addr _
= return $ Left $
Error 204 $ B8.pack "Method Unknown"
onQuery addr bvalue q
= do logger <- ctxLogger <$> ask
liftIO $ logger q
onQuery' addr q
-- Reply handling
onReply addr bvalue reply
= case reply `bdictLookup` "nodes" of
Just (BString nodesBuf) ->
do let nodes = decodeNodes nodesBuf
forM nodes $ \(nodeId, addr) ->
appendPeer $ Peer nodeId addr
return ()
_ ->
return ()
-- Querying
query :: App ()
query = do mbPeer <- popPeer
node <- ctxNode <$> ask >>=
liftIO . readIORef
liftIO $
case mbPeer of
Nothing ->
do addr : _ <- Node.getAddrs "router.bittorrent.com" "6881"
target <- makeRandomNodeId
target' <- makeRandomNeighbor target
let q = FindNode target target'
Node.sendQueryNoWait addr q node
Just peer ->
do target <- makeRandomNeighbor $ peerNodeId peer
target' <- makeRandomNodeId
let q = FindNode target target'
Node.sendQueryNoWait (peerAddr peer) q node
-- Main
makeTargets :: [(String, String)] -> IO [SockAddr]
makeTargets hostsPorts
= forM hostsPorts $ \(host, port) ->
head <$> Node.getAddrs host port
runSpoofer port
= do evLoop <- Ev.evRecommendedBackends >>=
Ev.evDefaultLoop
log <- newLog evLoop "nodespoofer.data"
node <- Node.new evLoop port
let app = AppState { stQueryQueue = Seq.empty }
appRef <- newIORef app
nodeRef <- newIORef node
targets <- makeTargets []
let ctx = AppContext { ctxState = appRef,
ctxNode = nodeRef,
ctxTargets = targets,
ctxEvLoop = evLoop,
ctxLogger = log,
ctxPort = PortNum $ fromIntegral port
}
appCall :: App a -> IO a
appCall f = runReaderT f ctx
appCallback f a b q = appCall $ f a b q
Node.setQueryHandler (appCallback onQuery) node
Node.setReplyHandler (appCallback onReply) node
appCall $ do
setInterval 0.05 $ query
Ev.evLoop evLoop 0
main = runSpoofer 10000
| null | https://raw.githubusercontent.com/astro/hashvortex/ccf32d13bd6057b442eb50c087c43c3870bb5be2/nodespoofer.hs | haskell | Query handling
Reply handling
Querying
Main | # LANGUAGE TupleSections #
module Main where
import System.Environment
import Network.Socket (SockAddr(SockAddrInet), PortNumber(PortNum))
import Control.Monad
import Control.Monad.Reader
import Control.Applicative
import Data.Sequence (Seq, ViewL((:<), EmptyL), (|>))
import qualified Data.Sequence as Seq
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.IORef
import qualified Network.Libev as Ev
import qualified Data.ByteString.Lazy.Char8 as B8
import Foreign (nullFunPtr)
import Data.List (foldl')
import BEncoding (bdictLookup, BValue(BString, BDict, BList))
import qualified Node
import KRPC
import NodeId
import EventLog
import qualified MagnetGrep
type Time = Ev.EvTimestamp
data Peer = Peer { peerNodeId :: NodeId,
peerAddr :: SockAddr }
queryQueueMax = 256
data AppState = AppState { stQueryQueue :: Seq Peer }
data AppContext = AppContext { ctxState :: IORef AppState,
ctxNode :: IORef Node.Node,
ctxTargets :: [SockAddr],
ctxEvLoop :: Ev.EvLoopPtr,
ctxLogger :: Logger,
ctxPort :: PortNumber
}
type App a = ReaderT AppContext IO a
getState :: App AppState
getState = ctxState <$> ask >>=
liftIO . readIORef
putState :: AppState -> App ()
putState app = do appRef <- ctxState <$> ask
liftIO $ writeIORef appRef app
now :: App Ev.EvTimestamp
now = ctxEvLoop <$> ask >>=
(liftIO . Ev.evNow)
setTimer :: Ev.EvTimestamp -> Ev.EvTimestamp -> App () -> App ()
setTimer delay repeat handler
= do ctx <- ask
let evLoop = ctxEvLoop ctx
handler' = runReaderT handler ctx
liftIO $ do evTimer <- Ev.mkEvTimer
evCbRef <- newIORef nullFunPtr
evCb <- Ev.mkTimerCallback $ \evLoop evTimer evType ->
do when (repeat <= 0) $
stop first ?
Ev.freeEvTimer evTimer
evCb <- readIORef evCbRef
Ev.freeTimerCallback evCb
handler'
writeIORef evCbRef evCb
Ev.evTimerInit evTimer evCb delay repeat
Ev.evTimerStart evLoop evTimer
setTimeout delay
= setTimer delay 0
setInterval interval
= setTimer interval interval
Model
appendPeer :: Peer -> App ()
appendPeer peer
= do app <- getState
myPort <- ctxPort <$> ask
targets <- ctxTargets <$> ask
let queue = stQueryQueue app
portAllowed
= case peerAddr peer of
SockAddrInet peerPort _ -> peerPort /= myPort
_ -> False
isTarget = peerAddr peer `elem` targets
when (Seq.length queue < queryQueueMax &&
portAllowed &&
not isTarget) $
let queue' = queue |> peer
in queue' `seq`
putState $ app { stQueryQueue = queue' }
popPeer :: App (Maybe Peer)
popPeer
= do app <- getState
case Seq.viewl $ stQueryQueue app of
peer :< queue ->
do putState $ app { stQueryQueue = queue }
return $ Just peer
_ ->
return Nothing
token = B8.pack "a"
onQuery' addr (Ping nodeId)
= do appendPeer $ Peer nodeId addr
nodeId' <- liftIO $ makeRandomNeighbor nodeId
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId')]
onQuery' addr (FindNode nodeId target)
= do nodeId' <- liftIO $ makeRandomNeighbor nodeId
targets <- ctxTargets <$> ask
nodes <- encodeNodes <$>
mapM (\addr ->
do target' <- liftIO $ makeRandomNeighbor target
return (target', addr)
) (take 8 targets)
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId'),
(BString $ B8.pack "nodes",
BString nodes)]
onQuery' addr (GetPeers nodeId infoHash)
= do nodeId' <- liftIO $ makeRandomNeighbor nodeId
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId'),
(BString $ B8.pack "token",
BString token),
(BString $ B8.pack "values",
BList [])]
onQuery' addr (AnnouncePeer nodeId infoHash port token)
= do nodeId' <- liftIO $ makeRandomNeighbor nodeId
return $ Right $
BDict [(BString $ B8.pack "id",
BString $ nodeIdToBuf nodeId')]
onQuery' addr _
= return $ Left $
Error 204 $ B8.pack "Method Unknown"
onQuery addr bvalue q
= do logger <- ctxLogger <$> ask
liftIO $ logger q
onQuery' addr q
onReply addr bvalue reply
= case reply `bdictLookup` "nodes" of
Just (BString nodesBuf) ->
do let nodes = decodeNodes nodesBuf
forM nodes $ \(nodeId, addr) ->
appendPeer $ Peer nodeId addr
return ()
_ ->
return ()
query :: App ()
query = do mbPeer <- popPeer
node <- ctxNode <$> ask >>=
liftIO . readIORef
liftIO $
case mbPeer of
Nothing ->
do addr : _ <- Node.getAddrs "router.bittorrent.com" "6881"
target <- makeRandomNodeId
target' <- makeRandomNeighbor target
let q = FindNode target target'
Node.sendQueryNoWait addr q node
Just peer ->
do target <- makeRandomNeighbor $ peerNodeId peer
target' <- makeRandomNodeId
let q = FindNode target target'
Node.sendQueryNoWait (peerAddr peer) q node
makeTargets :: [(String, String)] -> IO [SockAddr]
makeTargets hostsPorts
= forM hostsPorts $ \(host, port) ->
head <$> Node.getAddrs host port
runSpoofer port
= do evLoop <- Ev.evRecommendedBackends >>=
Ev.evDefaultLoop
log <- newLog evLoop "nodespoofer.data"
node <- Node.new evLoop port
let app = AppState { stQueryQueue = Seq.empty }
appRef <- newIORef app
nodeRef <- newIORef node
targets <- makeTargets []
let ctx = AppContext { ctxState = appRef,
ctxNode = nodeRef,
ctxTargets = targets,
ctxEvLoop = evLoop,
ctxLogger = log,
ctxPort = PortNum $ fromIntegral port
}
appCall :: App a -> IO a
appCall f = runReaderT f ctx
appCallback f a b q = appCall $ f a b q
Node.setQueryHandler (appCallback onQuery) node
Node.setReplyHandler (appCallback onReply) node
appCall $ do
setInterval 0.05 $ query
Ev.evLoop evLoop 0
main = runSpoofer 10000
|
5fa38698bf79992745966dc18731984ddefb520510a5aed8faceba35e2be91ea | skanev/playground | 27.scm | EOPL exercise 3.27
;
Add a new kind of procedure called a traceproc to the language . A traceproc
; works exactly like a proc, except that it prints a trace message on entry
; and exit.
(load-relative "cases/proc/env.scm")
; The parser
(define-datatype expression expression?
(const-exp
(num number?))
(diff-exp
(minuend expression?)
(subtrahend expression?))
(zero?-exp
(expr expression?))
(if-exp
(predicate expression?)
(consequent expression?)
(alternative expression?))
(var-exp
(var symbol?))
(let-exp
(var symbol?)
(value expression?)
(body expression?))
(proc-exp
(var symbol?)
(body expression?))
(traceproc-exp
(var symbol?)
(body expression?))
(call-exp
(rator expression?)
(rand expression?)))
(define scanner-spec
'((white-sp (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier (letter (arbno (or letter digit))) symbol)
(number (digit (arbno digit)) number)))
(define grammar
'((expression (number) const-exp)
(expression ("-" "(" expression "," expression ")") diff-exp)
(expression ("zero?" "(" expression ")") zero?-exp)
(expression ("if" expression "then" expression "else" expression) if-exp)
(expression (identifier) var-exp)
(expression ("proc" "(" identifier ")" expression) proc-exp)
(expression ("traceproc" "(" identifier ")" expression) traceproc-exp)
(expression ("let" identifier "=" expression "in" expression) let-exp)
(expression ("(" expression expression ")") call-exp)))
(define scan&parse
(sllgen:make-string-parser scanner-spec grammar))
; The evaluator
(define-datatype proc proc?
(procedure
(var symbol?)
(body expression?)
(saved-env environment?)
(trace? boolean?)))
(define (apply-procedure proc1 val)
(cases proc proc1
(procedure (var body saved-env trace?)
(when trace? (printf "enter: ~a = ~v\n" var val))
(let ((result (value-of body (extend-env var val saved-env))))
(when trace? (printf "exit: ~a\n" var))
result))))
(define-datatype expval expval?
(num-val
(num number?))
(bool-val
(bool boolean?))
(proc-val
(proc proc?)))
(define (expval->num val)
(cases expval val
(num-val (num) num)
(else (eopl:error 'expval->num "Invalid number: ~s" val))))
(define (expval->bool val)
(cases expval val
(bool-val (bool) bool)
(else (eopl:error 'expval->bool "Invalid boolean: ~s" val))))
(define (expval->proc val)
(cases expval val
(proc-val (proc) proc)
(else (eopl:error 'expval->proc "Invalid procedure: ~s" val))))
(define (value-of expr env)
(cases expression expr
(const-exp (num) (num-val num))
(var-exp (var) (apply-env env var))
(diff-exp (minuend subtrahend)
(let ((minuend-val (value-of minuend env))
(subtrahend-val (value-of subtrahend env)))
(let ((minuend-num (expval->num minuend-val))
(subtrahend-num (expval->num subtrahend-val)))
(num-val
(- minuend-num subtrahend-num)))))
(zero?-exp (arg)
(let ((value (value-of arg env)))
(let ((number (expval->num value)))
(if (zero? number)
(bool-val #t)
(bool-val #f)))))
(if-exp (predicate consequent alternative)
(let ((value (value-of predicate env)))
(if (expval->bool value)
(value-of consequent env)
(value-of alternative env))))
(let-exp (var value-exp body)
(let ((value (value-of value-exp env)))
(value-of body
(extend-env var value env))))
(proc-exp (var body)
(proc-val (procedure var body env #f)))
(traceproc-exp (var body)
(proc-val (procedure var body env #t)))
(call-exp (rator rand)
(let ((proc (expval->proc (value-of rator env)))
(arg (value-of rand env)))
(apply-procedure proc arg)))))
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/eopl/03/27.scm | scheme |
works exactly like a proc, except that it prints a trace message on entry
and exit.
The parser
The evaluator | EOPL exercise 3.27
Add a new kind of procedure called a traceproc to the language . A traceproc
(load-relative "cases/proc/env.scm")
(define-datatype expression expression?
(const-exp
(num number?))
(diff-exp
(minuend expression?)
(subtrahend expression?))
(zero?-exp
(expr expression?))
(if-exp
(predicate expression?)
(consequent expression?)
(alternative expression?))
(var-exp
(var symbol?))
(let-exp
(var symbol?)
(value expression?)
(body expression?))
(proc-exp
(var symbol?)
(body expression?))
(traceproc-exp
(var symbol?)
(body expression?))
(call-exp
(rator expression?)
(rand expression?)))
(define scanner-spec
'((white-sp (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier (letter (arbno (or letter digit))) symbol)
(number (digit (arbno digit)) number)))
(define grammar
'((expression (number) const-exp)
(expression ("-" "(" expression "," expression ")") diff-exp)
(expression ("zero?" "(" expression ")") zero?-exp)
(expression ("if" expression "then" expression "else" expression) if-exp)
(expression (identifier) var-exp)
(expression ("proc" "(" identifier ")" expression) proc-exp)
(expression ("traceproc" "(" identifier ")" expression) traceproc-exp)
(expression ("let" identifier "=" expression "in" expression) let-exp)
(expression ("(" expression expression ")") call-exp)))
(define scan&parse
(sllgen:make-string-parser scanner-spec grammar))
(define-datatype proc proc?
(procedure
(var symbol?)
(body expression?)
(saved-env environment?)
(trace? boolean?)))
(define (apply-procedure proc1 val)
(cases proc proc1
(procedure (var body saved-env trace?)
(when trace? (printf "enter: ~a = ~v\n" var val))
(let ((result (value-of body (extend-env var val saved-env))))
(when trace? (printf "exit: ~a\n" var))
result))))
(define-datatype expval expval?
(num-val
(num number?))
(bool-val
(bool boolean?))
(proc-val
(proc proc?)))
(define (expval->num val)
(cases expval val
(num-val (num) num)
(else (eopl:error 'expval->num "Invalid number: ~s" val))))
(define (expval->bool val)
(cases expval val
(bool-val (bool) bool)
(else (eopl:error 'expval->bool "Invalid boolean: ~s" val))))
(define (expval->proc val)
(cases expval val
(proc-val (proc) proc)
(else (eopl:error 'expval->proc "Invalid procedure: ~s" val))))
(define (value-of expr env)
(cases expression expr
(const-exp (num) (num-val num))
(var-exp (var) (apply-env env var))
(diff-exp (minuend subtrahend)
(let ((minuend-val (value-of minuend env))
(subtrahend-val (value-of subtrahend env)))
(let ((minuend-num (expval->num minuend-val))
(subtrahend-num (expval->num subtrahend-val)))
(num-val
(- minuend-num subtrahend-num)))))
(zero?-exp (arg)
(let ((value (value-of arg env)))
(let ((number (expval->num value)))
(if (zero? number)
(bool-val #t)
(bool-val #f)))))
(if-exp (predicate consequent alternative)
(let ((value (value-of predicate env)))
(if (expval->bool value)
(value-of consequent env)
(value-of alternative env))))
(let-exp (var value-exp body)
(let ((value (value-of value-exp env)))
(value-of body
(extend-env var value env))))
(proc-exp (var body)
(proc-val (procedure var body env #f)))
(traceproc-exp (var body)
(proc-val (procedure var body env #t)))
(call-exp (rator rand)
(let ((proc (expval->proc (value-of rator env)))
(arg (value-of rand env)))
(apply-procedure proc arg)))))
|
b983c3fe56d17d7e6f331d239ef7b7e1fa9fe41fc4908625a124f3694a3b7896 | agentbellnorm/dativity | define.cljc | (ns dativity.define
(:require [ysera.test :refer [is= is is-not error?]]
[ysera.error :refer [error]]
[clojure.spec.alpha :as s]
[dativity.graph :as graph]))
(defn empty-process-model
[]
(graph/empty-graph))
(defn action
[name]
[name {:type :action}])
(defn data
[name]
[name {:type :data}])
(defn role
[name]
[name {:type :role}])
(defn action-produces
[action creates]
[action creates {:association :produces}])
(defn action-requires
[action prereq]
[action prereq {:association :requires}])
(defn action-requires-conditional
"condition fn can assume that the data exists"
[action prereq predicate data-parameter]
[action prereq {:association :requires-conditional
:condition predicate
:data-parameter data-parameter}])
(defn role-performs
[role action]
[role action {:association :performs}])
(defn add-entity-to-model
{:test (fn []
(is= 2 (-> (empty-process-model)
(add-entity-to-model (action :add-customer-information))
(add-entity-to-model (action :add-phone-number))
(graph/count-nodes))))}
[model node]
(graph/add-node-with-attrs model node))
(defn add-relationship-to-model
{:test (fn []
(let [graph (-> (empty-process-model)
(add-entity-to-model (action :thing-to-do))
(add-entity-to-model (data :thing-to-know))
(add-relationship-to-model (action-produces :thing-to-do :thing-to-know)))]
(is= 1 (graph/count-edges graph))
(is= 2 (graph/count-nodes graph))))}
[case relationship]
(graph/add-directed-edge case relationship))
;; below is code related to creating the model via create-model.
(defn contains-it?
[it coll]
(some #{it} coll))
(defn- error-when-missing
{:test (fn []
(is (error? (error-when-missing :a [] "error!!")))
(is (nil? (error-when-missing :a [:a] "error!!"))))}
[needle haystack err-msg]
(when-not (contains-it? needle haystack)
(error err-msg)))
(defn- validate-relationships
[{:keys [actions data roles action-produces action-requires action-requires-conditional role-performs]}]
(doseq [[action produces] action-produces]
(let [relationship-string (str "[" action " produces " produces "]: ")]
(error-when-missing action actions (str "Error when parsing relationship " relationship-string action " is not a defined action"))
(error-when-missing produces data (str "Error when parsing relationship " relationship-string data " is not a defined data"))))
(doseq [[action requires] action-requires]
(let [relationship-string (str "[" action " requires " requires "]: ")]
(error-when-missing action actions (str "Error when parsing relationship " relationship-string action " is not a defined action"))
(error-when-missing requires data (str "Error when parsing relationship " relationship-string requires " is not a defined data"))))
(doseq [[role performs] role-performs]
(let [relationship-string (str "[" role " performs " performs "]: ")]
(error-when-missing role roles (str "Error when parsing relationship " relationship-string role " is not a defined role"))
(error-when-missing performs actions (str "Error when parsing relationship " relationship-string performs " is not a defined action"))))
(doseq [{:keys [action requires condition-argument]} action-requires-conditional]
(let [relationship-string (str "[" action " conditionally requires " requires " depending on " condition-argument "]: ")]
(error-when-missing action actions (str "Error when parsing relationship " relationship-string action " is not a defined action"))
(error-when-missing requires data (str "Error when parsing relationship " relationship-string requires " is not a defined data"))
(error-when-missing condition-argument data (str "Error when parsing relationship " relationship-string condition-argument " is not a defined data"))))
true)
(s/def ::relationship (s/coll-of keyword? :kind vector? :count 2))
(s/def ::actions (s/coll-of keyword? :kind vector?))
(s/def ::data (s/coll-of keyword? :kind vector?))
(s/def ::roles (s/coll-of keyword? :kind vector?))
(s/def ::action-produces (s/coll-of ::relationship :kind vector?))
(s/def ::action-requires (s/coll-of ::relationship :kind vector?))
(s/def ::role-performs (s/coll-of ::relationship :kind vector?))
(s/def ::action keyword?)
(s/def ::requires keyword?)
(s/def ::condition fn?)
(s/def ::condition-argument keyword?)
(s/def ::action-requires-conditional-item (s/keys :req-un [::action
::requires
::condition
::condition-argument]))
(s/def ::action-requires-conditional (s/coll-of ::action-requires-conditional-item
:kind vector?
:distinct true))
(s/def ::model-input (s/keys :req-un [::actions
::data
::roles
::action-produces
::action-requires
::action-requires-conditional
::role-performs]))
(defn- validate-spec-and-rules
[input]
(when-not (s/valid? ::model-input input)
(error (s/explain-str ::model-input input)))
(validate-relationships input))
(defn create-model
"Creates a process model to be used by core functions.
Takes a map with a strict structure as input"
{:test (fn []
(is (create-model {:actions [:call-mom
:call-dad
:call-grandma]
:data [:mom-number
:mom-info
:dad-info]
:roles [:me]
:action-produces [[:call-mom :mom-info]
[:call-dad :dad-info]]
:action-requires [[:call-mom :mom-number]
[:call-dad :mom-info]]
:action-requires-conditional [{:action :call-grandma
:requires :dad-info
:condition (fn [mom-info]
(not (:grandma-number mom-info)))
:condition-argument :mom-info}]
:role-performs [[:me :call-dad]
[:me :call-mom]
[:me :call-grandma]]})))}
[arg-map]
{:pre [(validate-spec-and-rules arg-map)]}
(let [actions-arg (:actions arg-map)
data-arg (:data arg-map)
roles-arg (:roles arg-map)
action-produces-arg (:action-produces arg-map)
action-requires-arg (:action-requires arg-map)
action-requires-conditional-arg (:action-requires-conditional arg-map)
role-performs-arg (:role-performs arg-map)]
(as-> (empty-process-model) model
(reduce (fn [acc input-action]
(add-entity-to-model acc (action input-action))) model actions-arg)
(reduce (fn [acc input-data]
(add-entity-to-model acc (data input-data))) model data-arg)
(reduce (fn [acc input-role]
(add-entity-to-model acc (role input-role))) model roles-arg)
(reduce (fn [acc [action produces]]
(add-relationship-to-model acc (action-produces action produces))) model action-produces-arg)
(reduce (fn [acc [action requires]]
(add-relationship-to-model acc (action-requires action requires))) model action-requires-arg)
(reduce (fn [acc [role performs]]
(add-relationship-to-model acc (role-performs role performs))) model role-performs-arg)
(reduce (fn [acc {:keys [action requires condition condition-argument]}]
(add-relationship-to-model acc (action-requires-conditional action requires condition condition-argument))) model action-requires-conditional-arg))))
| null | https://raw.githubusercontent.com/agentbellnorm/dativity/8d03d0a1ee7bf48397d900faef6f119649d95c83/src/dativity/define.cljc | clojure | below is code related to creating the model via create-model. | (ns dativity.define
(:require [ysera.test :refer [is= is is-not error?]]
[ysera.error :refer [error]]
[clojure.spec.alpha :as s]
[dativity.graph :as graph]))
(defn empty-process-model
[]
(graph/empty-graph))
(defn action
[name]
[name {:type :action}])
(defn data
[name]
[name {:type :data}])
(defn role
[name]
[name {:type :role}])
(defn action-produces
[action creates]
[action creates {:association :produces}])
(defn action-requires
[action prereq]
[action prereq {:association :requires}])
(defn action-requires-conditional
"condition fn can assume that the data exists"
[action prereq predicate data-parameter]
[action prereq {:association :requires-conditional
:condition predicate
:data-parameter data-parameter}])
(defn role-performs
[role action]
[role action {:association :performs}])
(defn add-entity-to-model
{:test (fn []
(is= 2 (-> (empty-process-model)
(add-entity-to-model (action :add-customer-information))
(add-entity-to-model (action :add-phone-number))
(graph/count-nodes))))}
[model node]
(graph/add-node-with-attrs model node))
(defn add-relationship-to-model
{:test (fn []
(let [graph (-> (empty-process-model)
(add-entity-to-model (action :thing-to-do))
(add-entity-to-model (data :thing-to-know))
(add-relationship-to-model (action-produces :thing-to-do :thing-to-know)))]
(is= 1 (graph/count-edges graph))
(is= 2 (graph/count-nodes graph))))}
[case relationship]
(graph/add-directed-edge case relationship))
(defn contains-it?
[it coll]
(some #{it} coll))
(defn- error-when-missing
{:test (fn []
(is (error? (error-when-missing :a [] "error!!")))
(is (nil? (error-when-missing :a [:a] "error!!"))))}
[needle haystack err-msg]
(when-not (contains-it? needle haystack)
(error err-msg)))
(defn- validate-relationships
[{:keys [actions data roles action-produces action-requires action-requires-conditional role-performs]}]
(doseq [[action produces] action-produces]
(let [relationship-string (str "[" action " produces " produces "]: ")]
(error-when-missing action actions (str "Error when parsing relationship " relationship-string action " is not a defined action"))
(error-when-missing produces data (str "Error when parsing relationship " relationship-string data " is not a defined data"))))
(doseq [[action requires] action-requires]
(let [relationship-string (str "[" action " requires " requires "]: ")]
(error-when-missing action actions (str "Error when parsing relationship " relationship-string action " is not a defined action"))
(error-when-missing requires data (str "Error when parsing relationship " relationship-string requires " is not a defined data"))))
(doseq [[role performs] role-performs]
(let [relationship-string (str "[" role " performs " performs "]: ")]
(error-when-missing role roles (str "Error when parsing relationship " relationship-string role " is not a defined role"))
(error-when-missing performs actions (str "Error when parsing relationship " relationship-string performs " is not a defined action"))))
(doseq [{:keys [action requires condition-argument]} action-requires-conditional]
(let [relationship-string (str "[" action " conditionally requires " requires " depending on " condition-argument "]: ")]
(error-when-missing action actions (str "Error when parsing relationship " relationship-string action " is not a defined action"))
(error-when-missing requires data (str "Error when parsing relationship " relationship-string requires " is not a defined data"))
(error-when-missing condition-argument data (str "Error when parsing relationship " relationship-string condition-argument " is not a defined data"))))
true)
(s/def ::relationship (s/coll-of keyword? :kind vector? :count 2))
(s/def ::actions (s/coll-of keyword? :kind vector?))
(s/def ::data (s/coll-of keyword? :kind vector?))
(s/def ::roles (s/coll-of keyword? :kind vector?))
(s/def ::action-produces (s/coll-of ::relationship :kind vector?))
(s/def ::action-requires (s/coll-of ::relationship :kind vector?))
(s/def ::role-performs (s/coll-of ::relationship :kind vector?))
(s/def ::action keyword?)
(s/def ::requires keyword?)
(s/def ::condition fn?)
(s/def ::condition-argument keyword?)
(s/def ::action-requires-conditional-item (s/keys :req-un [::action
::requires
::condition
::condition-argument]))
(s/def ::action-requires-conditional (s/coll-of ::action-requires-conditional-item
:kind vector?
:distinct true))
(s/def ::model-input (s/keys :req-un [::actions
::data
::roles
::action-produces
::action-requires
::action-requires-conditional
::role-performs]))
(defn- validate-spec-and-rules
[input]
(when-not (s/valid? ::model-input input)
(error (s/explain-str ::model-input input)))
(validate-relationships input))
(defn create-model
"Creates a process model to be used by core functions.
Takes a map with a strict structure as input"
{:test (fn []
(is (create-model {:actions [:call-mom
:call-dad
:call-grandma]
:data [:mom-number
:mom-info
:dad-info]
:roles [:me]
:action-produces [[:call-mom :mom-info]
[:call-dad :dad-info]]
:action-requires [[:call-mom :mom-number]
[:call-dad :mom-info]]
:action-requires-conditional [{:action :call-grandma
:requires :dad-info
:condition (fn [mom-info]
(not (:grandma-number mom-info)))
:condition-argument :mom-info}]
:role-performs [[:me :call-dad]
[:me :call-mom]
[:me :call-grandma]]})))}
[arg-map]
{:pre [(validate-spec-and-rules arg-map)]}
(let [actions-arg (:actions arg-map)
data-arg (:data arg-map)
roles-arg (:roles arg-map)
action-produces-arg (:action-produces arg-map)
action-requires-arg (:action-requires arg-map)
action-requires-conditional-arg (:action-requires-conditional arg-map)
role-performs-arg (:role-performs arg-map)]
(as-> (empty-process-model) model
(reduce (fn [acc input-action]
(add-entity-to-model acc (action input-action))) model actions-arg)
(reduce (fn [acc input-data]
(add-entity-to-model acc (data input-data))) model data-arg)
(reduce (fn [acc input-role]
(add-entity-to-model acc (role input-role))) model roles-arg)
(reduce (fn [acc [action produces]]
(add-relationship-to-model acc (action-produces action produces))) model action-produces-arg)
(reduce (fn [acc [action requires]]
(add-relationship-to-model acc (action-requires action requires))) model action-requires-arg)
(reduce (fn [acc [role performs]]
(add-relationship-to-model acc (role-performs role performs))) model role-performs-arg)
(reduce (fn [acc {:keys [action requires condition condition-argument]}]
(add-relationship-to-model acc (action-requires-conditional action requires condition condition-argument))) model action-requires-conditional-arg))))
|
7dd80ae044ca5437f74b05d57f51746bc39b5c75420e3cc67d358e875ef45365 | clojure-interop/aws-api | project.clj | (defproject clojure-interop/com.amazonaws.services.simpleworkflow "1.0.0"
:description "Clojure to Java Interop Bindings for com.amazonaws.services.simpleworkflow"
:url "-interop/aws-api"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:source-paths ["src"])
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.simpleworkflow/project.clj | clojure | (defproject clojure-interop/com.amazonaws.services.simpleworkflow "1.0.0"
:description "Clojure to Java Interop Bindings for com.amazonaws.services.simpleworkflow"
:url "-interop/aws-api"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]]
:source-paths ["src"])
| |
7d8a903a7700323d0e57606a6c5e422bca0e463caf682ef5ee4033f18b4dde4e | static-analysis-engineering/codehawk | jCHFunctionSummary.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
open CHNumerical
(* chutil *)
open CHXmlDocument
(* jchlib *)
open JCHBasicTypesAPI
(* jchpre *)
open JCHPreAPI
val no_taint_info: taint_int
val precondition_predicate_to_pretty: precondition_predicate_t -> pretty_t
val precondition_predicate_to_xml :
precondition_predicate_t -> method_signature_int -> xml_element_int
val make_postcondition:
?name:string -> bool -> postcondition_predicate_t -> postcondition_int
val sideeffect_to_pretty: sideeffect_t -> pretty_t
val write_xml_sideeffect:
xml_element_int -> sideeffect_t -> method_signature_int -> unit
val resource_type_to_string: resource_type_t -> string
val get_taint_element_class_dependencies: taint_element_t -> class_name_int list
val make_taint: taint_element_t list -> taint_int
val make_string_sink:
int -> string -> string -> class_name_int list -> string_sink_int
val make_resource_sink: int -> resource_type_t -> resource_sink_int
val make_exception_info:
?safe:precondition_predicate_t list ->
?unsafe:precondition_predicate_t list ->
?descr:string ->
class_name_int -> exception_info_int
val make_function_summary:
?is_static:bool ->
?is_final:bool ->
?is_abstract:bool ->
?is_inherited:bool ->
?is_default:bool ->
?is_valid:bool ->
?defining_method:class_method_signature_int option ->
?is_bridge:bool ->
?visibility:access_t ->
?exception_infos:exception_info_int list ->
?post:postcondition_int list ->
?sideeffects:sideeffect_t list ->
?taint:taint_int ->
?virtual_calls:class_method_signature_int list ->
?interface_calls:class_method_signature_int list ->
?resource_sinks:resource_sink_int list ->
?string_sinks:string_sink_int list ->
?pattern:bc_action_t option ->
?time_cost:jterm_range_int ->
?space_cost:jterm_range_int -> class_method_signature_int -> function_summary_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchpre/jCHFunctionSummary.mli | ocaml | chutil
jchlib
jchpre | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
open CHNumerical
open CHXmlDocument
open JCHBasicTypesAPI
open JCHPreAPI
val no_taint_info: taint_int
val precondition_predicate_to_pretty: precondition_predicate_t -> pretty_t
val precondition_predicate_to_xml :
precondition_predicate_t -> method_signature_int -> xml_element_int
val make_postcondition:
?name:string -> bool -> postcondition_predicate_t -> postcondition_int
val sideeffect_to_pretty: sideeffect_t -> pretty_t
val write_xml_sideeffect:
xml_element_int -> sideeffect_t -> method_signature_int -> unit
val resource_type_to_string: resource_type_t -> string
val get_taint_element_class_dependencies: taint_element_t -> class_name_int list
val make_taint: taint_element_t list -> taint_int
val make_string_sink:
int -> string -> string -> class_name_int list -> string_sink_int
val make_resource_sink: int -> resource_type_t -> resource_sink_int
val make_exception_info:
?safe:precondition_predicate_t list ->
?unsafe:precondition_predicate_t list ->
?descr:string ->
class_name_int -> exception_info_int
val make_function_summary:
?is_static:bool ->
?is_final:bool ->
?is_abstract:bool ->
?is_inherited:bool ->
?is_default:bool ->
?is_valid:bool ->
?defining_method:class_method_signature_int option ->
?is_bridge:bool ->
?visibility:access_t ->
?exception_infos:exception_info_int list ->
?post:postcondition_int list ->
?sideeffects:sideeffect_t list ->
?taint:taint_int ->
?virtual_calls:class_method_signature_int list ->
?interface_calls:class_method_signature_int list ->
?resource_sinks:resource_sink_int list ->
?string_sinks:string_sink_int list ->
?pattern:bc_action_t option ->
?time_cost:jterm_range_int ->
?space_cost:jterm_range_int -> class_method_signature_int -> function_summary_int
|
da9fce87ce520b17d04076928e7337d158ac3b8269f505ee5b26d99ca696a31c | bloomberg/blpapi-hs | NameImpl.hs | # LANGUAGE ForeignFunctionInterface #
|
Module : Finance . Blpapi . Impl . NameImpl
Description : FFI for Name
Copyright : Bloomberg Finance L.P.
License : MIT
Maintainer :
Stability : experimental
Portability : * nix , windows
Module : Finance.Blpapi.Impl.NameImpl
Description : FFI for Name
Copyright : Bloomberg Finance L.P.
License : MIT
Maintainer :
Stability : experimental
Portability : *nix, windows
-}
module Finance.Blpapi.Impl.NameImpl where
import Foreign hiding (unsafePerformIO)
import Foreign.C.String
import System.IO.Unsafe (unsafePerformIO)
newtype NameImpl = NameImpl (Ptr NameImpl)
foreign import ccall safe "blpapi_name.h blpapi_Name_create"
blpapi_Name_create :: CString -> IO (Ptr NameImpl)
foreign import ccall safe "blpapi_name.h blpapi_Name_destroy"
blpapi_Name_destroy:: Ptr NameImpl -> IO ()
foreign import ccall safe "blpapi_name.h blpapi_Name_string"
blpapi_Name_string:: Ptr NameImpl -> CString
foreign import ccall safe "blpapi_name.h blpapi_Name_findName"
blpapi_Name_findName:: CString -> IO (Ptr NameImpl)
nameImplToString :: Ptr NameImpl -> String
nameImplToString ptr = unsafePerformIO $ peekCString (blpapi_Name_string ptr)
| null | https://raw.githubusercontent.com/bloomberg/blpapi-hs/a4bdff86f3febcf8b06cbc70466c8abc177b973a/src/Finance/Blpapi/Impl/NameImpl.hs | haskell | # LANGUAGE ForeignFunctionInterface #
|
Module : Finance . Blpapi . Impl . NameImpl
Description : FFI for Name
Copyright : Bloomberg Finance L.P.
License : MIT
Maintainer :
Stability : experimental
Portability : * nix , windows
Module : Finance.Blpapi.Impl.NameImpl
Description : FFI for Name
Copyright : Bloomberg Finance L.P.
License : MIT
Maintainer :
Stability : experimental
Portability : *nix, windows
-}
module Finance.Blpapi.Impl.NameImpl where
import Foreign hiding (unsafePerformIO)
import Foreign.C.String
import System.IO.Unsafe (unsafePerformIO)
newtype NameImpl = NameImpl (Ptr NameImpl)
foreign import ccall safe "blpapi_name.h blpapi_Name_create"
blpapi_Name_create :: CString -> IO (Ptr NameImpl)
foreign import ccall safe "blpapi_name.h blpapi_Name_destroy"
blpapi_Name_destroy:: Ptr NameImpl -> IO ()
foreign import ccall safe "blpapi_name.h blpapi_Name_string"
blpapi_Name_string:: Ptr NameImpl -> CString
foreign import ccall safe "blpapi_name.h blpapi_Name_findName"
blpapi_Name_findName:: CString -> IO (Ptr NameImpl)
nameImplToString :: Ptr NameImpl -> String
nameImplToString ptr = unsafePerformIO $ peekCString (blpapi_Name_string ptr)
| |
4cd72d32bd93e47df877e9ddd7c95028820cc88340fa873bfe18f8b1650e9e1a | janestreet/memtrace_viewer_with_deps | time_range.ml | open! Core_kernel
type t =
{ lower_bound : Time_ns.Span.t option
; upper_bound : Time_ns.Span.t option
}
[@@deriving sexp, bin_io, equal]
let range lower_bound upper_bound = { lower_bound; upper_bound }
let all = { lower_bound = None; upper_bound = None }
let is_all = function
| { lower_bound = None; upper_bound = None } -> true
| _ -> false
;;
let covers { lower_bound; upper_bound } ~lower ~upper =
let covers_lower =
match lower_bound with
| None -> true
| Some lower_bound -> Time_ns.Span.(lower_bound <= lower)
in
let covers_upper =
match upper_bound with
| None -> true
| Some upper_bound -> Time_ns.Span.(upper_bound >= upper)
in
covers_lower && covers_upper
;;
let compare_point x { lower_bound; upper_bound } =
let in_bound f x bound =
match bound with
| None -> true
| Some bound -> f x bound
in
match x with
| x when not (in_bound Time_ns.Span.( >= ) x lower_bound) -> -1
| x when not (in_bound Time_ns.Span.( <= ) x upper_bound) -> 1
| _ -> 0
;;
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/common/time_range.ml | ocaml | open! Core_kernel
type t =
{ lower_bound : Time_ns.Span.t option
; upper_bound : Time_ns.Span.t option
}
[@@deriving sexp, bin_io, equal]
let range lower_bound upper_bound = { lower_bound; upper_bound }
let all = { lower_bound = None; upper_bound = None }
let is_all = function
| { lower_bound = None; upper_bound = None } -> true
| _ -> false
;;
let covers { lower_bound; upper_bound } ~lower ~upper =
let covers_lower =
match lower_bound with
| None -> true
| Some lower_bound -> Time_ns.Span.(lower_bound <= lower)
in
let covers_upper =
match upper_bound with
| None -> true
| Some upper_bound -> Time_ns.Span.(upper_bound >= upper)
in
covers_lower && covers_upper
;;
let compare_point x { lower_bound; upper_bound } =
let in_bound f x bound =
match bound with
| None -> true
| Some bound -> f x bound
in
match x with
| x when not (in_bound Time_ns.Span.( >= ) x lower_bound) -> -1
| x when not (in_bound Time_ns.Span.( <= ) x upper_bound) -> 1
| _ -> 0
;;
| |
bc7f41f7ddefd2887a9879e148d0d638efe53e8f6d1c6a017ddf2081a2c8979e | tomas-abrahamsson/tdiff | tdiff.erl | %%% A (simple) diff
Copyright ( C ) 2011
%%%
Author : < >
%%%
%%% This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
%%%
%%% This library is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details .
%%%
You should have received a copy of the GNU Library General Public
%%% License along with this library; if not, write to the Free
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
%%%
-module(tdiff).
-export([diff/2, diff/3, patch/2]).
-export([diff_files/2, diff_files/3]).
-export([diff_binaries/2, diff_binaries/3, patch_binaries/2]).
-export([format_diff_lines/1]).
-export([print_diff_lines/1]).
-type filename() :: string().
-type options() :: [option()].
-type option() :: {algorithm_tracer, no_tracer | algorithm_tracer()}.
-type algorithm_tracer() :: fun(({d, d()} |
{dpath, dpath()} |
{exhausted_kdiagonals, d()} |
{final_edit_script, edit_script()}) -> _).
-type d() :: integer(). %% The diagonal number, offset in number of steps from
the diagonal through ( 0,0 ) .
-type dpath() :: dpath(term()).
-type dpath(Elem) :: {X::index(), Y::index(),
SX::[Elem]|oob, SY::[Elem]|oob,
[Elem]}.%% The X and Y are indices along x and y.
The SX and SY are accumulated old / new strings
%% The last is a list of elements in reverse
%% order.
-type index() :: non_neg_integer().
-type edit_script() :: edit_script(term()).
-type edit_script(Elem) :: [{eq, [Elem]} | {ins, [Elem]} | {del, [Elem]}].
-export_type([options/0, option/0]).
-export_type([edit_script/0, edit_script/1]).
-export_type([algorithm_tracer/0, d/0, dpath/0, dpath/1, index/0]).
@equiv diff_files(F1 , F2 , [ ] )
-spec diff_files(filename(), filename()) -> edit_script(Line::string()).
diff_files(F1, F2) -> diff_files(F1, F2, _Opts=[]).
@doc Read the two files into memory , split to lists of lines
%% and compute the edit-script (or diff) for these.
%% The result is a diff for a list of lines/strings.
-spec diff_files(filename(), filename(), options()) -> edit_script(Line) when
Line :: string().
diff_files(F1, F2, Opts) ->
{ok,B1} = file:read_file(F1),
{ok,B2} = file:read_file(F2),
diff_binaries(B1, B2, Opts).
@equiv diff_binaries(B1 , B2 , [ ] )
diff_binaries(B1, B2) -> diff_binaries(B1, B2, _Opts=[]).
@doc Split the two binaries into lists of lines ( lists of strings ) ,
%% and compute the edit-script (or diff) for these.
%% The result is a diff for a list of lines/strings,
%% not for a list of binaries.
-spec diff_binaries(binary(), binary(), options()) -> edit_script(Line) when
Line :: string().
diff_binaries(B1, B2, Opts) ->
diff(split_bin_to_lines(B1), split_bin_to_lines(B2), Opts).
split_bin_to_lines(B) -> sbtl(binary_to_list(B), "", []).
sbtl("\n" ++ Rest, L, Acc) -> sbtl(Rest, "", [lists:reverse("\n"++L) | Acc]);
sbtl([C|Rest], L, Acc) -> sbtl(Rest, [C|L], Acc);
sbtl("", "", Acc) -> lists:reverse(Acc);
sbtl("", L, Acc) -> lists:reverse([lists:reverse(L) | Acc]).
%% @doc Print an edit-script, or diff. See {@link format_diff_lines/1}
%% for info on the output.
-spec print_diff_lines(edit_script(char())) -> _.
print_diff_lines(Diff) -> io:format("~s~n", [format_diff_lines(Diff)]).
%% @doc Format an edit-script or diff of lines to text, so that it looks like
%% a diff. The result will look like this if printed:
%% <pre><![CDATA[
123,456
< old line 1
< old line 2
%% ---
> new line 1
678
> new line 2
%% ]]></pre>
-spec format_diff_lines(edit_script(char())) -> iodata().
format_diff_lines(Diff) -> fdl(Diff, 1,1).
fdl([{del,Ls1},{ins,Ls2}|T], X, Y) ->
Addr = io_lib:format("~sc~s~n", [fmt_addr(X,Ls1), fmt_addr(Y, Ls2)]),
Del = format_lines("< ", Ls1),
Sep = io_lib:format("---~n", []),
Ins = format_lines("> ", Ls2),
[Addr, Del, Sep, Ins | fdl(T, X+length(Ls1), Y+length(Ls2))];
fdl([{del,Ls}|T], X, Y) ->
Addr = io_lib:format("~w,~wd~w~n", [X,X+length(Ls), Y]),
Del = format_lines("< ", Ls),
[Addr, Del | fdl(T, X+length(Ls), Y)];
fdl([{ins,Ls}|T], X, Y) ->
Addr = io_lib:format("~wa~w,~w~n", [X,Y,Y+length(Ls)]),
Ins = format_lines("> ", Ls),
[Addr, Ins | fdl(T, X, Y+length(Ls))];
fdl([{eq,Ls}|T], X, Y) ->
fdl(T, X+length(Ls), Y+length(Ls));
fdl([], _X, _Y) ->
[].
fmt_addr(N, Ls) when length(Ls) == 1 -> f("~w", [N]);
fmt_addr(N, Ls) -> f("~w,~w", [N,N+length(Ls)-1]).
f(F,A) -> lists:flatten(io_lib:format(F,A)).
format_lines(Indicator, Lines) ->
lists:map(fun(Line) -> io_lib:format("~s~s", [Indicator, Line]) end,
Lines).
%% Algorithm: "An O(ND) Difference Algorithm and Its Variations"
by , 1986 .
%%
%% Some good info can also be found at /
%%
%% General principle of the algorithm:
%%
%% We are about to produce a diff (or editscript) on what differs (or
how to get from ) string to . We lay out a grid with the
symbols from Sx on the x - axis and the symbols from on the Y
axis . The first symbol of and is at ( 0,0 ) .
%%
( The Sx and are strings of symbols : lists of lines or lists of
%% characters, or lists of works, or whatever is suitable.)
%%
%% Example: Sx="aXcccXe", Sy="aYcccYe" ==> the following grid is formed:
%%
Sx
%% aXcccXe
%% Sy a\
%% Y
%% c \\\
%% c \\\
%% c \\\
%% Y
%% e \
%%
Our plan now is go from corner to corner : from ( 0,0 ) to ( 7,7 ) .
%% We can move diagonally whenever the character on the x-axis and the
%% character on the y-axis are identical. Those are symbolized by the
%% \-edges in the grid above.
%%
%% When it is not possible to go diagonally (because the characters on
%% the x- and y-axis are not identical), we have to go horizontally
and vertically . This corresponds to deleting characters from Sx and
inserting characters from .
%%
%% Definitions (from the "O(ND) ..." paper by E.Myers):
%%
%% * A D-path is a path with D non-diagonal edges (ie: edges that are
%% vertical and/or horizontal).
%% * K-diagonal: the diagonal such that K=X-Y
( Thus , the 0 - diagonal is the one starting at ( 0,0 ) , going
straight down - right . The 1 - diagonal is the one just to the right of
the 0 - diagonal : starting at ( 1,0 ) going straight down - right .
There are negative diagonals as well : the -1 - diagonal is the one starting
at ( 0,1 ) , and so on .
%% * Snake: a sequence of only-diagonal steps
%%
%% The algorithm loops over D and over the K-diagonals:
%% D = 0..(length(Sx)+length(Sy))
K = -D .. D in steps of 2
%% For every such K-diagonal, we choose between the (D-1)-paths
%% whose end-points are currently on the adjacent (K-1)- and
%% (K+1)-diagonals: we pick the one that have gone furthest along
%% its diagonal.
%%
%% This means taking that (D-1)-path and going right (if
%% we pick the (D-1)-path on the (K-1)-diagonal) or down (if we
%% pick the (D-1)-path on the (K+1)-diagonal), thus forming a
%% D-path from a (D-1)-path.
%%
%% After this, we try to extend the snake as far as possible along
%% the K-diagonal.
%%
%% Note that this means that when we choose between the
%% (D-1)-paths along the (K-1)- and (K+1)-diagonals, we choose
between two paths , whose snakes have been extended as far as
possible , ie : they are at a point where the characters and
%% Sy don't match.
%%
%% Note that with this algorithm, we always do comparions further
right into the strings Sx and . The algorithm never goes towards
the beginning of either Sx or do do further comparisons . This is
%% good, because this fits the way lists are built in functional
%% programming languages.
@equiv diff(Sx , , [ ] )
-spec diff(Old::[Elem], New::[Elem]) -> edit_script(Elem) when Elem::term().
diff(Sx, Sy) -> diff(Sx, Sy, _Opts=[]).
@doc Compute an edit - script between two sequences of elements ,
such as two strings , lists of lines , or lists of elements more generally .
%% The result is a list of operations add/del/eq that can transform
%% `Old' to `New'
%%
The algorithm is " An O(ND ) Difference Algorithm and Its Variations "
by , 1986 .
%%
%% Note: This implementation currently searches only forwards. For
large inputs ( such as thousands of elements ) that differ very much ,
%% this implementation will take unnecessarily long time, and may not
%% complete within reasonable time.
%%
%% @end
%% Todo for optimization to handle large inputs (see the paper for details)
%% * Search from both ends as described in the paper.
When passing half of distance , search from the end ( reversing
%% the strings). Stop again at half. If snakes don't meet,
%% pick the best (or all?) snakes from both ends, search
%% recursively from both ends within this space.
%% * Keep track of visited coordinates.
%% If already visited, consider the snake/diagonal dead and don't follow it.
-spec diff(Old::[Elem], New::[Elem], options()) -> edit_script(Elem) when
Elem::term().
diff(Sx, Sy, Opts) ->
SxLen = length(Sx),
SyLen = length(Sy),
DMax = SxLen + SyLen,
Tracer = proplists:get_value(algorithm_tracer, Opts, no_tracer),
EditScript = case try_dpaths(0, DMax, [{0, 0, Sx, Sy, []}], Tracer) of
no -> [{del,Sx},{ins,Sy}];
{ed,EditOpsR} -> edit_ops_to_edit_script(EditOpsR)
end,
t_final_script(Tracer, EditScript),
EditScript.
try_dpaths(D, DMax, D1Paths, Tracer) when D =< DMax ->
t_d(Tracer, D),
case try_kdiagonals(-D, D, D1Paths, [], Tracer) of
{ed, E} -> {ed, E};
{dpaths, DPaths} -> try_dpaths(D+1, DMax, DPaths, Tracer)
end;
try_dpaths(_, _DMax, _DPaths, _Tracer) ->
no.
try_kdiagonals(K, D, D1Paths, DPaths, Tracer) when K =< D ->
DPath = if D == 0 -> hd(D1Paths);
true -> pick_best_dpath(K, D, D1Paths)
end,
case follow_snake(DPath) of
{ed, E} ->
{ed, E};
{dpath, DPath2} when K =/= -D ->
t_dpath(Tracer, DPath2),
try_kdiagonals(K+2, D, tl(D1Paths), [DPath2 | DPaths], Tracer);
{dpath, DPath2} when K =:= -D ->
t_dpath(Tracer, DPath2),
try_kdiagonals(K+2, D, D1Paths, [DPath2 | DPaths], Tracer)
end;
try_kdiagonals(_, D, _, DPaths, Tracer) ->
t_exhausted_kdiagonals(Tracer, D),
{dpaths, lists:reverse(DPaths)}.
follow_snake({X, Y, [H|Tx], [H|Ty], Cs}) -> follow_snake({X+1,Y+1, Tx,Ty,
[{e,H} | Cs]});
follow_snake({_X,_Y,[], [], Cs}) -> {ed, Cs};
follow_snake({X, Y, [], Sy, Cs}) -> {dpath, {X, Y, [], Sy, Cs}};
follow_snake({X, Y, oob, Sy, Cs}) -> {dpath, {X, Y, oob, Sy, Cs}};
follow_snake({X, Y, Sx, [], Cs}) -> {dpath, {X, Y, Sx, [], Cs}};
follow_snake({X, Y, Sx, oob, Cs}) -> {dpath, {X, Y, Sx, oob, Cs}};
follow_snake({X, Y, Sx, Sy, Cs}) -> {dpath, {X, Y, Sx, Sy, Cs}}.
pick_best_dpath(K, D, DPs) -> pbd(K, D, DPs).
pbd( K, D, [DP|_]) when K==-D -> go_inc_y(DP);
pbd( K, D, [DP]) when K==D -> go_inc_x(DP);
pbd(_K,_D, [DP1,DP2|_]) -> pbd2(DP1,DP2).
pbd2({_,Y1,_,_,_}=DP1, {_,Y2,_,_,_}) when Y1 > Y2 -> go_inc_x(DP1);
pbd2(_DP1 , DP2) -> go_inc_y(DP2).
go_inc_y({X, Y, [H|Tx], Sy, Cs}) -> {X, Y+1, Tx, Sy, [{y,H}|Cs]};
go_inc_y({X, Y, [], Sy, Cs}) -> {X, Y+1, oob, Sy, Cs};
go_inc_y({X, Y, oob, Sy, Cs}) -> {X, Y+1, oob, Sy, Cs}.
go_inc_x({X, Y, Sx, [H|Ty], Cs}) -> {X+1, Y, Sx, Ty, [{x,H}|Cs]};
go_inc_x({X, Y, Sx, [], Cs}) -> {X+1, Y, Sx, oob, Cs};
go_inc_x({X, Y, Sx, oob, Cs}) -> {X+1, Y, Sx, oob, Cs}.
edit_ops_to_edit_script(EditOps) -> e2e(EditOps, _Acc=[]).
e2e([{x,C}|T], [{ins,R}|Acc]) -> e2e(T, [{ins,[C|R]}|Acc]);
e2e([{y,C}|T], [{del,R}|Acc]) -> e2e(T, [{del,[C|R]}|Acc]);
e2e([{e,C}|T], [{eq,R}|Acc]) -> e2e(T, [{eq, [C|R]}|Acc]);
e2e([{x,C}|T], Acc) -> e2e(T, [{ins,[C]}|Acc]);
e2e([{y,C}|T], Acc) -> e2e(T, [{del,[C]}|Acc]);
e2e([{e,C}|T], Acc) -> e2e(T, [{eq, [C]}|Acc]);
e2e([], Acc) -> Acc.
%% @doc Apply a patch, in the form of an edit-script, to a string or
%% list of lines (or list of elements more generally)
-spec patch([Elem], edit_script(Elem)) -> [Elem] when Elem::term().
patch(S, Diff) -> p2(S, Diff, []).
@doc Apply a patch to a binary . The binary is first split to list
%% of lines (list of strings), and the edit-script is expected to be
%% for lists of strings/lines. The result is a list of strings.
-spec patch_binaries(binary(), edit_script(Line)) -> [Line] when
Line::string().
patch_binaries(B, Diff) ->
patch(split_bin_to_lines(B), Diff).
p2(S, [{eq,T}|Rest], Acc) -> p2_eq(S, T, Rest, Acc);
p2(S, [{ins,T}|Rest], Acc) -> p2_ins(S, T, Rest, Acc);
p2(S, [{del,T}|Rest], Acc) -> p2_del(S, T, Rest, Acc);
p2([],[], Acc) -> lists:reverse(Acc).
p2_eq([H|S], [H|T], Rest, Acc) -> p2_eq(S, T, Rest, [H|Acc]);
p2_eq(S, [], Rest, Acc) -> p2(S, Rest, Acc).
p2_ins(S, [H|T], Rest, Acc) -> p2_ins(S, T, Rest, [H|Acc]);
p2_ins(S, [], Rest, Acc) -> p2(S, Rest, Acc).
p2_del([H|S], [H|T], Rest, Acc) -> p2_del(S, T, Rest, Acc);
p2_del(S, [], Rest, Acc) -> p2(S, Rest, Acc).
t_final_script(no_tracer, _) -> ok;
t_final_script(Tracer, EditScript) -> Tracer({final_edit_script, EditScript}).
t_d(no_tracer, _) -> ok;
t_d(Tracer, D) -> Tracer({d,D}).
t_dpath(no_tracer, _) -> ok;
t_dpath(Tracer, DPath) -> Tracer({dpath,DPath}).
t_exhausted_kdiagonals(no_tracer, _) -> ok;
t_exhausted_kdiagonals(Tracer, D) -> Tracer({exhausted_kdiagonals, D}).
| null | https://raw.githubusercontent.com/tomas-abrahamsson/tdiff/2125d01df7c2e4bd3f25b2b1c80a00e67b2ef450/src/tdiff.erl | erlang | A (simple) diff
This library is free software; you can redistribute it and/or
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
License along with this library; if not, write to the Free
The diagonal number, offset in number of steps from
The X and Y are indices along x and y.
The last is a list of elements in reverse
order.
and compute the edit-script (or diff) for these.
The result is a diff for a list of lines/strings.
and compute the edit-script (or diff) for these.
The result is a diff for a list of lines/strings,
not for a list of binaries.
@doc Print an edit-script, or diff. See {@link format_diff_lines/1}
for info on the output.
@doc Format an edit-script or diff of lines to text, so that it looks like
a diff. The result will look like this if printed:
<pre><![CDATA[
---
]]></pre>
Algorithm: "An O(ND) Difference Algorithm and Its Variations"
Some good info can also be found at /
General principle of the algorithm:
We are about to produce a diff (or editscript) on what differs (or
characters, or lists of works, or whatever is suitable.)
Example: Sx="aXcccXe", Sy="aYcccYe" ==> the following grid is formed:
aXcccXe
Sy a\
Y
c \\\
c \\\
c \\\
Y
e \
We can move diagonally whenever the character on the x-axis and the
character on the y-axis are identical. Those are symbolized by the
\-edges in the grid above.
When it is not possible to go diagonally (because the characters on
the x- and y-axis are not identical), we have to go horizontally
Definitions (from the "O(ND) ..." paper by E.Myers):
* A D-path is a path with D non-diagonal edges (ie: edges that are
vertical and/or horizontal).
* K-diagonal: the diagonal such that K=X-Y
* Snake: a sequence of only-diagonal steps
The algorithm loops over D and over the K-diagonals:
D = 0..(length(Sx)+length(Sy))
For every such K-diagonal, we choose between the (D-1)-paths
whose end-points are currently on the adjacent (K-1)- and
(K+1)-diagonals: we pick the one that have gone furthest along
its diagonal.
This means taking that (D-1)-path and going right (if
we pick the (D-1)-path on the (K-1)-diagonal) or down (if we
pick the (D-1)-path on the (K+1)-diagonal), thus forming a
D-path from a (D-1)-path.
After this, we try to extend the snake as far as possible along
the K-diagonal.
Note that this means that when we choose between the
(D-1)-paths along the (K-1)- and (K+1)-diagonals, we choose
Sy don't match.
Note that with this algorithm, we always do comparions further
good, because this fits the way lists are built in functional
programming languages.
The result is a list of operations add/del/eq that can transform
`Old' to `New'
Note: This implementation currently searches only forwards. For
this implementation will take unnecessarily long time, and may not
complete within reasonable time.
@end
Todo for optimization to handle large inputs (see the paper for details)
* Search from both ends as described in the paper.
the strings). Stop again at half. If snakes don't meet,
pick the best (or all?) snakes from both ends, search
recursively from both ends within this space.
* Keep track of visited coordinates.
If already visited, consider the snake/diagonal dead and don't follow it.
@doc Apply a patch, in the form of an edit-script, to a string or
list of lines (or list of elements more generally)
of lines (list of strings), and the edit-script is expected to be
for lists of strings/lines. The result is a list of strings. |
Copyright ( C ) 2011
Author : < >
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
Library General Public License for more details .
You should have received a copy of the GNU Library General Public
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
-module(tdiff).
-export([diff/2, diff/3, patch/2]).
-export([diff_files/2, diff_files/3]).
-export([diff_binaries/2, diff_binaries/3, patch_binaries/2]).
-export([format_diff_lines/1]).
-export([print_diff_lines/1]).
-type filename() :: string().
-type options() :: [option()].
-type option() :: {algorithm_tracer, no_tracer | algorithm_tracer()}.
-type algorithm_tracer() :: fun(({d, d()} |
{dpath, dpath()} |
{exhausted_kdiagonals, d()} |
{final_edit_script, edit_script()}) -> _).
the diagonal through ( 0,0 ) .
-type dpath() :: dpath(term()).
-type dpath(Elem) :: {X::index(), Y::index(),
SX::[Elem]|oob, SY::[Elem]|oob,
The SX and SY are accumulated old / new strings
-type index() :: non_neg_integer().
-type edit_script() :: edit_script(term()).
-type edit_script(Elem) :: [{eq, [Elem]} | {ins, [Elem]} | {del, [Elem]}].
-export_type([options/0, option/0]).
-export_type([edit_script/0, edit_script/1]).
-export_type([algorithm_tracer/0, d/0, dpath/0, dpath/1, index/0]).
@equiv diff_files(F1 , F2 , [ ] )
-spec diff_files(filename(), filename()) -> edit_script(Line::string()).
diff_files(F1, F2) -> diff_files(F1, F2, _Opts=[]).
@doc Read the two files into memory , split to lists of lines
-spec diff_files(filename(), filename(), options()) -> edit_script(Line) when
Line :: string().
diff_files(F1, F2, Opts) ->
{ok,B1} = file:read_file(F1),
{ok,B2} = file:read_file(F2),
diff_binaries(B1, B2, Opts).
@equiv diff_binaries(B1 , B2 , [ ] )
diff_binaries(B1, B2) -> diff_binaries(B1, B2, _Opts=[]).
@doc Split the two binaries into lists of lines ( lists of strings ) ,
-spec diff_binaries(binary(), binary(), options()) -> edit_script(Line) when
Line :: string().
diff_binaries(B1, B2, Opts) ->
diff(split_bin_to_lines(B1), split_bin_to_lines(B2), Opts).
split_bin_to_lines(B) -> sbtl(binary_to_list(B), "", []).
sbtl("\n" ++ Rest, L, Acc) -> sbtl(Rest, "", [lists:reverse("\n"++L) | Acc]);
sbtl([C|Rest], L, Acc) -> sbtl(Rest, [C|L], Acc);
sbtl("", "", Acc) -> lists:reverse(Acc);
sbtl("", L, Acc) -> lists:reverse([lists:reverse(L) | Acc]).
-spec print_diff_lines(edit_script(char())) -> _.
print_diff_lines(Diff) -> io:format("~s~n", [format_diff_lines(Diff)]).
123,456
< old line 1
< old line 2
> new line 1
678
> new line 2
-spec format_diff_lines(edit_script(char())) -> iodata().
format_diff_lines(Diff) -> fdl(Diff, 1,1).
fdl([{del,Ls1},{ins,Ls2}|T], X, Y) ->
Addr = io_lib:format("~sc~s~n", [fmt_addr(X,Ls1), fmt_addr(Y, Ls2)]),
Del = format_lines("< ", Ls1),
Sep = io_lib:format("---~n", []),
Ins = format_lines("> ", Ls2),
[Addr, Del, Sep, Ins | fdl(T, X+length(Ls1), Y+length(Ls2))];
fdl([{del,Ls}|T], X, Y) ->
Addr = io_lib:format("~w,~wd~w~n", [X,X+length(Ls), Y]),
Del = format_lines("< ", Ls),
[Addr, Del | fdl(T, X+length(Ls), Y)];
fdl([{ins,Ls}|T], X, Y) ->
Addr = io_lib:format("~wa~w,~w~n", [X,Y,Y+length(Ls)]),
Ins = format_lines("> ", Ls),
[Addr, Ins | fdl(T, X, Y+length(Ls))];
fdl([{eq,Ls}|T], X, Y) ->
fdl(T, X+length(Ls), Y+length(Ls));
fdl([], _X, _Y) ->
[].
fmt_addr(N, Ls) when length(Ls) == 1 -> f("~w", [N]);
fmt_addr(N, Ls) -> f("~w,~w", [N,N+length(Ls)-1]).
f(F,A) -> lists:flatten(io_lib:format(F,A)).
format_lines(Indicator, Lines) ->
lists:map(fun(Line) -> io_lib:format("~s~s", [Indicator, Line]) end,
Lines).
by , 1986 .
how to get from ) string to . We lay out a grid with the
symbols from Sx on the x - axis and the symbols from on the Y
axis . The first symbol of and is at ( 0,0 ) .
( The Sx and are strings of symbols : lists of lines or lists of
Sx
Our plan now is go from corner to corner : from ( 0,0 ) to ( 7,7 ) .
and vertically . This corresponds to deleting characters from Sx and
inserting characters from .
( Thus , the 0 - diagonal is the one starting at ( 0,0 ) , going
straight down - right . The 1 - diagonal is the one just to the right of
the 0 - diagonal : starting at ( 1,0 ) going straight down - right .
There are negative diagonals as well : the -1 - diagonal is the one starting
at ( 0,1 ) , and so on .
K = -D .. D in steps of 2
between two paths , whose snakes have been extended as far as
possible , ie : they are at a point where the characters and
right into the strings Sx and . The algorithm never goes towards
the beginning of either Sx or do do further comparisons . This is
@equiv diff(Sx , , [ ] )
-spec diff(Old::[Elem], New::[Elem]) -> edit_script(Elem) when Elem::term().
diff(Sx, Sy) -> diff(Sx, Sy, _Opts=[]).
@doc Compute an edit - script between two sequences of elements ,
such as two strings , lists of lines , or lists of elements more generally .
The algorithm is " An O(ND ) Difference Algorithm and Its Variations "
by , 1986 .
large inputs ( such as thousands of elements ) that differ very much ,
When passing half of distance , search from the end ( reversing
-spec diff(Old::[Elem], New::[Elem], options()) -> edit_script(Elem) when
Elem::term().
diff(Sx, Sy, Opts) ->
SxLen = length(Sx),
SyLen = length(Sy),
DMax = SxLen + SyLen,
Tracer = proplists:get_value(algorithm_tracer, Opts, no_tracer),
EditScript = case try_dpaths(0, DMax, [{0, 0, Sx, Sy, []}], Tracer) of
no -> [{del,Sx},{ins,Sy}];
{ed,EditOpsR} -> edit_ops_to_edit_script(EditOpsR)
end,
t_final_script(Tracer, EditScript),
EditScript.
try_dpaths(D, DMax, D1Paths, Tracer) when D =< DMax ->
t_d(Tracer, D),
case try_kdiagonals(-D, D, D1Paths, [], Tracer) of
{ed, E} -> {ed, E};
{dpaths, DPaths} -> try_dpaths(D+1, DMax, DPaths, Tracer)
end;
try_dpaths(_, _DMax, _DPaths, _Tracer) ->
no.
try_kdiagonals(K, D, D1Paths, DPaths, Tracer) when K =< D ->
DPath = if D == 0 -> hd(D1Paths);
true -> pick_best_dpath(K, D, D1Paths)
end,
case follow_snake(DPath) of
{ed, E} ->
{ed, E};
{dpath, DPath2} when K =/= -D ->
t_dpath(Tracer, DPath2),
try_kdiagonals(K+2, D, tl(D1Paths), [DPath2 | DPaths], Tracer);
{dpath, DPath2} when K =:= -D ->
t_dpath(Tracer, DPath2),
try_kdiagonals(K+2, D, D1Paths, [DPath2 | DPaths], Tracer)
end;
try_kdiagonals(_, D, _, DPaths, Tracer) ->
t_exhausted_kdiagonals(Tracer, D),
{dpaths, lists:reverse(DPaths)}.
follow_snake({X, Y, [H|Tx], [H|Ty], Cs}) -> follow_snake({X+1,Y+1, Tx,Ty,
[{e,H} | Cs]});
follow_snake({_X,_Y,[], [], Cs}) -> {ed, Cs};
follow_snake({X, Y, [], Sy, Cs}) -> {dpath, {X, Y, [], Sy, Cs}};
follow_snake({X, Y, oob, Sy, Cs}) -> {dpath, {X, Y, oob, Sy, Cs}};
follow_snake({X, Y, Sx, [], Cs}) -> {dpath, {X, Y, Sx, [], Cs}};
follow_snake({X, Y, Sx, oob, Cs}) -> {dpath, {X, Y, Sx, oob, Cs}};
follow_snake({X, Y, Sx, Sy, Cs}) -> {dpath, {X, Y, Sx, Sy, Cs}}.
pick_best_dpath(K, D, DPs) -> pbd(K, D, DPs).
pbd( K, D, [DP|_]) when K==-D -> go_inc_y(DP);
pbd( K, D, [DP]) when K==D -> go_inc_x(DP);
pbd(_K,_D, [DP1,DP2|_]) -> pbd2(DP1,DP2).
pbd2({_,Y1,_,_,_}=DP1, {_,Y2,_,_,_}) when Y1 > Y2 -> go_inc_x(DP1);
pbd2(_DP1 , DP2) -> go_inc_y(DP2).
go_inc_y({X, Y, [H|Tx], Sy, Cs}) -> {X, Y+1, Tx, Sy, [{y,H}|Cs]};
go_inc_y({X, Y, [], Sy, Cs}) -> {X, Y+1, oob, Sy, Cs};
go_inc_y({X, Y, oob, Sy, Cs}) -> {X, Y+1, oob, Sy, Cs}.
go_inc_x({X, Y, Sx, [H|Ty], Cs}) -> {X+1, Y, Sx, Ty, [{x,H}|Cs]};
go_inc_x({X, Y, Sx, [], Cs}) -> {X+1, Y, Sx, oob, Cs};
go_inc_x({X, Y, Sx, oob, Cs}) -> {X+1, Y, Sx, oob, Cs}.
edit_ops_to_edit_script(EditOps) -> e2e(EditOps, _Acc=[]).
e2e([{x,C}|T], [{ins,R}|Acc]) -> e2e(T, [{ins,[C|R]}|Acc]);
e2e([{y,C}|T], [{del,R}|Acc]) -> e2e(T, [{del,[C|R]}|Acc]);
e2e([{e,C}|T], [{eq,R}|Acc]) -> e2e(T, [{eq, [C|R]}|Acc]);
e2e([{x,C}|T], Acc) -> e2e(T, [{ins,[C]}|Acc]);
e2e([{y,C}|T], Acc) -> e2e(T, [{del,[C]}|Acc]);
e2e([{e,C}|T], Acc) -> e2e(T, [{eq, [C]}|Acc]);
e2e([], Acc) -> Acc.
-spec patch([Elem], edit_script(Elem)) -> [Elem] when Elem::term().
patch(S, Diff) -> p2(S, Diff, []).
@doc Apply a patch to a binary . The binary is first split to list
-spec patch_binaries(binary(), edit_script(Line)) -> [Line] when
Line::string().
patch_binaries(B, Diff) ->
patch(split_bin_to_lines(B), Diff).
p2(S, [{eq,T}|Rest], Acc) -> p2_eq(S, T, Rest, Acc);
p2(S, [{ins,T}|Rest], Acc) -> p2_ins(S, T, Rest, Acc);
p2(S, [{del,T}|Rest], Acc) -> p2_del(S, T, Rest, Acc);
p2([],[], Acc) -> lists:reverse(Acc).
p2_eq([H|S], [H|T], Rest, Acc) -> p2_eq(S, T, Rest, [H|Acc]);
p2_eq(S, [], Rest, Acc) -> p2(S, Rest, Acc).
p2_ins(S, [H|T], Rest, Acc) -> p2_ins(S, T, Rest, [H|Acc]);
p2_ins(S, [], Rest, Acc) -> p2(S, Rest, Acc).
p2_del([H|S], [H|T], Rest, Acc) -> p2_del(S, T, Rest, Acc);
p2_del(S, [], Rest, Acc) -> p2(S, Rest, Acc).
t_final_script(no_tracer, _) -> ok;
t_final_script(Tracer, EditScript) -> Tracer({final_edit_script, EditScript}).
t_d(no_tracer, _) -> ok;
t_d(Tracer, D) -> Tracer({d,D}).
t_dpath(no_tracer, _) -> ok;
t_dpath(Tracer, DPath) -> Tracer({dpath,DPath}).
t_exhausted_kdiagonals(no_tracer, _) -> ok;
t_exhausted_kdiagonals(Tracer, D) -> Tracer({exhausted_kdiagonals, D}).
|
2d0e12abe5940473a03b361a883ce9e0ce7dd93f18205269c3e1478efc77026f | mauricioabreu/lang-studies | 09-input-and-output.hs | import Data.List
import System.Random
import System.Environment.Blank (getArgs)
- Lets implement the UNIX echo command
- The program arguments are simply printed to the standard output .
- If the first argument is -n , this argument is not printed , and no trailing newline is printed
- Lets implement the UNIX echo command
- The program arguments are simply printed to the standard output.
- If the first argument is -n, this argument is not printed, and no trailing newline is printed
-}
main :: IO ()
main = do
args <- getArgs
if not (null args) && head args == "-n"
then putStr $ unwords (tail args)
else putStrLn $ unwords args
Write a lottery number picker
- This function should take a StdGen instance , and produce a list of six unique numbers between 1 and 49 , in numerical order
- This function should take a StdGen instance, and produce a list of six unique numbers between 1 and 49, in numerical order
-}
lottery :: StdGen -> [Int]
lottery gen = sort $ take 6 $ nub $ randomRs (1, 49) gen | null | https://raw.githubusercontent.com/mauricioabreu/lang-studies/db75f8688d4e939bf0c5db44b6f176e11e9fcb8f/learn-you-a-haskell-exercises/09-input-and-output.hs | haskell | import Data.List
import System.Random
import System.Environment.Blank (getArgs)
- Lets implement the UNIX echo command
- The program arguments are simply printed to the standard output .
- If the first argument is -n , this argument is not printed , and no trailing newline is printed
- Lets implement the UNIX echo command
- The program arguments are simply printed to the standard output.
- If the first argument is -n, this argument is not printed, and no trailing newline is printed
-}
main :: IO ()
main = do
args <- getArgs
if not (null args) && head args == "-n"
then putStr $ unwords (tail args)
else putStrLn $ unwords args
Write a lottery number picker
- This function should take a StdGen instance , and produce a list of six unique numbers between 1 and 49 , in numerical order
- This function should take a StdGen instance, and produce a list of six unique numbers between 1 and 49, in numerical order
-}
lottery :: StdGen -> [Int]
lottery gen = sort $ take 6 $ nub $ randomRs (1, 49) gen | |
2efd3ef47e866c3ac685f97ee33b22a427b1ed11aac8799f3ada54a092171aa6 | mrphlip/aoc | 14.hs | # OPTIONS_GHC -Wno - tabs #
import Data.List
import qualified Data.Set as S
import qualified Text.ParserCombinators.ReadP as P
import Control.Exception
import Utils
type Point = (Integer, Integer)
type Grid = S.Set Point
getInput :: IO [[Point]]
getInput = map parseLine <$> lines <$> readFile "14.txt"
parseLine :: String -> [Point]
parseLine = runReadP readPath
where
readInt = P.readS_to_P reads :: P.ReadP Integer
readPoint = do
a <- readInt
P.char ','
b <- readInt
return (a, b)
readPath = P.sepBy1 readPoint (P.string " -> ")
drawLines :: [[Point]] -> Grid
drawLines = S.fromList . concat . map drawLine
drawLine :: [Point] -> [Point]
drawLine ps = concat $ map (uncurry drawEdge) $ zip ps (tail ps)
drawEdge :: Point -> Point -> [Point]
drawEdge (x1, y1) (x2, y2)
| x1 == x2 = [(x1, y) | y <- [min y1 y2 .. max y1 y2]]
| y1 == y2 = [(x, y1) | x <- [min x1 x2 .. max x1 x2]]
findFloor :: Grid -> Integer
findFloor g = (+1) $ maximum $ map snd $ S.elems g
dropSand grid ( endpoint , did it stop before reaching the floor ? )
dropSand :: Grid -> Integer -> Point -> (Point, Bool)
dropSand grid floor (x, y)
| y >= floor = ((x, y), False)
| not $ (x, y + 1) `S.member` grid = dropSand grid floor (x, y + 1)
| not $ (x - 1, y + 1) `S.member` grid = dropSand grid floor (x - 1, y + 1)
| not $ (x + 1, y + 1) `S.member` grid = dropSand grid floor (x + 1, y + 1)
| otherwise = ((x, y), True)
pourSand :: Grid -> [(Grid, Bool)]
pourSand grid = iterate stepFunc (grid, True)
where
floor = findFloor grid
stepFunc (g, _) = let (p, stop) = dropSand g floor (500, 0) in (S.insert p g, stop)
partA :: Grid -> Integer
partA grid = subtract 1 $ genericLength $ takeWhile snd $ pourSand grid
partB :: Grid -> Integer
partB grid = genericLength $ takeWhile (not . S.member (500,0) . fst) $ pourSand grid
tests :: IO ()
tests = do
check $ partA testGrid == 24
check $ partB testGrid == 93
where
testData = map parseLine ["498,4 -> 498,6 -> 496,6", "503,4 -> 502,4 -> 502,9 -> 494,9"]
testGrid = drawLines testData
check True = return ()
check False = throwIO $ AssertionFailed "test failed"
main :: IO ()
main = do
tests
dat <- getInput
let grid = drawLines dat
print $ partA grid
print $ partB grid
| null | https://raw.githubusercontent.com/mrphlip/aoc/34474f9fa32e3976ba5886045b610054cd220afd/2022/14.hs | haskell | # OPTIONS_GHC -Wno - tabs #
import Data.List
import qualified Data.Set as S
import qualified Text.ParserCombinators.ReadP as P
import Control.Exception
import Utils
type Point = (Integer, Integer)
type Grid = S.Set Point
getInput :: IO [[Point]]
getInput = map parseLine <$> lines <$> readFile "14.txt"
parseLine :: String -> [Point]
parseLine = runReadP readPath
where
readInt = P.readS_to_P reads :: P.ReadP Integer
readPoint = do
a <- readInt
P.char ','
b <- readInt
return (a, b)
readPath = P.sepBy1 readPoint (P.string " -> ")
drawLines :: [[Point]] -> Grid
drawLines = S.fromList . concat . map drawLine
drawLine :: [Point] -> [Point]
drawLine ps = concat $ map (uncurry drawEdge) $ zip ps (tail ps)
drawEdge :: Point -> Point -> [Point]
drawEdge (x1, y1) (x2, y2)
| x1 == x2 = [(x1, y) | y <- [min y1 y2 .. max y1 y2]]
| y1 == y2 = [(x, y1) | x <- [min x1 x2 .. max x1 x2]]
findFloor :: Grid -> Integer
findFloor g = (+1) $ maximum $ map snd $ S.elems g
dropSand grid ( endpoint , did it stop before reaching the floor ? )
dropSand :: Grid -> Integer -> Point -> (Point, Bool)
dropSand grid floor (x, y)
| y >= floor = ((x, y), False)
| not $ (x, y + 1) `S.member` grid = dropSand grid floor (x, y + 1)
| not $ (x - 1, y + 1) `S.member` grid = dropSand grid floor (x - 1, y + 1)
| not $ (x + 1, y + 1) `S.member` grid = dropSand grid floor (x + 1, y + 1)
| otherwise = ((x, y), True)
pourSand :: Grid -> [(Grid, Bool)]
pourSand grid = iterate stepFunc (grid, True)
where
floor = findFloor grid
stepFunc (g, _) = let (p, stop) = dropSand g floor (500, 0) in (S.insert p g, stop)
partA :: Grid -> Integer
partA grid = subtract 1 $ genericLength $ takeWhile snd $ pourSand grid
partB :: Grid -> Integer
partB grid = genericLength $ takeWhile (not . S.member (500,0) . fst) $ pourSand grid
tests :: IO ()
tests = do
check $ partA testGrid == 24
check $ partB testGrid == 93
where
testData = map parseLine ["498,4 -> 498,6 -> 496,6", "503,4 -> 502,4 -> 502,9 -> 494,9"]
testGrid = drawLines testData
check True = return ()
check False = throwIO $ AssertionFailed "test failed"
main :: IO ()
main = do
tests
dat <- getInput
let grid = drawLines dat
print $ partA grid
print $ partB grid
| |
369ce5e5e54e202d84650febdda14d029b48e3d539a9f19751bad0b66330dfc9 | ff-notes/ron | Main.hs | import Control.Monad (when)
import Control.Monad.Logger (MonadLogger, runFileLoggingT)
import Data.Text (Text)
import RON.Store.Sqlite (runStore)
import RON.Store.Sqlite qualified as Store
import Text.Pretty.Simple (pPrint)
import UnliftIO (MonadUnliftIO, liftIO, newTChanIO)
import Database qualified
import Fork (forkLinked)
import NetNode qualified
import Options (Command (Post, RunNode, RunUI, Show), NodeOptions (..),
Options (..), UIOptions (..), parseOptions)
import Types (Env (..), Message (..))
import UI (initUI, runUI)
main :: IO ()
main = do
Options{database, cmd, logFile} <- parseOptions
runFileLoggingT logFile do
db <- Store.newHandle database
case cmd of
Show -> Database.loadAllMessages db >>= pPrint
Post username text -> do
messageRef <-
runStore db $ Database.newMessage Message{username, text}
liftIO $ putStrLn $ "created message: " <> show messageRef
RunNode nodeOptions -> runNode db nodeOptions
RunUI UIOptions{username} nodeOptions -> do
forkLinked $ runNode db nodeOptions
runUI' username db
runUI' :: (MonadLogger m, MonadUnliftIO m) => Text -> Store.Handle -> m ()
runUI' username db = do
onMessagePosted <- newTChanIO
onMessageListUpdated <- newTChanIO
let env = Env{username, onMessagePosted, onMessageListUpdated}
uiHandle <- initUI db env
forkLinked $ Database.databaseToUIUpdater db onMessageListUpdated
forkLinked $ Database.messagePoster onMessagePosted db
runUI uiHandle
runNode ::
(MonadFail m, MonadLogger m, MonadUnliftIO m) =>
Store.Handle -> NodeOptions -> m ()
runNode db options@NodeOptions{listenPorts, peers} = do
when (null listenPorts && null peers) $
fail
"The peer must connect to other peers or listen for connections. \
\Specify `--listen` or `--peer`."
NetNode.workers db options
| null | https://raw.githubusercontent.com/ff-notes/ron/c9abcd0a871849c701111aba074596d04ac994c7/demo/chat/Main.hs | haskell | listen` or `--peer`." | import Control.Monad (when)
import Control.Monad.Logger (MonadLogger, runFileLoggingT)
import Data.Text (Text)
import RON.Store.Sqlite (runStore)
import RON.Store.Sqlite qualified as Store
import Text.Pretty.Simple (pPrint)
import UnliftIO (MonadUnliftIO, liftIO, newTChanIO)
import Database qualified
import Fork (forkLinked)
import NetNode qualified
import Options (Command (Post, RunNode, RunUI, Show), NodeOptions (..),
Options (..), UIOptions (..), parseOptions)
import Types (Env (..), Message (..))
import UI (initUI, runUI)
main :: IO ()
main = do
Options{database, cmd, logFile} <- parseOptions
runFileLoggingT logFile do
db <- Store.newHandle database
case cmd of
Show -> Database.loadAllMessages db >>= pPrint
Post username text -> do
messageRef <-
runStore db $ Database.newMessage Message{username, text}
liftIO $ putStrLn $ "created message: " <> show messageRef
RunNode nodeOptions -> runNode db nodeOptions
RunUI UIOptions{username} nodeOptions -> do
forkLinked $ runNode db nodeOptions
runUI' username db
runUI' :: (MonadLogger m, MonadUnliftIO m) => Text -> Store.Handle -> m ()
runUI' username db = do
onMessagePosted <- newTChanIO
onMessageListUpdated <- newTChanIO
let env = Env{username, onMessagePosted, onMessageListUpdated}
uiHandle <- initUI db env
forkLinked $ Database.databaseToUIUpdater db onMessageListUpdated
forkLinked $ Database.messagePoster onMessagePosted db
runUI uiHandle
runNode ::
(MonadFail m, MonadLogger m, MonadUnliftIO m) =>
Store.Handle -> NodeOptions -> m ()
runNode db options@NodeOptions{listenPorts, peers} = do
when (null listenPorts && null peers) $
fail
"The peer must connect to other peers or listen for connections. \
NetNode.workers db options
|
83de83c4c2d375dd187d033766ea6d2cfea133600e561a384321c335840ad2ea | shayan-najd/NativeMetaprogramming | CustomTypeErrors05.hs | # LANGUAGE TypeInType , TypeFamilies , UndecidableInstances #
# LANGUAGE UndecidableInstances #
The " tricky case " in # 11391
module CustomTypeErrors05 where
import Data.Kind
import GHC.TypeLits (TypeError, ErrorMessage(..))
type family Resolve (t :: Type -> Type) :: Type -> Type where
Resolve _ = TypeError (Text "ERROR")
testNOTOK2 :: Resolve [] Int
testNOTOK2 = 1
| null | https://raw.githubusercontent.com/shayan-najd/NativeMetaprogramming/24e5f85990642d3f0b0044be4327b8f52fce2ba3/testsuite/tests/typecheck/should_fail/CustomTypeErrors05.hs | haskell | # LANGUAGE TypeInType , TypeFamilies , UndecidableInstances #
# LANGUAGE UndecidableInstances #
The " tricky case " in # 11391
module CustomTypeErrors05 where
import Data.Kind
import GHC.TypeLits (TypeError, ErrorMessage(..))
type family Resolve (t :: Type -> Type) :: Type -> Type where
Resolve _ = TypeError (Text "ERROR")
testNOTOK2 :: Resolve [] Int
testNOTOK2 = 1
| |
726530409af1886c9487992045f274d76421727fd3aecb75a9629d82f0bed1e6 | gnl/ghostwheel | dev_test.cljc | Copyright ( c ) . All rights reserved .
;; The use and distribution terms for this software are covered by the
Eclipse Public License 2.0 ( -2.0/ )
;; which can be found in the file LICENSE at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ghostwheel.dev-test
(:require [clojure.spec.alpha :as s]
[clojure.spec.test.alpha :as st]
[clojure.spec.gen.alpha :as gen]
[ghostwheel.core :as g :refer [=> | <- >defn >defn- >fdef ?]]
[ghostwheel.test-utils :as tu
:refer [process-fdef extract-fdef threading-test deftest-permutations]]
[ghostwheel.threading-macros :refer [*-> *->> *as-> *cond-> *cond->> *some-> *some->>]]
#?@(:clj [[clojure.test :as t :refer [deftest testing is]]
[orchestra.spec.test :as ost]
[ghostwheel.test-utils-clj :refer [expand]]]
:cljs [[clojure.test :as t :refer-macros [deftest testing is]]
[orchestra-cljs.spec.test :as ost]
[ghostwheel.test-utils-cljs :refer-macros [expand]]
[ghostwheel.tracer]])))
;; TODO - test fx detection
(def arity-1-fdef
'(cljs.spec.alpha/fdef arity-1
:args (cljs.spec.alpha/and (cljs.spec.alpha/cat
:arg1 (s/tuple neg-int? pos-int?)
:cd (s/tuple nat-int? nat-int?)
:vw (s/map-of keyword? pos-int?)
:arg4 (s/map-of keyword? pos-int?)
:an-atom** any?)
(fn [{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**}]
(< a b))
(fn [{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**}]
(> x y))
(fn [{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**}]
(= (count cd) (count vw) 2)))
:ret int?
:fn (cljs.spec.alpha/and (fn [{{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**} :args,
ret__1 :ret}]
(< a ret__1))
(fn [{{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**} :args,
ret__1 :ret}]
(> ret__1 (- a x))))))
(deftest-permutations arity-1
{::tu/args-ret-mappings {[[-2 2] [0 10] {:v 10 :w 30} {:x 40 :y 10} (atom 2)] 100}
::tu/expected-fdef arity-1-fdef}
[[a b] [c d :as cd] {:keys [v w] :as vw} {:keys [x y]} an-atom**]
[(s/tuple neg-int? pos-int?)
(s/tuple nat-int? nat-int?)
(s/map-of keyword? pos-int?)
(s/map-of keyword? pos-int?)
any?
| #(< a b) #(> x y) #(= (count cd) (count vw) 2)
=> int? | #(< a %) #(> % (- a x))]
(swap! an-atom** - 2)
(let [alpha a
bravo (->> b inc dec dec inc)
up #(let [num %] (inc num))]
(let [nukyular** (atom nil)
down #(let [num %] (dec num))]
(reset! nukyular** alpha)
(apply + @an-atom** (down @nukyular**) (up bravo) v w x y cd))))
(deftest >fdef-arity-1-test
(let [fdef (process-fdef
(expand
(>fdef arity-1
[[a b] [c d :as cd] {:keys [v w] :as vw} {:keys [x y]} an-atom**]
[(s/tuple neg-int? pos-int?)
(s/tuple nat-int? nat-int?)
(s/map-of keyword? pos-int?)
(s/map-of keyword? pos-int?)
any?
| #(< a b) #(> x y) #(= (count cd) (count vw) 2)
=> int? | #(< a %) #(> % (- a x))])))]
(is (= fdef arity-1-fdef))))
(deftest >fdef-nested-fspec-test
(let [nested-fspec-fdef
'(cljs.spec.alpha/fdef nested-fspec
:args (cljs.spec.alpha/cat
:f (cljs.spec.alpha/fspec :args (cljs.spec.alpha/cat :arg1 nat-int? :arg2 string?) :ret string?)
:coll (? seqable?))
:ret seq?)
fdef
(process-fdef
(expand
(>fdef nested-fspec
[f coll]
[[nat-int? string? => string?] (? seqable?) => seq?])))]
(is (= fdef nested-fspec-fdef))))
(deftest >fdef-nested-nilable-fspec-test
(let [nested-fspec-nilable-fdef
'(cljs.spec.alpha/fdef nested-nilable-fspec
:args (cljs.spec.alpha/cat
:f (cljs.spec.alpha/nilable (cljs.spec.alpha/fspec :args (cljs.spec.alpha/cat :arg1 nat-int? :arg2 string?) :ret string?))
:coll (? seqable?))
:ret seq?)
fdef
(process-fdef
(expand
(>fdef nested-nilable-fspec
[f coll]
[[? [nat-int? string? => string?]] (? seqable?) => seq?])))]
(is (= fdef nested-fspec-nilable-fdef))))
(deftest >fdef-nested-any-fspec-test
(let [nested-any-fspec-fdef
'(cljs.spec.alpha/fdef nested-any-fspec
:args (cljs.spec.alpha/cat
:f ifn?
:coll (? seqable?))
:ret seq?)
fdef
(process-fdef
(expand
(>fdef nested-any-fspec
[f coll]
[[nat-int? any? => any?] (? seqable?) => seq?])))]
(is (= fdef nested-any-fspec-fdef))))
(deftest >fdef-keyword-test
(let [keyword-fdef
'(cljs.spec.alpha/def :ghostwheel.dev-test/fdef-keyword
(cljs.spec.alpha/fspec :args (cljs.spec.alpha/cat :a int?) :ret int?))
fdef
(process-fdef
(expand
(>fdef ::fdef-keyword
[a]
[int? => int?])))]
(is (= fdef keyword-fdef))))
(deftest >fdef-empty-arg-test
(let [empty-arg-fdef
'(cljs.spec.alpha/fdef empty-arg-fdef :args (cljs.spec.alpha/cat) :ret int?)
fdef
(process-fdef
(expand
(>fdef empty-arg-fdef
[]
[=> int?])))]
(is (= fdef empty-arg-fdef))))
(deftest arity-1-nilspec-test
(>defn arity-1-nilspec
[a]
nil
(inc a))
TODO check that no fdef is defined here
(is (= (arity-1-nilspec 3) 4)))
(def arity-n-fdef-multiret
'(cljs.spec.alpha/fdef arity-n
:args (cljs.spec.alpha/or
:arity-1 (cljs.spec.alpha/cat :a int?)
:arity-2 (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int?)
(fn [{:keys [a b]}] (< a b)))
:arity-n (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int? :c nat-int? :more (s/* int?))
(fn [{:keys [a b c more]}] (< a b))
(fn [{:keys [a b c more]}] (< a b c))))
:fn (cljs.spec.alpha/and
(fn valid-multi-arity-ret?
[p1__1]
(case (-> p1__1 :args key)
:arity-1 (cljs.spec.alpha/valid? string? (:ret p1__1))
:arity-2 (cljs.spec.alpha/valid? int? (:ret p1__1))
:arity-n (cljs.spec.alpha/valid? int? (:ret p1__1))))
(fn valid-multi-arity-fn?
[p1__1]
(case (-> p1__1 :args key)
:arity-1 true
:arity-2 (cljs.spec.alpha/valid?
(fn [{[_ {:keys [a b]}] :args, ret__1 :ret}] (> ret__1 a))
p1__1)
:arity-n (cljs.spec.alpha/valid?
(cljs.spec.alpha/and
(fn [{[_ {:keys [a b c more]}] :args, ret__1 :ret}] (> ret__1 a))
(fn [{[_ {:keys [a b c more]}] :args, ret__1 :ret}] (> ret__1 (+ a c))))
p1__1))))))
(def arity-n-fdef-uniret
'(cljs.spec.alpha/fdef arity-n
:args (cljs.spec.alpha/or
:arity-1 (cljs.spec.alpha/cat :a int?)
:arity-2 (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int?)
(fn [{:keys [a b]}] (< a b)))
:arity-n (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int? :c nat-int? :arg4 (s/* any?))
(fn [{a :a, b :b, c :c, [nukyular**] :arg4}] (< a b))
(fn [{a :a, b :b, c :c, [nukyular**] :arg4}] (< a b c))))
:ret int?
:fn (fn valid-multi-arity-fn? [p1__1]
(case (-> p1__1 :args key)
:arity-1 true
:arity-2 (cljs.spec.alpha/valid?
(fn [{[_ {:keys [a b]}] :args, ret__1 :ret}] (> ret__1 a))
p1__1)
:arity-n (cljs.spec.alpha/valid?
(cljs.spec.alpha/and
(fn [{[_ {a :a, b :b, c :c, [nukyular**] :arg4}] :args, ret__1 :ret}]
(> ret__1 a))
(fn [{[_ {a :a, b :b, c :c, [nukyular**] :arg4}] :args, ret__1 :ret}]
(> ret__1 (+ a c))))
p1__1)))))
(deftest-permutations arity-n-multiret
{::tu/args-ret-mappings {[3] "4"
[3 5] 8
[3 5 7] 15}
::tu/expected-fdef arity-n-fdef-multiret}
([a]
[int? => string?]
(str (inc a)))
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)]
(+ a b))
([a b c & more]
[nat-int? pos-int? nat-int? (s/* int?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]
(apply + a b c more)))
(deftest-permutations arity-n-uniret
{::tu/args-ret-mappings {[3] 4
[3 5] 8
[3 5 7 (atom 3)] 15}
::tu/expected-fdef arity-n-fdef-uniret}
([a]
[int? => int?]
(inc a))
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)]
(+ a b))
([a b c & [nukyular**]]
[nat-int? pos-int? nat-int? (s/* any?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]
(swap! nukyular** - 3)
(+ a b c @nukyular**)))
(deftest arity-n-nilspec-test
(>defn arity-n-nilspec
([a]
nil
(inc a))
([a b]
nil
(+ a b)))
TODO check that no fdef is defined here
(is (= (arity-n-nilspec 3) 4))
(is (= (arity-n-nilspec 3 5) 8)))
(deftest >fdef-arity-n-uniret-test
(let [fdef (process-fdef
(expand
(>fdef arity-n
([a]
[int? => int?]
(inc a))
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)]
(+ a b))
([a b c & [nukyular**]]
[nat-int? pos-int? nat-int? (s/* any?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]))))]
(is (= fdef arity-n-fdef-uniret))))
(deftest >fdef-arity-n-multiret-test
(let [fdef (process-fdef
(expand
(>fdef arity-n
([a]
[int? => string?])
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)])
([a b c & more]
[nat-int? pos-int? nat-int? (s/* int?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]))))]
(is (= fdef arity-n-fdef-multiret))))
(comment
(deftest arity-1-stub-test
(>defn arity-1-stub
[a b]
[int? string? => string?])
(is (string? (arity-1-stub 1 "abc"))))
(deftest arity-n-stub-test
(>defn arity-n-stub
([a]
[int? => int?])
([a b]
[int? string? => string?]))
(is (int? (arity-n-stub 1)))
(is (string? (arity-n-stub 1 "abc")))))
(deftest *->-test
(is (threading-test -> *->
(+ 1 2)
inc
inc
dec
(+ 2)
(/ 4))))
(deftest *->>-test
(is (threading-test ->> *->>
(+ 1 2)
inc
inc
dec
(+ 2)
(/ 4))))
(deftest *as->-test
(is (threading-test as-> *as->
(+ 1 2) x
(inc x)
(inc x)
(dec x)
(+ 2 x)
(/ x 4))))
(deftest *cond->-test
(is (threading-test cond-> *cond->
(+ 1 2)
(> 1 2) inc
(< 0 10) inc
false dec
true (+ 2)
true (/ 4))))
(deftest *cond->>-test
(is (threading-test cond->> *cond->>
(+ 1 2)
(> 1 2) inc
(< 0 10) inc
false dec
true (+ 2)
true (/ 4))))
(deftest *some->-nil-test
(is (threading-test some-> *some->
{:a 123 :b 456}
(dissoc :b)
:b
inc
inc)))
(deftest *some->-test
(is (threading-test some-> *some->
{:a 123 :b 456}
:b
inc
inc)))
(deftest *some->>-nil-test
(is (threading-test some->> *some->>
:c
(conj [:a :b])
(remove #{:b})
(some #{:b})
(conj [1 2 3]))))
(deftest *some->>-test
(is (threading-test some->> *some->>
:c
(conj [:a :b])
(some #{:b})
(conj [1 2 3]))))
| null | https://raw.githubusercontent.com/gnl/ghostwheel/a85c3510178fc4fbcb95125b86116d698e2a232a/test/ghostwheel/dev_test.cljc | clojure | The use and distribution terms for this software are covered by the
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
TODO - test fx detection | Copyright ( c ) . All rights reserved .
Eclipse Public License 2.0 ( -2.0/ )
(ns ghostwheel.dev-test
(:require [clojure.spec.alpha :as s]
[clojure.spec.test.alpha :as st]
[clojure.spec.gen.alpha :as gen]
[ghostwheel.core :as g :refer [=> | <- >defn >defn- >fdef ?]]
[ghostwheel.test-utils :as tu
:refer [process-fdef extract-fdef threading-test deftest-permutations]]
[ghostwheel.threading-macros :refer [*-> *->> *as-> *cond-> *cond->> *some-> *some->>]]
#?@(:clj [[clojure.test :as t :refer [deftest testing is]]
[orchestra.spec.test :as ost]
[ghostwheel.test-utils-clj :refer [expand]]]
:cljs [[clojure.test :as t :refer-macros [deftest testing is]]
[orchestra-cljs.spec.test :as ost]
[ghostwheel.test-utils-cljs :refer-macros [expand]]
[ghostwheel.tracer]])))
(def arity-1-fdef
'(cljs.spec.alpha/fdef arity-1
:args (cljs.spec.alpha/and (cljs.spec.alpha/cat
:arg1 (s/tuple neg-int? pos-int?)
:cd (s/tuple nat-int? nat-int?)
:vw (s/map-of keyword? pos-int?)
:arg4 (s/map-of keyword? pos-int?)
:an-atom** any?)
(fn [{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**}]
(< a b))
(fn [{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**}]
(> x y))
(fn [{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**}]
(= (count cd) (count vw) 2)))
:ret int?
:fn (cljs.spec.alpha/and (fn [{{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**} :args,
ret__1 :ret}]
(< a ret__1))
(fn [{{[a b] :arg1,
[c d :as cd] :cd,
{:keys [v w], :as vw} :vw,
{:keys [x y]} :arg4,
an-atom** :an-atom**} :args,
ret__1 :ret}]
(> ret__1 (- a x))))))
(deftest-permutations arity-1
{::tu/args-ret-mappings {[[-2 2] [0 10] {:v 10 :w 30} {:x 40 :y 10} (atom 2)] 100}
::tu/expected-fdef arity-1-fdef}
[[a b] [c d :as cd] {:keys [v w] :as vw} {:keys [x y]} an-atom**]
[(s/tuple neg-int? pos-int?)
(s/tuple nat-int? nat-int?)
(s/map-of keyword? pos-int?)
(s/map-of keyword? pos-int?)
any?
| #(< a b) #(> x y) #(= (count cd) (count vw) 2)
=> int? | #(< a %) #(> % (- a x))]
(swap! an-atom** - 2)
(let [alpha a
bravo (->> b inc dec dec inc)
up #(let [num %] (inc num))]
(let [nukyular** (atom nil)
down #(let [num %] (dec num))]
(reset! nukyular** alpha)
(apply + @an-atom** (down @nukyular**) (up bravo) v w x y cd))))
(deftest >fdef-arity-1-test
(let [fdef (process-fdef
(expand
(>fdef arity-1
[[a b] [c d :as cd] {:keys [v w] :as vw} {:keys [x y]} an-atom**]
[(s/tuple neg-int? pos-int?)
(s/tuple nat-int? nat-int?)
(s/map-of keyword? pos-int?)
(s/map-of keyword? pos-int?)
any?
| #(< a b) #(> x y) #(= (count cd) (count vw) 2)
=> int? | #(< a %) #(> % (- a x))])))]
(is (= fdef arity-1-fdef))))
(deftest >fdef-nested-fspec-test
(let [nested-fspec-fdef
'(cljs.spec.alpha/fdef nested-fspec
:args (cljs.spec.alpha/cat
:f (cljs.spec.alpha/fspec :args (cljs.spec.alpha/cat :arg1 nat-int? :arg2 string?) :ret string?)
:coll (? seqable?))
:ret seq?)
fdef
(process-fdef
(expand
(>fdef nested-fspec
[f coll]
[[nat-int? string? => string?] (? seqable?) => seq?])))]
(is (= fdef nested-fspec-fdef))))
(deftest >fdef-nested-nilable-fspec-test
(let [nested-fspec-nilable-fdef
'(cljs.spec.alpha/fdef nested-nilable-fspec
:args (cljs.spec.alpha/cat
:f (cljs.spec.alpha/nilable (cljs.spec.alpha/fspec :args (cljs.spec.alpha/cat :arg1 nat-int? :arg2 string?) :ret string?))
:coll (? seqable?))
:ret seq?)
fdef
(process-fdef
(expand
(>fdef nested-nilable-fspec
[f coll]
[[? [nat-int? string? => string?]] (? seqable?) => seq?])))]
(is (= fdef nested-fspec-nilable-fdef))))
(deftest >fdef-nested-any-fspec-test
(let [nested-any-fspec-fdef
'(cljs.spec.alpha/fdef nested-any-fspec
:args (cljs.spec.alpha/cat
:f ifn?
:coll (? seqable?))
:ret seq?)
fdef
(process-fdef
(expand
(>fdef nested-any-fspec
[f coll]
[[nat-int? any? => any?] (? seqable?) => seq?])))]
(is (= fdef nested-any-fspec-fdef))))
(deftest >fdef-keyword-test
(let [keyword-fdef
'(cljs.spec.alpha/def :ghostwheel.dev-test/fdef-keyword
(cljs.spec.alpha/fspec :args (cljs.spec.alpha/cat :a int?) :ret int?))
fdef
(process-fdef
(expand
(>fdef ::fdef-keyword
[a]
[int? => int?])))]
(is (= fdef keyword-fdef))))
(deftest >fdef-empty-arg-test
(let [empty-arg-fdef
'(cljs.spec.alpha/fdef empty-arg-fdef :args (cljs.spec.alpha/cat) :ret int?)
fdef
(process-fdef
(expand
(>fdef empty-arg-fdef
[]
[=> int?])))]
(is (= fdef empty-arg-fdef))))
(deftest arity-1-nilspec-test
(>defn arity-1-nilspec
[a]
nil
(inc a))
TODO check that no fdef is defined here
(is (= (arity-1-nilspec 3) 4)))
(def arity-n-fdef-multiret
'(cljs.spec.alpha/fdef arity-n
:args (cljs.spec.alpha/or
:arity-1 (cljs.spec.alpha/cat :a int?)
:arity-2 (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int?)
(fn [{:keys [a b]}] (< a b)))
:arity-n (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int? :c nat-int? :more (s/* int?))
(fn [{:keys [a b c more]}] (< a b))
(fn [{:keys [a b c more]}] (< a b c))))
:fn (cljs.spec.alpha/and
(fn valid-multi-arity-ret?
[p1__1]
(case (-> p1__1 :args key)
:arity-1 (cljs.spec.alpha/valid? string? (:ret p1__1))
:arity-2 (cljs.spec.alpha/valid? int? (:ret p1__1))
:arity-n (cljs.spec.alpha/valid? int? (:ret p1__1))))
(fn valid-multi-arity-fn?
[p1__1]
(case (-> p1__1 :args key)
:arity-1 true
:arity-2 (cljs.spec.alpha/valid?
(fn [{[_ {:keys [a b]}] :args, ret__1 :ret}] (> ret__1 a))
p1__1)
:arity-n (cljs.spec.alpha/valid?
(cljs.spec.alpha/and
(fn [{[_ {:keys [a b c more]}] :args, ret__1 :ret}] (> ret__1 a))
(fn [{[_ {:keys [a b c more]}] :args, ret__1 :ret}] (> ret__1 (+ a c))))
p1__1))))))
(def arity-n-fdef-uniret
'(cljs.spec.alpha/fdef arity-n
:args (cljs.spec.alpha/or
:arity-1 (cljs.spec.alpha/cat :a int?)
:arity-2 (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int?)
(fn [{:keys [a b]}] (< a b)))
:arity-n (cljs.spec.alpha/and
(cljs.spec.alpha/cat :a nat-int? :b pos-int? :c nat-int? :arg4 (s/* any?))
(fn [{a :a, b :b, c :c, [nukyular**] :arg4}] (< a b))
(fn [{a :a, b :b, c :c, [nukyular**] :arg4}] (< a b c))))
:ret int?
:fn (fn valid-multi-arity-fn? [p1__1]
(case (-> p1__1 :args key)
:arity-1 true
:arity-2 (cljs.spec.alpha/valid?
(fn [{[_ {:keys [a b]}] :args, ret__1 :ret}] (> ret__1 a))
p1__1)
:arity-n (cljs.spec.alpha/valid?
(cljs.spec.alpha/and
(fn [{[_ {a :a, b :b, c :c, [nukyular**] :arg4}] :args, ret__1 :ret}]
(> ret__1 a))
(fn [{[_ {a :a, b :b, c :c, [nukyular**] :arg4}] :args, ret__1 :ret}]
(> ret__1 (+ a c))))
p1__1)))))
(deftest-permutations arity-n-multiret
{::tu/args-ret-mappings {[3] "4"
[3 5] 8
[3 5 7] 15}
::tu/expected-fdef arity-n-fdef-multiret}
([a]
[int? => string?]
(str (inc a)))
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)]
(+ a b))
([a b c & more]
[nat-int? pos-int? nat-int? (s/* int?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]
(apply + a b c more)))
(deftest-permutations arity-n-uniret
{::tu/args-ret-mappings {[3] 4
[3 5] 8
[3 5 7 (atom 3)] 15}
::tu/expected-fdef arity-n-fdef-uniret}
([a]
[int? => int?]
(inc a))
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)]
(+ a b))
([a b c & [nukyular**]]
[nat-int? pos-int? nat-int? (s/* any?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]
(swap! nukyular** - 3)
(+ a b c @nukyular**)))
(deftest arity-n-nilspec-test
(>defn arity-n-nilspec
([a]
nil
(inc a))
([a b]
nil
(+ a b)))
TODO check that no fdef is defined here
(is (= (arity-n-nilspec 3) 4))
(is (= (arity-n-nilspec 3 5) 8)))
(deftest >fdef-arity-n-uniret-test
(let [fdef (process-fdef
(expand
(>fdef arity-n
([a]
[int? => int?]
(inc a))
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)]
(+ a b))
([a b c & [nukyular**]]
[nat-int? pos-int? nat-int? (s/* any?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]))))]
(is (= fdef arity-n-fdef-uniret))))
(deftest >fdef-arity-n-multiret-test
(let [fdef (process-fdef
(expand
(>fdef arity-n
([a]
[int? => string?])
([a b]
[nat-int? pos-int? | #(< a b)
=> int? | #(> % a)])
([a b c & more]
[nat-int? pos-int? nat-int? (s/* int?) | #(< a b) #(< a b c)
=> int? | #(> % a) #(> % (+ a c))]))))]
(is (= fdef arity-n-fdef-multiret))))
(comment
(deftest arity-1-stub-test
(>defn arity-1-stub
[a b]
[int? string? => string?])
(is (string? (arity-1-stub 1 "abc"))))
(deftest arity-n-stub-test
(>defn arity-n-stub
([a]
[int? => int?])
([a b]
[int? string? => string?]))
(is (int? (arity-n-stub 1)))
(is (string? (arity-n-stub 1 "abc")))))
(deftest *->-test
(is (threading-test -> *->
(+ 1 2)
inc
inc
dec
(+ 2)
(/ 4))))
(deftest *->>-test
(is (threading-test ->> *->>
(+ 1 2)
inc
inc
dec
(+ 2)
(/ 4))))
(deftest *as->-test
(is (threading-test as-> *as->
(+ 1 2) x
(inc x)
(inc x)
(dec x)
(+ 2 x)
(/ x 4))))
(deftest *cond->-test
(is (threading-test cond-> *cond->
(+ 1 2)
(> 1 2) inc
(< 0 10) inc
false dec
true (+ 2)
true (/ 4))))
(deftest *cond->>-test
(is (threading-test cond->> *cond->>
(+ 1 2)
(> 1 2) inc
(< 0 10) inc
false dec
true (+ 2)
true (/ 4))))
(deftest *some->-nil-test
(is (threading-test some-> *some->
{:a 123 :b 456}
(dissoc :b)
:b
inc
inc)))
(deftest *some->-test
(is (threading-test some-> *some->
{:a 123 :b 456}
:b
inc
inc)))
(deftest *some->>-nil-test
(is (threading-test some->> *some->>
:c
(conj [:a :b])
(remove #{:b})
(some #{:b})
(conj [1 2 3]))))
(deftest *some->>-test
(is (threading-test some->> *some->>
:c
(conj [:a :b])
(some #{:b})
(conj [1 2 3]))))
|
b9a17a3554904a57e0cb80f8908e94ad3a4591dd1880bb496f96f4d8bf7f92ec | fortytools/holumbus | Static.hs | -- ----------------------------------------------------------------------------
|
Module : Hayoo . Search . Pages . Static
Copyright : Copyright ( C ) 2010
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.1
The main Hayoo ! template .
Module : Hayoo.Search.Pages.Static
Copyright : Copyright (C) 2010 Timo B. Huebel
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: portable
Version : 0.1
The main Hayoo! template.
-}
-- ----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Hayoo.Search.Pages.Static (help, about, api, examples) where
import Data.Text (Text)
import Text.XHtmlCombinators
import qualified Text.XHtmlCombinators.Attributes as A
examples :: XHtml FlowContent
examples = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "words"] $ text " "
div' [A.id_ "documents"] $ div' [A.id_ "examples"] $ do
text "Hayoo! will search all packages from "
a' [A.href ""] $ text "Hackage"
text ", including all function and type definitions. Here are some example queries:"
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map'); return false;", A.href "hayoo.html?query=map&start=0"] $ text "map"
text " searches for everything that contains a word starting with \"map\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','name:map'); return false;", A.href "hayoo.html?query=name%3Amap&start=0"] $ text "name:map"
text " searches for everything where the function name starts with \"map\" (case insensitive)."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map OR fold'); return false;", A.href "hayoo.html?query=map%20OR%20fold&start=0"] $ text "map OR fold"
text " searches for everything that contains a word starting with \"map\" or \"fold\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map package:containers'); return false;", A.href "hayoo.html?query=map%20package%3Acontainers&start=0"] $ text "map package:containers"
text " searches for everything from package \"containers\" that contains a word starting with \"map\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map hierarchy:Lazy'); return false;", A.href "hayoo.html?query=map%20hierarchy%3ALazy&start=0"] $ text "map hierarchy:Lazy"
text " searches for everything where \"Lazy\" appears somewhere in the full qualified module name \
\and that contains a word starting with \"map\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','(map OR fold) module:Data.Map'); return false;", A.href "hayoo.html?query=(map%20OR%20fold)%20module%3AData.Map&start=0"] $ text "(map OR fold) module:Data.Map"
text " searches for everything from module \"Data.Map\" that contains a word starting with \"map\" or \"fold\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','name:attr module:Text.XML'); return false;", A.href "hayoo.html?query=name%3Aattr%20module%3AText.XML&start=0"] $ text "name:attr module:Text.XML"
text " searches for everything from the whole module hierarchy \"Text.XML\" where the function name starts with \"attr\" (case insensitive)."
help :: XHtml FlowContent
help = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "helptext", A.class_ "text"] $ do
h2 $ text "Basic Usage"
p $ do
text "By default, Hayoo! searches for function names, module names, signatures and function \
\descriptions. With every letter typed, Hayoo! will show the results it thinks are best matching \
\the query as well as some suggestions on how the words from the query could be completed. \
\Clicking one of these suggestions will replace the according word in the query."
p $ do
text "Hayoo! displays results as a list of functions, including full qualified module name and the \
\function signature. Clicking the function name will lead directly to the corresponding documentation \
\while clicking the module name will lead to the documentation of the module. Additionally, Hayoo! \
\shows the function description (if available) and provides a link leading directly to the source \
\of the function (if available). The description of the function can be expanded by clicking on \
\the small '+' sign."
p $ do
text "Along with the results, Hayoo! shows two lists on the right, containing the top fifteen \
\root-modules and packages. These are aggregated from the actual results. Clicking on each of \
\these will further restrict the current query to the respective module hierarchy or package. \
\On the left side, package search results are shown if the query matches the package information."
h2 $ text "Advanced Queries"
p $ do
text "If words are seperated by whitespace, Hayoo! will search for results containing both words. \
\Instead of using whitespace, the explicit "
span' [A.class_ "query"] $ text "AND"
text " operator can be used. Hayoo! also supports "
span' [A.class_ "query"] $ text "OR"
text " and "
span' [A.class_ "query"] $ text "NOT"
text " operators, although the "
span' [A.class_ "query"] $ text "NOT"
text " operator may only be used together with "
span' [A.class_ "query"] $ text "AND"
text ", e.g. "
span' [A.class_ "query"] $ text "map NOT fold"
text " or "
span' [A.class_ "query"] $ text "map AND NOT fold"
text ". Operator precedence can be influenced using round parentheses. Phrases can be searched \
\using double quotes, e.g. "
span' [A.class_ "query"] $ text "\"this is a phrase\""
text "."
p $ do
text "It is possible to restrict a search to certain packages or modules. The most simple way would \
\be to just include the package name in the search, e.g. "
span' [A.class_ "query"] $ text "map base"
text " will prefer hits from the base package. But the restriction can also be more explicit, like "
span' [A.class_ "query"] $ text "map package:base"
text " or like "
span' [A.class_ "query"] $ text "map module:data.list"
text ". It is also possible to specify several different modules or packages, like this: "
span' [A.class_ "query"] $ text "fold module:(data.list OR data.map)"
text ". This will return all hits for fold in the module hierarchies below Data.List and Data.Map."
p $ do
text "Hayoo! always performs fuzzy queries. This means, it tries to find something even if the \
\query contains spelling errors. For example, Hayoo! will still find \"fold\" if \"fodl\" is \
\being searched. If Hayoo! detects \">\" in the query string, it will only search for signatures. \
\A signature query may consist of explicit type names as well as type variables. For example, \
\searching for \"a > b\" will find signatures like \"Int > Bool\"."
h2 $ text "Scope"
p $ do
text "Currently, Hayoo! searches all packages available on "
a' [A.href ""] $ text "Hackage"
text ". Additionally, any Haskell documentation generated by Haddock can be included in Hayoo!. \
\Just send a message including an URI where the documentation can be found to "
a' [A.href "mailto:"] $ text ""
text "."
about :: XHtml FlowContent
about = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "abouttext", A.class_ "text"] $ do
h2 $ text "About Hayoo!"
p $ do
text "Hayoo! is a search engine specialized on "
a' [A.href ""] $ text "Haskell"
text " API documentation. The goal of Hayoo! is to provide an interactive, easy-to-use search interface to \
\the documenation of various Haskell packages and libraries. Although the Hayoo! data is regularly updated, \
\we might miss a package or library. If you think there is some documentation for Haskell modules available \
\on the Internet which should be added to Hayoo!, just drop us a note at "
a' [A.href "mailto:"] $ text ""
text " and tell us the location where we can find the documentation."
h2 $ text "Background"
p $ do
text "Hayoo! is an example application of the "
a' [A.href "-wedel.de"] $ text "Holumbus"
text " framework and was heavily inspired by "
a' [A.href ""] $ text "Hoogle"
text ". The Holumbus library provides the search and indexing backend for Hayoo!. Holumbus and Hayoo! \
\have been developed by Sebastian M. Gauck and Timo B. Hübel at "
a' [A.href "-wedel.de"] $ text "FH Wedel University of Applied Sciences"
text ". The Holumbus framework provides the basic building blocks for creating highly customizable search \
\engines. To demonstrate the flexibility of the framework by a very special use case, the Hayoo! Haskell \
\API search was implemented using Holumbus."
p $ do
text "Currently, Hayoo! is still in beta stage. This means, it can become unavailable unexpectedly, as \
\we do some maintenance or add new features. Therefore you should not yet rely on Hayoo! as primary \
\ search engine for Haskell documentation."
p $ do
text "Hardware infrastructure for daily index updates is generously sponsored by "
a' [A.href ""] $ text "fortytools gmbh"
text ", your friendly Haskell web development company."
h2 $ text "Technical Information"
p $ do
text "Hayoo! is written entirely in Haskell and consists of two main parts: The indexer, which regularly \
\checks Hackage for package updates and builds the search index and the web frontend, which relies on \
\Apache, FastCGI and Hack for presenting search results to the user."
h2 $ text "Feedback"
p $ do
text "We would like to know what you think about Hayoo!, therefore you can reach us at "
a' [A.href "mailto:"] $ text ""
text " and tell us about bugs, suggestions or anything else related to Hayoo!."
div' [A.id_ "sponsors"] $ do
div' [A.id_ "hol"] $ do
Change here when img bug in - combinators is fixed
div' [A.id_ "ft"] $ do
a' [A.href ""] $ img' "" "" [A.src "hayoo/ft.png", A.alt "fortytools logo", A.class_ "logo"]
div' [A.id_ "fhw"] $ do
a' [A.href "-wedel.de"] $ img' "" "" [A.src "hayoo/fhw.gif", A.alt "FH-Wedel logo", A.class_ "logo"]
api :: XHtml FlowContent
api = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "helptext", A.class_ "text"] $ do
h2 $ text "Hayoo! API"
p $ do
text "Hayoo! provides a JSON-based webservice API, which can be used to retrieve search results in a structured \
\format. This allows one to include Hayoo! search functionality in other applications. Arbitrary queries \
\can be submitted the same way as they would be entered them into the search box and results are returned \
\encoded in JSON format."
p $ do
text "You may use this service for whatever you like and without any limitations, although we would be \
\very happy to know about any application that uses the Hayoo! webservice API. Just drop us a line at"
a' [A.href "mailto:"] $ text ""
text "."
h2 $ text "Request URI"
p $ text "Direct your search request to the following URI:"
pre $ text "-wedel.de/hayoo/hayoo.json?query=YOUR_QUERY"
p $ do
text "Provide your query as argument to the "
code $ text "query"
text " URI parameter. Please note that you need to ensure proper URI encoding for the query argument. The syntax \
\for the query is the same as if it would be entered into the search box. A detailed explanation of the \
\syntax can be found "
a' [A.href "help.html"] $ text "here"
text "."
h2 $ text "Response"
p $ do
text "The response to a search request will be encoded in "
a' [A.href ""] $ text "JSON"
text " format and is structured as follows:"
pre $ do
code $ text "{\n\
\ \"message\":\"Found 12 results and 17 completions.\",\n\
\ \"hits\":12,\n\
\ \"functions\":[ {\n\
\ \"name\":\"map\",\n\
\ \"uri\":\"/...\",\n\
\ \"module\":\"Data.Map\",\n\
\ \"signature\":\"(a->b)->[a]->[b]\",\n\
\ \"package\":\"containers\"\n\
\ }, ... ],\n\
\ \"completions\":[ {\n\
\ \"word\":\"MapM\",\n\
\ \"count\":11\n\
\ }, ... ],\n\
\ \"modules\":[ {\n\
\ \"name\":\"Data\",\n\
\ \"count\":19\n\
\ } }, ... ],\n\
\ \"packages\":[ {\n\
\ \"name\":\"containers\",\n\
\ \"count\":13\n\
\ }, ... ]\n\
\}"
p $ do
(text "The ") >> (ct "message") >> (text " field will contain a descriptive status message about the result \
\or any errors encountered. The ") >> (ct "hits") >> (text " field will contain the total number of \
\functions found. In the ") >> (ct "functions") >> (text " field, an array containing all functions found \
\will be returned. For every function, a JSON object is included in the array.")
p $ do
(text "Each of these objects contains the function name, the URI pointing to the Haddock documentation, the module, \
\the signature and the package name in the ") >> (ct "name") >> (text ", ") >> (ct "uri") >> (text ", ")
>> (ct "module") >> (text ", ") >> (ct "signature") >> (text" and ") >> (ct "package") >> (text " fields, respectively.")
p $ do
(text "The ") >> (ct "completions") >> (text " contains all word completions (suggestions) resulting from the query \
\For every completion, a JSON object is included in the array, containing the suggested word and the total number \
\of occurrences of this word in the search result in the ") >> (ct "word") >> (text " and ") >> (ct "count")
>> (text " fields.")
p $ do
(text "The ") >> (ct "modules") >> (text " and ") >> (ct "packages") >> (text " fields contain arrays with JSON objects \
\denoting the occurrences of root modules and packages in the search result. For each element, the module/package \
\name is included in the ") >> (ct "name") >> (text " field and the number of occurrences in the ") >> (ct "count")
>> (text " field.")
ct :: (Functor t, Monad t, Inline c) => Text -> XHtmlT t c
ct = code . text
| null | https://raw.githubusercontent.com/fortytools/holumbus/4b2f7b832feab2715a4d48be0b07dca018eaa8e8/crawl2/examples/hayoo/Hayoo/Search/Pages/Static.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
# LANGUAGE OverloadedStrings # |
|
Module : Hayoo . Search . Pages . Static
Copyright : Copyright ( C ) 2010
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.1
The main Hayoo ! template .
Module : Hayoo.Search.Pages.Static
Copyright : Copyright (C) 2010 Timo B. Huebel
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: portable
Version : 0.1
The main Hayoo! template.
-}
module Hayoo.Search.Pages.Static (help, about, api, examples) where
import Data.Text (Text)
import Text.XHtmlCombinators
import qualified Text.XHtmlCombinators.Attributes as A
examples :: XHtml FlowContent
examples = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "words"] $ text " "
div' [A.id_ "documents"] $ div' [A.id_ "examples"] $ do
text "Hayoo! will search all packages from "
a' [A.href ""] $ text "Hackage"
text ", including all function and type definitions. Here are some example queries:"
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map'); return false;", A.href "hayoo.html?query=map&start=0"] $ text "map"
text " searches for everything that contains a word starting with \"map\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','name:map'); return false;", A.href "hayoo.html?query=name%3Amap&start=0"] $ text "name:map"
text " searches for everything where the function name starts with \"map\" (case insensitive)."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map OR fold'); return false;", A.href "hayoo.html?query=map%20OR%20fold&start=0"] $ text "map OR fold"
text " searches for everything that contains a word starting with \"map\" or \"fold\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map package:containers'); return false;", A.href "hayoo.html?query=map%20package%3Acontainers&start=0"] $ text "map package:containers"
text " searches for everything from package \"containers\" that contains a word starting with \"map\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','map hierarchy:Lazy'); return false;", A.href "hayoo.html?query=map%20hierarchy%3ALazy&start=0"] $ text "map hierarchy:Lazy"
text " searches for everything where \"Lazy\" appears somewhere in the full qualified module name \
\and that contains a word starting with \"map\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','(map OR fold) module:Data.Map'); return false;", A.href "hayoo.html?query=(map%20OR%20fold)%20module%3AData.Map&start=0"] $ text "(map OR fold) module:Data.Map"
text " searches for everything from module \"Data.Map\" that contains a word starting with \"map\" or \"fold\" (case insensitive) in the function name, module name or description."
div' [A.class_ "example"] $ p $ do
a' [A.attr "onclick" "replaceInQuery('','name:attr module:Text.XML'); return false;", A.href "hayoo.html?query=name%3Aattr%20module%3AText.XML&start=0"] $ text "name:attr module:Text.XML"
text " searches for everything from the whole module hierarchy \"Text.XML\" where the function name starts with \"attr\" (case insensitive)."
help :: XHtml FlowContent
help = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "helptext", A.class_ "text"] $ do
h2 $ text "Basic Usage"
p $ do
text "By default, Hayoo! searches for function names, module names, signatures and function \
\descriptions. With every letter typed, Hayoo! will show the results it thinks are best matching \
\the query as well as some suggestions on how the words from the query could be completed. \
\Clicking one of these suggestions will replace the according word in the query."
p $ do
text "Hayoo! displays results as a list of functions, including full qualified module name and the \
\function signature. Clicking the function name will lead directly to the corresponding documentation \
\while clicking the module name will lead to the documentation of the module. Additionally, Hayoo! \
\shows the function description (if available) and provides a link leading directly to the source \
\of the function (if available). The description of the function can be expanded by clicking on \
\the small '+' sign."
p $ do
text "Along with the results, Hayoo! shows two lists on the right, containing the top fifteen \
\root-modules and packages. These are aggregated from the actual results. Clicking on each of \
\these will further restrict the current query to the respective module hierarchy or package. \
\On the left side, package search results are shown if the query matches the package information."
h2 $ text "Advanced Queries"
p $ do
text "If words are seperated by whitespace, Hayoo! will search for results containing both words. \
\Instead of using whitespace, the explicit "
span' [A.class_ "query"] $ text "AND"
text " operator can be used. Hayoo! also supports "
span' [A.class_ "query"] $ text "OR"
text " and "
span' [A.class_ "query"] $ text "NOT"
text " operators, although the "
span' [A.class_ "query"] $ text "NOT"
text " operator may only be used together with "
span' [A.class_ "query"] $ text "AND"
text ", e.g. "
span' [A.class_ "query"] $ text "map NOT fold"
text " or "
span' [A.class_ "query"] $ text "map AND NOT fold"
text ". Operator precedence can be influenced using round parentheses. Phrases can be searched \
\using double quotes, e.g. "
span' [A.class_ "query"] $ text "\"this is a phrase\""
text "."
p $ do
text "It is possible to restrict a search to certain packages or modules. The most simple way would \
\be to just include the package name in the search, e.g. "
span' [A.class_ "query"] $ text "map base"
text " will prefer hits from the base package. But the restriction can also be more explicit, like "
span' [A.class_ "query"] $ text "map package:base"
text " or like "
span' [A.class_ "query"] $ text "map module:data.list"
text ". It is also possible to specify several different modules or packages, like this: "
span' [A.class_ "query"] $ text "fold module:(data.list OR data.map)"
text ". This will return all hits for fold in the module hierarchies below Data.List and Data.Map."
p $ do
text "Hayoo! always performs fuzzy queries. This means, it tries to find something even if the \
\query contains spelling errors. For example, Hayoo! will still find \"fold\" if \"fodl\" is \
\being searched. If Hayoo! detects \">\" in the query string, it will only search for signatures. \
\A signature query may consist of explicit type names as well as type variables. For example, \
\searching for \"a > b\" will find signatures like \"Int > Bool\"."
h2 $ text "Scope"
p $ do
text "Currently, Hayoo! searches all packages available on "
a' [A.href ""] $ text "Hackage"
text ". Additionally, any Haskell documentation generated by Haddock can be included in Hayoo!. \
\Just send a message including an URI where the documentation can be found to "
a' [A.href "mailto:"] $ text ""
text "."
about :: XHtml FlowContent
about = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "abouttext", A.class_ "text"] $ do
h2 $ text "About Hayoo!"
p $ do
text "Hayoo! is a search engine specialized on "
a' [A.href ""] $ text "Haskell"
text " API documentation. The goal of Hayoo! is to provide an interactive, easy-to-use search interface to \
\the documenation of various Haskell packages and libraries. Although the Hayoo! data is regularly updated, \
\we might miss a package or library. If you think there is some documentation for Haskell modules available \
\on the Internet which should be added to Hayoo!, just drop us a note at "
a' [A.href "mailto:"] $ text ""
text " and tell us the location where we can find the documentation."
h2 $ text "Background"
p $ do
text "Hayoo! is an example application of the "
a' [A.href "-wedel.de"] $ text "Holumbus"
text " framework and was heavily inspired by "
a' [A.href ""] $ text "Hoogle"
text ". The Holumbus library provides the search and indexing backend for Hayoo!. Holumbus and Hayoo! \
\have been developed by Sebastian M. Gauck and Timo B. Hübel at "
a' [A.href "-wedel.de"] $ text "FH Wedel University of Applied Sciences"
text ". The Holumbus framework provides the basic building blocks for creating highly customizable search \
\engines. To demonstrate the flexibility of the framework by a very special use case, the Hayoo! Haskell \
\API search was implemented using Holumbus."
p $ do
text "Currently, Hayoo! is still in beta stage. This means, it can become unavailable unexpectedly, as \
\we do some maintenance or add new features. Therefore you should not yet rely on Hayoo! as primary \
\ search engine for Haskell documentation."
p $ do
text "Hardware infrastructure for daily index updates is generously sponsored by "
a' [A.href ""] $ text "fortytools gmbh"
text ", your friendly Haskell web development company."
h2 $ text "Technical Information"
p $ do
text "Hayoo! is written entirely in Haskell and consists of two main parts: The indexer, which regularly \
\checks Hackage for package updates and builds the search index and the web frontend, which relies on \
\Apache, FastCGI and Hack for presenting search results to the user."
h2 $ text "Feedback"
p $ do
text "We would like to know what you think about Hayoo!, therefore you can reach us at "
a' [A.href "mailto:"] $ text ""
text " and tell us about bugs, suggestions or anything else related to Hayoo!."
div' [A.id_ "sponsors"] $ do
div' [A.id_ "hol"] $ do
Change here when img bug in - combinators is fixed
div' [A.id_ "ft"] $ do
a' [A.href ""] $ img' "" "" [A.src "hayoo/ft.png", A.alt "fortytools logo", A.class_ "logo"]
div' [A.id_ "fhw"] $ do
a' [A.href "-wedel.de"] $ img' "" "" [A.src "hayoo/fhw.gif", A.alt "FH-Wedel logo", A.class_ "logo"]
api :: XHtml FlowContent
api = div' [A.id_ "result"] $ do
div' [A.id_ "status"] $ text "Enter some search terms above to start a search."
div' [A.id_ "helptext", A.class_ "text"] $ do
h2 $ text "Hayoo! API"
p $ do
text "Hayoo! provides a JSON-based webservice API, which can be used to retrieve search results in a structured \
\format. This allows one to include Hayoo! search functionality in other applications. Arbitrary queries \
\can be submitted the same way as they would be entered them into the search box and results are returned \
\encoded in JSON format."
p $ do
text "You may use this service for whatever you like and without any limitations, although we would be \
\very happy to know about any application that uses the Hayoo! webservice API. Just drop us a line at"
a' [A.href "mailto:"] $ text ""
text "."
h2 $ text "Request URI"
p $ text "Direct your search request to the following URI:"
pre $ text "-wedel.de/hayoo/hayoo.json?query=YOUR_QUERY"
p $ do
text "Provide your query as argument to the "
code $ text "query"
text " URI parameter. Please note that you need to ensure proper URI encoding for the query argument. The syntax \
\for the query is the same as if it would be entered into the search box. A detailed explanation of the \
\syntax can be found "
a' [A.href "help.html"] $ text "here"
text "."
h2 $ text "Response"
p $ do
text "The response to a search request will be encoded in "
a' [A.href ""] $ text "JSON"
text " format and is structured as follows:"
pre $ do
code $ text "{\n\
\ \"message\":\"Found 12 results and 17 completions.\",\n\
\ \"hits\":12,\n\
\ \"functions\":[ {\n\
\ \"name\":\"map\",\n\
\ \"uri\":\"/...\",\n\
\ \"module\":\"Data.Map\",\n\
\ \"signature\":\"(a->b)->[a]->[b]\",\n\
\ \"package\":\"containers\"\n\
\ }, ... ],\n\
\ \"completions\":[ {\n\
\ \"word\":\"MapM\",\n\
\ \"count\":11\n\
\ }, ... ],\n\
\ \"modules\":[ {\n\
\ \"name\":\"Data\",\n\
\ \"count\":19\n\
\ } }, ... ],\n\
\ \"packages\":[ {\n\
\ \"name\":\"containers\",\n\
\ \"count\":13\n\
\ }, ... ]\n\
\}"
p $ do
(text "The ") >> (ct "message") >> (text " field will contain a descriptive status message about the result \
\or any errors encountered. The ") >> (ct "hits") >> (text " field will contain the total number of \
\functions found. In the ") >> (ct "functions") >> (text " field, an array containing all functions found \
\will be returned. For every function, a JSON object is included in the array.")
p $ do
(text "Each of these objects contains the function name, the URI pointing to the Haddock documentation, the module, \
\the signature and the package name in the ") >> (ct "name") >> (text ", ") >> (ct "uri") >> (text ", ")
>> (ct "module") >> (text ", ") >> (ct "signature") >> (text" and ") >> (ct "package") >> (text " fields, respectively.")
p $ do
(text "The ") >> (ct "completions") >> (text " contains all word completions (suggestions) resulting from the query \
\For every completion, a JSON object is included in the array, containing the suggested word and the total number \
\of occurrences of this word in the search result in the ") >> (ct "word") >> (text " and ") >> (ct "count")
>> (text " fields.")
p $ do
(text "The ") >> (ct "modules") >> (text " and ") >> (ct "packages") >> (text " fields contain arrays with JSON objects \
\denoting the occurrences of root modules and packages in the search result. For each element, the module/package \
\name is included in the ") >> (ct "name") >> (text " field and the number of occurrences in the ") >> (ct "count")
>> (text " field.")
ct :: (Functor t, Monad t, Inline c) => Text -> XHtmlT t c
ct = code . text
|
f6f442edd0a0d114289964c246dfb3cf77610a61f8fbb372a1829eb1eb662588 | rauschma/reasonml-demo-iterators | genMList.mli |
(* This file is free software, part of gen. See file "license" for more details. *)
* { 1 Efficient Mutable Lists }
Unrolled lists , append - only , used for storing the content of a generator .
Example :
{ [
let g = 1 -- 1000 ; ;
: int t = < fun >
let c = g | > MList.of_gen_lazy | > MList.to_clonable ; ;
c : int clonable = < obj >
c#next | > take 500 | > to_list ; ;
- : int list = [ 1 ; 2 ; 3 ; ..... ; 500 ]
let c ' = c#clone ; ;
c ' : int clonable = < obj >
c | > to_list ; ;
- : int list = [ 501 ; 502 ; .... ; 1000 ]
c'#gen | > to_list ; ; ( * c consumed , but not c '
Unrolled lists, append-only, used for storing the content of a generator.
Example:
{[
let g = 1 -- 1000 ;;
val g : int t = <fun>
let c = g |> MList.of_gen_lazy |> MList.to_clonable;;
val c : int clonable = <obj>
c#next |> take 500 |> to_list;;
- : int list = [1; 2; 3; .....; 500]
let c' = c#clone ;;
val c' : int clonable = <obj>
c |> to_list;;
- : int list = [501; 502; ....; 1000]
c'#gen |> to_list;; (* c consumed, but not c' *)
- : int list = [501; 502; ....; 1000]
c#gen |> to_list;;
- : int list = []
]}
@since 0.2.3 *)
type 'a gen = unit -> 'a option
type 'a clonable = <
gen : 'a gen; (** Generator of values tied to this copy *)
clone : 'a clonable; (** Clone the internal state *)
>
type 'a t
(** An internal append-only storage of elements of type 'a, produced from
a generator *)
val of_gen : 'a gen -> 'a t
* [ of_gen g ] consumes [ g ] to build a mlist
val of_gen_lazy : ?max_chunk_size:int -> ?caching:bool -> 'a gen -> 'a t
* [ ] makes a mlist that will read from [ g ] as required ,
until [ g ] is exhausted . Do not use [ g ] directly after this , or
some elements will be absent from the mlist !
@param caching if true or absent , values are read from the generator
by chunks of increasing size . If false , values are read one by one .
@param max_chunk_size if provided and [ caching = true ] ,
sets the ( maximal ) size of the internal chunks
until [g] is exhausted. Do not use [g] directly after this, or
some elements will be absent from the mlist!
@param caching if true or absent, values are read from the generator
by chunks of increasing size. If false, values are read one by one.
@param max_chunk_size if provided and [caching = true],
sets the (maximal) size of the internal chunks *)
val to_gen : 'a t -> 'a gen
* Iterate on the mlist . This function can be called many times without
any problem , the mlist is n't consumable !
any problem, the mlist isn't consumable! *)
val to_clonable : 'a t -> 'a clonable | null | https://raw.githubusercontent.com/rauschma/reasonml-demo-iterators/d69ff758cb0f159814d60ad76157dffba3e14634/src/lib/gen/genMList.mli | ocaml | This file is free software, part of gen. See file "license" for more details.
c consumed, but not c'
* Generator of values tied to this copy
* Clone the internal state
* An internal append-only storage of elements of type 'a, produced from
a generator |
* { 1 Efficient Mutable Lists }
Unrolled lists , append - only , used for storing the content of a generator .
Example :
{ [
let g = 1 -- 1000 ; ;
: int t = < fun >
let c = g | > MList.of_gen_lazy | > MList.to_clonable ; ;
c : int clonable = < obj >
c#next | > take 500 | > to_list ; ;
- : int list = [ 1 ; 2 ; 3 ; ..... ; 500 ]
let c ' = c#clone ; ;
c ' : int clonable = < obj >
c | > to_list ; ;
- : int list = [ 501 ; 502 ; .... ; 1000 ]
c'#gen | > to_list ; ; ( * c consumed , but not c '
Unrolled lists, append-only, used for storing the content of a generator.
Example:
{[
let g = 1 -- 1000 ;;
val g : int t = <fun>
let c = g |> MList.of_gen_lazy |> MList.to_clonable;;
val c : int clonable = <obj>
c#next |> take 500 |> to_list;;
- : int list = [1; 2; 3; .....; 500]
let c' = c#clone ;;
val c' : int clonable = <obj>
c |> to_list;;
- : int list = [501; 502; ....; 1000]
- : int list = [501; 502; ....; 1000]
c#gen |> to_list;;
- : int list = []
]}
@since 0.2.3 *)
type 'a gen = unit -> 'a option
type 'a clonable = <
>
type 'a t
val of_gen : 'a gen -> 'a t
* [ of_gen g ] consumes [ g ] to build a mlist
val of_gen_lazy : ?max_chunk_size:int -> ?caching:bool -> 'a gen -> 'a t
* [ ] makes a mlist that will read from [ g ] as required ,
until [ g ] is exhausted . Do not use [ g ] directly after this , or
some elements will be absent from the mlist !
@param caching if true or absent , values are read from the generator
by chunks of increasing size . If false , values are read one by one .
@param max_chunk_size if provided and [ caching = true ] ,
sets the ( maximal ) size of the internal chunks
until [g] is exhausted. Do not use [g] directly after this, or
some elements will be absent from the mlist!
@param caching if true or absent, values are read from the generator
by chunks of increasing size. If false, values are read one by one.
@param max_chunk_size if provided and [caching = true],
sets the (maximal) size of the internal chunks *)
val to_gen : 'a t -> 'a gen
* Iterate on the mlist . This function can be called many times without
any problem , the mlist is n't consumable !
any problem, the mlist isn't consumable! *)
val to_clonable : 'a t -> 'a clonable |
e071372c32a361d35840a2ab85bb415a921d3c5b767f773d3723b317f802835b | ocamllabs/ocaml-modular-implicits | t021-pushconst3.ml | let _ = () in 3;;
*
0
1 PUSHCONST3
2 POP 1
4 ATOM0
5 SETGLOBAL T021 - pushconst3
7 STOP
*
0 CONST0
1 PUSHCONST3
2 POP 1
4 ATOM0
5 SETGLOBAL T021-pushconst3
7 STOP
**)
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/testsuite/tests/tool-ocaml/t021-pushconst3.ml | ocaml | let _ = () in 3;;
*
0
1 PUSHCONST3
2 POP 1
4 ATOM0
5 SETGLOBAL T021 - pushconst3
7 STOP
*
0 CONST0
1 PUSHCONST3
2 POP 1
4 ATOM0
5 SETGLOBAL T021-pushconst3
7 STOP
**)
| |
8dfc6d4b46be72b3f9dac568d7a8115f9b8f32712782dfd59e09bd6d98cf82b0 | bomberstudios/mtasc | IO.mli |
* IO - Abstract input / output
* Copyright ( C ) 2003
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* IO - Abstract input/output
* Copyright (C) 2003 Nicolas Cannasse
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* High - order abstract I / O.
IO module simply deals with abstract inputs / outputs . It provides a
set of methods for working with these IO as well as several
constructors that enable to write to an underlying channel , buffer ,
or enum .
IO module simply deals with abstract inputs/outputs. It provides a
set of methods for working with these IO as well as several
constructors that enable to write to an underlying channel, buffer,
or enum.
*)
type input
(** The abstract input type. *)
type 'a output
(** The abstract output type, ['a] is the accumulator data, it is returned
when the [close_out] function is called. *)
exception No_more_input
* This exception is raised when reading on an input with the [ read ] or
[ nread ] functions while there is no available token to read .
[nread] functions while there is no available token to read. *)
exception Input_closed
(** This exception is raised when reading on a closed input. *)
exception Output_closed
(** This exception is raised when reading on a closed output. *)
* { 6 Standard API }
val read : input -> char
(** Read a single char from an input or raise [No_more_input] if
no input available. *)
val nread : input -> int -> string
(** [nread i n] reads a string of size up to [n] from an input.
The function will raise [No_more_input] if no input is available.
It will raise [Invalid_argument] if [n] < 0. *)
val really_nread : input -> int -> string
(** [really_nread i n] reads a string of exactly [n] characters
from the input. Raises [No_more_input] if at least [n] characters are
not available. Raises [Invalid_argument] if [n] < 0. *)
val input : input -> string -> int -> int -> int
(** [input i s p l] reads up to [l] characters from the given input, storing
them in string [s], starting at character number [p]. It returns the actual
number of characters read or raise [No_more_input] if no character can be
read. It will raise [Invalid_argument] if [p] and [l] do not designate a
valid substring of [s]. *)
val really_input : input -> string -> int -> int -> int
(** [really_input i s p l] reads exactly [l] characters from the given input,
storing them in the string [s], starting at position [p]. For consistency with
{!IO.input} it returns [l]. Raises [No_more_input] if at [l] characters are
not available. Raises [Invalid_argument] if [p] and [l] do not designate a
valid substring of [s]. *)
val close_in : input -> unit
(** Close the input. It can no longer be read from. *)
val write : 'a output -> char -> unit
(** Write a single char to an output. *)
val nwrite : 'a output -> string -> unit
(** Write a string to an output. *)
val output : 'a output -> string -> int -> int -> int
(** [output o s p l] writes up to [l] characters from string [s], starting at
offset [p]. It returns the number of characters written. It will raise
[Invalid_argument] if [p] and [l] do not designate a valid substring of [s]. *)
val really_output : 'a output -> string -> int -> int -> int
(** [really_output o s p l] writes exactly [l] characters from string [s] onto
the the output, starting with the character at offset [p]. For consistency with
{!IO.output} it returns [l]. Raises [Invalid_argument] if [p] and [l] do not
designate a valid substring of [s]. *)
val flush : 'a output -> unit
(** Flush an output. *)
val close_out : 'a output -> 'a
(** Close the output and return its accumulator data.
It can no longer be written. *)
* { 6 Creation of IO Inputs / Outputs }
val input_string : string -> input
(** Create an input that will read from a string. *)
val output_string : unit -> string output
(** Create an output that will write into a string in an efficient way.
When closed, the output returns all the data written into it. *)
val input_channel : in_channel -> input
(** Create an input that will read from a channel. *)
val output_channel : out_channel -> unit output
(** Create an output that will write into a channel. *)
val input_enum : char Enum.t -> input
(** Create an input that will read from an [enum]. *)
val output_enum : unit -> char Enum.t output
(** Create an output that will write into an [enum]. The
final enum is returned when the output is closed. *)
val create_in :
read:(unit -> char) ->
input:(string -> int -> int -> int) -> close:(unit -> unit) -> input
(** Fully create an input by giving all the needed functions. *)
val create_out :
write:(char -> unit) ->
output:(string -> int -> int -> int) ->
flush:(unit -> unit) -> close:(unit -> 'a) -> 'a output
(** Fully create an output by giving all the needed functions. *)
* { 6 Utilities }
val printf : 'a output -> ('b, unit, string, unit) format4 -> 'b
(** The printf function works for any output. *)
val read_all : input -> string
(** read all the contents of the input until [No_more_input] is raised. *)
val pipe : unit -> input * unit output
(** Create a pipe between an input and an ouput. Data written from
the output can be read from the input. *)
val pos_in : input -> input * (unit -> int)
(** Create an input that provide a count function of the number of bytes
read from it. *)
val pos_out : 'a output -> 'a output * (unit -> int)
(** Create an output that provide a count function of the number of bytes
written through it. *)
external cast_output : 'a output -> unit output = "%identity"
(** You can safely transform any output to an unit output in a safe way
by using this function. *)
* { 6 Binary files API }
Here is some API useful for working with binary files , in particular
binary files generated by C applications . By default , encoding of
multibyte integers is low - endian . The BigEndian module provide multibyte
operations with other encoding .
Here is some API useful for working with binary files, in particular
binary files generated by C applications. By default, encoding of
multibyte integers is low-endian. The BigEndian module provide multibyte
operations with other encoding.
*)
exception Overflow of string
(** Exception raised when a read or write operation cannot be completed. *)
val read_byte : input -> int
* Read an unsigned 8 - bit integer .
val read_signed_byte : input -> int
* Read an signed 8 - bit integer .
val read_ui16 : input -> int
* Read an unsigned 16 - bit word .
val read_i16 : input -> int
* Read a signed 16 - bit word .
val read_i32 : input -> int
* Read a signed 32 - bit integer . Raise [ Overflow ] if the
read integer can not be represented as a Caml 31 - bit integer .
read integer cannot be represented as a Caml 31-bit integer. *)
val read_real_i32 : input -> int32
* Read a signed 32 - bit integer as an OCaml int32 .
val read_i64 : input -> int64
* Read a signed 64 - bit integer as an OCaml int64 .
val read_double : input -> float
(** Read an IEEE double precision floating point value. *)
val read_string : input -> string
(** Read a null-terminated string. *)
val read_line : input -> string
(** Read a LF or CRLF terminated string. *)
val write_byte : 'a output -> int -> unit
* Write an unsigned 8 - bit byte .
val write_ui16 : 'a output -> int -> unit
* Write an unsigned 16 - bit word .
val write_i16 : 'a output -> int -> unit
* Write a signed 16 - bit word .
val write_i32 : 'a output -> int -> unit
* Write a signed 32 - bit integer .
val write_real_i32 : 'a output -> int32 -> unit
(** Write an OCaml int32. *)
val write_i64 : 'a output -> int64 -> unit
* Write an OCaml int64 .
val write_double : 'a output -> float -> unit
(** Write an IEEE double precision floating point value. *)
val write_string : 'a output -> string -> unit
(** Write a string and append an null character. *)
val write_line : 'a output -> string -> unit
(** Write a line and append a LF (it might be converted
to CRLF on some systems depending on the underlying IO). *)
(** Same as operations above, but use big-endian encoding *)
module BigEndian :
sig
val read_ui16 : input -> int
val read_i16 : input -> int
val read_i32 : input -> int
val read_real_i32 : input -> int32
val read_i64 : input -> int64
val read_double : input -> float
val write_ui16 : 'a output -> int -> unit
val write_i16 : 'a output -> int -> unit
val write_i32 : 'a output -> int -> unit
val write_real_i32 : 'a output -> int32 -> unit
val write_i64 : 'a output -> int64 -> unit
val write_double : 'a output -> float -> unit
end
* { 6 Bits API }
This enable you to read and write from an IO bit - by - bit or several bits
at the same time .
This enable you to read and write from an IO bit-by-bit or several bits
at the same time.
*)
type in_bits
type out_bits
exception Bits_error
val input_bits : input -> in_bits
(** Read bits from an input *)
val output_bits : 'a output -> out_bits
(** Write bits to an output *)
val read_bits : in_bits -> int -> int
* Read up to 31 bits , raise Bits_error if n < 0 or n > 31
val write_bits : out_bits -> nbits:int -> int -> unit
* Write up to 31 bits represented as a value , raise Bits_error if < 0
or > 31 or the value representation excess nbits .
or nbits > 31 or the value representation excess nbits. *)
val flush_bits : out_bits -> unit
* Flush remaining unwritten bits , adding up to 7 bits which values 0 .
val drop_bits : in_bits -> unit
* Drop up to 7 buffered bits and restart to next input character .
* { 6 Generic IO Object Wrappers }
Theses OO Wrappers have been written to provide easy support of ExtLib
IO by external librairies . If you want your library to support ExtLib
IO without actually requiring ExtLib to compile , you can should implement
the classes [ in_channel ] , [ out_channel ] , [ poly_in_channel ] and/or
[ poly_out_channel ] which are the common IO specifications established
for ExtLib , OCamlNet and Camomile .
( see -programming.de/tmp/IO-Classes.html for more details ) .
Theses OO Wrappers have been written to provide easy support of ExtLib
IO by external librairies. If you want your library to support ExtLib
IO without actually requiring ExtLib to compile, you can should implement
the classes [in_channel], [out_channel], [poly_in_channel] and/or
[poly_out_channel] which are the common IO specifications established
for ExtLib, OCamlNet and Camomile.
(see -programming.de/tmp/IO-Classes.html for more details).
*)
class in_channel : input ->
object
method input : string -> int -> int -> int
method close_in : unit -> unit
end
class out_channel : 'a output ->
object
method output : string -> int -> int -> int
method flush : unit -> unit
method close_out : unit -> unit
end
class in_chars : input ->
object
method get : unit -> char
method close_in : unit -> unit
end
class out_chars : 'a output ->
object
method put : char -> unit
method flush : unit -> unit
method close_out : unit -> unit
end
val from_in_channel : #in_channel -> input
val from_out_channel : #out_channel -> unit output
val from_in_chars : #in_chars -> input
val from_out_chars : #out_chars -> unit output
| null | https://raw.githubusercontent.com/bomberstudios/mtasc/d7c2441310248776aa89d60f9c8f98d539bfe8de/src/extlib-dev/IO.mli | ocaml | * The abstract input type.
* The abstract output type, ['a] is the accumulator data, it is returned
when the [close_out] function is called.
* This exception is raised when reading on a closed input.
* This exception is raised when reading on a closed output.
* Read a single char from an input or raise [No_more_input] if
no input available.
* [nread i n] reads a string of size up to [n] from an input.
The function will raise [No_more_input] if no input is available.
It will raise [Invalid_argument] if [n] < 0.
* [really_nread i n] reads a string of exactly [n] characters
from the input. Raises [No_more_input] if at least [n] characters are
not available. Raises [Invalid_argument] if [n] < 0.
* [input i s p l] reads up to [l] characters from the given input, storing
them in string [s], starting at character number [p]. It returns the actual
number of characters read or raise [No_more_input] if no character can be
read. It will raise [Invalid_argument] if [p] and [l] do not designate a
valid substring of [s].
* [really_input i s p l] reads exactly [l] characters from the given input,
storing them in the string [s], starting at position [p]. For consistency with
{!IO.input} it returns [l]. Raises [No_more_input] if at [l] characters are
not available. Raises [Invalid_argument] if [p] and [l] do not designate a
valid substring of [s].
* Close the input. It can no longer be read from.
* Write a single char to an output.
* Write a string to an output.
* [output o s p l] writes up to [l] characters from string [s], starting at
offset [p]. It returns the number of characters written. It will raise
[Invalid_argument] if [p] and [l] do not designate a valid substring of [s].
* [really_output o s p l] writes exactly [l] characters from string [s] onto
the the output, starting with the character at offset [p]. For consistency with
{!IO.output} it returns [l]. Raises [Invalid_argument] if [p] and [l] do not
designate a valid substring of [s].
* Flush an output.
* Close the output and return its accumulator data.
It can no longer be written.
* Create an input that will read from a string.
* Create an output that will write into a string in an efficient way.
When closed, the output returns all the data written into it.
* Create an input that will read from a channel.
* Create an output that will write into a channel.
* Create an input that will read from an [enum].
* Create an output that will write into an [enum]. The
final enum is returned when the output is closed.
* Fully create an input by giving all the needed functions.
* Fully create an output by giving all the needed functions.
* The printf function works for any output.
* read all the contents of the input until [No_more_input] is raised.
* Create a pipe between an input and an ouput. Data written from
the output can be read from the input.
* Create an input that provide a count function of the number of bytes
read from it.
* Create an output that provide a count function of the number of bytes
written through it.
* You can safely transform any output to an unit output in a safe way
by using this function.
* Exception raised when a read or write operation cannot be completed.
* Read an IEEE double precision floating point value.
* Read a null-terminated string.
* Read a LF or CRLF terminated string.
* Write an OCaml int32.
* Write an IEEE double precision floating point value.
* Write a string and append an null character.
* Write a line and append a LF (it might be converted
to CRLF on some systems depending on the underlying IO).
* Same as operations above, but use big-endian encoding
* Read bits from an input
* Write bits to an output |
* IO - Abstract input / output
* Copyright ( C ) 2003
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* IO - Abstract input/output
* Copyright (C) 2003 Nicolas Cannasse
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* High - order abstract I / O.
IO module simply deals with abstract inputs / outputs . It provides a
set of methods for working with these IO as well as several
constructors that enable to write to an underlying channel , buffer ,
or enum .
IO module simply deals with abstract inputs/outputs. It provides a
set of methods for working with these IO as well as several
constructors that enable to write to an underlying channel, buffer,
or enum.
*)
type input
type 'a output
exception No_more_input
* This exception is raised when reading on an input with the [ read ] or
[ nread ] functions while there is no available token to read .
[nread] functions while there is no available token to read. *)
exception Input_closed
exception Output_closed
* { 6 Standard API }
val read : input -> char
val nread : input -> int -> string
val really_nread : input -> int -> string
val input : input -> string -> int -> int -> int
val really_input : input -> string -> int -> int -> int
val close_in : input -> unit
val write : 'a output -> char -> unit
val nwrite : 'a output -> string -> unit
val output : 'a output -> string -> int -> int -> int
val really_output : 'a output -> string -> int -> int -> int
val flush : 'a output -> unit
val close_out : 'a output -> 'a
* { 6 Creation of IO Inputs / Outputs }
val input_string : string -> input
val output_string : unit -> string output
val input_channel : in_channel -> input
val output_channel : out_channel -> unit output
val input_enum : char Enum.t -> input
val output_enum : unit -> char Enum.t output
val create_in :
read:(unit -> char) ->
input:(string -> int -> int -> int) -> close:(unit -> unit) -> input
val create_out :
write:(char -> unit) ->
output:(string -> int -> int -> int) ->
flush:(unit -> unit) -> close:(unit -> 'a) -> 'a output
* { 6 Utilities }
val printf : 'a output -> ('b, unit, string, unit) format4 -> 'b
val read_all : input -> string
val pipe : unit -> input * unit output
val pos_in : input -> input * (unit -> int)
val pos_out : 'a output -> 'a output * (unit -> int)
external cast_output : 'a output -> unit output = "%identity"
* { 6 Binary files API }
Here is some API useful for working with binary files , in particular
binary files generated by C applications . By default , encoding of
multibyte integers is low - endian . The BigEndian module provide multibyte
operations with other encoding .
Here is some API useful for working with binary files, in particular
binary files generated by C applications. By default, encoding of
multibyte integers is low-endian. The BigEndian module provide multibyte
operations with other encoding.
*)
exception Overflow of string
val read_byte : input -> int
* Read an unsigned 8 - bit integer .
val read_signed_byte : input -> int
* Read an signed 8 - bit integer .
val read_ui16 : input -> int
* Read an unsigned 16 - bit word .
val read_i16 : input -> int
* Read a signed 16 - bit word .
val read_i32 : input -> int
* Read a signed 32 - bit integer . Raise [ Overflow ] if the
read integer can not be represented as a Caml 31 - bit integer .
read integer cannot be represented as a Caml 31-bit integer. *)
val read_real_i32 : input -> int32
* Read a signed 32 - bit integer as an OCaml int32 .
val read_i64 : input -> int64
* Read a signed 64 - bit integer as an OCaml int64 .
val read_double : input -> float
val read_string : input -> string
val read_line : input -> string
val write_byte : 'a output -> int -> unit
* Write an unsigned 8 - bit byte .
val write_ui16 : 'a output -> int -> unit
* Write an unsigned 16 - bit word .
val write_i16 : 'a output -> int -> unit
* Write a signed 16 - bit word .
val write_i32 : 'a output -> int -> unit
* Write a signed 32 - bit integer .
val write_real_i32 : 'a output -> int32 -> unit
val write_i64 : 'a output -> int64 -> unit
* Write an OCaml int64 .
val write_double : 'a output -> float -> unit
val write_string : 'a output -> string -> unit
val write_line : 'a output -> string -> unit
module BigEndian :
sig
val read_ui16 : input -> int
val read_i16 : input -> int
val read_i32 : input -> int
val read_real_i32 : input -> int32
val read_i64 : input -> int64
val read_double : input -> float
val write_ui16 : 'a output -> int -> unit
val write_i16 : 'a output -> int -> unit
val write_i32 : 'a output -> int -> unit
val write_real_i32 : 'a output -> int32 -> unit
val write_i64 : 'a output -> int64 -> unit
val write_double : 'a output -> float -> unit
end
* { 6 Bits API }
This enable you to read and write from an IO bit - by - bit or several bits
at the same time .
This enable you to read and write from an IO bit-by-bit or several bits
at the same time.
*)
type in_bits
type out_bits
exception Bits_error
val input_bits : input -> in_bits
val output_bits : 'a output -> out_bits
val read_bits : in_bits -> int -> int
* Read up to 31 bits , raise Bits_error if n < 0 or n > 31
val write_bits : out_bits -> nbits:int -> int -> unit
* Write up to 31 bits represented as a value , raise Bits_error if < 0
or > 31 or the value representation excess nbits .
or nbits > 31 or the value representation excess nbits. *)
val flush_bits : out_bits -> unit
* Flush remaining unwritten bits , adding up to 7 bits which values 0 .
val drop_bits : in_bits -> unit
* Drop up to 7 buffered bits and restart to next input character .
* { 6 Generic IO Object Wrappers }
Theses OO Wrappers have been written to provide easy support of ExtLib
IO by external librairies . If you want your library to support ExtLib
IO without actually requiring ExtLib to compile , you can should implement
the classes [ in_channel ] , [ out_channel ] , [ poly_in_channel ] and/or
[ poly_out_channel ] which are the common IO specifications established
for ExtLib , OCamlNet and Camomile .
( see -programming.de/tmp/IO-Classes.html for more details ) .
Theses OO Wrappers have been written to provide easy support of ExtLib
IO by external librairies. If you want your library to support ExtLib
IO without actually requiring ExtLib to compile, you can should implement
the classes [in_channel], [out_channel], [poly_in_channel] and/or
[poly_out_channel] which are the common IO specifications established
for ExtLib, OCamlNet and Camomile.
(see -programming.de/tmp/IO-Classes.html for more details).
*)
class in_channel : input ->
object
method input : string -> int -> int -> int
method close_in : unit -> unit
end
class out_channel : 'a output ->
object
method output : string -> int -> int -> int
method flush : unit -> unit
method close_out : unit -> unit
end
class in_chars : input ->
object
method get : unit -> char
method close_in : unit -> unit
end
class out_chars : 'a output ->
object
method put : char -> unit
method flush : unit -> unit
method close_out : unit -> unit
end
val from_in_channel : #in_channel -> input
val from_out_channel : #out_channel -> unit output
val from_in_chars : #in_chars -> input
val from_out_chars : #out_chars -> unit output
|
c2734376799780d32f9e6493a7c3bf1663cc99ab6a5342ee72290d2ba1278b1a | rd--/hsc3 | Enum.hs | -- | Data types for enumerated and non signal unit generator inputs.
module Sound.Sc3.Common.Enum where
-- * Loop
-- | Loop indicator input.
data Loop t =
NoLoop -- ^ 0
^ 1
| WithLoop t
deriving (Eq, Show)
-- | Apply /f/ at 'WithLoop'.
loop_map :: (t -> u) -> Loop t -> Loop u
loop_map f lp =
case lp of
NoLoop -> NoLoop
Loop -> Loop
WithLoop t -> WithLoop (f t)
-- | fmap is 'loop_map'
instance Functor Loop where
fmap = loop_map
-- | Resolve 'Loop'.
from_loop :: Num t => Loop t -> t
from_loop e =
case e of
NoLoop -> 0
Loop -> 1
WithLoop u -> u
-- * Interpolation
-- | Interpolation indicator input.
data Interpolation t =
NoInterpolation
| LinearInterpolation
| CubicInterpolation
| WithInterpolation t
deriving (Eq, Show)
-- | Resolve 'Interpolation'.
from_interpolation :: Num t => Interpolation t -> t
from_interpolation e =
case e of
NoInterpolation -> 1
LinearInterpolation -> 2
CubicInterpolation -> 4
WithInterpolation u -> u
-- * DoneAction
-- | Completion mode indicator input.
data DoneAction t
= DoNothing
| PauseSynth
| RemoveSynth
| RemoveGroup
| WithDoneAction t
deriving (Eq, Show)
-- | Apply /f/ at 'WithDoneAction'.
done_action_map :: (t -> u) -> DoneAction t -> DoneAction u
done_action_map f e =
case e of
DoNothing -> DoNothing
PauseSynth -> PauseSynth
RemoveSynth -> RemoveSynth
RemoveGroup -> RemoveGroup
WithDoneAction x -> WithDoneAction (f x)
-- | fmap is 'done_action_map'
instance Functor DoneAction where
fmap = done_action_map
-- | Resolve 'DoneAction'.
from_done_action :: Num t => DoneAction t -> t
from_done_action e =
case e of
DoNothing -> 0
PauseSynth -> 1
RemoveSynth -> 2
RemoveGroup -> 14
WithDoneAction x -> x
-- * Warp
-- | Warp interpolation indicator input.
data Warp t =
Linear
| Exponential
| WithWarp t
deriving (Eq, Show)
-- | Resolve 'Warp'.
--
> map from_warp [ Linear , Exponential ] = = [ 0,1 ]
from_warp :: Num t => Warp t -> t
from_warp e =
case e of
Linear -> 0
Exponential -> 1
WithWarp u -> u
| Apply /f/ at ' WithWarp '
warp_map :: (t -> u) -> Warp t -> Warp u
warp_map f e =
case e of
Linear -> Linear
Exponential -> Exponential
WithWarp u -> WithWarp (f u)
-- | fmap = 'warp_map'
instance Functor Warp where
fmap = warp_map
-- * Buffer
| Unification of integer and ' Ugen ' buffer identifiers .
data Buffer t =
Buffer_Id Int
| Buffer t
deriving (Eq, Show)
| null | https://raw.githubusercontent.com/rd--/hsc3/7dc748106639999947548d0b3205a468cfc55fed/Sound/Sc3/Common/Enum.hs | haskell | | Data types for enumerated and non signal unit generator inputs.
* Loop
| Loop indicator input.
^ 0
| Apply /f/ at 'WithLoop'.
| fmap is 'loop_map'
| Resolve 'Loop'.
* Interpolation
| Interpolation indicator input.
| Resolve 'Interpolation'.
* DoneAction
| Completion mode indicator input.
| Apply /f/ at 'WithDoneAction'.
| fmap is 'done_action_map'
| Resolve 'DoneAction'.
* Warp
| Warp interpolation indicator input.
| Resolve 'Warp'.
| fmap = 'warp_map'
* Buffer | module Sound.Sc3.Common.Enum where
data Loop t =
^ 1
| WithLoop t
deriving (Eq, Show)
loop_map :: (t -> u) -> Loop t -> Loop u
loop_map f lp =
case lp of
NoLoop -> NoLoop
Loop -> Loop
WithLoop t -> WithLoop (f t)
instance Functor Loop where
fmap = loop_map
from_loop :: Num t => Loop t -> t
from_loop e =
case e of
NoLoop -> 0
Loop -> 1
WithLoop u -> u
data Interpolation t =
NoInterpolation
| LinearInterpolation
| CubicInterpolation
| WithInterpolation t
deriving (Eq, Show)
from_interpolation :: Num t => Interpolation t -> t
from_interpolation e =
case e of
NoInterpolation -> 1
LinearInterpolation -> 2
CubicInterpolation -> 4
WithInterpolation u -> u
data DoneAction t
= DoNothing
| PauseSynth
| RemoveSynth
| RemoveGroup
| WithDoneAction t
deriving (Eq, Show)
done_action_map :: (t -> u) -> DoneAction t -> DoneAction u
done_action_map f e =
case e of
DoNothing -> DoNothing
PauseSynth -> PauseSynth
RemoveSynth -> RemoveSynth
RemoveGroup -> RemoveGroup
WithDoneAction x -> WithDoneAction (f x)
instance Functor DoneAction where
fmap = done_action_map
from_done_action :: Num t => DoneAction t -> t
from_done_action e =
case e of
DoNothing -> 0
PauseSynth -> 1
RemoveSynth -> 2
RemoveGroup -> 14
WithDoneAction x -> x
data Warp t =
Linear
| Exponential
| WithWarp t
deriving (Eq, Show)
> map from_warp [ Linear , Exponential ] = = [ 0,1 ]
from_warp :: Num t => Warp t -> t
from_warp e =
case e of
Linear -> 0
Exponential -> 1
WithWarp u -> u
| Apply /f/ at ' WithWarp '
warp_map :: (t -> u) -> Warp t -> Warp u
warp_map f e =
case e of
Linear -> Linear
Exponential -> Exponential
WithWarp u -> WithWarp (f u)
instance Functor Warp where
fmap = warp_map
| Unification of integer and ' Ugen ' buffer identifiers .
data Buffer t =
Buffer_Id Int
| Buffer t
deriving (Eq, Show)
|
de7e5384b0dbc39ca7d6917ae2bbc0230a65bc6c1e573461a8af2fdf0da564d6 | data61/Mirza | Types.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedLists #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE StandaloneDeriving #
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
# OPTIONS_GHC -Wno - orphans #
# OPTIONS_GHC -fno - warn - orphans #
module Mirza.Common.Types
( EmailAddress, emailToText, Password(..) , UserId(..)
, ORKeyId(..)
, HealthResponse(..)
, EnvType(..)
, AppM(..)
, runAppM
, DB(..)
, runDb
, pg
, Member
, HasLogging
, AsSqlError(..)
, HasConnPool(..)
, HasEnvType(..)
, HasKatipContext(..)
, HasKatipLogEnv(..)
, HasORClientEnv(..)
, HasDB
, AsServantError (..)
, DBConstraint
, ask, asks
, MonadError
, throwing, throwing_
, MonadIO, liftIO
, PrimaryKeyType
, orKeyIdType
, runClientFunc
) where
import qualified Database.Beam as B
import Database.Beam.Backend.SQL (FromBackendRow,
HasSqlValueSyntax)
import qualified Database.Beam.Backend.SQL as BSQL
import Database.Beam.Postgres (Pg, Postgres,
runBeamPostgres,
runBeamPostgresDebug)
import Database.Beam.Postgres.Syntax (pgUuidType)
import Database.Beam.Query.DataTypes (DataType (..))
import Database.PostgreSQL.Simple (Connection, SqlError)
import qualified Database.PostgreSQL.Simple as DB
import Database.PostgreSQL.Simple.FromField (FromField, fromField)
import Database.PostgreSQL.Simple.ToField (ToField, toField)
import Data.Proxy (Proxy (..))
import qualified Control.Exception as Exc
import qualified Control.Exception as E
import Control.Monad.Except (ExceptT (..), MonadError,
runExceptT, throwError)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader, ReaderT,
ask, asks, local,
runReaderT)
import Control.Monad.Trans (lift)
import Data.Pool as Pool
import Crypto.JOSE (JWK, JWS, JWSHeader,
Signature)
import Crypto.JOSE.Types (Base64Octets)
import qualified Data.ByteString as BS
import Data.Text (Text)
import Data.Text.Encoding as T
import Text.Email.Validate (EmailAddress,
toByteString, validate)
import Data.Aeson
import Data.Aeson.Types
import Control.Lens
import Control.Monad.Error.Lens
import GHC.Exts (Constraint)
import GHC.Generics (Generic)
import Katip as K
import Katip.Monadic (askLoggerIO)
import Data.Swagger
import Servant (FromHttpApiData (..),
ToHttpApiData (..))
import Servant.Client (ClientEnv (..), ClientM,
ServantError (..),
runClientM)
import Data.UUID (UUID)
type PrimaryKeyType = UUID
-- *****************************************************************************
-- Orphan Instances
-- *****************************************************************************
instance ToJSON EmailAddress where
toJSON = toJSON . T.decodeUtf8 . toByteString
instance FromJSON EmailAddress where
parseJSON = withText "EmailAddress" $ \t -> case validate (T.encodeUtf8 t) of
Left err -> fail err
Right e -> pure e
instance ToSchema EmailAddress where
declareNamedSchema _ = declareNamedSchema (Proxy :: Proxy Text)
<&> name ?~ "Email address"
<&> schema . description ?~ "An RFC 5322 compliant email address"
emailToText :: EmailAddress -> Text
emailToText = decodeUtf8 . toByteString
-- *****************************************************************************
-- User Types
-- *****************************************************************************
-- TODO: Handwrite these instances to comply with their defined syntax
For example , emails have their own format , as do
newtype UserId = UserId {getUserId :: PrimaryKeyType}
deriving (Eq, Show, Generic, Read, Ord)
instance FromJSON UserId where
parseJSON = fmap UserId . parseJSON
instance ToJSON UserId where
toJSON = toJSON . getUserId
instance ToSchema UserId
instance ToParamSchema UserId
deriving instance FromHttpApiData UserId
deriving instance ToHttpApiData UserId
| Do NOT derive an ` Eq ` instance for Password . We do not want a literal
-- equality check for password
newtype Password = Password BS.ByteString
instance Show Password where
show _ = "Password <redacted>"
newtype ORKeyId = ORKeyId {getORKeyId :: UUID}
deriving (Show, Eq, Generic, Read)
instance FromJSON ORKeyId where
parseJSON = fmap ORKeyId . parseJSON
instance ToJSON ORKeyId where
toJSON = toJSON . getORKeyId
instance ToSchema ORKeyId
instance ToParamSchema ORKeyId
instance FromHttpApiData ORKeyId where
parseUrlPiece t = fmap ORKeyId (parseUrlPiece t)
deriving instance ToHttpApiData ORKeyId
instance FromField ORKeyId where
fromField field mbs = ORKeyId <$> fromField field mbs
instance ToField ORKeyId where
toField = toField . getORKeyId
instance HasSqlValueSyntax be UUID => HasSqlValueSyntax be ORKeyId where
sqlValueSyntax (ORKeyId uuid) = BSQL.sqlValueSyntax uuid
instance (BSQL.BeamSqlBackend be, FromBackendRow be UUID)
=> FromBackendRow be ORKeyId where
fromBackendRow = ORKeyId <$> BSQL.fromBackendRow
valuesNeeded proxyBE _proxyKID = BSQL.valuesNeeded proxyBE (Proxy :: Proxy UUID)
orKeyIdType :: B.DataType Postgres ORKeyId
orKeyIdType = DataType pgUuidType
data EnvType = Prod | Dev
deriving (Show, Eq, Read)
-- | The class of contexts which include an 'EnvType'
$(makeClassy ''EnvType)
-- runReaderT :: r -> m a
-- ReaderT r m a
type Handler a = ExceptT ServantErr IO a
-- newtype ExceptT e m a :: * -> (* -> *) -> * -> *
newtype AppM context err a = AppM
{ getAppM :: ReaderT context (ExceptT err IO) a
} deriving
( Functor
, Applicative
, Monad
, MonadReader context
, MonadIO
, MonadError err
)
| The DB monad is used to connect to the Beam backend . The only way to run
-- something of type DB a is to use 'runDb', which ensures the action is run in
a Postgres transaction , and that exceptions and errors thrown inside the DB a
-- cause the transaction to be rolled back and the error rethrown.
newtype DB context error a = DB (ReaderT (Connection,context) (ExceptT error Pg) a)
deriving
( Functor
, Applicative
, Monad
, MonadReader (Connection,context)
, MonadError error
, MonadIO -- Need to figure out if we actually want this
)
-- Health Types:
successHealthResponseText :: Text
successHealthResponseText = "Status OK"
data HealthResponse = HealthResponse
deriving (Show, Eq, Read, Generic)
instance ToSchema HealthResponse
instance ToJSON HealthResponse where
toJSON _ = toJSON successHealthResponseText
instance FromJSON HealthResponse where
parseJSON (String value)
| value == successHealthResponseText = pure HealthResponse
| otherwise = fail "Invalid health response string."
parseJSON value = typeMismatch "HealthResponse" value
-- =============================================================================
-- Classes and utilities for working with Constraints
-- =============================================================================
-- | Helper to make constraints on functions cleaner:
--
bazQuery : : ( Member context ' [ HasEnvType , HasConnPool , HasLogging ]
-- , Member err '[AsORError, AsORKeyError])
-- => Foo
-- -> DB context err Bar
type family Member (e :: *) (cs :: [* -> Constraint]) :: Constraint where
Member e '[] = ()
Member e (c ': cs) = (c e, Member e cs)
-- | The class of contexts which have a database pool:
-- @
-- pool <- view connPool
Pool.withResource pool $ \conn - > ..
-- @
class HasConnPool a where
connPool :: Lens' a (Pool Connection)
| The class of error types which can contain a ` SqlError ` . _ See
' . SupplyChain . BeamQueries.insertUser ' for a good example of how to catch
-- errors using this class._
class AsSqlError a where
_SqlError :: Prism' a SqlError
instance AsSqlError SqlError where
_SqlError = id
-- Logging classes
-- ===============
-- | Convenience class for contexts which can be used for logging
-- @
-- foo :: Member context '[HasLogging] => Foo -> DB context err Bar
-- @
class (HasKatipContext context, HasKatipLogEnv context)
=> HasLogging context where
instance (HasKatipContext context, HasKatipLogEnv context)
=> HasLogging context
class HasKatipLogEnv a where
katipLogEnv :: Lens' a K.LogEnv
class HasKatipContext a where
katipContexts :: Lens' a K.LogContexts
katipNamespace :: Lens' a K.Namespace
instance HasKatipLogEnv context => Katip (AppM context err) where
getLogEnv = view katipLogEnv
localLogEnv f = local (over katipLogEnv f)
instance (HasKatipContext context, HasKatipLogEnv context)
=> KatipContext (AppM context err) where
getKatipContext = view katipContexts
getKatipNamespace = view katipNamespace
localKatipContext f = local (over katipContexts f)
localKatipNamespace f = local (over katipNamespace f)
instance HasKatipLogEnv context => Katip (DB context err) where
getLogEnv = view (_2 . katipLogEnv)
localLogEnv f = local (over (_2 . katipLogEnv) f)
instance (HasKatipContext context, HasKatipLogEnv context)
=> KatipContext (DB context err) where
getKatipContext = view (_2 . katipContexts)
getKatipNamespace = view (_2 . katipNamespace)
localKatipContext f = local (over (_2 . katipContexts) f)
localKatipNamespace f = local (over (_2 . katipNamespace) f)
class HasORClientEnv a where
clientEnv :: Lens' a ClientEnv
class AsServantError a where
_ServantError :: Prism' a ServantError
Useage of this type is deprecated prefer HasDb .
TODO : Remove DBConstraint once SCS is converted to use Member notation .
type DBConstraint context err =
( HasEnvType context
, HasConnPool context
, HasKatipContext context
, HasKatipLogEnv context
, AsSqlError err)
| Convenience class for contexts which require DB .
class (HasEnvType context, HasConnPool context, HasLogging context)
=> HasDB context where
instance (HasEnvType context, HasConnPool context, HasLogging context)
=> HasDB context
-- | Run a DB action within a transaction. See the documentation for
' withTransaction ' . SqlError exceptions will be caught and lifted into the
-- AppM MonadError instance, as will all app errors thrown in the DB a action,
-- and in either case the database transaction is rolled back.
--
Exceptions which are thrown which are not SqlErrors will be caught by Servant
and cause 500 errors ( these are not exceptions we 'll generally know how to
-- deal with).
runDb :: (HasDB context
, Member err '[AsSqlError])
=> DB context err a -> AppM context err a
runDb (DB act) = katipAddNamespace "runDb" $ do
env <- ask
e <- view envType
lggr <- askLoggerIO
let dbf = case e of
Prod -> runBeamPostgres
_ -> runBeamPostgresDebug (lggr DebugS . logStr)
res <- liftIO $ Pool.withResource (env ^. connPool) $ \conn ->
Exc.try
. withTransaction conn
. dbf conn
. runExceptT
. runReaderT act $ (conn,env)
: : ( Either SqlError ( Either AppError a ) )
either (throwing _SqlError)
(either throwError pure)
res
-- | As "Database.PostgreSQL.Simple.Transaction".'DB.withTransaction',
-- but aborts the transaction if a 'Left' is returned.
-- TODO: Add NFData constraint to avoid async exceptions.
withTransaction :: Connection -> IO (Either e a) -> IO (Either e a)
withTransaction conn act = E.mask $ \restore -> do
DB.begin conn
r <- restore (act >>= E.evaluate) `E.onException` DB.rollback conn
case r of
Left _ -> DB.rollback conn
Right _ -> DB.commit conn
pure r
pg :: Pg a -> DB context err a
pg = DB . lift . lift
runAppM :: context -> AppM context err a -> IO (Either err a)
runAppM env aM = runExceptT $ (runReaderT . getAppM) aM env
runClientFunc :: (AsServantError err, HasORClientEnv context)
=> ClientM a
-> AppM context err a
runClientFunc func = do
cEnv <- view clientEnv
either (throwing _ServantError) pure =<< liftIO (runClientM func cEnv)
TODO : Orphan for JWK
instance ToSchema JWK where
declareNamedSchema _ = do
strSchema <- declareSchemaRef (Proxy :: Proxy String)
pure $ NamedSchema (Just "JWK") $ mempty
& type_ .~ SwaggerObject
& properties .~
[ ("kty",strSchema)
, ("n",strSchema)
, ("e",strSchema)
]
instance ToSchema (JWS Identity () JWSHeader) where
declareNamedSchema _ =
pure $ NamedSchema (Just "JWS") mempty
instance ToSchema (Signature () JWSHeader) where
declareNamedSchema _ =
pure $ NamedSchema (Just "JWS Signature") mempty
instance ToSchema Base64Octets where
declareNamedSchema _ =
pure $ NamedSchema (Just "Base64 Encoded Bytes") $ mempty
& type_ .~ SwaggerString
| null | https://raw.githubusercontent.com/data61/Mirza/24e5ccddfc307cceebcc5ce26d35e91020b8ee10/projects/mirza-common-haskell/src/Mirza/Common/Types.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE OverloadedStrings #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
*****************************************************************************
Orphan Instances
*****************************************************************************
*****************************************************************************
User Types
*****************************************************************************
TODO: Handwrite these instances to comply with their defined syntax
equality check for password
| The class of contexts which include an 'EnvType'
runReaderT :: r -> m a
ReaderT r m a
newtype ExceptT e m a :: * -> (* -> *) -> * -> *
something of type DB a is to use 'runDb', which ensures the action is run in
cause the transaction to be rolled back and the error rethrown.
Need to figure out if we actually want this
Health Types:
=============================================================================
Classes and utilities for working with Constraints
=============================================================================
| Helper to make constraints on functions cleaner:
, Member err '[AsORError, AsORKeyError])
=> Foo
-> DB context err Bar
| The class of contexts which have a database pool:
@
pool <- view connPool
@
errors using this class._
Logging classes
===============
| Convenience class for contexts which can be used for logging
@
foo :: Member context '[HasLogging] => Foo -> DB context err Bar
@
| Run a DB action within a transaction. See the documentation for
AppM MonadError instance, as will all app errors thrown in the DB a action,
and in either case the database transaction is rolled back.
deal with).
| As "Database.PostgreSQL.Simple.Transaction".'DB.withTransaction',
but aborts the transaction if a 'Left' is returned.
TODO: Add NFData constraint to avoid async exceptions. | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedLists #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - orphans #
# OPTIONS_GHC -fno - warn - orphans #
module Mirza.Common.Types
( EmailAddress, emailToText, Password(..) , UserId(..)
, ORKeyId(..)
, HealthResponse(..)
, EnvType(..)
, AppM(..)
, runAppM
, DB(..)
, runDb
, pg
, Member
, HasLogging
, AsSqlError(..)
, HasConnPool(..)
, HasEnvType(..)
, HasKatipContext(..)
, HasKatipLogEnv(..)
, HasORClientEnv(..)
, HasDB
, AsServantError (..)
, DBConstraint
, ask, asks
, MonadError
, throwing, throwing_
, MonadIO, liftIO
, PrimaryKeyType
, orKeyIdType
, runClientFunc
) where
import qualified Database.Beam as B
import Database.Beam.Backend.SQL (FromBackendRow,
HasSqlValueSyntax)
import qualified Database.Beam.Backend.SQL as BSQL
import Database.Beam.Postgres (Pg, Postgres,
runBeamPostgres,
runBeamPostgresDebug)
import Database.Beam.Postgres.Syntax (pgUuidType)
import Database.Beam.Query.DataTypes (DataType (..))
import Database.PostgreSQL.Simple (Connection, SqlError)
import qualified Database.PostgreSQL.Simple as DB
import Database.PostgreSQL.Simple.FromField (FromField, fromField)
import Database.PostgreSQL.Simple.ToField (ToField, toField)
import Data.Proxy (Proxy (..))
import qualified Control.Exception as Exc
import qualified Control.Exception as E
import Control.Monad.Except (ExceptT (..), MonadError,
runExceptT, throwError)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader, ReaderT,
ask, asks, local,
runReaderT)
import Control.Monad.Trans (lift)
import Data.Pool as Pool
import Crypto.JOSE (JWK, JWS, JWSHeader,
Signature)
import Crypto.JOSE.Types (Base64Octets)
import qualified Data.ByteString as BS
import Data.Text (Text)
import Data.Text.Encoding as T
import Text.Email.Validate (EmailAddress,
toByteString, validate)
import Data.Aeson
import Data.Aeson.Types
import Control.Lens
import Control.Monad.Error.Lens
import GHC.Exts (Constraint)
import GHC.Generics (Generic)
import Katip as K
import Katip.Monadic (askLoggerIO)
import Data.Swagger
import Servant (FromHttpApiData (..),
ToHttpApiData (..))
import Servant.Client (ClientEnv (..), ClientM,
ServantError (..),
runClientM)
import Data.UUID (UUID)
type PrimaryKeyType = UUID
instance ToJSON EmailAddress where
toJSON = toJSON . T.decodeUtf8 . toByteString
instance FromJSON EmailAddress where
parseJSON = withText "EmailAddress" $ \t -> case validate (T.encodeUtf8 t) of
Left err -> fail err
Right e -> pure e
instance ToSchema EmailAddress where
declareNamedSchema _ = declareNamedSchema (Proxy :: Proxy Text)
<&> name ?~ "Email address"
<&> schema . description ?~ "An RFC 5322 compliant email address"
emailToText :: EmailAddress -> Text
emailToText = decodeUtf8 . toByteString
For example , emails have their own format , as do
newtype UserId = UserId {getUserId :: PrimaryKeyType}
deriving (Eq, Show, Generic, Read, Ord)
instance FromJSON UserId where
parseJSON = fmap UserId . parseJSON
instance ToJSON UserId where
toJSON = toJSON . getUserId
instance ToSchema UserId
instance ToParamSchema UserId
deriving instance FromHttpApiData UserId
deriving instance ToHttpApiData UserId
| Do NOT derive an ` Eq ` instance for Password . We do not want a literal
newtype Password = Password BS.ByteString
instance Show Password where
show _ = "Password <redacted>"
newtype ORKeyId = ORKeyId {getORKeyId :: UUID}
deriving (Show, Eq, Generic, Read)
instance FromJSON ORKeyId where
parseJSON = fmap ORKeyId . parseJSON
instance ToJSON ORKeyId where
toJSON = toJSON . getORKeyId
instance ToSchema ORKeyId
instance ToParamSchema ORKeyId
instance FromHttpApiData ORKeyId where
parseUrlPiece t = fmap ORKeyId (parseUrlPiece t)
deriving instance ToHttpApiData ORKeyId
instance FromField ORKeyId where
fromField field mbs = ORKeyId <$> fromField field mbs
instance ToField ORKeyId where
toField = toField . getORKeyId
instance HasSqlValueSyntax be UUID => HasSqlValueSyntax be ORKeyId where
sqlValueSyntax (ORKeyId uuid) = BSQL.sqlValueSyntax uuid
instance (BSQL.BeamSqlBackend be, FromBackendRow be UUID)
=> FromBackendRow be ORKeyId where
fromBackendRow = ORKeyId <$> BSQL.fromBackendRow
valuesNeeded proxyBE _proxyKID = BSQL.valuesNeeded proxyBE (Proxy :: Proxy UUID)
orKeyIdType :: B.DataType Postgres ORKeyId
orKeyIdType = DataType pgUuidType
data EnvType = Prod | Dev
deriving (Show, Eq, Read)
$(makeClassy ''EnvType)
type Handler a = ExceptT ServantErr IO a
newtype AppM context err a = AppM
{ getAppM :: ReaderT context (ExceptT err IO) a
} deriving
( Functor
, Applicative
, Monad
, MonadReader context
, MonadIO
, MonadError err
)
| The DB monad is used to connect to the Beam backend . The only way to run
a Postgres transaction , and that exceptions and errors thrown inside the DB a
newtype DB context error a = DB (ReaderT (Connection,context) (ExceptT error Pg) a)
deriving
( Functor
, Applicative
, Monad
, MonadReader (Connection,context)
, MonadError error
)
successHealthResponseText :: Text
successHealthResponseText = "Status OK"
data HealthResponse = HealthResponse
deriving (Show, Eq, Read, Generic)
instance ToSchema HealthResponse
instance ToJSON HealthResponse where
toJSON _ = toJSON successHealthResponseText
instance FromJSON HealthResponse where
parseJSON (String value)
| value == successHealthResponseText = pure HealthResponse
| otherwise = fail "Invalid health response string."
parseJSON value = typeMismatch "HealthResponse" value
bazQuery : : ( Member context ' [ HasEnvType , HasConnPool , HasLogging ]
type family Member (e :: *) (cs :: [* -> Constraint]) :: Constraint where
Member e '[] = ()
Member e (c ': cs) = (c e, Member e cs)
Pool.withResource pool $ \conn - > ..
class HasConnPool a where
connPool :: Lens' a (Pool Connection)
| The class of error types which can contain a ` SqlError ` . _ See
' . SupplyChain . BeamQueries.insertUser ' for a good example of how to catch
class AsSqlError a where
_SqlError :: Prism' a SqlError
instance AsSqlError SqlError where
_SqlError = id
class (HasKatipContext context, HasKatipLogEnv context)
=> HasLogging context where
instance (HasKatipContext context, HasKatipLogEnv context)
=> HasLogging context
class HasKatipLogEnv a where
katipLogEnv :: Lens' a K.LogEnv
class HasKatipContext a where
katipContexts :: Lens' a K.LogContexts
katipNamespace :: Lens' a K.Namespace
instance HasKatipLogEnv context => Katip (AppM context err) where
getLogEnv = view katipLogEnv
localLogEnv f = local (over katipLogEnv f)
instance (HasKatipContext context, HasKatipLogEnv context)
=> KatipContext (AppM context err) where
getKatipContext = view katipContexts
getKatipNamespace = view katipNamespace
localKatipContext f = local (over katipContexts f)
localKatipNamespace f = local (over katipNamespace f)
instance HasKatipLogEnv context => Katip (DB context err) where
getLogEnv = view (_2 . katipLogEnv)
localLogEnv f = local (over (_2 . katipLogEnv) f)
instance (HasKatipContext context, HasKatipLogEnv context)
=> KatipContext (DB context err) where
getKatipContext = view (_2 . katipContexts)
getKatipNamespace = view (_2 . katipNamespace)
localKatipContext f = local (over (_2 . katipContexts) f)
localKatipNamespace f = local (over (_2 . katipNamespace) f)
class HasORClientEnv a where
clientEnv :: Lens' a ClientEnv
class AsServantError a where
_ServantError :: Prism' a ServantError
Useage of this type is deprecated prefer HasDb .
TODO : Remove DBConstraint once SCS is converted to use Member notation .
type DBConstraint context err =
( HasEnvType context
, HasConnPool context
, HasKatipContext context
, HasKatipLogEnv context
, AsSqlError err)
| Convenience class for contexts which require DB .
class (HasEnvType context, HasConnPool context, HasLogging context)
=> HasDB context where
instance (HasEnvType context, HasConnPool context, HasLogging context)
=> HasDB context
' withTransaction ' . SqlError exceptions will be caught and lifted into the
Exceptions which are thrown which are not SqlErrors will be caught by Servant
and cause 500 errors ( these are not exceptions we 'll generally know how to
runDb :: (HasDB context
, Member err '[AsSqlError])
=> DB context err a -> AppM context err a
runDb (DB act) = katipAddNamespace "runDb" $ do
env <- ask
e <- view envType
lggr <- askLoggerIO
let dbf = case e of
Prod -> runBeamPostgres
_ -> runBeamPostgresDebug (lggr DebugS . logStr)
res <- liftIO $ Pool.withResource (env ^. connPool) $ \conn ->
Exc.try
. withTransaction conn
. dbf conn
. runExceptT
. runReaderT act $ (conn,env)
: : ( Either SqlError ( Either AppError a ) )
either (throwing _SqlError)
(either throwError pure)
res
withTransaction :: Connection -> IO (Either e a) -> IO (Either e a)
withTransaction conn act = E.mask $ \restore -> do
DB.begin conn
r <- restore (act >>= E.evaluate) `E.onException` DB.rollback conn
case r of
Left _ -> DB.rollback conn
Right _ -> DB.commit conn
pure r
pg :: Pg a -> DB context err a
pg = DB . lift . lift
runAppM :: context -> AppM context err a -> IO (Either err a)
runAppM env aM = runExceptT $ (runReaderT . getAppM) aM env
runClientFunc :: (AsServantError err, HasORClientEnv context)
=> ClientM a
-> AppM context err a
runClientFunc func = do
cEnv <- view clientEnv
either (throwing _ServantError) pure =<< liftIO (runClientM func cEnv)
TODO : Orphan for JWK
instance ToSchema JWK where
declareNamedSchema _ = do
strSchema <- declareSchemaRef (Proxy :: Proxy String)
pure $ NamedSchema (Just "JWK") $ mempty
& type_ .~ SwaggerObject
& properties .~
[ ("kty",strSchema)
, ("n",strSchema)
, ("e",strSchema)
]
instance ToSchema (JWS Identity () JWSHeader) where
declareNamedSchema _ =
pure $ NamedSchema (Just "JWS") mempty
instance ToSchema (Signature () JWSHeader) where
declareNamedSchema _ =
pure $ NamedSchema (Just "JWS Signature") mempty
instance ToSchema Base64Octets where
declareNamedSchema _ =
pure $ NamedSchema (Just "Base64 Encoded Bytes") $ mempty
& type_ .~ SwaggerString
|
500b78994a1fd13e8f06d0a0ee75363b0fcc1f5e373f317177933af9bb168887 | AvisoNovate/rook | form_endpoints.clj | (ns sample.form-endpoints
(:require [ring.util.response :refer [response]]
[io.aviso.rook.interceptors :refer [keywordized-form]]))
(defn post-new-widget
{:rook-route [:post ""]
;; This is something I like; form parsing and all that ONLY occurs if this is the selected
;; endpoint. Otherwise, the :body is never accessed. And for stateless and
;; configuration-free interceptors like
this one , we do n't even need to use the extra machinery provides .
:interceptors [keywordized-form]}
[^:form-param widget-name
^:form-param supplier-id]
(response {:widget-name widget-name
:supplier-id supplier-id}))
| null | https://raw.githubusercontent.com/AvisoNovate/rook/a752ce97f39a5c52301dd1866195f463817a1ed7/spec/sample/form_endpoints.clj | clojure | This is something I like; form parsing and all that ONLY occurs if this is the selected
endpoint. Otherwise, the :body is never accessed. And for stateless and
configuration-free interceptors like | (ns sample.form-endpoints
(:require [ring.util.response :refer [response]]
[io.aviso.rook.interceptors :refer [keywordized-form]]))
(defn post-new-widget
{:rook-route [:post ""]
this one , we do n't even need to use the extra machinery provides .
:interceptors [keywordized-form]}
[^:form-param widget-name
^:form-param supplier-id]
(response {:widget-name widget-name
:supplier-id supplier-id}))
|
1c4b46bcf9a554b9ab9e5ec0f2535f5f13440943865ce2f20283da688b0db288 | snmsts/cl-langserver | slynk-retro.lisp | (defpackage :ls-retro
(:use :cl :ls-base :ls-api))
(in-package :ls-retro)
(defun ensure-slynk-package-nicknames (&rest ignored)
"Nickname all SLYNK-* package to SWANK-*"
(declare (ignore ignored))
(loop for package in (list-all-packages)
for package-name = (package-name package)
when (search "SLYNK" package-name :test #'char-equal)
do (rename-package package
package-name
(remove-duplicates
(cons
(format nil "SWANK~a"
(subseq package-name 5))
(package-nicknames package))
:test #'string-equal))))
(setq ls-rpc:*translating-swank-to-slynk* nil)
(push #'ensure-slynk-package-nicknames
ls-api:*slynk-require-hook*)
(ensure-slynk-package-nicknames)
(provide :ls-retro)
| null | https://raw.githubusercontent.com/snmsts/cl-langserver/3b1246a5d0bd58459e7a64708f820bf718cf7175/src/contrib/slynk-retro.lisp | lisp | (defpackage :ls-retro
(:use :cl :ls-base :ls-api))
(in-package :ls-retro)
(defun ensure-slynk-package-nicknames (&rest ignored)
"Nickname all SLYNK-* package to SWANK-*"
(declare (ignore ignored))
(loop for package in (list-all-packages)
for package-name = (package-name package)
when (search "SLYNK" package-name :test #'char-equal)
do (rename-package package
package-name
(remove-duplicates
(cons
(format nil "SWANK~a"
(subseq package-name 5))
(package-nicknames package))
:test #'string-equal))))
(setq ls-rpc:*translating-swank-to-slynk* nil)
(push #'ensure-slynk-package-nicknames
ls-api:*slynk-require-hook*)
(ensure-slynk-package-nicknames)
(provide :ls-retro)
| |
8414dc580633f2041fef4a9ead17ea189e30f29b24b1a23bf89317eb81db2e2c | vouillon/osm | lru_cache.mli | OSM tools
* Copyright ( C ) 2013
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2013 Jérôme Vouillon
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type t
val make : int -> t
val funct : t -> (int -> 'a) -> int -> 'a
| null | https://raw.githubusercontent.com/vouillon/osm/a42d1bcc82a4ad73c26c81ac7a75f9f1c7470344/generic/lru_cache.mli | ocaml | OSM tools
* Copyright ( C ) 2013
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2013 Jérôme Vouillon
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type t
val make : int -> t
val funct : t -> (int -> 'a) -> int -> 'a
| |
53c461eb6a76e6f77203806390eee142e3e77017a99baf6983a9d35c21039bdc | vbrankov/hdf5-ocaml | c_string.mli | open Bigarray
type t
val null : t
external of_string : string -> t = "hdf5_c_string_of_string"
external to_string : t -> string = "hdf5_c_string_to_string"
external to_bigstring : t -> (char, int8_unsigned_elt, c_layout) Array1.t
= "hdf5_c_string_to_bigstring"
external free : t -> unit = "free"
| null | https://raw.githubusercontent.com/vbrankov/hdf5-ocaml/7abc763189767cd6c92620f29ce98f6ee23ba88f/src/raw/c_string.mli | ocaml | open Bigarray
type t
val null : t
external of_string : string -> t = "hdf5_c_string_of_string"
external to_string : t -> string = "hdf5_c_string_to_string"
external to_bigstring : t -> (char, int8_unsigned_elt, c_layout) Array1.t
= "hdf5_c_string_to_bigstring"
external free : t -> unit = "free"
| |
79db06e748ede26550132dcc15639a816174e6315122d57616127cab6b75d85c | kiselgra/c-mera | c.misc.05.unary.lisp | (include <stdio.h>)
(function main () -> int
(decl ((int x = 10))
(printf "%d\\n" +x)
(printf "%d\\n" -x))
(return 0))
# # 10
# # -10
| null | https://raw.githubusercontent.com/kiselgra/c-mera/d06ed96d50a40a3fefe188202c8c535d6784f392/tests/c.misc.05.unary.lisp | lisp | (include <stdio.h>)
(function main () -> int
(decl ((int x = 10))
(printf "%d\\n" +x)
(printf "%d\\n" -x))
(return 0))
# # 10
# # -10
| |
56e80723eb820cd3fd0f28aa1a32fac168d4b38715f41956bb3edbd5c507aea0 | green-coder/girouette | default_api.cljc | (ns girouette.tw.default-api
(:require
[girouette.tw.core :as gtw]
[girouette.version :as version]
[girouette.tw.common :as common]
[girouette.tw.color :as color]
[girouette.tw.layout :as layout]
[girouette.tw.flexbox :as flexbox]
[girouette.tw.grid :as grid]
[girouette.tw.box-alignment :as box-alignment]
[girouette.tw.spacing :as spacing]
[girouette.tw.sizing :as sizing]
[girouette.tw.typography :as typography]
[girouette.tw.background :as background]
[girouette.tw.border :as border]
[girouette.tw.effect :as effect]
[girouette.tw.filter :as filter]
[girouette.tw.table :as table]
[girouette.tw.animation :as animation]
[girouette.tw.transform :as transform]
[girouette.tw.interactivity :as interactivity]
[girouette.tw.svg :as svg]
[girouette.tw.accessibility :as accessibility]))
(def all-tw-components
[common/components
layout/components
flexbox/components
grid/components
box-alignment/components
spacing/components
sizing/components
typography/components
background/components
border/components
effect/components
filter/components
table/components
animation/components
transform/components
interactivity/components
svg/components
accessibility/components])
The API built using the Tailwind v2 components .
(let [{:keys [parser class-name->garden]} (-> all-tw-components
(version/filter-components-by-version [:tw 2])
(gtw/make-api {:color-map color/tw-v2-colors
:font-family-map typography/tw-v2-font-family-map}))]
(def tw-v2-parser parser)
(def tw-v2-class-name->garden class-name->garden))
The API built using the Tailwind v3 components .
(let [{:keys [parser class-name->garden]} (-> all-tw-components
(version/filter-components-by-version [:tw 3])
(gtw/make-api {:color-map color/tw-v3-unified-colors-extended
:font-family-map typography/tw-v2-font-family-map}))]
(def tw-v3-parser parser)
(def tw-v3-class-name->garden class-name->garden))
;; Feel free to fork the snippet above and add your own components,
as that 's what was made for : customization .
| null | https://raw.githubusercontent.com/green-coder/girouette/34f8cabdd605e93bd2ced4e5be29f611557d4b76/lib/girouette/src/girouette/tw/default_api.cljc | clojure | Feel free to fork the snippet above and add your own components, | (ns girouette.tw.default-api
(:require
[girouette.tw.core :as gtw]
[girouette.version :as version]
[girouette.tw.common :as common]
[girouette.tw.color :as color]
[girouette.tw.layout :as layout]
[girouette.tw.flexbox :as flexbox]
[girouette.tw.grid :as grid]
[girouette.tw.box-alignment :as box-alignment]
[girouette.tw.spacing :as spacing]
[girouette.tw.sizing :as sizing]
[girouette.tw.typography :as typography]
[girouette.tw.background :as background]
[girouette.tw.border :as border]
[girouette.tw.effect :as effect]
[girouette.tw.filter :as filter]
[girouette.tw.table :as table]
[girouette.tw.animation :as animation]
[girouette.tw.transform :as transform]
[girouette.tw.interactivity :as interactivity]
[girouette.tw.svg :as svg]
[girouette.tw.accessibility :as accessibility]))
(def all-tw-components
[common/components
layout/components
flexbox/components
grid/components
box-alignment/components
spacing/components
sizing/components
typography/components
background/components
border/components
effect/components
filter/components
table/components
animation/components
transform/components
interactivity/components
svg/components
accessibility/components])
The API built using the Tailwind v2 components .
(let [{:keys [parser class-name->garden]} (-> all-tw-components
(version/filter-components-by-version [:tw 2])
(gtw/make-api {:color-map color/tw-v2-colors
:font-family-map typography/tw-v2-font-family-map}))]
(def tw-v2-parser parser)
(def tw-v2-class-name->garden class-name->garden))
The API built using the Tailwind v3 components .
(let [{:keys [parser class-name->garden]} (-> all-tw-components
(version/filter-components-by-version [:tw 3])
(gtw/make-api {:color-map color/tw-v3-unified-colors-extended
:font-family-map typography/tw-v2-font-family-map}))]
(def tw-v3-parser parser)
(def tw-v3-class-name->garden class-name->garden))
as that 's what was made for : customization .
|
201637a0bfdb91b7b4a85830646b9c21e7141f3cdc92177c4cc3bc9e86d21872 | VisionsGlobalEmpowerment/webchange | cinema.clj | (ns webchange.templates.library.cinema
(:require
[webchange.templates.core :as core]
[webchange.templates.utils.common :as common]
[webchange.templates.utils.dialog :as dialog]))
(def template-options
[{:type "note"
:text "Introduce and show a letter introduction video on a screen for students to learn a new letter."}
{:type "select-video"
:key "video-src"
:label "Choose Letter for Video"
:placeholder "Choose"}])
(def m {:id 43
:name "cinema"
:tags ["Direct Instruction - Educational Video"]
:description "Simple video"
:options {:chanting-video-src {:label "Video"
:type "string"}}
:actions {:template-options {:title "Template Options"
:options template-options}}})
(def t {:assets [{:url "/raw/img/cinema/background.jpg", :size 10, :type "image"}
{:url "/raw/img/cinema/screen-off.png", :size 10, :type "image"}
{:url "/raw/img/ui/play_button/play_button.png", :size 10, :type "image"}],
:objects
{:background {:type "background", :src "/raw/img/cinema/background.jpg"},
:letter-video
{:type "video",
:x 342,
:y 111,
:width 1236,
:height 674,
:visible false,
:editable? {:select true
:show-in-tree? true}},
:play-button
{:type "button",
:x 883,
:y 347,
:actions {:click {:id "play-video", :on "click", :type "action"}},
:font-size 76,
:img "/raw/img/ui/play_button/play_button.png"
:filters [{:name "brightness" :value 0}
{:name "glow" :outer-strength 0 :color 0xffd700}]
:transition "play-button"},
:screen-overlay
{:type "image",
:x 342,
:y 109,
:width 1238,
:height 678,
:src "/raw/img/cinema/screen-off.png",
:visible true}},
:scene-objects [["background"] ["letter-video" "screen-overlay" "play-button"]],
:actions
{:finish-activity {:type "finish-activity"},
:play-video {:type "sequence-data",
:data [{:type "action" :id "stop-timeout"}
{:type "remove-flows", :flow-tag "instruction"}
{:type "set-attribute" :attr-name "visible", :attr-value false :target "play-button"}
{:type "set-attribute" :attr-name "visible", :attr-value false :target "screen-overlay"}
{:type "set-attribute" :attr-name "visible", :attr-value true :target "letter-video"}
{:type "play-video",
:target "letter-video",
:from-var [{:var-name "video-src" :action-property "src"}]}
{:type "set-attribute" :attr-name "visible", :attr-value false :target "letter-video"}
{:type "set-attribute" :attr-name "visible", :attr-value true :target "screen-overlay"}
{:id "dialog-finish-activity", :type "action"}
{:id "finish-activity", :type "action"}]},
:dialog-finish-activity (dialog/default "Finish activity")
:dialog-intro (-> (dialog/default "Intro")
(assoc :available-activities ["highlight-play"]))
:dialog-timeout-instructions (-> (dialog/default "Timeout instructions")
(assoc :available-activities ["highlight-play"]))
:init-concept {:type "set-variable" :var-name "video-src" :var-value nil}
:start-scene {:type "sequence-data",
:data [{:type "start-activity"},
{:type "action" :id "init-concept"}
{:type "action" :id "dialog-intro"}
{:type "action" :id "start-timeout"}],
:description "Initial action"
:tags ["instruction"]},
:stop-activity {:type "stop-activity"}
:start-timeout {:type "start-timeout-counter",
:id "inactive-counter",
:action "continue-try",
:autostart true
:interval 30000}
:stop-timeout {:type "remove-interval"
:id "inactive-counter"}
:continue-try {:type "sequence",
:data ["start-timeout"
"dialog-timeout-instructions"]},
:highlight-play {:type "transition"
:transition-id "play-button"
:return-immediately true
:from {:brightness 0 :glow 0}
:to {:brightness 1 :glow 10 :yoyo true :duration 0.5 :repeat 5}}},
:triggers {:back {:on "back", :action "stop-activity"}, :start {:on "start", :action "start-scene"}},
:metadata {:prev "map", :autostart true}})
(defn- set-video
[t src]
(assoc-in t [:actions :init-concept :var-value] src))
(defn create-activity
[args]
(-> (common/init-metadata m t args)
(set-video (:video-src args))
(assoc-in [:metadata :saved-props :template-options] args)))
(defn- update-activity
[old-data args]
(-> old-data
(set-video (:video-src args))
(assoc-in [:metadata :saved-props :template-options] args)))
(core/register-template m create-activity update-activity)
| null | https://raw.githubusercontent.com/VisionsGlobalEmpowerment/webchange/118ba5ee407ba1261bac40a6ba5729ccda6e8150/src/clj/webchange/templates/library/cinema.clj | clojure | (ns webchange.templates.library.cinema
(:require
[webchange.templates.core :as core]
[webchange.templates.utils.common :as common]
[webchange.templates.utils.dialog :as dialog]))
(def template-options
[{:type "note"
:text "Introduce and show a letter introduction video on a screen for students to learn a new letter."}
{:type "select-video"
:key "video-src"
:label "Choose Letter for Video"
:placeholder "Choose"}])
(def m {:id 43
:name "cinema"
:tags ["Direct Instruction - Educational Video"]
:description "Simple video"
:options {:chanting-video-src {:label "Video"
:type "string"}}
:actions {:template-options {:title "Template Options"
:options template-options}}})
(def t {:assets [{:url "/raw/img/cinema/background.jpg", :size 10, :type "image"}
{:url "/raw/img/cinema/screen-off.png", :size 10, :type "image"}
{:url "/raw/img/ui/play_button/play_button.png", :size 10, :type "image"}],
:objects
{:background {:type "background", :src "/raw/img/cinema/background.jpg"},
:letter-video
{:type "video",
:x 342,
:y 111,
:width 1236,
:height 674,
:visible false,
:editable? {:select true
:show-in-tree? true}},
:play-button
{:type "button",
:x 883,
:y 347,
:actions {:click {:id "play-video", :on "click", :type "action"}},
:font-size 76,
:img "/raw/img/ui/play_button/play_button.png"
:filters [{:name "brightness" :value 0}
{:name "glow" :outer-strength 0 :color 0xffd700}]
:transition "play-button"},
:screen-overlay
{:type "image",
:x 342,
:y 109,
:width 1238,
:height 678,
:src "/raw/img/cinema/screen-off.png",
:visible true}},
:scene-objects [["background"] ["letter-video" "screen-overlay" "play-button"]],
:actions
{:finish-activity {:type "finish-activity"},
:play-video {:type "sequence-data",
:data [{:type "action" :id "stop-timeout"}
{:type "remove-flows", :flow-tag "instruction"}
{:type "set-attribute" :attr-name "visible", :attr-value false :target "play-button"}
{:type "set-attribute" :attr-name "visible", :attr-value false :target "screen-overlay"}
{:type "set-attribute" :attr-name "visible", :attr-value true :target "letter-video"}
{:type "play-video",
:target "letter-video",
:from-var [{:var-name "video-src" :action-property "src"}]}
{:type "set-attribute" :attr-name "visible", :attr-value false :target "letter-video"}
{:type "set-attribute" :attr-name "visible", :attr-value true :target "screen-overlay"}
{:id "dialog-finish-activity", :type "action"}
{:id "finish-activity", :type "action"}]},
:dialog-finish-activity (dialog/default "Finish activity")
:dialog-intro (-> (dialog/default "Intro")
(assoc :available-activities ["highlight-play"]))
:dialog-timeout-instructions (-> (dialog/default "Timeout instructions")
(assoc :available-activities ["highlight-play"]))
:init-concept {:type "set-variable" :var-name "video-src" :var-value nil}
:start-scene {:type "sequence-data",
:data [{:type "start-activity"},
{:type "action" :id "init-concept"}
{:type "action" :id "dialog-intro"}
{:type "action" :id "start-timeout"}],
:description "Initial action"
:tags ["instruction"]},
:stop-activity {:type "stop-activity"}
:start-timeout {:type "start-timeout-counter",
:id "inactive-counter",
:action "continue-try",
:autostart true
:interval 30000}
:stop-timeout {:type "remove-interval"
:id "inactive-counter"}
:continue-try {:type "sequence",
:data ["start-timeout"
"dialog-timeout-instructions"]},
:highlight-play {:type "transition"
:transition-id "play-button"
:return-immediately true
:from {:brightness 0 :glow 0}
:to {:brightness 1 :glow 10 :yoyo true :duration 0.5 :repeat 5}}},
:triggers {:back {:on "back", :action "stop-activity"}, :start {:on "start", :action "start-scene"}},
:metadata {:prev "map", :autostart true}})
(defn- set-video
[t src]
(assoc-in t [:actions :init-concept :var-value] src))
(defn create-activity
[args]
(-> (common/init-metadata m t args)
(set-video (:video-src args))
(assoc-in [:metadata :saved-props :template-options] args)))
(defn- update-activity
[old-data args]
(-> old-data
(set-video (:video-src args))
(assoc-in [:metadata :saved-props :template-options] args)))
(core/register-template m create-activity update-activity)
| |
b72906ccecd69f234753d7298d12cc6b0625c9749a1cbaaf9a5d10621f5478ff | mithrandi/isaacranks | Instrument.hs | module Instrument
( requestDuration
, instrumentApp
, observeDurationL
, observeHandler
, observeHandlerL
, timeAction
) where
import Data.Ratio ((%))
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Data.Text.Encoding.Error
import Import
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wai as Wai
import qualified Prometheus as Prom
import qualified System.Clock as Clock
instance Prom . MonadMonitor ( HandlerFor site ) where
-- doIO = liftIO
-- | Core information about HTTP requests:
--
-- Labels:
-- * handler: the name of the application
-- * method: the HTTP method requested
-- * status_code: the HTTP response code
--
-- Actual metric is the latency of the request.
type RequestDuration = Prom.Vector Prom.Label3 Prom.Histogram
requestDuration :: IO RequestDuration
requestDuration =
Prom.register $ Prom.vector ("handler", "method", "status_code") $ Prom.histogram info Prom.defaultBuckets
where
info =
Prom.Info
"http_request_duration_seconds"
"The HTTP request latencies in seconds."
| Instrument a WAI app with the default WAI metrics .
instrumentApp
:: RequestDuration -- ^ The metric to instrument
-> Text -- ^ The label used to identify this app
-> Wai.Application -- ^ The app to instrument
-> Wai.Application -- ^ The instrumented app
instrumentApp metric handler app req resp = do
start <- Clock.getTime Clock.Monotonic
app
req
(\res -> do
recordResult start (HTTP.statusCode (Wai.responseStatus res))
resp res) `onException`
recordResult start 500
where
recordResult start statusCode = do
end <- Clock.getTime Clock.Monotonic
let latency = fromRational . (/ 1000000000) . toRational . Clock.toNanoSecs $
end `Clock.diffTimeSpec` start
Prom.withLabel metric (handler, method, T.pack status) (`Prom.observe` latency)
where
method = E.decodeUtf8With lenientDecode (Wai.requestMethod req)
status = show statusCode
observeDuration ::
(MonadIO m, Prom.Observer metric) =>
metric -> m a -> m a
observeDuration metric io = do
(result, duration) <- timeAction io
liftIO $ Prom.observe metric duration
return result
observeDurationL ::
(MonadIO m, Prom.Observer metric, Prom.Label l) =>
Prom.Vector l metric -> l -> m a -> m a
observeDurationL metric label io = do
(result, duration) <- timeAction io
liftIO $ Prom.withLabel metric label (`Prom.observe` duration)
return result
-- | Lifted version of 'Prometheus.timeAction'
timeAction :: MonadIO m => m a -> m (a, Double)
timeAction io = do
start <- liftIO $ Clock.getTime Clock.Monotonic
result <- io
end <- liftIO $ Clock.getTime Clock.Monotonic
let duration = Clock.toNanoSecs (end `Clock.diffTimeSpec` start) % 1000000000
return (result, fromRational duration)
observeHandler ::
Prom.Observer metric =>
(AppMetrics -> metric) -> HandlerFor App a -> HandlerFor App a
observeHandler m h = getsYesod (m . appMetrics) >>= (`observeDuration` h)
observeHandlerL ::
(Prom.Observer metric, Prom.Label l) =>
(AppMetrics -> Prom.Vector l metric) -> l -> HandlerFor App a -> HandlerFor App a
observeHandlerL m label h = do
metric <- getsYesod (m . appMetrics)
observeDurationL metric label h
| null | https://raw.githubusercontent.com/mithrandi/isaacranks/7943ea00ef3d3f415cae61e33d9f16f234de895f/Instrument.hs | haskell | doIO = liftIO
| Core information about HTTP requests:
Labels:
* handler: the name of the application
* method: the HTTP method requested
* status_code: the HTTP response code
Actual metric is the latency of the request.
^ The metric to instrument
^ The label used to identify this app
^ The app to instrument
^ The instrumented app
| Lifted version of 'Prometheus.timeAction' | module Instrument
( requestDuration
, instrumentApp
, observeDurationL
, observeHandler
, observeHandlerL
, timeAction
) where
import Data.Ratio ((%))
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Data.Text.Encoding.Error
import Import
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wai as Wai
import qualified Prometheus as Prom
import qualified System.Clock as Clock
instance Prom . MonadMonitor ( HandlerFor site ) where
type RequestDuration = Prom.Vector Prom.Label3 Prom.Histogram
requestDuration :: IO RequestDuration
requestDuration =
Prom.register $ Prom.vector ("handler", "method", "status_code") $ Prom.histogram info Prom.defaultBuckets
where
info =
Prom.Info
"http_request_duration_seconds"
"The HTTP request latencies in seconds."
| Instrument a WAI app with the default WAI metrics .
instrumentApp
instrumentApp metric handler app req resp = do
start <- Clock.getTime Clock.Monotonic
app
req
(\res -> do
recordResult start (HTTP.statusCode (Wai.responseStatus res))
resp res) `onException`
recordResult start 500
where
recordResult start statusCode = do
end <- Clock.getTime Clock.Monotonic
let latency = fromRational . (/ 1000000000) . toRational . Clock.toNanoSecs $
end `Clock.diffTimeSpec` start
Prom.withLabel metric (handler, method, T.pack status) (`Prom.observe` latency)
where
method = E.decodeUtf8With lenientDecode (Wai.requestMethod req)
status = show statusCode
observeDuration ::
(MonadIO m, Prom.Observer metric) =>
metric -> m a -> m a
observeDuration metric io = do
(result, duration) <- timeAction io
liftIO $ Prom.observe metric duration
return result
observeDurationL ::
(MonadIO m, Prom.Observer metric, Prom.Label l) =>
Prom.Vector l metric -> l -> m a -> m a
observeDurationL metric label io = do
(result, duration) <- timeAction io
liftIO $ Prom.withLabel metric label (`Prom.observe` duration)
return result
timeAction :: MonadIO m => m a -> m (a, Double)
timeAction io = do
start <- liftIO $ Clock.getTime Clock.Monotonic
result <- io
end <- liftIO $ Clock.getTime Clock.Monotonic
let duration = Clock.toNanoSecs (end `Clock.diffTimeSpec` start) % 1000000000
return (result, fromRational duration)
observeHandler ::
Prom.Observer metric =>
(AppMetrics -> metric) -> HandlerFor App a -> HandlerFor App a
observeHandler m h = getsYesod (m . appMetrics) >>= (`observeDuration` h)
observeHandlerL ::
(Prom.Observer metric, Prom.Label l) =>
(AppMetrics -> Prom.Vector l metric) -> l -> HandlerFor App a -> HandlerFor App a
observeHandlerL m label h = do
metric <- getsYesod (m . appMetrics)
observeDurationL metric label h
|
93d9e92ad2ff00a3a588659e8136ff24e30968a81a766d5b4ab33ebaeb025bea | LaurentMazare/ocaml-torch | alexnet.ml | AlexNet model .
*)
open Base
open Torch
let sub = Var_store.sub
let conv2d = Layer.conv2d_
let features vs =
let conv1 = conv2d (sub vs "0") ~ksize:11 ~padding:2 ~stride:4 ~input_dim:3 64 in
let conv2 = conv2d (sub vs "3") ~ksize:5 ~padding:1 ~stride:2 ~input_dim:64 192 in
let conv3 = conv2d (sub vs "6") ~ksize:3 ~padding:1 ~stride:1 ~input_dim:192 384 in
let conv4 = conv2d (sub vs "8") ~ksize:3 ~padding:1 ~stride:1 ~input_dim:384 256 in
let conv5 = conv2d (sub vs "10") ~ksize:3 ~padding:1 ~stride:1 ~input_dim:256 256 in
Layer.of_fn (fun xs ->
Layer.forward conv1 xs
|> Tensor.relu
|> Tensor.max_pool2d ~ksize:(3, 3) ~stride:(2, 2)
|> Layer.forward conv2
|> Tensor.relu
|> Tensor.max_pool2d ~ksize:(3, 3) ~stride:(2, 2)
|> Layer.forward conv3
|> Tensor.relu
|> Layer.forward conv4
|> Tensor.relu
|> Layer.forward conv5
|> Tensor.relu
|> Tensor.max_pool2d ~ksize:(3, 3) ~stride:(2, 2))
let classifier ?num_classes vs =
let linear1 = Layer.linear (sub vs "1") ~input_dim:(256 * 6 * 6) 4096 in
let linear2 = Layer.linear (sub vs "4") ~input_dim:4096 4096 in
let linear_or_id =
match num_classes with
| Some num_classes -> Layer.linear (sub vs "6") ~input_dim:4096 num_classes
| None -> Layer.id
in
Layer.of_fn_ (fun xs ~is_training ->
Tensor.dropout xs ~p:0.5 ~is_training
|> Layer.forward linear1
|> Tensor.relu
|> Tensor.dropout ~p:0.5 ~is_training
|> Layer.forward linear2
|> Tensor.relu
|> Layer.forward linear_or_id)
let alexnet ?num_classes vs =
let features = features (sub vs "features") in
let classifier = classifier ?num_classes (sub vs "classifier") in
Layer.of_fn_ (fun xs ~is_training ->
let batch_size = Tensor.shape xs |> List.hd_exn in
Layer.forward features xs
|> Tensor.adaptive_avg_pool2d ~output_size:[ 6; 6 ]
|> Tensor.view ~size:[ batch_size; -1 ]
|> Layer.forward_ classifier ~is_training)
| null | https://raw.githubusercontent.com/LaurentMazare/ocaml-torch/a82b906a22c7c23138af16fab497a08e5167d249/src/vision/alexnet.ml | ocaml | AlexNet model .
*)
open Base
open Torch
let sub = Var_store.sub
let conv2d = Layer.conv2d_
let features vs =
let conv1 = conv2d (sub vs "0") ~ksize:11 ~padding:2 ~stride:4 ~input_dim:3 64 in
let conv2 = conv2d (sub vs "3") ~ksize:5 ~padding:1 ~stride:2 ~input_dim:64 192 in
let conv3 = conv2d (sub vs "6") ~ksize:3 ~padding:1 ~stride:1 ~input_dim:192 384 in
let conv4 = conv2d (sub vs "8") ~ksize:3 ~padding:1 ~stride:1 ~input_dim:384 256 in
let conv5 = conv2d (sub vs "10") ~ksize:3 ~padding:1 ~stride:1 ~input_dim:256 256 in
Layer.of_fn (fun xs ->
Layer.forward conv1 xs
|> Tensor.relu
|> Tensor.max_pool2d ~ksize:(3, 3) ~stride:(2, 2)
|> Layer.forward conv2
|> Tensor.relu
|> Tensor.max_pool2d ~ksize:(3, 3) ~stride:(2, 2)
|> Layer.forward conv3
|> Tensor.relu
|> Layer.forward conv4
|> Tensor.relu
|> Layer.forward conv5
|> Tensor.relu
|> Tensor.max_pool2d ~ksize:(3, 3) ~stride:(2, 2))
let classifier ?num_classes vs =
let linear1 = Layer.linear (sub vs "1") ~input_dim:(256 * 6 * 6) 4096 in
let linear2 = Layer.linear (sub vs "4") ~input_dim:4096 4096 in
let linear_or_id =
match num_classes with
| Some num_classes -> Layer.linear (sub vs "6") ~input_dim:4096 num_classes
| None -> Layer.id
in
Layer.of_fn_ (fun xs ~is_training ->
Tensor.dropout xs ~p:0.5 ~is_training
|> Layer.forward linear1
|> Tensor.relu
|> Tensor.dropout ~p:0.5 ~is_training
|> Layer.forward linear2
|> Tensor.relu
|> Layer.forward linear_or_id)
let alexnet ?num_classes vs =
let features = features (sub vs "features") in
let classifier = classifier ?num_classes (sub vs "classifier") in
Layer.of_fn_ (fun xs ~is_training ->
let batch_size = Tensor.shape xs |> List.hd_exn in
Layer.forward features xs
|> Tensor.adaptive_avg_pool2d ~output_size:[ 6; 6 ]
|> Tensor.view ~size:[ batch_size; -1 ]
|> Layer.forward_ classifier ~is_training)
| |
aee3882c23075ed3e00aa742658a48b96a6a7f003d92b6201123429854f3375a | solita/mnt-teet | meeting_queries.clj | (ns teet.meeting.meeting-queries
(:require [teet.project.project-db :as project-db]
[teet.db-api.core :refer [defquery]]
[teet.meta.meta-query :as meta-query]
[teet.meeting.meeting-db :as meeting-db]
[teet.user.user-model :as user-model]
[datomic.client.api :as d]
[clojure.walk :as walk]
[teet.util.date :as du]
[teet.util.string :as string]
[teet.link.link-db :as link-db]
[teet.util.datomic :as datomic-util]
[teet.integration.postgrest :as postgrest]
[teet.environment :as environment]
[teet.meeting.meeting-model :as meeting-model]
[ring.util.io :as ring-io]
[teet.comment.comment-db :as comment-db]
[teet.pdf.pdf-export :as pdf-export]
[teet.meeting.meeting-pdf :as meeting-pdf]
[teet.log :as log]
[teet.entity.entity-db :as entity-db]
[teet.db-api.db-api-large-text :as db-api-large-text]))
(defn project-related-unit-ids
[db api-context project-eid]
(let [units (:thk.project/related-cadastral-units (datomic-util/entity db project-eid))]
{:cadastral-unit (set units)
:estate (->> (postgrest/rpc api-context
:select_feature_properties
{:ids units
:properties ["KINNISTU"]})
vals
(mapv :KINNISTU)
set)}))
(defn project-upcoming-meetings
[db project-eid]
(d/q '[:find (pull ?m [* :activity/_meetings])
:where
[?p :thk.project/lifecycles ?l]
[?l :thk.lifecycle/activities ?a]
[?a :activity/meetings ?m]
[?m :meeting/start ?start]
[(.after ?start ?today )]
[(missing? $ ?m :meta/deleted?)]
:in $ ?p ?today]
db
project-eid
(du/start-of-today)))
(defn activity-past-meetings
[db activity-eid]
(->> (d/q '[:find (pull ?m [* :activity/_meetings])
:where
[?a :activity/meetings ?m]
[?m :meeting/start ?start]
[(.before ?start ?today)]
[(missing? $ ?m :meta/deleted?)]
:in $ ?a ?today]
db
activity-eid
(du/start-of-today))
(mapv first)
(sort-by :meeting/start #(.after %1 %2))))
(defn project-past-meetings
[db project-id]
(->> (d/q '[:find (pull ?m [* {:activity/_meetings [:activity/name :db/id]}])
:in $ ?project ?today
:where
[?project :thk.project/lifecycles ?l]
[?l :thk.lifecycle/activities ?a]
[?a :activity/meetings ?m]
[?m :meeting/start ?start]
[(.before ?start ?today)]
[(missing? $ ?m :meta/deleted?)]]
db
project-id
(du/start-of-today))
(mapv first)
(mapv #(assoc % :meeting/activity-name (get-in % [:activity/_meetings 0 :activity/name]))) ;; This is done to have the activity name in easier place for frontend
(sort-by :meeting/start)
reverse))
(defn matching-decision-ids
[search-term meetings]
(set
(for [m meetings
a (:meeting/agenda m)
d (:meeting.agenda/decisions a)
:let [candidate-text (str (:meeting/title m)
" "
(:meeting/number m)
" "
(:meeting/location m)
" "
(:meeting.agenda/topic a)
" "
(:meeting.decision/body d)
" "
(:meeting.decision/number d))]
:when (string/contains-words? candidate-text search-term)]
(:db/id d))))
(defn filter-decisions
[decision-ids meetings]
(for [m meetings
:when (some decision-ids
(map :db/id
(mapcat
:meeting.agenda/decisions
(:meeting/agenda m))))]
(assoc m :meeting/agenda
(for [a (:meeting/agenda m)
:when (some decision-ids
(map :db/id
(:meeting.agenda/decisions a)))]
(assoc a :meeting.agenda/decisions
(for [d (:meeting.agenda/decisions a)
:when (decision-ids (:db/id d))]
d))))))
(defn activity-decisions
[db user activity-id search-term]
(let [meetings
(db-api-large-text/with-large-text
meeting-model/rich-text-fields
(link-db/fetch-links
db user
(project-related-unit-ids db (environment/api-context) (project-db/activity-project-id db activity-id))
#(contains? % :meeting.decision/body)
(meta-query/without-deleted
db
(->> (d/q '[:find
(pull ?m [* :activity/_meetings
{:meeting/agenda
[* {:meeting.agenda/decisions
[:db/id :meeting.decision/body
:meeting.decision/number
{:file/_attached-to
[:db/id :file/name
:file/upload-complete?
:meta/created-at
{:meta/creator [:user/given-name :user/family-name]}]}]}]}
{:review/_of
[{:review/reviewer [:user/given-name
:user/family-name]}]}])
(max ?cr)
:where
[?a :activity/meetings ?m]
[?m :meeting/agenda ?ag]
[?m :meeting/locked? true]
[?ag :meeting.agenda/decisions ?d]
[?r :review/of ?m]
[?r :meta/created-at ?cr]
:in $ ?a]
db activity-id)
(map #(assoc (first %) :meeting/locked-at (second %)))
(sort-by :meeting/start)
reverse))))
decision-ids (matching-decision-ids search-term meetings)
meetings-without-incomplete-uploads (meeting-db/without-incomplete-uploads meetings)]
(filter-decisions decision-ids meetings-without-incomplete-uploads)))
(defn project-decisions
[db user project-id search-term]
(let [meetings (db-api-large-text/with-large-text
meeting-model/rich-text-fields
(link-db/fetch-links
db user
(project-related-unit-ids db (environment/api-context) project-id)
#(contains? % :meeting.decision/body)
(meta-query/without-deleted
db
(->> (d/q '[:find
(pull ?m [* {:activity/_meetings [:activity/name
:db/id]}
{:meeting/agenda
[* {:meeting.agenda/decisions
[:db/id :meeting.decision/body
:meeting.decision/number
{:file/_attached-to
[:db/id :file/name
:file/upload-complete?
:meta/created-at
{:meta/creator [:user/given-name :user/family-name]}]}]}]}
{:review/_of
[{:review/reviewer [:user/given-name
:user/family-name]}]}])
(max ?cr)
:where
[?p :thk.project/lifecycles ?l]
[?l :thk.lifecycle/activities ?a]
[?a :activity/meetings ?m]
[?m :meeting/agenda ?ag]
[?m :meeting/locked? true]
[?ag :meeting.agenda/decisions ?d]
[?r :review/of ?m]
[?r :meta/created-at ?cr]
:in $ ?p]
db project-id)
(map #(assoc (first %) :meeting/locked-at (second %)))
(sort-by :meeting/start)
reverse))))
decision-ids (matching-decision-ids search-term meetings)]
(filter-decisions decision-ids meetings)))
(defn fetch-project-meetings
[db eid]
(let [activity-meetings (group-by
#(-> %
:activity/_meetings
first
:db/id)
(mapv first
(project-upcoming-meetings db eid)))]
(walk/postwalk
(fn [e]
(if-let [activity-meeting (and (map? e) (get activity-meetings (:db/id e)))]
(assoc e :activity/meetings activity-meeting)
e))
(project-db/project-by-id db eid {}))))
(defn fetch-meeting-title
[db meeting-id]
( d/pull db [:meeting/title :meeting/number] meeting-id))
(defquery :meeting/project-with-meetings
{:doc "Fetch project data with project meetings"
:context {db :db
user :user}
:args {:thk.project/keys [id]}
:project-id [:thk.project/id id]}
(meta-query/without-deleted
db
(fetch-project-meetings db [:thk.project/id id])))
(def attachments {:file/_attached-to
[:db/id :file/name :file/upload-complete?
:meta/created-at
{:meta/creator [:user/given-name :user/family-name]}]})
(defn fetch-meeting* [db user meeting-id activity-id]
(let [meeting
(d/pull
db
`[:db/id
:meeting/locked?
:meeting/title :meeting/location
:meeting/start :meeting/end
:meeting/notifications-sent-at
:meeting/number :meta/created-at :meta/modified-at
{:meeting/organizer ~user-model/user-listing-attributes}
{:meeting/agenda [:db/id
:meeting.agenda/topic
:meeting.agenda/body
:meta/created-at :meta/modified-at
{:meeting.agenda/decisions
[:db/id :meeting.decision/body
:meta/created-at :meta/modified-at
:meeting.decision/number
~attachments]}
{:meeting.agenda/responsible ~user-model/user-listing-attributes}
~attachments]}
{:review/_of [:db/id
:review/comment
:review/decision
:meta/created-at
{:review/reviewer ~user-model/user-listing-attributes}]}
{:participation/_in
[:db/id
:participation/absent?
:participation/role
:meta/created-at :meta/modified-at
{:participation/participant ~user-model/user-listing-attributes}]}]
(meeting-db/activity-meeting-id db activity-id meeting-id))]
(merge
meeting
(comment-db/comment-count-of-entity-by-status
db user meeting-id :meeting)
(entity-db/entity-seen db user meeting-id))))
(defquery :meeting/fetch-meeting
{:doc "Fetch a single meeting info and project info"
:context {:keys [db user]}
:args {:keys [activity-id meeting-id]}
:project-id (project-db/activity-project-id db activity-id)}
(meeting-db/without-incomplete-uploads
(db-api-large-text/with-large-text
meeting-model/rich-text-fields
(let [valid-external-ids (project-related-unit-ids db (environment/api-context) (project-db/activity-project-id db activity-id))]
(link-db/fetch-links
db user
valid-external-ids
#(or (contains? % :meeting.agenda/topic)
(contains? % :meeting.decision/body))
(meta-query/without-deleted
db
{:project (fetch-project-meetings db (project-db/activity-project-id db activity-id)) ;; This ends up pulling duplicate information, could be refactored
:meeting (fetch-meeting* db user meeting-id activity-id)}
(fn [entity]
(contains? entity :link/to))))))))
(defquery :meeting/activity-meeting-history
{:doc "Fetch past meetings for an activity"
:context {:keys [db user]}
:args {:keys [activity-id]}
:project-id (project-db/activity-project-id db activity-id)}
(activity-past-meetings db activity-id))
(defquery :meeting/activity-decision-history
{:doc "Fetch all the decisions for activity matching the given string"
:context {:keys [db user]}
:args {:keys [activity-id
search-term]}
:project-id (project-db/activity-project-id db activity-id)}
(activity-decisions db user activity-id search-term))
(defquery :meeting/project-meeting-history
{:doc "Fetch all the meetings from the history of the project"
:context {:keys [db user]}
:args {:keys [project-id]}
:project-id project-id}
(project-past-meetings db project-id))
(defquery :meeting/project-decision-history
{:doc "Fetch all decisions for project matching the given string"
:context {:keys [db user]}
:args {:keys [project-id
search-term]}
:project-id project-id}
(project-decisions db user project-id search-term))
(defquery :meeting/download-pdf
{:doc "Download meeting minutes as PDF"
:context {:keys [db user]}
:args {id :db/id
language :language}
:project-id (project-db/meeting-project-id db id)}
^{:format :raw}
{:status 200
:headers {"Content-Disposition" (str "inline; filename=meeting_" (meeting-model/meeting-title (fetch-meeting-title db id)) ".pdf")
"Content-Type" "application/pdf"}
:body (ring-io/piped-input-stream
(fn [ostream]
(try
(pdf-export/hiccup->pdf
(meeting-pdf/meeting-pdf db user language id)
ostream)
(catch Exception e
(log/error e "Exception while generating meeting PDF")))))})
| null | https://raw.githubusercontent.com/solita/mnt-teet/2dcdad70d4dcf33eef56650f8f6d3fa9f7756cec/app/backend/src/clj/teet/meeting/meeting_queries.clj | clojure | This is done to have the activity name in easier place for frontend
This ends up pulling duplicate information, could be refactored | (ns teet.meeting.meeting-queries
(:require [teet.project.project-db :as project-db]
[teet.db-api.core :refer [defquery]]
[teet.meta.meta-query :as meta-query]
[teet.meeting.meeting-db :as meeting-db]
[teet.user.user-model :as user-model]
[datomic.client.api :as d]
[clojure.walk :as walk]
[teet.util.date :as du]
[teet.util.string :as string]
[teet.link.link-db :as link-db]
[teet.util.datomic :as datomic-util]
[teet.integration.postgrest :as postgrest]
[teet.environment :as environment]
[teet.meeting.meeting-model :as meeting-model]
[ring.util.io :as ring-io]
[teet.comment.comment-db :as comment-db]
[teet.pdf.pdf-export :as pdf-export]
[teet.meeting.meeting-pdf :as meeting-pdf]
[teet.log :as log]
[teet.entity.entity-db :as entity-db]
[teet.db-api.db-api-large-text :as db-api-large-text]))
(defn project-related-unit-ids
[db api-context project-eid]
(let [units (:thk.project/related-cadastral-units (datomic-util/entity db project-eid))]
{:cadastral-unit (set units)
:estate (->> (postgrest/rpc api-context
:select_feature_properties
{:ids units
:properties ["KINNISTU"]})
vals
(mapv :KINNISTU)
set)}))
(defn project-upcoming-meetings
[db project-eid]
(d/q '[:find (pull ?m [* :activity/_meetings])
:where
[?p :thk.project/lifecycles ?l]
[?l :thk.lifecycle/activities ?a]
[?a :activity/meetings ?m]
[?m :meeting/start ?start]
[(.after ?start ?today )]
[(missing? $ ?m :meta/deleted?)]
:in $ ?p ?today]
db
project-eid
(du/start-of-today)))
(defn activity-past-meetings
[db activity-eid]
(->> (d/q '[:find (pull ?m [* :activity/_meetings])
:where
[?a :activity/meetings ?m]
[?m :meeting/start ?start]
[(.before ?start ?today)]
[(missing? $ ?m :meta/deleted?)]
:in $ ?a ?today]
db
activity-eid
(du/start-of-today))
(mapv first)
(sort-by :meeting/start #(.after %1 %2))))
(defn project-past-meetings
[db project-id]
(->> (d/q '[:find (pull ?m [* {:activity/_meetings [:activity/name :db/id]}])
:in $ ?project ?today
:where
[?project :thk.project/lifecycles ?l]
[?l :thk.lifecycle/activities ?a]
[?a :activity/meetings ?m]
[?m :meeting/start ?start]
[(.before ?start ?today)]
[(missing? $ ?m :meta/deleted?)]]
db
project-id
(du/start-of-today))
(mapv first)
(sort-by :meeting/start)
reverse))
(defn matching-decision-ids
[search-term meetings]
(set
(for [m meetings
a (:meeting/agenda m)
d (:meeting.agenda/decisions a)
:let [candidate-text (str (:meeting/title m)
" "
(:meeting/number m)
" "
(:meeting/location m)
" "
(:meeting.agenda/topic a)
" "
(:meeting.decision/body d)
" "
(:meeting.decision/number d))]
:when (string/contains-words? candidate-text search-term)]
(:db/id d))))
(defn filter-decisions
[decision-ids meetings]
(for [m meetings
:when (some decision-ids
(map :db/id
(mapcat
:meeting.agenda/decisions
(:meeting/agenda m))))]
(assoc m :meeting/agenda
(for [a (:meeting/agenda m)
:when (some decision-ids
(map :db/id
(:meeting.agenda/decisions a)))]
(assoc a :meeting.agenda/decisions
(for [d (:meeting.agenda/decisions a)
:when (decision-ids (:db/id d))]
d))))))
(defn activity-decisions
[db user activity-id search-term]
(let [meetings
(db-api-large-text/with-large-text
meeting-model/rich-text-fields
(link-db/fetch-links
db user
(project-related-unit-ids db (environment/api-context) (project-db/activity-project-id db activity-id))
#(contains? % :meeting.decision/body)
(meta-query/without-deleted
db
(->> (d/q '[:find
(pull ?m [* :activity/_meetings
{:meeting/agenda
[* {:meeting.agenda/decisions
[:db/id :meeting.decision/body
:meeting.decision/number
{:file/_attached-to
[:db/id :file/name
:file/upload-complete?
:meta/created-at
{:meta/creator [:user/given-name :user/family-name]}]}]}]}
{:review/_of
[{:review/reviewer [:user/given-name
:user/family-name]}]}])
(max ?cr)
:where
[?a :activity/meetings ?m]
[?m :meeting/agenda ?ag]
[?m :meeting/locked? true]
[?ag :meeting.agenda/decisions ?d]
[?r :review/of ?m]
[?r :meta/created-at ?cr]
:in $ ?a]
db activity-id)
(map #(assoc (first %) :meeting/locked-at (second %)))
(sort-by :meeting/start)
reverse))))
decision-ids (matching-decision-ids search-term meetings)
meetings-without-incomplete-uploads (meeting-db/without-incomplete-uploads meetings)]
(filter-decisions decision-ids meetings-without-incomplete-uploads)))
(defn project-decisions
[db user project-id search-term]
(let [meetings (db-api-large-text/with-large-text
meeting-model/rich-text-fields
(link-db/fetch-links
db user
(project-related-unit-ids db (environment/api-context) project-id)
#(contains? % :meeting.decision/body)
(meta-query/without-deleted
db
(->> (d/q '[:find
(pull ?m [* {:activity/_meetings [:activity/name
:db/id]}
{:meeting/agenda
[* {:meeting.agenda/decisions
[:db/id :meeting.decision/body
:meeting.decision/number
{:file/_attached-to
[:db/id :file/name
:file/upload-complete?
:meta/created-at
{:meta/creator [:user/given-name :user/family-name]}]}]}]}
{:review/_of
[{:review/reviewer [:user/given-name
:user/family-name]}]}])
(max ?cr)
:where
[?p :thk.project/lifecycles ?l]
[?l :thk.lifecycle/activities ?a]
[?a :activity/meetings ?m]
[?m :meeting/agenda ?ag]
[?m :meeting/locked? true]
[?ag :meeting.agenda/decisions ?d]
[?r :review/of ?m]
[?r :meta/created-at ?cr]
:in $ ?p]
db project-id)
(map #(assoc (first %) :meeting/locked-at (second %)))
(sort-by :meeting/start)
reverse))))
decision-ids (matching-decision-ids search-term meetings)]
(filter-decisions decision-ids meetings)))
(defn fetch-project-meetings
[db eid]
(let [activity-meetings (group-by
#(-> %
:activity/_meetings
first
:db/id)
(mapv first
(project-upcoming-meetings db eid)))]
(walk/postwalk
(fn [e]
(if-let [activity-meeting (and (map? e) (get activity-meetings (:db/id e)))]
(assoc e :activity/meetings activity-meeting)
e))
(project-db/project-by-id db eid {}))))
(defn fetch-meeting-title
[db meeting-id]
( d/pull db [:meeting/title :meeting/number] meeting-id))
(defquery :meeting/project-with-meetings
{:doc "Fetch project data with project meetings"
:context {db :db
user :user}
:args {:thk.project/keys [id]}
:project-id [:thk.project/id id]}
(meta-query/without-deleted
db
(fetch-project-meetings db [:thk.project/id id])))
(def attachments {:file/_attached-to
[:db/id :file/name :file/upload-complete?
:meta/created-at
{:meta/creator [:user/given-name :user/family-name]}]})
(defn fetch-meeting* [db user meeting-id activity-id]
(let [meeting
(d/pull
db
`[:db/id
:meeting/locked?
:meeting/title :meeting/location
:meeting/start :meeting/end
:meeting/notifications-sent-at
:meeting/number :meta/created-at :meta/modified-at
{:meeting/organizer ~user-model/user-listing-attributes}
{:meeting/agenda [:db/id
:meeting.agenda/topic
:meeting.agenda/body
:meta/created-at :meta/modified-at
{:meeting.agenda/decisions
[:db/id :meeting.decision/body
:meta/created-at :meta/modified-at
:meeting.decision/number
~attachments]}
{:meeting.agenda/responsible ~user-model/user-listing-attributes}
~attachments]}
{:review/_of [:db/id
:review/comment
:review/decision
:meta/created-at
{:review/reviewer ~user-model/user-listing-attributes}]}
{:participation/_in
[:db/id
:participation/absent?
:participation/role
:meta/created-at :meta/modified-at
{:participation/participant ~user-model/user-listing-attributes}]}]
(meeting-db/activity-meeting-id db activity-id meeting-id))]
(merge
meeting
(comment-db/comment-count-of-entity-by-status
db user meeting-id :meeting)
(entity-db/entity-seen db user meeting-id))))
(defquery :meeting/fetch-meeting
{:doc "Fetch a single meeting info and project info"
:context {:keys [db user]}
:args {:keys [activity-id meeting-id]}
:project-id (project-db/activity-project-id db activity-id)}
(meeting-db/without-incomplete-uploads
(db-api-large-text/with-large-text
meeting-model/rich-text-fields
(let [valid-external-ids (project-related-unit-ids db (environment/api-context) (project-db/activity-project-id db activity-id))]
(link-db/fetch-links
db user
valid-external-ids
#(or (contains? % :meeting.agenda/topic)
(contains? % :meeting.decision/body))
(meta-query/without-deleted
db
:meeting (fetch-meeting* db user meeting-id activity-id)}
(fn [entity]
(contains? entity :link/to))))))))
(defquery :meeting/activity-meeting-history
{:doc "Fetch past meetings for an activity"
:context {:keys [db user]}
:args {:keys [activity-id]}
:project-id (project-db/activity-project-id db activity-id)}
(activity-past-meetings db activity-id))
(defquery :meeting/activity-decision-history
{:doc "Fetch all the decisions for activity matching the given string"
:context {:keys [db user]}
:args {:keys [activity-id
search-term]}
:project-id (project-db/activity-project-id db activity-id)}
(activity-decisions db user activity-id search-term))
(defquery :meeting/project-meeting-history
{:doc "Fetch all the meetings from the history of the project"
:context {:keys [db user]}
:args {:keys [project-id]}
:project-id project-id}
(project-past-meetings db project-id))
(defquery :meeting/project-decision-history
{:doc "Fetch all decisions for project matching the given string"
:context {:keys [db user]}
:args {:keys [project-id
search-term]}
:project-id project-id}
(project-decisions db user project-id search-term))
(defquery :meeting/download-pdf
{:doc "Download meeting minutes as PDF"
:context {:keys [db user]}
:args {id :db/id
language :language}
:project-id (project-db/meeting-project-id db id)}
^{:format :raw}
{:status 200
:headers {"Content-Disposition" (str "inline; filename=meeting_" (meeting-model/meeting-title (fetch-meeting-title db id)) ".pdf")
"Content-Type" "application/pdf"}
:body (ring-io/piped-input-stream
(fn [ostream]
(try
(pdf-export/hiccup->pdf
(meeting-pdf/meeting-pdf db user language id)
ostream)
(catch Exception e
(log/error e "Exception while generating meeting PDF")))))})
|
8baf57ec5d7f6d978217c8905927c827225679922e7fd275d1d66d8ecae4552c | anishathalye/knox | circuit-lang.rkt | #lang rosette/safe
(require
(only-in rosutil/addressable-struct addressable-struct)
"../result.rkt"
"../circuit.rkt"
(for-syntax racket/base racket/syntax syntax/parse))
(provide
(except-out (all-from-out rosette/safe) struct #%module-begin)
(rename-out [addressable-struct struct]
[$#%module-begin #%module-begin])
(all-from-out "../result.rkt"))
(define-syntax ($#%module-begin stx)
(syntax-parse stx
[(_
#:circuit import-path
#:reset reset-input-name reset-input-signal:boolean
#:persistent [persistent-input ...]
#:init-zeroed [init-zeroed-field ...])
#:with circuit (format-id stx "circuit")
#:with metadata (format-id stx "metadata")
#'(#%module-begin
(require (only-in import-path metadata))
(define circuit
(make-circuit
metadata
'reset-input-name
reset-input-signal
(list 'persistent-input ...)
(list 'init-zeroed-field ...)))
(provide circuit))]
[(_ body ...) ; fallback, useful in e.g. submodules (like a test module)
#'(#%module-begin body ...)]))
| null | https://raw.githubusercontent.com/anishathalye/knox/161cda3e5274cc69012830f477749954ddcf736d/knox/circuit/circuit-lang.rkt | racket | fallback, useful in e.g. submodules (like a test module) | #lang rosette/safe
(require
(only-in rosutil/addressable-struct addressable-struct)
"../result.rkt"
"../circuit.rkt"
(for-syntax racket/base racket/syntax syntax/parse))
(provide
(except-out (all-from-out rosette/safe) struct #%module-begin)
(rename-out [addressable-struct struct]
[$#%module-begin #%module-begin])
(all-from-out "../result.rkt"))
(define-syntax ($#%module-begin stx)
(syntax-parse stx
[(_
#:circuit import-path
#:reset reset-input-name reset-input-signal:boolean
#:persistent [persistent-input ...]
#:init-zeroed [init-zeroed-field ...])
#:with circuit (format-id stx "circuit")
#:with metadata (format-id stx "metadata")
#'(#%module-begin
(require (only-in import-path metadata))
(define circuit
(make-circuit
metadata
'reset-input-name
reset-input-signal
(list 'persistent-input ...)
(list 'init-zeroed-field ...)))
(provide circuit))]
#'(#%module-begin body ...)]))
|
ebc1d8509b05e422d9c060c323b5b44c1edb21d3bf856fa2cb5e1a65f8054783 | FreeProving/free-compiler | List.hs | -- This example contains definitions for commonly used list functions from
-- the @Data.List@ module.
module Data.List where
-------------------------------------------------------------------------------
-- Basic functions --
-------------------------------------------------------------------------------
| Append two lists , i.e. ,
--
-- > [x1, ..., xm] ++ [y1, ..., yn] == [x1, ..., xm, y1, ..., yn]
-- > [x1, ..., xm] ++ [y1, ...] == [x1, ..., xm, y1, ...]
append :: [a] -> [a] -> [a]
append xs ys = case xs of
[] -> ys
x : xs' -> x : append xs' ys
infixr 5 `append`
| Extract the first element of a list , which must be non - empty .
head :: [a] -> a
head xs = case xs of
[] -> error "head: empty list"
x : xs' -> x
-- | Extract the elements after the 'head' of a list, which must be non-empty.
tail :: [a] -> [a]
tail xs = case xs of
[] -> error "tail: empty list"
x : xs' -> xs'
-- | Test whether the list is empty.
null :: [a] -> Bool
null xs = case xs of
[] -> True
x : xs' -> False
-- | Returns the length of a list.
length :: [a] -> Integer
length xs = case xs of
[] -> 0
x : xs' -> 1 + length xs'
-------------------------------------------------------------------------------
-- List transformations --
-------------------------------------------------------------------------------
| @'map ' f xs@ is the list obtained by applying @f@ to each
-- element of @xs@, i.e.,
--
-- > map f [x1, x2, ..., xn] == [f x1, f x2, ..., f xn]
map :: (a -> b) -> [a] -> [b]
map f xs = case xs of
[] -> []
x : xs' -> f x : map f xs'
-- | @'reverse' xs@ returns the elements of @xs@ in reverse order.
reverse :: [a] -> [a]
reverse = reverse' []
-- | Version of 'reverse' with accumulator.
reverse' :: [a] -> [a] -> [a]
reverse' acc xs = case xs of
[] -> acc
x : xs' -> reverse' (x : acc) xs'
-- | The 'intersperse' function takes an element and a list and
-- intersperses that element between the elements of the list.
-- For example,
--
-- >>> intersperse ',' "abcde"
-- "a,b,c,d,e"
intersperse :: a -> [a] -> [a]
intersperse sep xs = case xs of
[] -> []
y : ys -> y : case ys of
[] -> []
z : zs -> sep : intersperse sep ys
-------------------------------------------------------------------------------
-- Reducing lists (folds) --
-------------------------------------------------------------------------------
-- | Left-associative fold of a structure.
--
-- In the case of lists, 'foldl', when applied to a binary operator, a
-- starting value (typically the left-identity of the operator), and a
-- list, reduces the list using the binary operator, from left to right:
--
-- > foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
foldl :: (b -> a -> b) -> b -> [a] -> b
foldl f e xs = case xs of
[] -> e
x : xs' -> foldl f (f e x) xs'
-- | Right-associative fold of a structure.
--
-- In the case of lists, 'foldr', when applied to a binary operator, a
-- starting value (typically the right-identity of the operator), and a
-- list, reduces the list using the binary operator, from right to left:
--
-- > foldr f z [x1, x2, ..., xn] == x1 `f` (x2 `f` ... (xn `f` z)...)
foldr :: (a -> b -> b) -> b -> [a] -> b
foldr f e xs = case xs of
[] -> e
x : xs' -> x `f` foldr f e xs'
-- | A variant of 'foldr' that has no base case, and thus may only be applied
-- to non-empty structures.
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 f xs = case xs of
[] -> error "foldr1: empty list"
x : xs' -> foldr f x xs'
-------------------------------------------------------------------------------
-- Special folds --
-------------------------------------------------------------------------------
-- | The concatenation of all the elements of a list of lists.
concat :: [[a]] -> [a]
concat = foldr append []
| ' and ' returns the conjunction of a list of ' 's .
and :: [Bool] -> Bool
and = foldr (&&) True
| ' or ' returns the disjunction of a container of ' 's .
or :: [Bool] -> Bool
or = foldr (||) False
-- | The 'sum' function computes the sum of the numbers of a list.
sum :: [Integer] -> Integer
sum = foldr (+) 0
-- | The 'product' function computes the product of the numbers of a list.
product :: [Integer] -> Integer
product = foldr (*) 1
-- | The largest element of a non-empty list.
maximum :: [Integer] -> Integer
maximum = foldr1 (\a b -> if a >= b then a else b)
-- | The least element of a non-empty list.
minimum :: [Integer] -> Integer
minimum = foldr1 (\a b -> if a <= b then a else b)
-------------------------------------------------------------------------------
-- Zipping and unzipping lists --
-------------------------------------------------------------------------------
| ' zip ' takes two lists and returns a list of corresponding pairs .
--
> zip [ 1 , 2 ] [ ' a ' , ' b ' ] = [ ( 1 , ' a ' ) , ( 2 , ' b ' ) ]
--
If one input list is short , excess elements of the longer list are
-- discarded:
--
> zip [ 1 ] [ ' a ' , ' b ' ] = [ ( 1 , ' a ' ) ]
> zip [ 1 , 2 ] [ ' a ' ] = [ ( 1 , ' a ' ) ]
zip :: [a] -> [b] -> [(a, b)]
zip xs ys = case xs of
[] -> []
x : xs' -> case ys of
[] -> []
y : ys' -> (x, y) : (zip xs' ys')
| ' unzip ' transforms a list of pairs into a list of first components and a
list of second components .
unzip :: [(a, b)] -> ([a], [b])
unzip xys = case xys of
[] -> ([], [])
xy : xys' -> case xy of
(x, y) -> case unzip xys' of
(xs, ys) -> (x : xs, y : ys)
| null | https://raw.githubusercontent.com/FreeProving/free-compiler/6931b9ca652a185a92dd824373f092823aea4ea9/example/Data/List.hs | haskell | This example contains definitions for commonly used list functions from
the @Data.List@ module.
-----------------------------------------------------------------------------
Basic functions --
-----------------------------------------------------------------------------
> [x1, ..., xm] ++ [y1, ..., yn] == [x1, ..., xm, y1, ..., yn]
> [x1, ..., xm] ++ [y1, ...] == [x1, ..., xm, y1, ...]
| Extract the elements after the 'head' of a list, which must be non-empty.
| Test whether the list is empty.
| Returns the length of a list.
-----------------------------------------------------------------------------
List transformations --
-----------------------------------------------------------------------------
element of @xs@, i.e.,
> map f [x1, x2, ..., xn] == [f x1, f x2, ..., f xn]
| @'reverse' xs@ returns the elements of @xs@ in reverse order.
| Version of 'reverse' with accumulator.
| The 'intersperse' function takes an element and a list and
intersperses that element between the elements of the list.
For example,
>>> intersperse ',' "abcde"
"a,b,c,d,e"
-----------------------------------------------------------------------------
Reducing lists (folds) --
-----------------------------------------------------------------------------
| Left-associative fold of a structure.
In the case of lists, 'foldl', when applied to a binary operator, a
starting value (typically the left-identity of the operator), and a
list, reduces the list using the binary operator, from left to right:
> foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
| Right-associative fold of a structure.
In the case of lists, 'foldr', when applied to a binary operator, a
starting value (typically the right-identity of the operator), and a
list, reduces the list using the binary operator, from right to left:
> foldr f z [x1, x2, ..., xn] == x1 `f` (x2 `f` ... (xn `f` z)...)
| A variant of 'foldr' that has no base case, and thus may only be applied
to non-empty structures.
-----------------------------------------------------------------------------
Special folds --
-----------------------------------------------------------------------------
| The concatenation of all the elements of a list of lists.
| The 'sum' function computes the sum of the numbers of a list.
| The 'product' function computes the product of the numbers of a list.
| The largest element of a non-empty list.
| The least element of a non-empty list.
-----------------------------------------------------------------------------
Zipping and unzipping lists --
-----------------------------------------------------------------------------
discarded:
| module Data.List where
| Append two lists , i.e. ,
append :: [a] -> [a] -> [a]
append xs ys = case xs of
[] -> ys
x : xs' -> x : append xs' ys
infixr 5 `append`
| Extract the first element of a list , which must be non - empty .
head :: [a] -> a
head xs = case xs of
[] -> error "head: empty list"
x : xs' -> x
tail :: [a] -> [a]
tail xs = case xs of
[] -> error "tail: empty list"
x : xs' -> xs'
null :: [a] -> Bool
null xs = case xs of
[] -> True
x : xs' -> False
length :: [a] -> Integer
length xs = case xs of
[] -> 0
x : xs' -> 1 + length xs'
| @'map ' f xs@ is the list obtained by applying @f@ to each
map :: (a -> b) -> [a] -> [b]
map f xs = case xs of
[] -> []
x : xs' -> f x : map f xs'
reverse :: [a] -> [a]
reverse = reverse' []
reverse' :: [a] -> [a] -> [a]
reverse' acc xs = case xs of
[] -> acc
x : xs' -> reverse' (x : acc) xs'
intersperse :: a -> [a] -> [a]
intersperse sep xs = case xs of
[] -> []
y : ys -> y : case ys of
[] -> []
z : zs -> sep : intersperse sep ys
foldl :: (b -> a -> b) -> b -> [a] -> b
foldl f e xs = case xs of
[] -> e
x : xs' -> foldl f (f e x) xs'
foldr :: (a -> b -> b) -> b -> [a] -> b
foldr f e xs = case xs of
[] -> e
x : xs' -> x `f` foldr f e xs'
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 f xs = case xs of
[] -> error "foldr1: empty list"
x : xs' -> foldr f x xs'
concat :: [[a]] -> [a]
concat = foldr append []
| ' and ' returns the conjunction of a list of ' 's .
and :: [Bool] -> Bool
and = foldr (&&) True
| ' or ' returns the disjunction of a container of ' 's .
or :: [Bool] -> Bool
or = foldr (||) False
sum :: [Integer] -> Integer
sum = foldr (+) 0
product :: [Integer] -> Integer
product = foldr (*) 1
maximum :: [Integer] -> Integer
maximum = foldr1 (\a b -> if a >= b then a else b)
minimum :: [Integer] -> Integer
minimum = foldr1 (\a b -> if a <= b then a else b)
| ' zip ' takes two lists and returns a list of corresponding pairs .
> zip [ 1 , 2 ] [ ' a ' , ' b ' ] = [ ( 1 , ' a ' ) , ( 2 , ' b ' ) ]
If one input list is short , excess elements of the longer list are
> zip [ 1 ] [ ' a ' , ' b ' ] = [ ( 1 , ' a ' ) ]
> zip [ 1 , 2 ] [ ' a ' ] = [ ( 1 , ' a ' ) ]
zip :: [a] -> [b] -> [(a, b)]
zip xs ys = case xs of
[] -> []
x : xs' -> case ys of
[] -> []
y : ys' -> (x, y) : (zip xs' ys')
| ' unzip ' transforms a list of pairs into a list of first components and a
list of second components .
unzip :: [(a, b)] -> ([a], [b])
unzip xys = case xys of
[] -> ([], [])
xy : xys' -> case xy of
(x, y) -> case unzip xys' of
(xs, ys) -> (x : xs, y : ys)
|
9f17d135d3a6b2628e1b261c5eb9814af178358fd4e9ed1284ce8f03ae423b01 | seanirby/koeeoadi | title.cljs | (ns koeeoadi.components.title
(:require [om.next :as om :refer-macros [defui]]
[om.dom :as dom]))
(defn title [comp]
(let [{:keys [show-help]} (om/get-state comp)]
(dom/div #js {:className "widget widget-active"
:id "app-info"}
(dom/h2 #js {:id "title"} "KOEEOADI!")
(dom/p nil "A theme creator for Emacs and Vim")
(dom/div #js {:className "row"}
(dom/div #js {:className "column one-third"}
(dom/a #js {:href ""
:target "_blank"} "Twitter"))
(dom/div #js {:className "column one-third"}
(dom/a #js {:href ""
:target "_blank"} "Github"))
(dom/div #js {:className "column one-third"}
(dom/a #js {:href "#"
:onClick #(om/update-state! comp assoc :show-help true)} "Help"))))))
| null | https://raw.githubusercontent.com/seanirby/koeeoadi/481dc31e023e0a54ee5248bd2ef06a56e7d1d64d/src/cljs/koeeoadi/components/title.cljs | clojure | (ns koeeoadi.components.title
(:require [om.next :as om :refer-macros [defui]]
[om.dom :as dom]))
(defn title [comp]
(let [{:keys [show-help]} (om/get-state comp)]
(dom/div #js {:className "widget widget-active"
:id "app-info"}
(dom/h2 #js {:id "title"} "KOEEOADI!")
(dom/p nil "A theme creator for Emacs and Vim")
(dom/div #js {:className "row"}
(dom/div #js {:className "column one-third"}
(dom/a #js {:href ""
:target "_blank"} "Twitter"))
(dom/div #js {:className "column one-third"}
(dom/a #js {:href ""
:target "_blank"} "Github"))
(dom/div #js {:className "column one-third"}
(dom/a #js {:href "#"
:onClick #(om/update-state! comp assoc :show-help true)} "Help"))))))
| |
844693af279a32cf8385492d7654c00f38395b74f4ca7cacba48e417ed752e63 | davebryson/beepbeep | beepbeep_session_server.erl | @author [ ]
@copyright 2008 - 2009
%% @hidden
%%-------------------------------------------------------------------
%% Description : Maintains session information for the client. All data is stored
%% on the server. Only a unique session id is exchanged with the client.
%% Inspired by the Yaws Session Server.
%%
%%-------------------------------------------------------------------
-module(beepbeep_session_server).
-author('Dave Bryson <>').
-behaviour(gen_server).
-export([start/0,new_session/1,get_session_data/1,set_session_data/3,delete_session/1,remove_session_data/2]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-record(beep_session, {sid,data,ttl}).
%%% API
start() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
init([]) ->
ets:new(?MODULE,[set,named_table,{keypos, 2}]),
{A1, A2, A3} = now(),
random:seed(A1,A2,A3),
{ok, undefined}.
new_session(Data) ->
gen_server:call(?MODULE,{new_session,Data}).
get_session_data(Sid) ->
gen_server:call(?MODULE,{get_session_data,Sid}).
set_session_data(Sid,Key,Value) ->
gen_server:call(?MODULE,{set_session_data,Sid,Key,Value}).
delete_session(Sid) ->
gen_server:call(?MODULE,{delete_session,Sid}).
remove_session_data(Sid,Key) ->
gen_server:call(?MODULE,{remove_session_data,Sid,Key}).
%%% Callbacks
handle_call({new_session,Cookie}, _From, _State) ->
NewId = case Cookie of
undefined ->
make_session();
Any ->
case ets:member(?MODULE, Any) of
true -> Any;
false -> make_session()
end
end,
{reply,NewId,undefined};
handle_call({get_session_data,Sid},_From,_State) ->
Data = case ets:lookup(?MODULE, Sid) of
[S] ->
S#beep_session.data;
[] ->
[]
end,
{reply,Data,undefined};
handle_call({set_session_data,Sid,Key,Value},_From,_State) ->
Data = case ets:lookup(?MODULE,Sid) of
[S] ->
S#beep_session.data;
[] -> []
end,
Data1 = case proplists:is_defined(Key,Data) of
true ->
Rest = proplists:delete(Key,Data),
[{Key,Value}|Rest];
false ->
[{Key,Value}|Data]
end,
ets:insert(?MODULE,#beep_session{sid=Sid,data=Data1,ttl=0}),
{reply,ok,undefined};
handle_call({delete_session,Sid},_From,_State) ->
ets:delete(?MODULE,Sid),
{reply,ok,undefined};
handle_call({remove_session_data,Sid,Key},_From,_State) ->
Data = case ets:lookup(?MODULE,Sid) of
[S] ->
S#beep_session.data;
[] -> []
end,
Data1 = case proplists:is_defined(Key,Data) of
true ->
proplists:delete(Key,Data);
false ->
Data
end,
ets:insert(?MODULE,#beep_session{sid=Sid,data=Data1,ttl=0}),
{reply,ok,undefined}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
make_session() ->
Data = crypto:rand_bytes(2048),
Sha_list = binary_to_list(crypto:sha(Data)),
Id = lists:flatten(list_to_hex(Sha_list)),
Session = #beep_session{sid=Id,data=[],ttl=0},
ets:insert(?MODULE,Session),
Id.
Convert Integer from the SHA to Hex
list_to_hex(L)->
lists:map(fun(X) -> int_to_hex(X) end, L).
int_to_hex(N) when N < 256 ->
[hex(N div 16), hex(N rem 16)].
hex(N) when N < 10 ->
$0+N;
hex(N) when N >= 10, N < 16 ->
$a + (N-10).
| null | https://raw.githubusercontent.com/davebryson/beepbeep/62db46d268c6cb6ad86345562b3c77f8ff070b27/src/beepbeep_session_server.erl | erlang | @hidden
-------------------------------------------------------------------
Description : Maintains session information for the client. All data is stored
on the server. Only a unique session id is exchanged with the client.
Inspired by the Yaws Session Server.
-------------------------------------------------------------------
API
Callbacks
--------------------------------------------------------------------
-------------------------------------------------------------------- | @author [ ]
@copyright 2008 - 2009
-module(beepbeep_session_server).
-author('Dave Bryson <>').
-behaviour(gen_server).
-export([start/0,new_session/1,get_session_data/1,set_session_data/3,delete_session/1,remove_session_data/2]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-record(beep_session, {sid,data,ttl}).
start() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
init([]) ->
ets:new(?MODULE,[set,named_table,{keypos, 2}]),
{A1, A2, A3} = now(),
random:seed(A1,A2,A3),
{ok, undefined}.
new_session(Data) ->
gen_server:call(?MODULE,{new_session,Data}).
get_session_data(Sid) ->
gen_server:call(?MODULE,{get_session_data,Sid}).
set_session_data(Sid,Key,Value) ->
gen_server:call(?MODULE,{set_session_data,Sid,Key,Value}).
delete_session(Sid) ->
gen_server:call(?MODULE,{delete_session,Sid}).
remove_session_data(Sid,Key) ->
gen_server:call(?MODULE,{remove_session_data,Sid,Key}).
handle_call({new_session,Cookie}, _From, _State) ->
NewId = case Cookie of
undefined ->
make_session();
Any ->
case ets:member(?MODULE, Any) of
true -> Any;
false -> make_session()
end
end,
{reply,NewId,undefined};
handle_call({get_session_data,Sid},_From,_State) ->
Data = case ets:lookup(?MODULE, Sid) of
[S] ->
S#beep_session.data;
[] ->
[]
end,
{reply,Data,undefined};
handle_call({set_session_data,Sid,Key,Value},_From,_State) ->
Data = case ets:lookup(?MODULE,Sid) of
[S] ->
S#beep_session.data;
[] -> []
end,
Data1 = case proplists:is_defined(Key,Data) of
true ->
Rest = proplists:delete(Key,Data),
[{Key,Value}|Rest];
false ->
[{Key,Value}|Data]
end,
ets:insert(?MODULE,#beep_session{sid=Sid,data=Data1,ttl=0}),
{reply,ok,undefined};
handle_call({delete_session,Sid},_From,_State) ->
ets:delete(?MODULE,Sid),
{reply,ok,undefined};
handle_call({remove_session_data,Sid,Key},_From,_State) ->
Data = case ets:lookup(?MODULE,Sid) of
[S] ->
S#beep_session.data;
[] -> []
end,
Data1 = case proplists:is_defined(Key,Data) of
true ->
proplists:delete(Key,Data);
false ->
Data
end,
ets:insert(?MODULE,#beep_session{sid=Sid,data=Data1,ttl=0}),
{reply,ok,undefined}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
make_session() ->
Data = crypto:rand_bytes(2048),
Sha_list = binary_to_list(crypto:sha(Data)),
Id = lists:flatten(list_to_hex(Sha_list)),
Session = #beep_session{sid=Id,data=[],ttl=0},
ets:insert(?MODULE,Session),
Id.
Convert Integer from the SHA to Hex
list_to_hex(L)->
lists:map(fun(X) -> int_to_hex(X) end, L).
int_to_hex(N) when N < 256 ->
[hex(N div 16), hex(N rem 16)].
hex(N) when N < 10 ->
$0+N;
hex(N) when N >= 10, N < 16 ->
$a + (N-10).
|
2803764c586b5243ea13fc819f4239191e01e33879152c4237872103135d9141 | DKurilo/hackerrank | solution.hs | # LANGUAGE FlexibleInstances , UndecidableInstances #
# LANGUAGE DuplicateRecordFields , UnicodeSyntax #
module Main where
import Prelude.Unicode
import Control.Monad
import Debug.Trace
import System.Environment
import System.IO
import System.IO.Unsafe
-- Complete the stepPerms function below.
stepPerms ∷ Int → Int
stepPerms n = steps n `mod` 10000000007
steps ∷ Int → Int
steps = (map sts [0..] !!)
where sts 0 = 1
sts 1 = 1
sts 2 = 2
sts k = steps (k-3) + steps (k-2) + steps (k-1)
main ∷ IO()
main = do
stdout ← getEnv "OUTPUT_PATH"
fptr ← openFile stdout WriteMode
s ← readLn ∷ IO Int
forM_ [1..s] $ \s_itr → do
n ← readLn ∷ IO Int
let res = stepPerms n
-- hPutStrLn fptr $ show res
putStrLn $ show res
hFlush fptr
hClose fptr
| null | https://raw.githubusercontent.com/DKurilo/hackerrank/37063170567b397b25a2b7123bc9c1299d34814a/ctci-recursive-staircase/solution.hs | haskell | Complete the stepPerms function below.
hPutStrLn fptr $ show res | # LANGUAGE FlexibleInstances , UndecidableInstances #
# LANGUAGE DuplicateRecordFields , UnicodeSyntax #
module Main where
import Prelude.Unicode
import Control.Monad
import Debug.Trace
import System.Environment
import System.IO
import System.IO.Unsafe
stepPerms ∷ Int → Int
stepPerms n = steps n `mod` 10000000007
steps ∷ Int → Int
steps = (map sts [0..] !!)
where sts 0 = 1
sts 1 = 1
sts 2 = 2
sts k = steps (k-3) + steps (k-2) + steps (k-1)
main ∷ IO()
main = do
stdout ← getEnv "OUTPUT_PATH"
fptr ← openFile stdout WriteMode
s ← readLn ∷ IO Int
forM_ [1..s] $ \s_itr → do
n ← readLn ∷ IO Int
let res = stepPerms n
putStrLn $ show res
hFlush fptr
hClose fptr
|
645f1cd17ca87ffab443e1e76ae516a9758c9bfd6f4fcba89e0b75559192c3d1 | jnavila/plotkicadsch | imageDiff.ml | open! StdLabels
open Lwt.Infix
let doc = "use compare (ImageMagick) between bitmaps"
type pctx = SvgPainter.t
module SVG = Kicadsch.MakeSchPainter (SvgPainter)
module SP = struct
include SVG
type painterContext = SvgPainter.t
end
module S = SP
let display_diff ~from_ctx ~to_ctx filename ~keep =
let from_filename = SysAbst.build_tmp_svg_name ~keep "from_" filename in
let to_filename = SysAbst.build_tmp_svg_name ~keep "to_" filename in
let both_files =
List.map
~f:(fun (svg_name, context) ->
Lwt_io.with_file ~mode:Lwt_io.Output svg_name (fun o ->
Lwt_io.write o (SvgPainter.write context) ) )
[(from_filename, from_ctx); (to_filename, to_ctx)]
in
let both = Lwt.join both_files in
let compare_them =
both
>>= fun _ ->
SysAbst.exec "git-imgdiff" [|from_filename; to_filename|]
>|= let open UnixLabels in
function
| WEXITED ret ->
if Int.equal ret 0 then true else false
| WSIGNALED _ ->
false
| WSTOPPED _ ->
false
in
let%lwt ret =
try%lwt compare_them with
| GitFs.InternalGitError s ->
Lwt_io.printf "%s\n" s >|= fun () -> false
| _ ->
Lwt_io.printf "unknown error\n" >|= fun () -> false
in
Lwt.join
@@ List.map
~f:(SysAbst.finalize_tmp_file ~keep)
[from_filename; to_filename]
>|= fun _ -> ret
| null | https://raw.githubusercontent.com/jnavila/plotkicadsch/a8afb216bf04aeaeb9088ef264407b094553d145/plotkicadsch/src/imageDiff.ml | ocaml | open! StdLabels
open Lwt.Infix
let doc = "use compare (ImageMagick) between bitmaps"
type pctx = SvgPainter.t
module SVG = Kicadsch.MakeSchPainter (SvgPainter)
module SP = struct
include SVG
type painterContext = SvgPainter.t
end
module S = SP
let display_diff ~from_ctx ~to_ctx filename ~keep =
let from_filename = SysAbst.build_tmp_svg_name ~keep "from_" filename in
let to_filename = SysAbst.build_tmp_svg_name ~keep "to_" filename in
let both_files =
List.map
~f:(fun (svg_name, context) ->
Lwt_io.with_file ~mode:Lwt_io.Output svg_name (fun o ->
Lwt_io.write o (SvgPainter.write context) ) )
[(from_filename, from_ctx); (to_filename, to_ctx)]
in
let both = Lwt.join both_files in
let compare_them =
both
>>= fun _ ->
SysAbst.exec "git-imgdiff" [|from_filename; to_filename|]
>|= let open UnixLabels in
function
| WEXITED ret ->
if Int.equal ret 0 then true else false
| WSIGNALED _ ->
false
| WSTOPPED _ ->
false
in
let%lwt ret =
try%lwt compare_them with
| GitFs.InternalGitError s ->
Lwt_io.printf "%s\n" s >|= fun () -> false
| _ ->
Lwt_io.printf "unknown error\n" >|= fun () -> false
in
Lwt.join
@@ List.map
~f:(SysAbst.finalize_tmp_file ~keep)
[from_filename; to_filename]
>|= fun _ -> ret
| |
0b38a0293b8fd11526b24e08246e882e8f55a6a0d55b776ac4e7bc19a803cf99 | rowangithub/DOrder | includemod.mli | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : includemod.mli 10447 2010 - 05 - 21 03:36:52Z garrigue $
(* Inclusion checks for the module language *)
open Types
open Typedtree
open Format
val modtypes: Env.t -> module_type -> module_type -> module_coercion
val signatures: Env.t -> signature -> signature -> module_coercion
val compunit: string -> signature -> string -> signature -> module_coercion
val type_declarations:
Env.t -> Ident.t -> type_declaration -> type_declaration -> unit
type error =
Missing_field of Ident.t
| Value_descriptions of Ident.t * value_description * value_description
| Type_declarations of Ident.t * type_declaration
* type_declaration * Includecore.type_mismatch list
| Exception_declarations of
Ident.t * exception_declaration * exception_declaration
| Module_types of module_type * module_type
| Modtype_infos of Ident.t * modtype_declaration * modtype_declaration
| Modtype_permutation
| Interface_mismatch of string * string
| Class_type_declarations of
Ident.t * cltype_declaration * cltype_declaration *
Ctype.class_match_failure list
| Class_declarations of
Ident.t * class_declaration * class_declaration *
Ctype.class_match_failure list
| Unbound_modtype_path of Path.t
exception Error of error list
val report_error: formatter -> error list -> unit
| null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/typing/includemod.mli | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Inclusion checks for the module language | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : includemod.mli 10447 2010 - 05 - 21 03:36:52Z garrigue $
open Types
open Typedtree
open Format
val modtypes: Env.t -> module_type -> module_type -> module_coercion
val signatures: Env.t -> signature -> signature -> module_coercion
val compunit: string -> signature -> string -> signature -> module_coercion
val type_declarations:
Env.t -> Ident.t -> type_declaration -> type_declaration -> unit
type error =
Missing_field of Ident.t
| Value_descriptions of Ident.t * value_description * value_description
| Type_declarations of Ident.t * type_declaration
* type_declaration * Includecore.type_mismatch list
| Exception_declarations of
Ident.t * exception_declaration * exception_declaration
| Module_types of module_type * module_type
| Modtype_infos of Ident.t * modtype_declaration * modtype_declaration
| Modtype_permutation
| Interface_mismatch of string * string
| Class_type_declarations of
Ident.t * cltype_declaration * cltype_declaration *
Ctype.class_match_failure list
| Class_declarations of
Ident.t * class_declaration * class_declaration *
Ctype.class_match_failure list
| Unbound_modtype_path of Path.t
exception Error of error list
val report_error: formatter -> error list -> unit
|
6c7ad9bc6ae7bee9df6315ed9679d3da68dc825753e38874b6a610aacb984f01 | noloop/cacau | package.lisp | (defpackage #:noloop.cacau
(:use #:common-lisp)
(:nicknames #:cacau)
(:import-from #:eventbus
#:make-eventbus
#:once
#:on
#:emit)
(:import-from #:assertion-error
#:assertion-error
#:assertion-error-actual
#:assertion-error-expected
#:assertion-error-message
#:assertion-error-result
#:assertion-error-stack
#:get-stack-trace)
(:export #:make-runner
#:suite-root
#:add-child
#:create-test
#:create-suite
#:on-runner
#:once-runner
#:create-before-all
#:create-after-all
#:create-before-each
#:create-after-each
#:run-runner
#:result
#:run
#:cacau-runner
#:reset-runner
#:cl-debugger
#:common-create-before-all
#:common-create-after-all
#:common-create-before-each
#:common-create-after-each
#:common-create-suite
#:common-create-test
#:before-all
#:after-all
#:before-each
#:after-each
#:context
#:it
#:suite
#:test
#:suite-setup
#:suite-teardown
#:test-setup
#:test-teardown
#:defsuite
#:deftest
#:defbefore-all
#:defafter-all
#:defbefore-each
#:defafter-each
#:in-plan
#:defbefore-plan
#:defafter-plan
#:defbefore-t
#:defafter-t
#:deft))
| null | https://raw.githubusercontent.com/noloop/cacau/ba0fb36a284ded884f1dab0bd3f0f41ec14e3038/src/package.lisp | lisp | (defpackage #:noloop.cacau
(:use #:common-lisp)
(:nicknames #:cacau)
(:import-from #:eventbus
#:make-eventbus
#:once
#:on
#:emit)
(:import-from #:assertion-error
#:assertion-error
#:assertion-error-actual
#:assertion-error-expected
#:assertion-error-message
#:assertion-error-result
#:assertion-error-stack
#:get-stack-trace)
(:export #:make-runner
#:suite-root
#:add-child
#:create-test
#:create-suite
#:on-runner
#:once-runner
#:create-before-all
#:create-after-all
#:create-before-each
#:create-after-each
#:run-runner
#:result
#:run
#:cacau-runner
#:reset-runner
#:cl-debugger
#:common-create-before-all
#:common-create-after-all
#:common-create-before-each
#:common-create-after-each
#:common-create-suite
#:common-create-test
#:before-all
#:after-all
#:before-each
#:after-each
#:context
#:it
#:suite
#:test
#:suite-setup
#:suite-teardown
#:test-setup
#:test-teardown
#:defsuite
#:deftest
#:defbefore-all
#:defafter-all
#:defbefore-each
#:defafter-each
#:in-plan
#:defbefore-plan
#:defafter-plan
#:defbefore-t
#:defafter-t
#:deft))
| |
a61469c2362f4ced761fe2fd8b2a74585e5a2512287aeb9d718e6434d345e1bb | mdesharnais/mini-ml | Spec.hs | # LANGUAGE OverloadedLists #
import qualified Compiler
import qualified Data.Char
import qualified Expr
import qualified Data.Set as Set
import qualified Interpreter
import qualified Lexer
import qualified Parser
import qualified Type
import qualified TypeContext as TyCtxt
import qualified TypeInference as TyInferance
import qualified TypeSubstitution as Subst
import Data.Bifunctor(bimap)
import Expr(Expr(..))
import Interpreter(Value(..))
import Test.HUnit
import Type
import TypeContext(Context)
litBool = [
("true", LitBool () True),
("false", LitBool () False)
]
litInt min max =
let
impl n xs =
if n >= max then xs else impl (n + 1) ((show n, LitInt () n) : xs)
in
impl min []
variables = [
("a", Var () "a"),
("ab", Var () "ab"),
("ab1", Var () "ab1"),
("ab12", Var () "ab12"),
("ab121", Var () "ab121"),
("ab121b", Var () "ab121b"),
("ab121ba", Var () "ab121ba")
]
functions = [
("let min = fun x -> fun y -> if x < y then x else y in min 3 5",
Let () ("min", ())
(Abs () "x" (Abs () "y"
(If () (OpLT () (Var () "x") (Var () "y"))
(Var () "x")
(Var () "y"))))
(App () (App () (Var () "min") (LitInt () 3)) (LitInt () 5))),
("1 * 2 < 3 * 4",
OpLT ()
(OpMul () (LitInt () 1) (LitInt () 2))
(OpMul () (LitInt () 3) (LitInt () 4)))
]
testCases =
litBool ++
litInt 0 101 ++
variables ++
functions
testEquivalences = [
("a * b * c", "(a * b) * c"),
("a + b * c", "a + (b * c)"),
("f x y z", "((f x) y) z"),
("f x + f y", "(f x) + (f y)"),
("a * b < c * d", "(a * b) < (c * d)"),
("extern f 5", "(extern f) 5"),
("let min = fun x -> fun y -> if x < y then x else y in min 2 3",
"let min = (fun x -> (fun y -> (if (x < y) then x else y))) in ((min 2) 3)")
]
testInference :: [(Context, String, Expr TypeSchema Type)]
testInference =
let int = LitInt TInt in
let bool = LitBool TBool in [
(TyCtxt.empty, "true", bool True),
(TyCtxt.empty, "false", bool False),
(TyCtxt.empty, "1", int 1),
(TyCtxt.empty, "12", int 12),
(TyCtxt.empty, "123", int 123),
(TyCtxt.empty, "3 - 2", OpSub TInt (int 3) (int 2)),
(TyCtxt.empty, "3 + 2", OpAdd TInt (int 3) (int 2)),
(TyCtxt.empty, "3 * 2", OpMul TInt (int 3) (int 2)),
(TyCtxt.empty, "3 / 2", OpDiv TInt (int 3) (int 2)),
(TyCtxt.empty, "3 < 2", OpLT TBool (int 3) (int 2)),
(TyCtxt.empty, "3 = 2", OpEQ TBool (int 3) (int 2)),
(TyCtxt.empty, "if true then 0 else 1",
If TInt (bool True) (int 0) (int 1)),
(TyCtxt.empty, "extern f",
ExternVar (TFun "x0" TInt TInt) "f"),
(TyCtxt.empty, "fun x -> x",
Abs (TFun "x1" (TVar "x0") (TVar "x0")) "x" (Var (TVar "x0") "x")),
(TyCtxt.empty, "fun x -> fun y -> x",
Abs (TFun "x3" (TVar "x0") (TFun "x2" (TVar "x1") (TVar "x0"))) "x"
(Abs (TFun "x2" (TVar "x1") (TVar "x0")) "y"
(Var (TVar "x0") "x"))),
(TyCtxt.empty, "fun x -> fun y -> y",
Abs (TFun "x3" (TVar "x0") (TFun "x2" (TVar "x1") (TVar "x1"))) "x"
(Abs (TFun "x2" (TVar "x1") (TVar "x1")) "y"
(Var (TVar "x1") "y"))),
(TyCtxt.empty, "fun x -> true",
Abs (TFun "x1" (TVar "x0") TBool) "x" (bool True)),
(TyCtxt.empty, "let x = true in 3",
Let TInt ("x", TSType TBool) (bool True) (int 3)),
(TyCtxt.empty, "let min = fun x -> fun y -> if x < y then x else y in min 2 3",
Let TInt ("min", (TSType (TFun "x3" TInt (TFun "x2" TInt TInt))))
(Abs (TFun "x3" TInt (TFun "x2" TInt TInt)) "x"
(Abs (TFun "x2" TInt TInt) "y"
(If TInt (OpLT TBool (Var TInt "x") (Var TInt "y"))
(Var TInt "x")
(Var TInt "y"))))
(App TInt
(App (TFun "x2" TInt TInt)
(Var (TFun "x3" TInt (TFun "x2" TInt TInt)) "min")
(int 2))
(int 3))),
(TyCtxt.empty, "let rec f = fun x -> x in f 1",
Let TInt ("f", TSForall "x0" (TSType (TFun "x2" (TVar "x0") (TVar "x0"))))
(AbsRec (TFun "x2" (TVar "x0") (TVar "x0")) "f" "x" (Var (TVar "x0") "x"))
(App TInt (Var (TFun "x2" TInt TInt) "f") (int 1))),
(TyCtxt.empty, "let rec f = fun x -> fun y -> x in f 1 2",
Let TInt ("f",TSForall "x3" (TSForall "x0"
(TSType (TFun "x2" (TVar "x0") (TFun "x4" (TVar "x3") (TVar "x0"))))))
(AbsRec (TFun "x2" (TVar "x0") (TFun "x4" (TVar "x3") (TVar "x0"))) "f" "x"
(Abs (TFun "x4" (TVar "x3") (TVar "x0")) "y" (Var (TVar "x0") "x")))
(App TInt
(App (TFun "x4" TInt TInt)
(Var (TFun "x2" TInt (TFun "x4" TInt TInt)) "f")
(LitInt TInt 1))
(LitInt TInt 2))),
(TyCtxt.empty, "let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3",
Let TInt ("sum", TSType (TFun "x2" TInt TInt))
(AbsRec (TFun "x2" TInt TInt) "sum" "n"
(If TInt (OpEQ TBool (Var TInt "n") (int 0))
(int 0)
(OpAdd TInt
(Var TInt "n")
(App TInt
(Var (TFun "x2" TInt TInt) "sum")
(OpSub TInt (Var TInt "n") (int 1))))))
(App TInt (Var (TFun "x2" TInt TInt) "sum") (int 3))),
(TyCtxt.empty, "let rec mult = fun x -> fun y -> if y < 1 then 0 else x * mult x (y - 1) in mult 3 5",
Let TInt ("mult", TSType (TFun "x2" TInt (TFun "x8" TInt TInt)))
(AbsRec (TFun "x2" TInt (TFun "x8" TInt TInt)) "mult" "x"
(Abs (TFun "x8" TInt TInt) "y"
(If TInt (OpLT TBool (Var TInt "y") (int 1))
(int 0)
(OpMul TInt
(Var TInt "x")
(App TInt
(App (TFun "x8" TInt TInt)
(Var (TFun "x2" TInt (TFun "x8" TInt TInt)) "mult")
(Var TInt "x"))
(OpSub TInt (Var TInt "y") (int 1)))))))
(App TInt
(App (TFun "x8" TInt TInt)
(Var (TFun "x2" TInt (TFun "x8" TInt TInt)) "mult")
(int 3))
(int 5))),
(TyCtxt.empty, "let f = fun x -> fun y -> if true then x else y in f 2 3",
Let TInt ("f", TSForall "x1"
(TSType (TFun "x3" (TVar "x1") (TFun "x2" (TVar "x1") (TVar "x1")))))
(Abs (TFun "x3" (TVar "x1") (TFun "x2" (TVar "x1") (TVar "x1"))) "x"
(Abs (TFun "x2" (TVar "x1") (TVar "x1")) "y"
(If (TVar "x1") (bool True)
(Var (TVar "x1") "x")
(Var (TVar "x1") "y"))))
(App TInt
(App (TFun "x2" TInt TInt)
(Var (TFun "x3" TInt (TFun "x2" TInt TInt)) "f")
(int 2))
(int 3))),
(TyCtxt.empty, "let f = fun b -> fun x -> fun y -> if b then x else y in f true 2 3",
Let TInt ("f", (TSForall "x2" (TSType
(TFun "x5" TBool (TFun "x4" (TVar "x2") (TFun "x3" (TVar "x2") (TVar "x2")))))))
(Abs (TFun "x5" TBool (TFun "x4" (TVar "x2") (TFun "x3" (TVar "x2") (TVar "x2")))) "b"
(Abs (TFun "x4" (TVar "x2") (TFun "x3" (TVar "x2") (TVar "x2"))) "x"
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "y"
(If (TVar "x2") (Var TBool "b")
(Var (TVar "x2") "x")
(Var (TVar "x2") "y")))))
(App TInt
(App (TFun "x3" TInt TInt)
(App (TFun "x4" TInt (TFun "x3" TInt TInt))
(Var (TFun "x5" TBool (TFun "x4" TInt (TFun "x3" TInt TInt))) "f")
(bool True))
(int 2))
(int 3))),
(TyCtxt.empty, "let i = fun x -> x in if i true then i 1 else i 2",
Let TInt ("i", TSForall "x0" (TSType (TFun "x1" (TVar "x0") (TVar "x0"))))
(Abs (TFun "x1" (TVar "x0") (TVar "x0")) "x"
(Var (TVar "x0") "x"))
(If TInt (App TBool (Var (TFun "x1" TBool TBool) "i") (bool True))
(App TInt (Var (TFun "x1" TInt TInt) "i") (int 1))
(App TInt (Var (TFun "x1" TInt TInt) "i") (int 2)))),
(TyCtxt.empty, "let foo = fun b -> if b then true else false in foo true",
Let TBool ("foo", (TSType (TFun "x1" TBool TBool)))
(Abs (TFun "x1" TBool TBool) "b"
(If TBool (Var TBool "b")
(bool True)
(bool False)))
(App TBool (Var (TFun "x1" TBool TBool) "foo") (bool True))),
(TyCtxt.empty, "let rec f = fun x -> x in if f true then f 3 else f 4",
(Let TInt ("f", TSForall "x0" (TSType (TFun "x2" (TVar "x0") (TVar "x0"))))
(AbsRec(TFun "x2" (TVar "x0") (TVar "x0")) "f" "x" (Var (TVar "x0") "x"))
(If TInt (App TBool (Var (TFun "x2" TBool TBool) "f") (bool True))
(App TInt (Var (TFun "x2" TInt TInt) "f") (int 3))
(App TInt (Var (TFun "x2" TInt TInt) "f") (int 4))))),
(TyCtxt.empty,
"let not = fun b -> if b then b else false in " ++
"let rec foo = fun b -> fun x -> fun y -> " ++
"if b then x else foo (not b) y x in " ++
"foo false 1 1",
Let TInt ("not", TSType (TFun "x1" TBool TBool))
(Abs (TFun "x1" TBool TBool) "b"
(If TBool (Var TBool "b")
(Var TBool "b")
(bool False)))
(Let TInt ("foo", TSForall "x13" (TSType
(TFun "x4" TBool
(TFun "x16" (TVar "x13")
(TFun "x15" (TVar "x13") (TVar "x13"))))))
(AbsRec (TFun "x4" TBool
(TFun "x16" (TVar "x13")
(TFun "x15" (TVar "x13") (TVar "x13")))) "foo" "b"
(Abs (TFun "x16" (TVar "x13") (TFun "x15" (TVar "x13") (TVar "x13"))) "x"
(Abs (TFun "x15" (TVar "x13") (TVar "x13")) "y"
(If (TVar "x13") (Var TBool "b")
(Var (TVar "x13") "x")
(App (TVar "x13")
(App (TFun "x15" (TVar "x13") (TVar "x13"))
(App (TFun "x16" (TVar "x13") (TFun "x15" (TVar "x13") (TVar "x13")))
(Var (TFun "x4" TBool (TFun "x16" (TVar "x13")
(TFun "x15" (TVar "x13") (TVar "x13")))) "foo")
(App TBool
(Var (TFun "x1" TBool TBool) "not")
(Var TBool "b")))
(Var (TVar "x13") "y"))
(Var (TVar "x13") "x"))))))
(App TInt
(App (TFun "x15" TInt TInt)
(App (TFun "x16" TInt (TFun "x15" TInt TInt))
(Var (TFun "x4" TBool (TFun "x16" TInt (TFun "x15" TInt TInt))) "foo")
(bool False))
(int 1))
(int 1)))),
(TyCtxt.empty, "fun fix -> fun f -> f (fun y -> fix f y)",
Abs
(TFun "x11"
(TFun "x4"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TFun "x6" (TVar "x2") (TVar "x5")))
(TFun "x10"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TVar "x8")))
"fix"
(Abs
(TFun "x10"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TVar "x8"))
"f"
(App (TVar "x8")
(Var (TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8")) "f")
(Abs (TFun "x7" (TVar "x2") (TVar "x5")) "y"
(App (TVar "x5")
(App (TFun "x6" (TVar "x2") (TVar "x5"))
(Var
(TFun "x4"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TFun "x6" (TVar "x2") (TVar "x5"))) "fix")
(Var (TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8")) "f"))
(Var (TVar "x2") "y")))))
),
(TyCtxt.empty, "let rec fix = fun f -> f (fun y -> fix f y) in fix",
Let
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x12") (TVar "x11"))
(TFun "x7" (TVar "x12") (TVar "x11")))
(TFun "x7" (TVar "x12") (TVar "x11")))
("fix",
TSForall "x6" (TSForall "x3" (TSType (TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
(TFun "x7" (TVar "x3") (TVar "x6"))))))
(AbsRec
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
(TFun "x7" (TVar "x3") (TVar "x6"))) "fix" "f"
(App (TFun "x7" (TVar "x3") (TVar "x6"))
(Var
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6"))) "f")
(Abs (TFun "x8" (TVar "x3") (TVar "x6")) "y"
(App (TVar "x6")
(App (TFun "x7" (TVar "x3") (TVar "x6"))
(Var
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
(TFun "x7" (TVar "x3") (TVar "x6")))
"fix")
(Var
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
"f"))
(Var (TVar "x3") "y")))))
(Var
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x12") (TVar "x11"))
(TFun "x7" (TVar "x12") (TVar "x11")))
(TFun "x7" (TVar "x12") (TVar "x11")))
"fix")),
(TyCtxt.empty,
"fun f -> f (fun x -> f (fun y -> y))",
Abs (TFun "x9" (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) (TVar "x2")) "f"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "x"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "y"
(Var (TVar "x2") "y")))))),
(TyCtxt.empty,
"fun f -> f (fun x -> f (fun y -> x))",
Abs (TFun "x9" (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) (TVar "x2")) "f"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "x"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "y"
(Var (TVar "x2") "x")))))),
(TyCtxt.singleton ("x", TInt), "x",
Var TInt "x"),
(TyCtxt.singleton ("f", TFun "" TInt TInt), "f",
Var (TFun "" TInt TInt) "f"),
(TyCtxt.singleton ("f", TFun "" TInt TInt), "f 3",
App TInt (Var (TFun "" TInt TInt) "f") (int 3)),
(TyCtxt.singleton ("x", TVar "x0"), "x - 1",
OpSub TInt (Var TInt "x") (int 1)),
(TyCtxt.fromListTy [("x", TVar "x0"), ("y", TVar "x1")], "x y",
App (TVar "x2")
(Var (TFun "x3" (TVar "x1") (TVar "x2")) "x")
(Var (TVar "x1") "y")),
(TyCtxt.empty,
"let n = 10 in " ++
"let foo = fun g -> g 10 in " ++
"foo (fun x -> x + n) + foo (fun x -> x)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("foo",TSForall "x1"
(TSType (TFun "x3" (TFun "x2" TInt (TVar "x1")) (TVar "x1"))))
(Abs (TFun "x3" (TFun "x2" TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1")
(Var (TFun "x2" TInt (TVar "x1")) "g") (int 10)))
(OpAdd TInt
(App TInt (Var (TFun "x3" (TFun "x2" TInt TInt) TInt) "foo")
(Abs (TFun "x2" TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n"))))
(App TInt
(Var (TFun "x3" (TFun "x2" TInt TInt) TInt) "foo")
(Abs (TFun "x2" TInt TInt) "x" (Var TInt "x"))))))
]
testInference2 :: [(String, TyExpr2)]
testInference2 =
let int = LitInt TInt in
let bool = LitBool TBool in [
("fun x -> x",
Abs (TFun [AFun] (TVar "x0") (TVar "x0")) "x"
(Var (TVar "x0") "x")),
("fun x -> fun y -> y",
Abs (TFun [AFun] (TVar "x0") (TFun [AFun] (TVar "x1") (TVar "x1"))) "x"
(Abs (TFun [AFun] (TVar "x1") (TVar "x1")) "y"
(Var (TVar "x1") "y"))),
("fun x -> fun y -> x",
Abs (TFun [AFun] (TVar "x0") (TFun [AClo] (TVar "x1") (TVar "x0"))) "x"
(Abs (TFun [AClo] (TVar "x1") (TVar "x0")) "y"
(Var (TVar "x0") "x"))),
("let f = fun x -> x + 10 in f 10 ",
Let TInt ("f",TSType (TFun [AFun] TInt TInt))
(Abs (TFun [AFun] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (LitInt TInt 10)))
(App TInt
(Var (TFun [AFun] TInt TInt) "f")
(LitInt TInt 10))),
("let n = 10 in fun x -> x + n",
Let (TFun [AClo] TInt TInt) ("n",TSType TInt)
(LitInt TInt 10)
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))),
("let n = 10 in let f = fun x -> x + n in f 10 ",
Let TInt ("n",TSType TInt) (LitInt TInt 10)
(Let TInt ("f",TSType (TFun [AClo] TInt TInt))
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(App TInt (Var (TFun [AClo] TInt TInt) "f") (LitInt TInt 10)))),
("fun g -> g 10",
Abs (TFun [AFun] (TFun [] TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1") (Var (TFun [] TInt (TVar "x1")) "g") (LitInt TInt 10))),
("let n = 10 in " ++
"let f = fun x -> x + 10 in " ++
"let c = fun x -> x + n in " ++
"let foo = fun g -> g 10 in " ++
"foo f",
Let TInt ("n",TSType TInt) (LitInt TInt 10)
(Let TInt ("f",TSType (TFun [AFun] TInt TInt))
(Abs (TFun [AFun] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (LitInt TInt 10)))
(Let TInt ("c",TSType (TFun [AClo] TInt TInt))
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(Let TInt ("foo",TSForall "x5" (TSType
(TFun [AFun] (TFun [AFun] TInt (TVar "x5")) (TVar "x5"))))
(Abs (TFun [AFun] (TFun [AFun] TInt (TVar "x5")) (TVar "x5")) "g"
(App (TVar "x5")
(Var (TFun [AFun] TInt (TVar "x5")) "g")
(LitInt TInt 10)))
(App TInt
(Var (TFun [AFun] (TFun [AFun] TInt TInt) TInt) "foo")
(Var (TFun [AFun] TInt TInt) "f")))))),
("let n = 10 in " ++
"let f = fun x -> x + 10 in " ++
"let c = fun x -> x + n in " ++
"let foo = fun g -> g 10 in " ++
"foo c",
Let TInt ("n",TSType TInt) (LitInt TInt 10)
(Let TInt ("f",TSType (TFun [AFun] TInt TInt))
(Abs (TFun [AFun] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (LitInt TInt 10)))
(Let TInt ("c",TSType (TFun [AClo] TInt TInt))
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(Let TInt ("foo",TSForall "x5" (TSType
(TFun [AFun] (TFun [AClo] TInt (TVar "x5")) (TVar "x5"))))
(Abs (TFun [AFun] (TFun [AClo] TInt (TVar "x5")) (TVar "x5")) "g"
(App (TVar "x5")
(Var (TFun [AClo] TInt (TVar "x5")) "g")
(LitInt TInt 10)))
(App TInt
(Var (TFun [AFun] (TFun [AClo] TInt TInt) TInt) "foo")
(Var (TFun [AClo] TInt TInt) "c")))))),
("let n = 10 in " ++
"let foo = fun g -> g 10 in " ++
"foo (fun x -> x + n) + foo (fun x -> x)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("foo",TSForall "x1"
(TSType (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1"))))
(Abs (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1")
(Var (TFun [AFun,AClo] TInt (TVar "x1")) "g") (int 10)))
(OpAdd TInt
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n"))))
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x" (Var TInt "x")))))),
("let n = 10 in " ++
"let foo = fun g -> g 10 in " ++
"foo (fun x -> x) + foo (fun x -> x + n)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("foo",TSForall "x1"
(TSType (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1"))))
(Abs (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1")
(Var (TFun [AFun,AClo] TInt (TVar "x1")) "g") (int 10)))
(OpAdd TInt
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x" (Var TInt "x")))
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n"))))))),
("let n = 10 in " ++
"let f = fun x -> x + 10 in " ++
"let g = fun x -> x + n in " ++
"let foo = fun g -> g 10 in " ++
"(foo f) + (foo g)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("f",TSType (TFun [AFun,AClo] TInt TInt))
(Abs (TFun [AFun,AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (int 10)))
(Let TInt ("g",TSType (TFun [AFun,AClo] TInt TInt))
(Abs (TFun [AFun, AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(Let TInt ("foo",TSForall "x5" (TSType
(TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x5")) (TVar "x5"))))
(Abs (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x5")) (TVar "x5")) "g"
(App (TVar "x5")
(Var (TFun [AFun,AClo] TInt (TVar "x5")) "g")
(LitInt TInt 10)))
(OpAdd TInt
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Var (TFun [AFun,AClo] TInt TInt) "f"))
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Var (TFun [AFun,AClo] TInt TInt) "g")))))))
]
interpretationTests = [
("4 + 2", ConstInt 6),
("4 - 2", ConstInt 2),
("4 * 2", ConstInt 8),
("4 / 2", ConstInt 2),
("6 + 4 / 2", ConstInt 8),
("2 * 3 + 4 / 2", ConstInt 8),
("2 < 4", ConstBool True),
("4 < 2", ConstBool False),
("let i = fun x -> x in i 0", ConstInt 0),
("let i = fun x -> x in if i true then i 1 else i 2", ConstInt 1),
("let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3", ConstInt 6)
]
normalFormTests = [
("1", "1"),
("fun x -> x",
"let x0 = (fun x -> x) in\n" ++
"x0"),
("1 + 2",
"let x0 = 1 + 2 in\nx0"),
("1 + 2 + 3",
"let x0 = 1 + 2 in\nlet x1 = x0 + 3 in\nx1"),
("1 + 2 + 3 + 4",
"let x0 = 1 + 2 in\nlet x1 = x0 + 3 in\nlet x2 = x1 + 4 in\nx2"),
("(fun x -> x) true",
"let x0 = (fun x -> x) in\n" ++
"let x1 = x0 true in\n" ++
"x1"),
("let f = fun x -> fun y -> fun z -> x in f 1 2 3",
"let x0 = (fun x -> " ++
"let x1 = (fun y -> " ++
"let x2 = (fun z -> x) in\nx2) in\nx1) in\n" ++
"let x3 = x0 1 in\n" ++
"let x4 = x3 2 in\n" ++
"let x5 = x4 3 in\n" ++
"x5"),
("(fun x -> x) (fun x -> x) true",
"let x0 = (fun x -> x) in\n" ++
"let x1 = (fun x -> x) in\n" ++
"let x2 = x0 x1 in\n" ++
"let x3 = x2 true in\n" ++
"x3"),
("let a = 1 in let b = 2 in a * b",
"let x0 = 1 * 2 in\nx0"),
("let f = fun x -> x in f 1",
"let x0 = (fun x -> x) in\n" ++
"let x1 = x0 1 in\n" ++
"x1"),
("let f = fun x -> x in f 1 + f 2",
"let x0 = (fun x -> x) in\n" ++
"let x1 = x0 1 in\n" ++
"let x2 = x0 2 in\n" ++
"let x3 = x1 + x2 in\n" ++
"x3"),
("let a = 1 in let b = 2 in 3 + a * b",
"let x0 = 1 * 2 in\nlet x1 = 3 + x0 in\nx1"),
("if true then 1 else 2",
"let x0 = if true then 1 else 2 in\nx0"),
("let f = fun x -> x in if true then f 1 else f 2",
"let x0 = (fun x -> x) in\n" ++
"let x1 = " ++
"if true then " ++
"let x2 = x0 1 in\nx2 " ++
"else " ++
"let x3 = x0 2 in\nx3 in\n" ++
"x1"),
("let f = fun x -> if x then 1 else 2 in f true",
"let x0 = (fun x -> " ++
"let x1 = if x then 1 else 2 in\n" ++
"x1) in\n" ++
"let x2 = x0 true in\n" ++
"x2"),
("let rec f = fun x -> fun y -> f y x in f 1 2",
"let rec x0 = (fun x -> " ++
"let x1 = (fun y -> " ++
"let x2 = x0 y in\n" ++
"let x3 = x2 x in\n" ++
"x3) in\n" ++
"x1) in\n" ++
"let x4 = x0 1 in\n" ++
"let x5 = x4 2 in\n" ++
"x5"),
("let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3",
"let rec x0 = (fun n -> " ++
"let x1 = n = 0 in\n" ++
"let x2 = if x1 then 0 else " ++
"let x3 = n - 1 in\n" ++
"let x4 = x0 x3 in\n" ++
"let x5 = n + x4 in\n" ++
"x5 in\n" ++
"x2) in\n" ++
"let x6 = x0 3 in\n" ++
"x6"),
("let x = 5 in let f = fun y -> x + y in f 3",
"let x0 = (fun y -> " ++
"let x1 = 5 + y in\n" ++
"x1) in\n" ++
"let x2 = x0 3 in\n" ++
"x2")
]
fvTests = [
(TyCtxt.empty, "fun x -> x", []),
(TyCtxt.singleton ("y", TInt), "fun x -> y", ["y"]),
(TyCtxt.singleton ("y", TInt), "fun x -> x + y", ["y"]),
(TyCtxt.empty, "let x = 2 + 3 in x", []),
(TyCtxt.empty, "let x = 5 in let f = fun y -> x + y in f 3", []),
(TyCtxt.singleton ("sum", TFun "" TInt TInt), "fun n -> if n = 0 then 0 else n + sum (n - 1)", ["sum"])
]
closureTests = [
("let n = 1 * 5 in " ++
"let f = fun x -> fun y -> x + y + n in " ++
"f 1 2",
"let x0 = 1 * 5 in\n" ++
"let x1 = Closure (fun env -> fun x -> " ++
"let x2 = Closure (fun env -> fun y -> " ++
"let x3 = env.0 + y in\n" ++
"let x4 = x3 + env.1 in\n" ++
"x4, [x,env.0]) in\n" ++
"x2, [x0]) in\n" ++
"let x5 = x1 1 in\n" ++
"let x6 = x5 2 in\n" ++
"x6"),
("let x = 5 in let f = fun y -> x + y in f 3",
"let x0 = Closure (fun env -> fun y -> " ++
"let x1 = 5 + y in\n" ++
"x1, []) in\n" ++
"let x2 = x0 3 in\n" ++
"x2"),
("let rec f = fun x -> fun y -> f y x in f 1 2",
"let x0 = Closure (fun env -> fun x -> " ++
"let x1 = Closure (fun env -> fun y -> " ++
"let x2 = env.0 y in\n" ++
"let x3 = x2 env.1 in\n" ++
"x3, [env.self,x]) in\n" ++
"x1, []) in\n" ++
"let x4 = x0 1 in\n" ++
"let x5 = x4 2 in\n" ++
"x5"),
("let x = 5 + 3 in let f = fun y -> x + y in f 3",
"let x0 = 5 + 3 in\n" ++
"let x1 = Closure (fun env -> fun y -> " ++
"let x2 = env.0 + y in\n" ++
"x2, [x0]) in\n" ++
"let x3 = x1 3 in\n" ++
"x3"),
("let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3",
"let x0 = Closure (fun env -> fun n -> " ++
"let x1 = n = 0 in\n" ++
"let x2 = if x1 then 0 else " ++
"let x3 = n - 1 in\n" ++
"let x4 = env.self x3 in\n" ++
"let x5 = n + x4 in\n" ++
"x5 in\n" ++
"x2, []) in\n" ++
"let x6 = x0 3 in\n" ++
"x6")
]
testCompilation :: (String, Expr () ()) -> Test
testCompilation (prog, expected) =
TestLabel ("program is '" ++ prog ++ "'") $
TestCase $
assertEqual prog expected (Parser.parse (Lexer.alexScanTokens prog))
testComparaison :: (String, String) -> Test
testComparaison (prog1, prog2) =
TestLabel ("program are '" ++ prog1 ++ "' and '" ++ prog2 ++ "'") $
TestCase $
assertEqual prog1
(Parser.parse (Lexer.alexScanTokens prog1))
(Parser.parse (Lexer.alexScanTokens prog2))
testTypeInference :: (Context, String, Expr TypeSchema Type) -> Test
testTypeInference (ctxt, prog, expr) =
let term = Parser.parse (Lexer.alexScanTokens prog)
in TestLabel ("program '" ++ prog ++ "' has type '" ++
show (Expr.getType expr) ++ "'") $
TestCase $
case TyInferance.infer ctxt term of
Right (subst, cs, expr') ->
assertEqual ""{-(show subst)-} expr expr'
Left msg -> assertFailure msg
testTypeInference2 :: (String, TyExpr2) -> Test
testTypeInference2 (prog, expr) =
let term = Parser.parse (Lexer.alexScanTokens prog)
in TestLabel ("program '" ++ prog ++ "' has type '" ++
show (Expr.getType expr) ++ "'") $
TestCase $
case TyInferance.infer2 TyCtxt.empty term of
Right expr' -> assertEqual "" expr expr'
Left msg -> assertFailure msg
testInterpreter :: (String, Value () ()) -> Test
testInterpreter (prog, val) =
let term = Parser.parse (Lexer.alexScanTokens prog)
in TestLabel ("program '" ++ prog ++ "' evaluate to '" ++ show val ++ "'") $
TestCase $
case Interpreter.eval [] term of
Just v -> assertEqual "" val v
Nothing -> assertFailure "evaluation went wrong"
testNormalForm :: (String, String) -> Test
testNormalForm (prog, nf) =
let term = Parser.parse (Lexer.alexScanTokens prog) in
TestLabel prog $ TestCase $
case TyInferance.infer2 TyCtxt.empty term of
Left msg -> assertFailure msg
Right expr -> assertEqual "" nf (show (Compiler.toNormalForm expr))
testFreeVariables :: (Context, String, [String]) -> Test
testFreeVariables (ctxt, prog, fvs) =
let term = Parser.parse (Lexer.alexScanTokens prog) in
TestLabel prog $ TestCase $
case TyInferance.infer2 ctxt term of
Left msg -> assertFailure msg
Right expr ->
assertEqual "" fvs (map fst (Compiler.fv (Compiler.toNormalForm expr)))
testClosure :: (String, String) -> Test
testClosure (prog, nfc) =
let term = Parser.parse (Lexer.alexScanTokens prog) in
TestLabel prog $ TestCase $
case TyInferance.infer2 TyCtxt.empty term of
Left msg -> assertFailure msg
Right expr ->
let normForm = Compiler.toNormalForm expr in
let normFormClosure = Compiler.toClosure normForm in
assertEqual "" nfc (show normFormClosure)
tests =
TestList $ [
TestLabel "testing (Parser.parse . Lexer.alexScanTokens)" $
TestList (map testCompilation testCases),
TestLabel "testing (parse prog1 == parse prog2)" $
TestList (map testComparaison testEquivalences),
TestLabel "testing (infer (parse prog))" $
TestList (map testTypeInference testInference),
TestLabel "testing (infer2 (parse prog))" $
TestList (map testTypeInference2 testInference2),
TestLabel "testing (eval [] (parse prog))" $
TestList (map testInterpreter interpretationTests),
TestLabel "testing (toNormalForm (parse prog))" $
TestList (map testNormalForm normalFormTests),
TestLabel "Compiler.fv" $
TestList (map testFreeVariables fvTests),
TestLabel "Compiler.toClosure" $
TestList (map testClosure closureTests)
]
main :: IO ()
main = fmap (const ()) (runTestTT tests)
| null | https://raw.githubusercontent.com/mdesharnais/mini-ml/304017aab02c04ed4fbd9420405d3a0483dcba37/test/Spec.hs | haskell | (show subst) | # LANGUAGE OverloadedLists #
import qualified Compiler
import qualified Data.Char
import qualified Expr
import qualified Data.Set as Set
import qualified Interpreter
import qualified Lexer
import qualified Parser
import qualified Type
import qualified TypeContext as TyCtxt
import qualified TypeInference as TyInferance
import qualified TypeSubstitution as Subst
import Data.Bifunctor(bimap)
import Expr(Expr(..))
import Interpreter(Value(..))
import Test.HUnit
import Type
import TypeContext(Context)
litBool = [
("true", LitBool () True),
("false", LitBool () False)
]
litInt min max =
let
impl n xs =
if n >= max then xs else impl (n + 1) ((show n, LitInt () n) : xs)
in
impl min []
variables = [
("a", Var () "a"),
("ab", Var () "ab"),
("ab1", Var () "ab1"),
("ab12", Var () "ab12"),
("ab121", Var () "ab121"),
("ab121b", Var () "ab121b"),
("ab121ba", Var () "ab121ba")
]
functions = [
("let min = fun x -> fun y -> if x < y then x else y in min 3 5",
Let () ("min", ())
(Abs () "x" (Abs () "y"
(If () (OpLT () (Var () "x") (Var () "y"))
(Var () "x")
(Var () "y"))))
(App () (App () (Var () "min") (LitInt () 3)) (LitInt () 5))),
("1 * 2 < 3 * 4",
OpLT ()
(OpMul () (LitInt () 1) (LitInt () 2))
(OpMul () (LitInt () 3) (LitInt () 4)))
]
testCases =
litBool ++
litInt 0 101 ++
variables ++
functions
testEquivalences = [
("a * b * c", "(a * b) * c"),
("a + b * c", "a + (b * c)"),
("f x y z", "((f x) y) z"),
("f x + f y", "(f x) + (f y)"),
("a * b < c * d", "(a * b) < (c * d)"),
("extern f 5", "(extern f) 5"),
("let min = fun x -> fun y -> if x < y then x else y in min 2 3",
"let min = (fun x -> (fun y -> (if (x < y) then x else y))) in ((min 2) 3)")
]
testInference :: [(Context, String, Expr TypeSchema Type)]
testInference =
let int = LitInt TInt in
let bool = LitBool TBool in [
(TyCtxt.empty, "true", bool True),
(TyCtxt.empty, "false", bool False),
(TyCtxt.empty, "1", int 1),
(TyCtxt.empty, "12", int 12),
(TyCtxt.empty, "123", int 123),
(TyCtxt.empty, "3 - 2", OpSub TInt (int 3) (int 2)),
(TyCtxt.empty, "3 + 2", OpAdd TInt (int 3) (int 2)),
(TyCtxt.empty, "3 * 2", OpMul TInt (int 3) (int 2)),
(TyCtxt.empty, "3 / 2", OpDiv TInt (int 3) (int 2)),
(TyCtxt.empty, "3 < 2", OpLT TBool (int 3) (int 2)),
(TyCtxt.empty, "3 = 2", OpEQ TBool (int 3) (int 2)),
(TyCtxt.empty, "if true then 0 else 1",
If TInt (bool True) (int 0) (int 1)),
(TyCtxt.empty, "extern f",
ExternVar (TFun "x0" TInt TInt) "f"),
(TyCtxt.empty, "fun x -> x",
Abs (TFun "x1" (TVar "x0") (TVar "x0")) "x" (Var (TVar "x0") "x")),
(TyCtxt.empty, "fun x -> fun y -> x",
Abs (TFun "x3" (TVar "x0") (TFun "x2" (TVar "x1") (TVar "x0"))) "x"
(Abs (TFun "x2" (TVar "x1") (TVar "x0")) "y"
(Var (TVar "x0") "x"))),
(TyCtxt.empty, "fun x -> fun y -> y",
Abs (TFun "x3" (TVar "x0") (TFun "x2" (TVar "x1") (TVar "x1"))) "x"
(Abs (TFun "x2" (TVar "x1") (TVar "x1")) "y"
(Var (TVar "x1") "y"))),
(TyCtxt.empty, "fun x -> true",
Abs (TFun "x1" (TVar "x0") TBool) "x" (bool True)),
(TyCtxt.empty, "let x = true in 3",
Let TInt ("x", TSType TBool) (bool True) (int 3)),
(TyCtxt.empty, "let min = fun x -> fun y -> if x < y then x else y in min 2 3",
Let TInt ("min", (TSType (TFun "x3" TInt (TFun "x2" TInt TInt))))
(Abs (TFun "x3" TInt (TFun "x2" TInt TInt)) "x"
(Abs (TFun "x2" TInt TInt) "y"
(If TInt (OpLT TBool (Var TInt "x") (Var TInt "y"))
(Var TInt "x")
(Var TInt "y"))))
(App TInt
(App (TFun "x2" TInt TInt)
(Var (TFun "x3" TInt (TFun "x2" TInt TInt)) "min")
(int 2))
(int 3))),
(TyCtxt.empty, "let rec f = fun x -> x in f 1",
Let TInt ("f", TSForall "x0" (TSType (TFun "x2" (TVar "x0") (TVar "x0"))))
(AbsRec (TFun "x2" (TVar "x0") (TVar "x0")) "f" "x" (Var (TVar "x0") "x"))
(App TInt (Var (TFun "x2" TInt TInt) "f") (int 1))),
(TyCtxt.empty, "let rec f = fun x -> fun y -> x in f 1 2",
Let TInt ("f",TSForall "x3" (TSForall "x0"
(TSType (TFun "x2" (TVar "x0") (TFun "x4" (TVar "x3") (TVar "x0"))))))
(AbsRec (TFun "x2" (TVar "x0") (TFun "x4" (TVar "x3") (TVar "x0"))) "f" "x"
(Abs (TFun "x4" (TVar "x3") (TVar "x0")) "y" (Var (TVar "x0") "x")))
(App TInt
(App (TFun "x4" TInt TInt)
(Var (TFun "x2" TInt (TFun "x4" TInt TInt)) "f")
(LitInt TInt 1))
(LitInt TInt 2))),
(TyCtxt.empty, "let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3",
Let TInt ("sum", TSType (TFun "x2" TInt TInt))
(AbsRec (TFun "x2" TInt TInt) "sum" "n"
(If TInt (OpEQ TBool (Var TInt "n") (int 0))
(int 0)
(OpAdd TInt
(Var TInt "n")
(App TInt
(Var (TFun "x2" TInt TInt) "sum")
(OpSub TInt (Var TInt "n") (int 1))))))
(App TInt (Var (TFun "x2" TInt TInt) "sum") (int 3))),
(TyCtxt.empty, "let rec mult = fun x -> fun y -> if y < 1 then 0 else x * mult x (y - 1) in mult 3 5",
Let TInt ("mult", TSType (TFun "x2" TInt (TFun "x8" TInt TInt)))
(AbsRec (TFun "x2" TInt (TFun "x8" TInt TInt)) "mult" "x"
(Abs (TFun "x8" TInt TInt) "y"
(If TInt (OpLT TBool (Var TInt "y") (int 1))
(int 0)
(OpMul TInt
(Var TInt "x")
(App TInt
(App (TFun "x8" TInt TInt)
(Var (TFun "x2" TInt (TFun "x8" TInt TInt)) "mult")
(Var TInt "x"))
(OpSub TInt (Var TInt "y") (int 1)))))))
(App TInt
(App (TFun "x8" TInt TInt)
(Var (TFun "x2" TInt (TFun "x8" TInt TInt)) "mult")
(int 3))
(int 5))),
(TyCtxt.empty, "let f = fun x -> fun y -> if true then x else y in f 2 3",
Let TInt ("f", TSForall "x1"
(TSType (TFun "x3" (TVar "x1") (TFun "x2" (TVar "x1") (TVar "x1")))))
(Abs (TFun "x3" (TVar "x1") (TFun "x2" (TVar "x1") (TVar "x1"))) "x"
(Abs (TFun "x2" (TVar "x1") (TVar "x1")) "y"
(If (TVar "x1") (bool True)
(Var (TVar "x1") "x")
(Var (TVar "x1") "y"))))
(App TInt
(App (TFun "x2" TInt TInt)
(Var (TFun "x3" TInt (TFun "x2" TInt TInt)) "f")
(int 2))
(int 3))),
(TyCtxt.empty, "let f = fun b -> fun x -> fun y -> if b then x else y in f true 2 3",
Let TInt ("f", (TSForall "x2" (TSType
(TFun "x5" TBool (TFun "x4" (TVar "x2") (TFun "x3" (TVar "x2") (TVar "x2")))))))
(Abs (TFun "x5" TBool (TFun "x4" (TVar "x2") (TFun "x3" (TVar "x2") (TVar "x2")))) "b"
(Abs (TFun "x4" (TVar "x2") (TFun "x3" (TVar "x2") (TVar "x2"))) "x"
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "y"
(If (TVar "x2") (Var TBool "b")
(Var (TVar "x2") "x")
(Var (TVar "x2") "y")))))
(App TInt
(App (TFun "x3" TInt TInt)
(App (TFun "x4" TInt (TFun "x3" TInt TInt))
(Var (TFun "x5" TBool (TFun "x4" TInt (TFun "x3" TInt TInt))) "f")
(bool True))
(int 2))
(int 3))),
(TyCtxt.empty, "let i = fun x -> x in if i true then i 1 else i 2",
Let TInt ("i", TSForall "x0" (TSType (TFun "x1" (TVar "x0") (TVar "x0"))))
(Abs (TFun "x1" (TVar "x0") (TVar "x0")) "x"
(Var (TVar "x0") "x"))
(If TInt (App TBool (Var (TFun "x1" TBool TBool) "i") (bool True))
(App TInt (Var (TFun "x1" TInt TInt) "i") (int 1))
(App TInt (Var (TFun "x1" TInt TInt) "i") (int 2)))),
(TyCtxt.empty, "let foo = fun b -> if b then true else false in foo true",
Let TBool ("foo", (TSType (TFun "x1" TBool TBool)))
(Abs (TFun "x1" TBool TBool) "b"
(If TBool (Var TBool "b")
(bool True)
(bool False)))
(App TBool (Var (TFun "x1" TBool TBool) "foo") (bool True))),
(TyCtxt.empty, "let rec f = fun x -> x in if f true then f 3 else f 4",
(Let TInt ("f", TSForall "x0" (TSType (TFun "x2" (TVar "x0") (TVar "x0"))))
(AbsRec(TFun "x2" (TVar "x0") (TVar "x0")) "f" "x" (Var (TVar "x0") "x"))
(If TInt (App TBool (Var (TFun "x2" TBool TBool) "f") (bool True))
(App TInt (Var (TFun "x2" TInt TInt) "f") (int 3))
(App TInt (Var (TFun "x2" TInt TInt) "f") (int 4))))),
(TyCtxt.empty,
"let not = fun b -> if b then b else false in " ++
"let rec foo = fun b -> fun x -> fun y -> " ++
"if b then x else foo (not b) y x in " ++
"foo false 1 1",
Let TInt ("not", TSType (TFun "x1" TBool TBool))
(Abs (TFun "x1" TBool TBool) "b"
(If TBool (Var TBool "b")
(Var TBool "b")
(bool False)))
(Let TInt ("foo", TSForall "x13" (TSType
(TFun "x4" TBool
(TFun "x16" (TVar "x13")
(TFun "x15" (TVar "x13") (TVar "x13"))))))
(AbsRec (TFun "x4" TBool
(TFun "x16" (TVar "x13")
(TFun "x15" (TVar "x13") (TVar "x13")))) "foo" "b"
(Abs (TFun "x16" (TVar "x13") (TFun "x15" (TVar "x13") (TVar "x13"))) "x"
(Abs (TFun "x15" (TVar "x13") (TVar "x13")) "y"
(If (TVar "x13") (Var TBool "b")
(Var (TVar "x13") "x")
(App (TVar "x13")
(App (TFun "x15" (TVar "x13") (TVar "x13"))
(App (TFun "x16" (TVar "x13") (TFun "x15" (TVar "x13") (TVar "x13")))
(Var (TFun "x4" TBool (TFun "x16" (TVar "x13")
(TFun "x15" (TVar "x13") (TVar "x13")))) "foo")
(App TBool
(Var (TFun "x1" TBool TBool) "not")
(Var TBool "b")))
(Var (TVar "x13") "y"))
(Var (TVar "x13") "x"))))))
(App TInt
(App (TFun "x15" TInt TInt)
(App (TFun "x16" TInt (TFun "x15" TInt TInt))
(Var (TFun "x4" TBool (TFun "x16" TInt (TFun "x15" TInt TInt))) "foo")
(bool False))
(int 1))
(int 1)))),
(TyCtxt.empty, "fun fix -> fun f -> f (fun y -> fix f y)",
Abs
(TFun "x11"
(TFun "x4"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TFun "x6" (TVar "x2") (TVar "x5")))
(TFun "x10"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TVar "x8")))
"fix"
(Abs
(TFun "x10"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TVar "x8"))
"f"
(App (TVar "x8")
(Var (TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8")) "f")
(Abs (TFun "x7" (TVar "x2") (TVar "x5")) "y"
(App (TVar "x5")
(App (TFun "x6" (TVar "x2") (TVar "x5"))
(Var
(TFun "x4"
(TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8"))
(TFun "x6" (TVar "x2") (TVar "x5"))) "fix")
(Var (TFun "x9" (TFun "x7" (TVar "x2") (TVar "x5")) (TVar "x8")) "f"))
(Var (TVar "x2") "y")))))
),
(TyCtxt.empty, "let rec fix = fun f -> f (fun y -> fix f y) in fix",
Let
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x12") (TVar "x11"))
(TFun "x7" (TVar "x12") (TVar "x11")))
(TFun "x7" (TVar "x12") (TVar "x11")))
("fix",
TSForall "x6" (TSForall "x3" (TSType (TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
(TFun "x7" (TVar "x3") (TVar "x6"))))))
(AbsRec
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
(TFun "x7" (TVar "x3") (TVar "x6"))) "fix" "f"
(App (TFun "x7" (TVar "x3") (TVar "x6"))
(Var
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6"))) "f")
(Abs (TFun "x8" (TVar "x3") (TVar "x6")) "y"
(App (TVar "x6")
(App (TFun "x7" (TVar "x3") (TVar "x6"))
(Var
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
(TFun "x7" (TVar "x3") (TVar "x6")))
"fix")
(Var
(TFun "x10"
(TFun "x8" (TVar "x3") (TVar "x6"))
(TFun "x7" (TVar "x3") (TVar "x6")))
"f"))
(Var (TVar "x3") "y")))))
(Var
(TFun "x2"
(TFun "x10"
(TFun "x8" (TVar "x12") (TVar "x11"))
(TFun "x7" (TVar "x12") (TVar "x11")))
(TFun "x7" (TVar "x12") (TVar "x11")))
"fix")),
(TyCtxt.empty,
"fun f -> f (fun x -> f (fun y -> y))",
Abs (TFun "x9" (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) (TVar "x2")) "f"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "x"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "y"
(Var (TVar "x2") "y")))))),
(TyCtxt.empty,
"fun f -> f (fun x -> f (fun y -> x))",
Abs (TFun "x9" (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) (TVar "x2")) "f"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "x"
(App (TVar "x2")
(Var (TFun "x5" (TFun "x3" (TVar "x2") (TVar "x2")) (TVar "x2")) "f")
(Abs (TFun "x3" (TVar "x2") (TVar "x2")) "y"
(Var (TVar "x2") "x")))))),
(TyCtxt.singleton ("x", TInt), "x",
Var TInt "x"),
(TyCtxt.singleton ("f", TFun "" TInt TInt), "f",
Var (TFun "" TInt TInt) "f"),
(TyCtxt.singleton ("f", TFun "" TInt TInt), "f 3",
App TInt (Var (TFun "" TInt TInt) "f") (int 3)),
(TyCtxt.singleton ("x", TVar "x0"), "x - 1",
OpSub TInt (Var TInt "x") (int 1)),
(TyCtxt.fromListTy [("x", TVar "x0"), ("y", TVar "x1")], "x y",
App (TVar "x2")
(Var (TFun "x3" (TVar "x1") (TVar "x2")) "x")
(Var (TVar "x1") "y")),
(TyCtxt.empty,
"let n = 10 in " ++
"let foo = fun g -> g 10 in " ++
"foo (fun x -> x + n) + foo (fun x -> x)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("foo",TSForall "x1"
(TSType (TFun "x3" (TFun "x2" TInt (TVar "x1")) (TVar "x1"))))
(Abs (TFun "x3" (TFun "x2" TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1")
(Var (TFun "x2" TInt (TVar "x1")) "g") (int 10)))
(OpAdd TInt
(App TInt (Var (TFun "x3" (TFun "x2" TInt TInt) TInt) "foo")
(Abs (TFun "x2" TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n"))))
(App TInt
(Var (TFun "x3" (TFun "x2" TInt TInt) TInt) "foo")
(Abs (TFun "x2" TInt TInt) "x" (Var TInt "x"))))))
]
testInference2 :: [(String, TyExpr2)]
testInference2 =
let int = LitInt TInt in
let bool = LitBool TBool in [
("fun x -> x",
Abs (TFun [AFun] (TVar "x0") (TVar "x0")) "x"
(Var (TVar "x0") "x")),
("fun x -> fun y -> y",
Abs (TFun [AFun] (TVar "x0") (TFun [AFun] (TVar "x1") (TVar "x1"))) "x"
(Abs (TFun [AFun] (TVar "x1") (TVar "x1")) "y"
(Var (TVar "x1") "y"))),
("fun x -> fun y -> x",
Abs (TFun [AFun] (TVar "x0") (TFun [AClo] (TVar "x1") (TVar "x0"))) "x"
(Abs (TFun [AClo] (TVar "x1") (TVar "x0")) "y"
(Var (TVar "x0") "x"))),
("let f = fun x -> x + 10 in f 10 ",
Let TInt ("f",TSType (TFun [AFun] TInt TInt))
(Abs (TFun [AFun] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (LitInt TInt 10)))
(App TInt
(Var (TFun [AFun] TInt TInt) "f")
(LitInt TInt 10))),
("let n = 10 in fun x -> x + n",
Let (TFun [AClo] TInt TInt) ("n",TSType TInt)
(LitInt TInt 10)
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))),
("let n = 10 in let f = fun x -> x + n in f 10 ",
Let TInt ("n",TSType TInt) (LitInt TInt 10)
(Let TInt ("f",TSType (TFun [AClo] TInt TInt))
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(App TInt (Var (TFun [AClo] TInt TInt) "f") (LitInt TInt 10)))),
("fun g -> g 10",
Abs (TFun [AFun] (TFun [] TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1") (Var (TFun [] TInt (TVar "x1")) "g") (LitInt TInt 10))),
("let n = 10 in " ++
"let f = fun x -> x + 10 in " ++
"let c = fun x -> x + n in " ++
"let foo = fun g -> g 10 in " ++
"foo f",
Let TInt ("n",TSType TInt) (LitInt TInt 10)
(Let TInt ("f",TSType (TFun [AFun] TInt TInt))
(Abs (TFun [AFun] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (LitInt TInt 10)))
(Let TInt ("c",TSType (TFun [AClo] TInt TInt))
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(Let TInt ("foo",TSForall "x5" (TSType
(TFun [AFun] (TFun [AFun] TInt (TVar "x5")) (TVar "x5"))))
(Abs (TFun [AFun] (TFun [AFun] TInt (TVar "x5")) (TVar "x5")) "g"
(App (TVar "x5")
(Var (TFun [AFun] TInt (TVar "x5")) "g")
(LitInt TInt 10)))
(App TInt
(Var (TFun [AFun] (TFun [AFun] TInt TInt) TInt) "foo")
(Var (TFun [AFun] TInt TInt) "f")))))),
("let n = 10 in " ++
"let f = fun x -> x + 10 in " ++
"let c = fun x -> x + n in " ++
"let foo = fun g -> g 10 in " ++
"foo c",
Let TInt ("n",TSType TInt) (LitInt TInt 10)
(Let TInt ("f",TSType (TFun [AFun] TInt TInt))
(Abs (TFun [AFun] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (LitInt TInt 10)))
(Let TInt ("c",TSType (TFun [AClo] TInt TInt))
(Abs (TFun [AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(Let TInt ("foo",TSForall "x5" (TSType
(TFun [AFun] (TFun [AClo] TInt (TVar "x5")) (TVar "x5"))))
(Abs (TFun [AFun] (TFun [AClo] TInt (TVar "x5")) (TVar "x5")) "g"
(App (TVar "x5")
(Var (TFun [AClo] TInt (TVar "x5")) "g")
(LitInt TInt 10)))
(App TInt
(Var (TFun [AFun] (TFun [AClo] TInt TInt) TInt) "foo")
(Var (TFun [AClo] TInt TInt) "c")))))),
("let n = 10 in " ++
"let foo = fun g -> g 10 in " ++
"foo (fun x -> x + n) + foo (fun x -> x)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("foo",TSForall "x1"
(TSType (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1"))))
(Abs (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1")
(Var (TFun [AFun,AClo] TInt (TVar "x1")) "g") (int 10)))
(OpAdd TInt
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n"))))
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x" (Var TInt "x")))))),
("let n = 10 in " ++
"let foo = fun g -> g 10 in " ++
"foo (fun x -> x) + foo (fun x -> x + n)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("foo",TSForall "x1"
(TSType (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1"))))
(Abs (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x1")) (TVar "x1")) "g"
(App (TVar "x1")
(Var (TFun [AFun,AClo] TInt (TVar "x1")) "g") (int 10)))
(OpAdd TInt
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x" (Var TInt "x")))
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Abs (TFun [AFun,AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n"))))))),
("let n = 10 in " ++
"let f = fun x -> x + 10 in " ++
"let g = fun x -> x + n in " ++
"let foo = fun g -> g 10 in " ++
"(foo f) + (foo g)",
Let TInt ("n",TSType TInt) (int 10)
(Let TInt ("f",TSType (TFun [AFun,AClo] TInt TInt))
(Abs (TFun [AFun,AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (int 10)))
(Let TInt ("g",TSType (TFun [AFun,AClo] TInt TInt))
(Abs (TFun [AFun, AClo] TInt TInt) "x"
(OpAdd TInt (Var TInt "x") (Var TInt "n")))
(Let TInt ("foo",TSForall "x5" (TSType
(TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x5")) (TVar "x5"))))
(Abs (TFun [AFun] (TFun [AFun,AClo] TInt (TVar "x5")) (TVar "x5")) "g"
(App (TVar "x5")
(Var (TFun [AFun,AClo] TInt (TVar "x5")) "g")
(LitInt TInt 10)))
(OpAdd TInt
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Var (TFun [AFun,AClo] TInt TInt) "f"))
(App TInt
(Var (TFun [AFun] (TFun [AFun,AClo] TInt TInt) TInt) "foo")
(Var (TFun [AFun,AClo] TInt TInt) "g")))))))
]
interpretationTests = [
("4 + 2", ConstInt 6),
("4 - 2", ConstInt 2),
("4 * 2", ConstInt 8),
("4 / 2", ConstInt 2),
("6 + 4 / 2", ConstInt 8),
("2 * 3 + 4 / 2", ConstInt 8),
("2 < 4", ConstBool True),
("4 < 2", ConstBool False),
("let i = fun x -> x in i 0", ConstInt 0),
("let i = fun x -> x in if i true then i 1 else i 2", ConstInt 1),
("let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3", ConstInt 6)
]
normalFormTests = [
("1", "1"),
("fun x -> x",
"let x0 = (fun x -> x) in\n" ++
"x0"),
("1 + 2",
"let x0 = 1 + 2 in\nx0"),
("1 + 2 + 3",
"let x0 = 1 + 2 in\nlet x1 = x0 + 3 in\nx1"),
("1 + 2 + 3 + 4",
"let x0 = 1 + 2 in\nlet x1 = x0 + 3 in\nlet x2 = x1 + 4 in\nx2"),
("(fun x -> x) true",
"let x0 = (fun x -> x) in\n" ++
"let x1 = x0 true in\n" ++
"x1"),
("let f = fun x -> fun y -> fun z -> x in f 1 2 3",
"let x0 = (fun x -> " ++
"let x1 = (fun y -> " ++
"let x2 = (fun z -> x) in\nx2) in\nx1) in\n" ++
"let x3 = x0 1 in\n" ++
"let x4 = x3 2 in\n" ++
"let x5 = x4 3 in\n" ++
"x5"),
("(fun x -> x) (fun x -> x) true",
"let x0 = (fun x -> x) in\n" ++
"let x1 = (fun x -> x) in\n" ++
"let x2 = x0 x1 in\n" ++
"let x3 = x2 true in\n" ++
"x3"),
("let a = 1 in let b = 2 in a * b",
"let x0 = 1 * 2 in\nx0"),
("let f = fun x -> x in f 1",
"let x0 = (fun x -> x) in\n" ++
"let x1 = x0 1 in\n" ++
"x1"),
("let f = fun x -> x in f 1 + f 2",
"let x0 = (fun x -> x) in\n" ++
"let x1 = x0 1 in\n" ++
"let x2 = x0 2 in\n" ++
"let x3 = x1 + x2 in\n" ++
"x3"),
("let a = 1 in let b = 2 in 3 + a * b",
"let x0 = 1 * 2 in\nlet x1 = 3 + x0 in\nx1"),
("if true then 1 else 2",
"let x0 = if true then 1 else 2 in\nx0"),
("let f = fun x -> x in if true then f 1 else f 2",
"let x0 = (fun x -> x) in\n" ++
"let x1 = " ++
"if true then " ++
"let x2 = x0 1 in\nx2 " ++
"else " ++
"let x3 = x0 2 in\nx3 in\n" ++
"x1"),
("let f = fun x -> if x then 1 else 2 in f true",
"let x0 = (fun x -> " ++
"let x1 = if x then 1 else 2 in\n" ++
"x1) in\n" ++
"let x2 = x0 true in\n" ++
"x2"),
("let rec f = fun x -> fun y -> f y x in f 1 2",
"let rec x0 = (fun x -> " ++
"let x1 = (fun y -> " ++
"let x2 = x0 y in\n" ++
"let x3 = x2 x in\n" ++
"x3) in\n" ++
"x1) in\n" ++
"let x4 = x0 1 in\n" ++
"let x5 = x4 2 in\n" ++
"x5"),
("let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3",
"let rec x0 = (fun n -> " ++
"let x1 = n = 0 in\n" ++
"let x2 = if x1 then 0 else " ++
"let x3 = n - 1 in\n" ++
"let x4 = x0 x3 in\n" ++
"let x5 = n + x4 in\n" ++
"x5 in\n" ++
"x2) in\n" ++
"let x6 = x0 3 in\n" ++
"x6"),
("let x = 5 in let f = fun y -> x + y in f 3",
"let x0 = (fun y -> " ++
"let x1 = 5 + y in\n" ++
"x1) in\n" ++
"let x2 = x0 3 in\n" ++
"x2")
]
fvTests = [
(TyCtxt.empty, "fun x -> x", []),
(TyCtxt.singleton ("y", TInt), "fun x -> y", ["y"]),
(TyCtxt.singleton ("y", TInt), "fun x -> x + y", ["y"]),
(TyCtxt.empty, "let x = 2 + 3 in x", []),
(TyCtxt.empty, "let x = 5 in let f = fun y -> x + y in f 3", []),
(TyCtxt.singleton ("sum", TFun "" TInt TInt), "fun n -> if n = 0 then 0 else n + sum (n - 1)", ["sum"])
]
closureTests = [
("let n = 1 * 5 in " ++
"let f = fun x -> fun y -> x + y + n in " ++
"f 1 2",
"let x0 = 1 * 5 in\n" ++
"let x1 = Closure (fun env -> fun x -> " ++
"let x2 = Closure (fun env -> fun y -> " ++
"let x3 = env.0 + y in\n" ++
"let x4 = x3 + env.1 in\n" ++
"x4, [x,env.0]) in\n" ++
"x2, [x0]) in\n" ++
"let x5 = x1 1 in\n" ++
"let x6 = x5 2 in\n" ++
"x6"),
("let x = 5 in let f = fun y -> x + y in f 3",
"let x0 = Closure (fun env -> fun y -> " ++
"let x1 = 5 + y in\n" ++
"x1, []) in\n" ++
"let x2 = x0 3 in\n" ++
"x2"),
("let rec f = fun x -> fun y -> f y x in f 1 2",
"let x0 = Closure (fun env -> fun x -> " ++
"let x1 = Closure (fun env -> fun y -> " ++
"let x2 = env.0 y in\n" ++
"let x3 = x2 env.1 in\n" ++
"x3, [env.self,x]) in\n" ++
"x1, []) in\n" ++
"let x4 = x0 1 in\n" ++
"let x5 = x4 2 in\n" ++
"x5"),
("let x = 5 + 3 in let f = fun y -> x + y in f 3",
"let x0 = 5 + 3 in\n" ++
"let x1 = Closure (fun env -> fun y -> " ++
"let x2 = env.0 + y in\n" ++
"x2, [x0]) in\n" ++
"let x3 = x1 3 in\n" ++
"x3"),
("let rec sum = fun n -> if n = 0 then 0 else n + sum (n - 1) in sum 3",
"let x0 = Closure (fun env -> fun n -> " ++
"let x1 = n = 0 in\n" ++
"let x2 = if x1 then 0 else " ++
"let x3 = n - 1 in\n" ++
"let x4 = env.self x3 in\n" ++
"let x5 = n + x4 in\n" ++
"x5 in\n" ++
"x2, []) in\n" ++
"let x6 = x0 3 in\n" ++
"x6")
]
testCompilation :: (String, Expr () ()) -> Test
testCompilation (prog, expected) =
TestLabel ("program is '" ++ prog ++ "'") $
TestCase $
assertEqual prog expected (Parser.parse (Lexer.alexScanTokens prog))
testComparaison :: (String, String) -> Test
testComparaison (prog1, prog2) =
TestLabel ("program are '" ++ prog1 ++ "' and '" ++ prog2 ++ "'") $
TestCase $
assertEqual prog1
(Parser.parse (Lexer.alexScanTokens prog1))
(Parser.parse (Lexer.alexScanTokens prog2))
testTypeInference :: (Context, String, Expr TypeSchema Type) -> Test
testTypeInference (ctxt, prog, expr) =
let term = Parser.parse (Lexer.alexScanTokens prog)
in TestLabel ("program '" ++ prog ++ "' has type '" ++
show (Expr.getType expr) ++ "'") $
TestCase $
case TyInferance.infer ctxt term of
Right (subst, cs, expr') ->
Left msg -> assertFailure msg
testTypeInference2 :: (String, TyExpr2) -> Test
testTypeInference2 (prog, expr) =
let term = Parser.parse (Lexer.alexScanTokens prog)
in TestLabel ("program '" ++ prog ++ "' has type '" ++
show (Expr.getType expr) ++ "'") $
TestCase $
case TyInferance.infer2 TyCtxt.empty term of
Right expr' -> assertEqual "" expr expr'
Left msg -> assertFailure msg
testInterpreter :: (String, Value () ()) -> Test
testInterpreter (prog, val) =
let term = Parser.parse (Lexer.alexScanTokens prog)
in TestLabel ("program '" ++ prog ++ "' evaluate to '" ++ show val ++ "'") $
TestCase $
case Interpreter.eval [] term of
Just v -> assertEqual "" val v
Nothing -> assertFailure "evaluation went wrong"
testNormalForm :: (String, String) -> Test
testNormalForm (prog, nf) =
let term = Parser.parse (Lexer.alexScanTokens prog) in
TestLabel prog $ TestCase $
case TyInferance.infer2 TyCtxt.empty term of
Left msg -> assertFailure msg
Right expr -> assertEqual "" nf (show (Compiler.toNormalForm expr))
testFreeVariables :: (Context, String, [String]) -> Test
testFreeVariables (ctxt, prog, fvs) =
let term = Parser.parse (Lexer.alexScanTokens prog) in
TestLabel prog $ TestCase $
case TyInferance.infer2 ctxt term of
Left msg -> assertFailure msg
Right expr ->
assertEqual "" fvs (map fst (Compiler.fv (Compiler.toNormalForm expr)))
testClosure :: (String, String) -> Test
testClosure (prog, nfc) =
let term = Parser.parse (Lexer.alexScanTokens prog) in
TestLabel prog $ TestCase $
case TyInferance.infer2 TyCtxt.empty term of
Left msg -> assertFailure msg
Right expr ->
let normForm = Compiler.toNormalForm expr in
let normFormClosure = Compiler.toClosure normForm in
assertEqual "" nfc (show normFormClosure)
tests =
TestList $ [
TestLabel "testing (Parser.parse . Lexer.alexScanTokens)" $
TestList (map testCompilation testCases),
TestLabel "testing (parse prog1 == parse prog2)" $
TestList (map testComparaison testEquivalences),
TestLabel "testing (infer (parse prog))" $
TestList (map testTypeInference testInference),
TestLabel "testing (infer2 (parse prog))" $
TestList (map testTypeInference2 testInference2),
TestLabel "testing (eval [] (parse prog))" $
TestList (map testInterpreter interpretationTests),
TestLabel "testing (toNormalForm (parse prog))" $
TestList (map testNormalForm normalFormTests),
TestLabel "Compiler.fv" $
TestList (map testFreeVariables fvTests),
TestLabel "Compiler.toClosure" $
TestList (map testClosure closureTests)
]
main :: IO ()
main = fmap (const ()) (runTestTT tests)
|
b5e5be7fb96376cab95b4078c0ea769c5973194e9f49a5a52b578c66515bb644 | darrenldl/ProVerif-ATP | pilexer.ml | # 28 "pilexer.mll"
open Parsing_helper
open Piparser
let create_hashtable size init =
let tbl = Hashtbl.create size in
List.iter (fun (key,data) -> Hashtbl.add tbl key data) init;
tbl
(* Untyped front-end *)
let keyword_table =
create_hashtable 11
[ "data", DATA;
"param", PARAM;
"private", PRIVATE;
(* Common keywords *)
"new", NEW;
"out", OUT;
"in", IN;
"if", IF;
"then", THEN;
"else", ELSE;
"fun", FUN;
"equation", EQUATION;
"reduc", REDUCTION;
"pred", PREDICATE;
"process", PROCESS;
"let", LET;
"query", QUERY;
"putbegin", PUTBEGIN;
"noninterf", NONINTERF;
"event", EVENT;
"not", NOT;
"elimtrue", ELIMTRUE;
"free", FREE;
"clauses", CLAUSES;
"suchthat", SUCHTHAT;
"nounif", NOUNIF;
"phase", PHASE;
"sync", BARRIER;
"among", AMONG;
"weaksecret", WEAKSECRET;
"choice", CHOICE;
"diff", CHOICE;
"otherwise", OTHERWISE;
"can", CANTEXT;
"fail", FAIL;
"where", WHERE]
# 54 "pilexer.ml"
let __ocaml_lex_tables = {
Lexing.lex_base =
"\000\000\229\255\230\255\078\000\000\000\236\255\237\255\238\255\
\239\255\240\255\002\000\242\255\243\255\244\255\245\255\246\255\
\247\255\249\255\001\000\077\000\141\000\094\001\005\000\001\000\
\255\255\252\255\249\001\250\255\002\000\231\255\235\255\232\255\
\030\000\032\000\234\255\233\255\185\000\252\255\253\255\005\000\
\254\255\054\000\255\255";
Lexing.lex_backtrk =
"\255\255\255\255\255\255\026\000\026\000\255\255\255\255\255\255\
\255\255\255\255\014\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\007\000\004\000\026\000\002\000\001\000\000\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\001\000\
\255\255\003\000\255\255";
Lexing.lex_default =
"\001\000\000\000\000\000\255\255\255\255\000\000\000\000\000\000\
\000\000\000\000\255\255\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\255\255\255\255\026\000\255\255\255\255\255\255\
\000\000\000\000\026\000\000\000\255\255\000\000\000\000\000\000\
\255\255\255\255\000\000\000\000\037\000\000\000\000\000\255\255\
\000\000\255\255\000\000";
Lexing.lex_trans =
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\022\000\024\000\024\000\022\000\023\000\022\000\040\000\
\000\000\022\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\022\000\011\000\020\000\000\000\000\000\022\000\005\000\000\000\
\018\000\016\000\007\000\027\000\017\000\004\000\008\000\009\000\
\019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\
\019\000\019\000\006\000\012\000\003\000\010\000\030\000\028\000\
\029\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\015\000\035\000\014\000\034\000\042\000\
\000\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\033\000\013\000\019\000\019\000\019\000\
\019\000\019\000\019\000\019\000\019\000\019\000\019\000\000\000\
\000\000\000\000\000\000\032\000\031\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\000\000\000\000\025\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\040\000\000\000\000\000\039\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\041\000\000\000\000\000\000\000\000\000\
\000\000\255\255\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\021\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\038\000\000\000\000\000\000\000\021\000\000\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\025\000\000\000\000\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\000\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\255\255";
Lexing.lex_check =
"\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\023\000\000\000\000\000\022\000\039\000\
\255\255\022\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\000\000\255\255\255\255\022\000\000\000\255\255\
\000\000\000\000\000\000\018\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\004\000\010\000\
\028\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\032\000\000\000\033\000\041\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\003\000\000\000\019\000\019\000\019\000\
\019\000\019\000\019\000\019\000\019\000\019\000\019\000\255\255\
\255\255\255\255\255\255\003\000\003\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\255\255\255\255\020\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\036\000\255\255\255\255\036\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\036\000\255\255\255\255\255\255\255\255\
\255\255\020\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\020\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\020\000\021\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\020\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\036\000\255\255\255\255\255\255\021\000\255\255\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\255\255\255\255\026\000\255\255\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\026\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\026\000";
Lexing.lex_base_code =
"";
Lexing.lex_backtrk_code =
"";
Lexing.lex_default_code =
"";
Lexing.lex_trans_code =
"";
Lexing.lex_check_code =
"";
Lexing.lex_code =
"";
}
let rec token lexbuf =
__ocaml_lex_token_rec lexbuf 0
and __ocaml_lex_token_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 82 "pilexer.mll"
( Lexing.new_line lexbuf; token lexbuf )
# 292 "pilexer.ml"
| 1 ->
# 84 "pilexer.mll"
( token lexbuf )
# 297 "pilexer.ml"
| 2 ->
# 86 "pilexer.mll"
( let s = Lexing.lexeme lexbuf in
try
Hashtbl.find keyword_table s
with
Not_found ->
IDENT (s, extent lexbuf)
)
# 308 "pilexer.ml"
| 3 ->
# 94 "pilexer.mll"
( let s = Lexing.lexeme lexbuf in
STRING (String.sub s 1 (String.length s - 2), extent lexbuf)
)
# 315 "pilexer.ml"
| 4 ->
# 98 "pilexer.mll"
(
try
INT (int_of_string(Lexing.lexeme lexbuf))
with Failure _ ->
input_error "Incorrect integer" (extent lexbuf)
)
# 325 "pilexer.ml"
| 5 ->
# 104 "pilexer.mll"
(
comment lexbuf;
token lexbuf
)
# 333 "pilexer.ml"
| 6 ->
# 108 "pilexer.mll"
( COMMA )
# 338 "pilexer.ml"
| 7 ->
# 109 "pilexer.mll"
( LPAREN )
# 343 "pilexer.ml"
| 8 ->
# 110 "pilexer.mll"
( RPAREN )
# 348 "pilexer.ml"
| 9 ->
# 111 "pilexer.mll"
( LBRACKET )
# 353 "pilexer.ml"
| 10 ->
# 112 "pilexer.mll"
( RBRACKET )
# 358 "pilexer.ml"
| 11 ->
# 113 "pilexer.mll"
( BAR )
# 363 "pilexer.ml"
| 12 ->
# 114 "pilexer.mll"
( SEMI )
# 368 "pilexer.ml"
| 13 ->
# 115 "pilexer.mll"
( REPL )
# 373 "pilexer.ml"
| 14 ->
# 116 "pilexer.mll"
( EQUAL )
# 378 "pilexer.ml"
| 15 ->
# 117 "pilexer.mll"
( SLASH )
# 383 "pilexer.ml"
| 16 ->
# 118 "pilexer.mll"
( DOT )
# 388 "pilexer.ml"
| 17 ->
# 119 "pilexer.mll"
( STAR )
# 393 "pilexer.ml"
| 18 ->
# 120 "pilexer.mll"
( COLON )
# 398 "pilexer.ml"
| 19 ->
# 121 "pilexer.mll"
( WEDGE )
# 403 "pilexer.ml"
| 20 ->
# 122 "pilexer.mll"
( RED )
# 408 "pilexer.ml"
| 21 ->
# 123 "pilexer.mll"
( EQUIV )
# 413 "pilexer.ml"
| 22 ->
# 124 "pilexer.mll"
( EQUIVEQ )
# 418 "pilexer.ml"
| 23 ->
# 125 "pilexer.mll"
( DIFF )
# 423 "pilexer.ml"
| 24 ->
# 126 "pilexer.mll"
( BEFORE )
# 428 "pilexer.ml"
| 25 ->
# 127 "pilexer.mll"
( EOF )
# 433 "pilexer.ml"
| 26 ->
# 128 "pilexer.mll"
( input_error "Illegal character" (extent lexbuf) )
# 438 "pilexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_token_rec lexbuf __ocaml_lex_state
and comment lexbuf =
__ocaml_lex_comment_rec lexbuf 36
and __ocaml_lex_comment_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 131 "pilexer.mll"
( )
# 450 "pilexer.ml"
| 1 ->
# 133 "pilexer.mll"
( Lexing.new_line lexbuf; comment lexbuf )
# 455 "pilexer.ml"
| 2 ->
# 134 "pilexer.mll"
( )
# 460 "pilexer.ml"
| 3 ->
# 135 "pilexer.mll"
( comment lexbuf )
# 465 "pilexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_comment_rec lexbuf __ocaml_lex_state
;;
| null | https://raw.githubusercontent.com/darrenldl/ProVerif-ATP/7af6cfb9e0550ecdb072c471e15b8f22b07408bd/proverif2.00/src/pilexer.ml | ocaml | Untyped front-end
Common keywords | # 28 "pilexer.mll"
open Parsing_helper
open Piparser
let create_hashtable size init =
let tbl = Hashtbl.create size in
List.iter (fun (key,data) -> Hashtbl.add tbl key data) init;
tbl
let keyword_table =
create_hashtable 11
[ "data", DATA;
"param", PARAM;
"private", PRIVATE;
"new", NEW;
"out", OUT;
"in", IN;
"if", IF;
"then", THEN;
"else", ELSE;
"fun", FUN;
"equation", EQUATION;
"reduc", REDUCTION;
"pred", PREDICATE;
"process", PROCESS;
"let", LET;
"query", QUERY;
"putbegin", PUTBEGIN;
"noninterf", NONINTERF;
"event", EVENT;
"not", NOT;
"elimtrue", ELIMTRUE;
"free", FREE;
"clauses", CLAUSES;
"suchthat", SUCHTHAT;
"nounif", NOUNIF;
"phase", PHASE;
"sync", BARRIER;
"among", AMONG;
"weaksecret", WEAKSECRET;
"choice", CHOICE;
"diff", CHOICE;
"otherwise", OTHERWISE;
"can", CANTEXT;
"fail", FAIL;
"where", WHERE]
# 54 "pilexer.ml"
let __ocaml_lex_tables = {
Lexing.lex_base =
"\000\000\229\255\230\255\078\000\000\000\236\255\237\255\238\255\
\239\255\240\255\002\000\242\255\243\255\244\255\245\255\246\255\
\247\255\249\255\001\000\077\000\141\000\094\001\005\000\001\000\
\255\255\252\255\249\001\250\255\002\000\231\255\235\255\232\255\
\030\000\032\000\234\255\233\255\185\000\252\255\253\255\005\000\
\254\255\054\000\255\255";
Lexing.lex_backtrk =
"\255\255\255\255\255\255\026\000\026\000\255\255\255\255\255\255\
\255\255\255\255\014\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\007\000\004\000\026\000\002\000\001\000\000\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\001\000\
\255\255\003\000\255\255";
Lexing.lex_default =
"\001\000\000\000\000\000\255\255\255\255\000\000\000\000\000\000\
\000\000\000\000\255\255\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\255\255\255\255\026\000\255\255\255\255\255\255\
\000\000\000\000\026\000\000\000\255\255\000\000\000\000\000\000\
\255\255\255\255\000\000\000\000\037\000\000\000\000\000\255\255\
\000\000\255\255\000\000";
Lexing.lex_trans =
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\022\000\024\000\024\000\022\000\023\000\022\000\040\000\
\000\000\022\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\022\000\011\000\020\000\000\000\000\000\022\000\005\000\000\000\
\018\000\016\000\007\000\027\000\017\000\004\000\008\000\009\000\
\019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\
\019\000\019\000\006\000\012\000\003\000\010\000\030\000\028\000\
\029\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\015\000\035\000\014\000\034\000\042\000\
\000\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\033\000\013\000\019\000\019\000\019\000\
\019\000\019\000\019\000\019\000\019\000\019\000\019\000\000\000\
\000\000\000\000\000\000\032\000\031\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\000\000\000\000\025\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\040\000\000\000\000\000\039\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\041\000\000\000\000\000\000\000\000\000\
\000\000\255\255\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\255\255\021\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\038\000\000\000\000\000\000\000\021\000\000\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\025\000\000\000\000\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\000\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\255\255";
Lexing.lex_check =
"\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\000\000\023\000\000\000\000\000\022\000\039\000\
\255\255\022\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\000\000\255\255\255\255\022\000\000\000\255\255\
\000\000\000\000\000\000\018\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\004\000\010\000\
\028\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\032\000\000\000\033\000\041\000\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\003\000\000\000\019\000\019\000\019\000\
\019\000\019\000\019\000\019\000\019\000\019\000\019\000\255\255\
\255\255\255\255\255\255\003\000\003\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\255\255\255\255\020\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\036\000\255\255\255\255\036\000\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\036\000\255\255\255\255\255\255\255\255\
\255\255\020\000\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\
\020\000\020\000\020\000\020\000\020\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\020\000\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\020\000\021\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\020\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\036\000\255\255\255\255\255\255\021\000\255\255\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\255\255\255\255\026\000\255\255\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\255\255\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\026\000\021\000\021\000\
\021\000\021\000\021\000\021\000\021\000\021\000\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\026\000\026\000\026\000\026\000\026\000\026\000\026\000\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\026\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\026\000";
Lexing.lex_base_code =
"";
Lexing.lex_backtrk_code =
"";
Lexing.lex_default_code =
"";
Lexing.lex_trans_code =
"";
Lexing.lex_check_code =
"";
Lexing.lex_code =
"";
}
let rec token lexbuf =
__ocaml_lex_token_rec lexbuf 0
and __ocaml_lex_token_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 82 "pilexer.mll"
( Lexing.new_line lexbuf; token lexbuf )
# 292 "pilexer.ml"
| 1 ->
# 84 "pilexer.mll"
( token lexbuf )
# 297 "pilexer.ml"
| 2 ->
# 86 "pilexer.mll"
( let s = Lexing.lexeme lexbuf in
try
Hashtbl.find keyword_table s
with
Not_found ->
IDENT (s, extent lexbuf)
)
# 308 "pilexer.ml"
| 3 ->
# 94 "pilexer.mll"
( let s = Lexing.lexeme lexbuf in
STRING (String.sub s 1 (String.length s - 2), extent lexbuf)
)
# 315 "pilexer.ml"
| 4 ->
# 98 "pilexer.mll"
(
try
INT (int_of_string(Lexing.lexeme lexbuf))
with Failure _ ->
input_error "Incorrect integer" (extent lexbuf)
)
# 325 "pilexer.ml"
| 5 ->
# 104 "pilexer.mll"
(
comment lexbuf;
token lexbuf
)
# 333 "pilexer.ml"
| 6 ->
# 108 "pilexer.mll"
( COMMA )
# 338 "pilexer.ml"
| 7 ->
# 109 "pilexer.mll"
( LPAREN )
# 343 "pilexer.ml"
| 8 ->
# 110 "pilexer.mll"
( RPAREN )
# 348 "pilexer.ml"
| 9 ->
# 111 "pilexer.mll"
( LBRACKET )
# 353 "pilexer.ml"
| 10 ->
# 112 "pilexer.mll"
( RBRACKET )
# 358 "pilexer.ml"
| 11 ->
# 113 "pilexer.mll"
( BAR )
# 363 "pilexer.ml"
| 12 ->
# 114 "pilexer.mll"
( SEMI )
# 368 "pilexer.ml"
| 13 ->
# 115 "pilexer.mll"
( REPL )
# 373 "pilexer.ml"
| 14 ->
# 116 "pilexer.mll"
( EQUAL )
# 378 "pilexer.ml"
| 15 ->
# 117 "pilexer.mll"
( SLASH )
# 383 "pilexer.ml"
| 16 ->
# 118 "pilexer.mll"
( DOT )
# 388 "pilexer.ml"
| 17 ->
# 119 "pilexer.mll"
( STAR )
# 393 "pilexer.ml"
| 18 ->
# 120 "pilexer.mll"
( COLON )
# 398 "pilexer.ml"
| 19 ->
# 121 "pilexer.mll"
( WEDGE )
# 403 "pilexer.ml"
| 20 ->
# 122 "pilexer.mll"
( RED )
# 408 "pilexer.ml"
| 21 ->
# 123 "pilexer.mll"
( EQUIV )
# 413 "pilexer.ml"
| 22 ->
# 124 "pilexer.mll"
( EQUIVEQ )
# 418 "pilexer.ml"
| 23 ->
# 125 "pilexer.mll"
( DIFF )
# 423 "pilexer.ml"
| 24 ->
# 126 "pilexer.mll"
( BEFORE )
# 428 "pilexer.ml"
| 25 ->
# 127 "pilexer.mll"
( EOF )
# 433 "pilexer.ml"
| 26 ->
# 128 "pilexer.mll"
( input_error "Illegal character" (extent lexbuf) )
# 438 "pilexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_token_rec lexbuf __ocaml_lex_state
and comment lexbuf =
__ocaml_lex_comment_rec lexbuf 36
and __ocaml_lex_comment_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 131 "pilexer.mll"
( )
# 450 "pilexer.ml"
| 1 ->
# 133 "pilexer.mll"
( Lexing.new_line lexbuf; comment lexbuf )
# 455 "pilexer.ml"
| 2 ->
# 134 "pilexer.mll"
( )
# 460 "pilexer.ml"
| 3 ->
# 135 "pilexer.mll"
( comment lexbuf )
# 465 "pilexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_comment_rec lexbuf __ocaml_lex_state
;;
|
5f0efb79c1f0da922158e03442047035a6d7ac667872d8b1c33debf6b6a943a5 | shayan-najd/NativeMetaprogramming | T9858d.hs | # LANGUAGE DataKinds #
module Main where
import Data.Typeable
data A = A
main = print $ typeRep (Proxy :: Proxy A) == typeRep (Proxy :: Proxy 'A)
| null | https://raw.githubusercontent.com/shayan-najd/NativeMetaprogramming/24e5f85990642d3f0b0044be4327b8f52fce2ba3/testsuite/tests/typecheck/should_run/T9858d.hs | haskell | # LANGUAGE DataKinds #
module Main where
import Data.Typeable
data A = A
main = print $ typeRep (Proxy :: Proxy A) == typeRep (Proxy :: Proxy 'A)
| |
0c0caa6de710780d64a30e77b1a8227c836f8858f1c829d06563b555fd0eec68 | mfoemmel/erlang-otp | io_lib_pretty.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(io_lib_pretty).
Pretty printing Erlang terms
%%%
%%% In this module "print" means the formatted printing while "write"
means just writing out onto one line .
-export([print/1,print/2,print/3,print/4,print/5,print/6]).
%%%
%%% Exported functions
%%%
%% print(Term) -> [Chars]
print(Term , Column , , Depth ) - > [ Chars ]
%% Depth = -1 gives unlimited print depth. Use io_lib:write for atomic terms.
print(Term) ->
print(Term, 1, 80, -1).
print(Term , RecDefFun ) - > [ Chars ]
print(Term , Depth , RecDefFun ) - > [ Chars ]
RecDefFun = fun(Tag , ) - > [ FieldTag ] | no
%% Used by the shell for printing records.
print(Term, RecDefFun) ->
print(Term, -1, RecDefFun).
print(Term, Depth, RecDefFun) ->
print(Term, 1, 80, Depth, RecDefFun).
print(Term, Col, Ll, D) ->
print(Term, Col, Ll, D, _M=-1, no_fun).
print(Term, Col, Ll, D, RecDefFun) ->
print(Term, Col, Ll, D, _M=-1, RecDefFun).
print(_, _, _, 0, _M, _RF) -> "...";
print(Term, Col, Ll, D, M, RecDefFun) when Col =< 0 ->
print(Term, 1, Ll, D, M, RecDefFun);
print(Term, Col, Ll, D, M0, RecDefFun) when is_tuple(Term);
is_list(Term) ->
If = {_S, Len} = print_length(Term, D, RecDefFun),
M = max_cs(M0, Len),
if
Len < Ll - Col, Len =< M ->
write(If);
true ->
TInd = while_fail([-1, 4],
fun(I) -> cind(If, Col, Ll, M, I, 0, 0) end,
1),
pp(If, Col, Ll, M, TInd, indent(Col), 0, 0)
end;
print(<<_/bitstring>>=Term, Col, Ll, D, M0, RecDefFun) ->
If = {_S, Len} = print_length(Term, D, RecDefFun),
M = max_cs(M0, Len),
if
Len < Ll - Col, Len =< M ->
write(If);
true ->
TInd = while_fail([-1, 4],
fun(I) -> cind(If, Col, Ll, M, I, 0, 0) end,
1),
pp(If, Col, Ll, M, TInd, indent(Col), 0, 0)
end;
print(Term, _Col, _Ll, _D, _M, _RF) ->
io_lib:write(Term).
%%%
%%% Local functions
%%%
max_cs(M, Len) when M < 0 ->
Len;
max_cs(M, _Len) ->
M.
-define(ATM(T), is_list(element(1, T))).
-define(ATM_FLD(Field), ?ATM(element(4, element(1, Field)))).
pp({_S, Len} = If, Col, Ll, M, _TInd, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M ->
write(If);
pp({{list,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[$[, pp_list(L, Col + 1, Ll, M, TInd, indent(1, Ind), LD, $|, W + 1), $]];
pp({{tuple,true,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[${, pp_tag_tuple(L, Col, Ll, M, TInd, Ind, LD, W + 1), $}];
pp({{tuple,false,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[${, pp_list(L, Col + 1, Ll, M, TInd, indent(1, Ind), LD, $,, W + 1), $}];
pp({{record,[{Name,NLen} | L]}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[Name, ${, pp_record(L, NLen, Col, Ll, M, TInd, Ind, LD, W + NLen+1), $}];
pp({{bin,S}, _Len}, Col, Ll, M, _TInd, Ind, LD, W) ->
pp_binary(S, Col + 2, Ll, M, indent(2, Ind), LD, W);
pp({S, _Len}, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
S.
%% Print a tagged tuple by indenting the rest of the elements
differently to the tag . Tuple has size > = 2 .
pp_tag_tuple([{Tag,Tlen} | L], Col, Ll, M, TInd, Ind, LD, W) ->
TagInd = Tlen + 2,
Tcol = Col + TagInd,
S = $,,
if
TInd > 0, TagInd > TInd ->
Col1 = Col + TInd,
Indent = indent(TInd, Ind),
[Tag|pp_tail(L, Col1, Tcol, Ll, M, TInd, Indent, LD, S, W+Tlen)];
true ->
Indent = indent(TagInd, Ind),
[Tag, S | pp_list(L, Tcol, Ll, M, TInd, Indent, LD, S, W+Tlen+1)]
end.
pp_record([], _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"";
pp_record({dots, _}, _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"...";
pp_record([F | Fs], Nlen, Col0, Ll, M, TInd, Ind0, LD, W0) ->
Nind = Nlen + 1,
{Col, Ind, S, W} = rec_indent(Nind, TInd, Col0, Ind0, W0),
{FS, FW} = pp_field(F, Col, Ll, M, TInd, Ind, last_depth(Fs, LD), W),
[S, FS | pp_fields_tail(Fs, Col, Col + FW, Ll, M, TInd, Ind, LD, W + FW)].
pp_fields_tail([], _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"";
pp_fields_tail({dots, _}, _Col0, _Col, _M, _Ll, _TInd, _Ind, _LD, _W) ->
",...";
pp_fields_tail([{_, Len}=F | Fs], Col0, Col, Ll, M, TInd, Ind, LD, W) ->
LD1 = last_depth(Fs, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_FLD(F);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_FLD(F) ->
[$,, write_field(F) |
pp_fields_tail(Fs, Col0, Col+ELen, Ll, M, TInd, Ind, LD, W+ELen)];
true ->
{FS, FW} = pp_field(F, Col0, Ll, M, TInd, Ind, LD1, 0),
[$,, $\n, Ind, FS |
pp_fields_tail(Fs, Col0, Col0 + FW, Ll, M, TInd, Ind, LD, FW)]
end.
pp_field({_, Len}=Fl, Col, Ll, M, _TInd, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M ->
{write_field(Fl), if
?ATM_FLD(Fl) ->
Len;
true ->
force nl
end};
pp_field({{field, Name, NameL, F}, _Len}, Col0, Ll, M, TInd, Ind0, LD, W0) ->
{Col, Ind, S, W} = rec_indent(NameL, TInd, Col0, Ind0, W0 + NameL),
force nl
rec_indent(RInd, TInd, Col0, Ind0, W0) ->
Nl = (TInd > 0) and (RInd > TInd),
DCol = case Nl of
true -> TInd;
false -> RInd
end,
Col = Col0 + DCol,
Ind = indent(DCol, Ind0),
S = case Nl of
true -> [$\n | Ind];
false -> ""
end,
W = case Nl of
true -> 0;
false -> W0
end,
{Col, Ind, S, W}.
pp_list({dots, _}, _Col0, _Ll, _M, _TInd, _Ind, _LD, _S, _W) ->
"...";
pp_list([E | Es], Col0, Ll, M, TInd, Ind, LD, S, W) ->
{ES, WE} = pp_element(E, Col0, Ll, M, TInd, Ind, last_depth(Es, LD), W),
[ES | pp_tail(Es, Col0, Col0 + WE, Ll, M, TInd, Ind, LD, S, W + WE)].
pp_tail([], _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, _S, _W) ->
"";
pp_tail([{_, Len}=E | Es], Col0, Col, Ll, M, TInd, Ind, LD, S, W) ->
LD1 = last_depth(Es, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM(E);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM(E) ->
[$,, write(E) |
pp_tail(Es, Col0, Col + ELen, Ll, M, TInd, Ind, LD, S, W+ELen)];
true ->
{ES, WE} = pp_element(E, Col0, Ll, M, TInd, Ind, LD1, 0),
[$,, $\n, Ind, ES |
pp_tail(Es, Col0, Col0 + WE, Ll, M, TInd, Ind, LD, S, WE)]
end;
pp_tail({dots, _}, _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, S, _W) ->
[S | "..."];
pp_tail({_, Len}=E, _Col0, Col, Ll, M, _TInd, _Ind, LD, S, W)
when Len + 1 < Ll - Col - (LD + 1),
Len + 1 + W + (LD + 1) =< M,
?ATM(E) ->
[S | write(E)];
pp_tail(E, Col0, _Col, Ll, M, TInd, Ind, LD, S, _W) ->
[S, $\n, Ind | pp(E, Col0, Ll, M, TInd, Ind, LD + 1, 0)].
pp_element({_, Len}=E, Col, Ll, M, _TInd, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M, ?ATM(E) ->
{write(E), Len};
pp_element(E, Col, Ll, M, TInd, Ind, LD, W) ->
force nl
%% Reuse the list created by io_lib:write_binary()...
pp_binary([LT,LT,S,GT,GT], Col, Ll, M, Ind, LD, W) ->
N = erlang:max(8, erlang:min(Ll - Col, M - 4 - W) - LD),
[LT,LT,pp_binary(S, N, N, Ind),GT,GT].
pp_binary([BS, $, | S], N, N0, Ind) ->
Len = length(BS) + 1,
case N - Len of
N1 when N1 < 0 ->
[$\n, Ind, BS, $, | pp_binary(S, N0 - Len, N0, Ind)];
N1 ->
[BS, $, | pp_binary(S, N1, N0, Ind)]
end;
pp_binary([BS1, $:, BS2]=S, N, _N0, Ind)
when length(BS1) + length(BS2) + 1 > N ->
[$\n, Ind, S];
pp_binary(S, N, _N0, Ind) ->
case iolist_size(S) > N of
true ->
[$\n, Ind, S];
false ->
S
end.
write({{tuple, _IsTagged, L}, _}) ->
[${, write_list(L, $,), $}];
write({{list, L}, _}) ->
[$[, write_list(L, $|), $]];
write({{record, [{Name,_} | L]}, _}) ->
[Name, ${, write_fields(L), $}];
write({{bin, S}, _}) ->
S;
write({S, _}) ->
S.
write_fields([]) ->
"";
write_fields({dots, _}) ->
"...";
write_fields([F | Fs]) ->
[write_field(F) | write_fields_tail(Fs)].
write_fields_tail([]) ->
"";
write_fields_tail({dots, _}) ->
",...";
write_fields_tail([F | Fs]) ->
[$,, write_field(F) | write_fields_tail(Fs)].
write_field({{field, Name, _NameL, F}, _}) ->
[Name, " = " | write(F)].
write_list({dots, _}, _S) ->
"...";
write_list([E | Es], S) ->
[write(E) | write_tail(Es, S)].
write_tail([], _S) ->
[];
write_tail([E | Es], S) ->
[$,, write(E) | write_tail(Es, S)];
write_tail({dots, _}, S) ->
[S | "..."];
write_tail(E, S) ->
[S | write(E)].
%% The depth (D) is used for extracting and counting the characters to
%% print. The structure is kept so that the returned intermediate
%% format can be formatted. The separators (list, tuple, record) are
%% counted but need to be added later.
%% D =/= 0
print_length([], _D, _RF) ->
{"[]", 2};
print_length({}, _D, _RF) ->
{"{}", 2};
print_length(List, D, RF) when is_list(List) ->
case printable_list(List, D) of
true ->
S = io_lib:write_string(List, $"), %"
{S, length(S)};
%% Truncated lists could break some existing code.
% {true, Prefix} ->
% S = io_lib:write_string(Prefix, $"), %"
{ [ S | " ... " ] , 3 + length(S ) } ;
false ->
print_length_list(List, D, RF)
end;
print_length(Fun, _D, _RF) when is_function(Fun) ->
S = io_lib:write(Fun),
{S, iolist_size(S)};
print_length(R, D, RF) when is_atom(element(1, R)),
is_function(RF) ->
case RF(element(1, R), tuple_size(R) - 1) of
no ->
print_length_tuple(R, D, RF);
RDefs ->
print_length_record(R, D, RF, RDefs)
end;
print_length(Tuple, D, RF) when is_tuple(Tuple) ->
print_length_tuple(Tuple, D, RF);
print_length(<<>>, _D, _RF) ->
{"<<>>", 4};
print_length(<<_/bitstring>>, 1, _RF) ->
{"<<...>>", 7};
print_length(<<_/bitstring>>=Bin, D, _RF) ->
case bit_size(Bin) rem 8 of
0 ->
D1 = D - 1,
case printable_bin(Bin, D1) of
List when is_list(List) ->
S = io_lib:write_string(List, $"),
{[$<,$<,S,$>,$>], 4 + length(S)};
{true, Prefix} ->
S = io_lib:write_string(Prefix, $"),
{[$<,$<, S | "...>>"], 4 + length(S)};
false ->
S = io_lib:write(Bin, D),
{{bin,S}, iolist_size(S)}
end;
_ ->
S = io_lib:write(Bin, D),
{{bin,S}, iolist_size(S)}
end;
print_length(Term, _D, _RF) ->
S = io_lib:write(Term),
{S, iolist_size(S)}.
print_length_tuple(_Tuple, 1, _RF) ->
{"{...}", 5};
print_length_tuple(Tuple, D, RF) ->
L = print_length_list1(tuple_to_list(Tuple), D, RF),
IsTagged = is_atom(element(1, Tuple)) and (tuple_size(Tuple) > 1),
{{tuple,IsTagged,L}, list_length(L, 2)}.
print_length_record(_Tuple, 1, _RF, _RDefs) ->
{"{...}", 5};
print_length_record(Tuple, D, RF, RDefs) ->
Name = [$# | io_lib:write_atom(element(1, Tuple))],
NameL = length(Name),
L = print_length_fields(RDefs, D - 1, tl(tuple_to_list(Tuple)), RF),
{{record, [{Name,NameL} | L]}, list_length(L, NameL + 2)}.
print_length_fields([], _D, [], _RF) ->
[];
print_length_fields(_, 1, _, _RF) ->
{dots, 3};
print_length_fields([Def | Defs], D, [E | Es], RF) ->
[print_length_field(Def, D - 1, E, RF) |
print_length_fields(Defs, D - 1, Es, RF)].
print_length_field(Def, D, E, RF) ->
Name = io_lib:write_atom(Def),
{S, L} = print_length(E, D, RF),
NameL = length(Name) + 3,
{{field, Name, NameL, {S, L}}, NameL + L}.
print_length_list(List, D, RF) ->
L = print_length_list1(List, D, RF),
{{list, L}, list_length(L, 2)}.
print_length_list1([], _D, _RF) ->
[];
print_length_list1(_, 1, _RF) ->
{dots, 3};
print_length_list1([E | Es], D, RF) ->
[print_length(E, D - 1, RF) | print_length_list1(Es, D - 1, RF)];
print_length_list1(E, D, RF) ->
print_length(E, D - 1, RF).
list_length([], Acc) ->
Acc;
list_length([{_, Len} | Es], Acc) ->
list_length_tail(Es, Acc + Len);
list_length({_, Len}, Acc) ->
Acc + Len.
list_length_tail([], Acc) ->
Acc;
list_length_tail([{_,Len} | Es], Acc) ->
list_length_tail(Es, Acc + 1 + Len);
list_length_tail({_, Len}, Acc) ->
Acc + 1 + Len.
? CHARS printable characters has depth 1 .
-define(CHARS, 4).
printable_list(L, D) when D < 0 ->
io_lib:printable_list(L);
printable_list(_L, 1) ->
false;
printable_list(L, _D) ->
io_lib:printable_list(L).
%% Truncated lists could break some existing code.
printable_list(L , D ) - >
% Len = ?CHARS * (D - 1),
case printable_list1(L , ) of
% all ->
% true;
N when is_integer(N ) , > = D - 1 - >
% {L1, _} = lists:split(Len - N, L),
% {true, L1};
% N when is_integer(N) ->
% false
% end.
printable_bin(Bin, D) when D >= 0, ?CHARS * D =< byte_size(Bin) ->
printable_bin(Bin, erlang:min(?CHARS * D, byte_size(Bin)), D);
printable_bin(Bin, D) ->
printable_bin(Bin, byte_size(Bin), D).
printable_bin(Bin, Len, D) ->
N = erlang:min(20, Len),
L = binary_to_list(Bin, 1, N),
case printable_list1(L, N) of
all when N =:= byte_size(Bin) ->
L;
all when N =:= Len -> % N < byte_size(Bin)
{true, L};
all ->
case printable_bin1(Bin, 1 + N, Len - N) of
0 when byte_size(Bin) =:= Len ->
binary_to_list(Bin);
NC when D > 0, Len - NC >= D ->
{true, binary_to_list(Bin, 1, Len - NC)};
NC when is_integer(NC) ->
false
end;
NC when is_integer(NC), D > 0, N - NC >= D ->
{true, binary_to_list(Bin, 1, N - NC)};
NC when is_integer(NC) ->
false
end.
printable_bin1(_Bin, _Start, 0) ->
0;
printable_bin1(Bin, Start, Len) ->
N = erlang:min(10000, Len),
L = binary_to_list(Bin, Start, Start + N - 1),
case printable_list1(L, N) of
all ->
printable_bin1(Bin, Start + N, Len - N);
NC when is_integer(NC) ->
Len - (N - NC)
end.
%% -> all | integer() >=0. Adopted from io_lib.erl.
% printable_list1([_ | _], 0) -> 0;
printable_list1([C | Cs], N) when is_integer(C), C >= $\s, C =< $~ ->
printable_list1(Cs, N - 1);
printable_list1([C | Cs], N) when is_integer(C), C >= $\240, C =< $\377 ->
printable_list1(Cs, N - 1);
printable_list1([$\n | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\r | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\t | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\v | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\b | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\f | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\e | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([], _) -> all;
printable_list1(_, N) -> N.
Throw ' no_good ' if the indentation exceeds half the line length
%% unless there is room for M characters on the line.
cind({_S, Len}, Col, Ll, M, Ind, LD, W) when Len < Ll - Col - LD,
Len + W + LD =< M ->
Ind;
cind({{list,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_list(L, Col + 1, Ll, M, Ind, LD, W + 1);
cind({{tuple,true,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_tag_tuple(L, Col, Ll, M, Ind, LD, W + 1);
cind({{tuple,false,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_list(L, Col + 1, Ll, M, Ind, LD, W + 1);
cind({{record,[{_Name,NLen} | L]}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_record(L, NLen, Col, Ll, M, Ind, LD, W + NLen + 1);
cind({{bin,_S}, _Len}, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind({_S, _Len}, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind.
cind_tag_tuple([{_Tag,Tlen} | L], Col, Ll, M, Ind, LD, W) ->
TagInd = Tlen + 2,
Tcol = Col + TagInd,
if
Ind > 0, TagInd > Ind ->
Col1 = Col + Ind,
if
M + Col1 =< Ll; Col1 =< Ll div 2 ->
cind_tail(L, Col1, Tcol, Ll, M, Ind, LD, W + Tlen);
true ->
throw(no_good)
end;
M + Tcol < Ll; Tcol < Ll div 2 ->
cind_list(L, Tcol, Ll, M, Ind, LD, W + Tlen + 1);
true ->
throw(no_good)
end.
cind_record([F | Fs], Nlen, Col0, Ll, M, Ind, LD, W0) ->
Nind = Nlen + 1,
{Col, W} = cind_rec(Nind, Col0, Ll, M, Ind, W0),
FW = cind_field(F, Col, Ll, M, Ind, last_depth(Fs, LD), W),
cind_fields_tail(Fs, Col, Col + FW, Ll, M, Ind, LD, W + FW);
cind_record(_, _Nlen, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind.
cind_fields_tail([{_, Len}=F | Fs], Col0, Col, Ll, M, Ind, LD, W) ->
LD1 = last_depth(Fs, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_FLD(F);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_FLD(F) ->
cind_fields_tail(Fs, Col0, Col + ELen, Ll, M, Ind, LD, W + ELen);
true ->
FW = cind_field(F, Col0, Ll, M, Ind, LD1, 0),
cind_fields_tail(Fs, Col0, Col + FW, Ll, M, Ind, LD, FW)
end;
cind_fields_tail(_, _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind.
cind_field({{field, _N, _NL, _F}, Len}=Fl, Col, Ll, M, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M ->
if
?ATM_FLD(Fl) ->
Len;
true ->
Ll
end;
cind_field({{field, _Name, NameL, F}, _Len}, Col0, Ll, M, Ind, LD, W0) ->
{Col, W} = cind_rec(NameL, Col0, Ll, M, Ind, W0 + NameL),
cind(F, Col, Ll, M, Ind, LD, W),
Ll.
cind_rec(RInd, Col0, Ll, M, Ind, W0) ->
Nl = (Ind > 0) and (RInd > Ind),
DCol = case Nl of
true -> Ind;
false -> RInd
end,
Col = Col0 + DCol,
if
M + Col =< Ll; Col =< Ll div 2 ->
W = case Nl of
true -> 0;
false -> W0
end,
{Col, W};
true ->
throw(no_good)
end.
cind_list({dots, _}, _Col0, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind_list([E | Es], Col0, Ll, M, Ind, LD, W) ->
WE = cind_element(E, Col0, Ll, M, Ind, last_depth(Es, LD), W),
cind_tail(Es, Col0, Col0 + WE, Ll, M, Ind, LD, W + WE).
cind_tail([], _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind_tail([{_, Len}=E | Es], Col0, Col, Ll, M, Ind, LD, W) ->
LD1 = last_depth(Es, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM(E);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM(E) ->
cind_tail(Es, Col0, Col + ELen, Ll, M, Ind, LD, W + ELen);
true ->
WE = cind_element(E, Col0, Ll, M, Ind, LD1, 0),
cind_tail(Es, Col0, Col0 + WE, Ll, M, Ind, LD, WE)
end;
cind_tail({dots, _}, _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind_tail({_, Len}=E, _Col0, Col, Ll, M, Ind, LD, W)
when Len + 1 < Ll - Col - (LD + 1),
Len + 1 + W + (LD + 1) =< M,
?ATM(E) ->
Ind;
cind_tail(E, _Col0, Col, Ll, M, Ind, LD, _W) ->
cind(E, Col, Ll, M, Ind, LD + 1, 0).
cind_element({_, Len}=E, Col, Ll, M, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M, ?ATM(E) ->
Len;
cind_element(E, Col, Ll, M, Ind, LD, W) ->
cind(E, Col, Ll, M, Ind, LD, W),
Ll.
last_depth([_ | _], _LD) ->
0;
last_depth(_, LD) ->
LD + 1.
while_fail([], _F, V) ->
V;
while_fail([A | As], F, V) ->
try F(A) catch _ -> while_fail(As, F, V) end.
indent(N) when is_integer(N), N > 0 ->
chars($\s, N-1).
indent(1, Ind) -> % Optimization of common case
[$\s | Ind];
indent(4, Ind) -> % Optimization of common case
S2 = [$\s, $\s],
[S2, S2 | Ind];
indent(N, Ind) when is_integer(N), N > 0 ->
[chars($\s, N) | Ind].
%% A deep version of string:chars/2
chars(_C, 0) ->
[];
chars(C, 2) ->
[C, C];
chars(C, 3) ->
[C, C, C];
chars(C, N) when (N band 1) =:= 0 ->
S = chars(C, N bsr 1),
[S | S];
chars(C, N) ->
S = chars(C, N bsr 1),
[C, S | S].
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/stdlib/src/io_lib_pretty.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
In this module "print" means the formatted printing while "write"
Exported functions
print(Term) -> [Chars]
Depth = -1 gives unlimited print depth. Use io_lib:write for atomic terms.
Used by the shell for printing records.
Local functions
Print a tagged tuple by indenting the rest of the elements
Reuse the list created by io_lib:write_binary()...
The depth (D) is used for extracting and counting the characters to
print. The structure is kept so that the returned intermediate
format can be formatted. The separators (list, tuple, record) are
counted but need to be added later.
D =/= 0
Truncated lists could break some existing code.
{true, Prefix} ->
S = io_lib:write_string(Prefix, $"), %"
Truncated lists could break some existing code.
Len = ?CHARS * (D - 1),
all ->
true;
{L1, _} = lists:split(Len - N, L),
{true, L1};
N when is_integer(N) ->
false
end.
N < byte_size(Bin)
-> all | integer() >=0. Adopted from io_lib.erl.
printable_list1([_ | _], 0) -> 0;
unless there is room for M characters on the line.
Optimization of common case
Optimization of common case
A deep version of string:chars/2 | Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(io_lib_pretty).
Pretty printing Erlang terms
means just writing out onto one line .
-export([print/1,print/2,print/3,print/4,print/5,print/6]).
print(Term , Column , , Depth ) - > [ Chars ]
print(Term) ->
print(Term, 1, 80, -1).
print(Term , RecDefFun ) - > [ Chars ]
print(Term , Depth , RecDefFun ) - > [ Chars ]
RecDefFun = fun(Tag , ) - > [ FieldTag ] | no
print(Term, RecDefFun) ->
print(Term, -1, RecDefFun).
print(Term, Depth, RecDefFun) ->
print(Term, 1, 80, Depth, RecDefFun).
print(Term, Col, Ll, D) ->
print(Term, Col, Ll, D, _M=-1, no_fun).
print(Term, Col, Ll, D, RecDefFun) ->
print(Term, Col, Ll, D, _M=-1, RecDefFun).
print(_, _, _, 0, _M, _RF) -> "...";
print(Term, Col, Ll, D, M, RecDefFun) when Col =< 0 ->
print(Term, 1, Ll, D, M, RecDefFun);
print(Term, Col, Ll, D, M0, RecDefFun) when is_tuple(Term);
is_list(Term) ->
If = {_S, Len} = print_length(Term, D, RecDefFun),
M = max_cs(M0, Len),
if
Len < Ll - Col, Len =< M ->
write(If);
true ->
TInd = while_fail([-1, 4],
fun(I) -> cind(If, Col, Ll, M, I, 0, 0) end,
1),
pp(If, Col, Ll, M, TInd, indent(Col), 0, 0)
end;
print(<<_/bitstring>>=Term, Col, Ll, D, M0, RecDefFun) ->
If = {_S, Len} = print_length(Term, D, RecDefFun),
M = max_cs(M0, Len),
if
Len < Ll - Col, Len =< M ->
write(If);
true ->
TInd = while_fail([-1, 4],
fun(I) -> cind(If, Col, Ll, M, I, 0, 0) end,
1),
pp(If, Col, Ll, M, TInd, indent(Col), 0, 0)
end;
print(Term, _Col, _Ll, _D, _M, _RF) ->
io_lib:write(Term).
max_cs(M, Len) when M < 0 ->
Len;
max_cs(M, _Len) ->
M.
-define(ATM(T), is_list(element(1, T))).
-define(ATM_FLD(Field), ?ATM(element(4, element(1, Field)))).
pp({_S, Len} = If, Col, Ll, M, _TInd, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M ->
write(If);
pp({{list,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[$[, pp_list(L, Col + 1, Ll, M, TInd, indent(1, Ind), LD, $|, W + 1), $]];
pp({{tuple,true,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[${, pp_tag_tuple(L, Col, Ll, M, TInd, Ind, LD, W + 1), $}];
pp({{tuple,false,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[${, pp_list(L, Col + 1, Ll, M, TInd, indent(1, Ind), LD, $,, W + 1), $}];
pp({{record,[{Name,NLen} | L]}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[Name, ${, pp_record(L, NLen, Col, Ll, M, TInd, Ind, LD, W + NLen+1), $}];
pp({{bin,S}, _Len}, Col, Ll, M, _TInd, Ind, LD, W) ->
pp_binary(S, Col + 2, Ll, M, indent(2, Ind), LD, W);
pp({S, _Len}, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
S.
differently to the tag . Tuple has size > = 2 .
pp_tag_tuple([{Tag,Tlen} | L], Col, Ll, M, TInd, Ind, LD, W) ->
TagInd = Tlen + 2,
Tcol = Col + TagInd,
S = $,,
if
TInd > 0, TagInd > TInd ->
Col1 = Col + TInd,
Indent = indent(TInd, Ind),
[Tag|pp_tail(L, Col1, Tcol, Ll, M, TInd, Indent, LD, S, W+Tlen)];
true ->
Indent = indent(TagInd, Ind),
[Tag, S | pp_list(L, Tcol, Ll, M, TInd, Indent, LD, S, W+Tlen+1)]
end.
pp_record([], _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"";
pp_record({dots, _}, _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"...";
pp_record([F | Fs], Nlen, Col0, Ll, M, TInd, Ind0, LD, W0) ->
Nind = Nlen + 1,
{Col, Ind, S, W} = rec_indent(Nind, TInd, Col0, Ind0, W0),
{FS, FW} = pp_field(F, Col, Ll, M, TInd, Ind, last_depth(Fs, LD), W),
[S, FS | pp_fields_tail(Fs, Col, Col + FW, Ll, M, TInd, Ind, LD, W + FW)].
pp_fields_tail([], _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"";
pp_fields_tail({dots, _}, _Col0, _Col, _M, _Ll, _TInd, _Ind, _LD, _W) ->
",...";
pp_fields_tail([{_, Len}=F | Fs], Col0, Col, Ll, M, TInd, Ind, LD, W) ->
LD1 = last_depth(Fs, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_FLD(F);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_FLD(F) ->
[$,, write_field(F) |
pp_fields_tail(Fs, Col0, Col+ELen, Ll, M, TInd, Ind, LD, W+ELen)];
true ->
{FS, FW} = pp_field(F, Col0, Ll, M, TInd, Ind, LD1, 0),
[$,, $\n, Ind, FS |
pp_fields_tail(Fs, Col0, Col0 + FW, Ll, M, TInd, Ind, LD, FW)]
end.
pp_field({_, Len}=Fl, Col, Ll, M, _TInd, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M ->
{write_field(Fl), if
?ATM_FLD(Fl) ->
Len;
true ->
force nl
end};
pp_field({{field, Name, NameL, F}, _Len}, Col0, Ll, M, TInd, Ind0, LD, W0) ->
{Col, Ind, S, W} = rec_indent(NameL, TInd, Col0, Ind0, W0 + NameL),
force nl
rec_indent(RInd, TInd, Col0, Ind0, W0) ->
Nl = (TInd > 0) and (RInd > TInd),
DCol = case Nl of
true -> TInd;
false -> RInd
end,
Col = Col0 + DCol,
Ind = indent(DCol, Ind0),
S = case Nl of
true -> [$\n | Ind];
false -> ""
end,
W = case Nl of
true -> 0;
false -> W0
end,
{Col, Ind, S, W}.
pp_list({dots, _}, _Col0, _Ll, _M, _TInd, _Ind, _LD, _S, _W) ->
"...";
pp_list([E | Es], Col0, Ll, M, TInd, Ind, LD, S, W) ->
{ES, WE} = pp_element(E, Col0, Ll, M, TInd, Ind, last_depth(Es, LD), W),
[ES | pp_tail(Es, Col0, Col0 + WE, Ll, M, TInd, Ind, LD, S, W + WE)].
pp_tail([], _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, _S, _W) ->
"";
pp_tail([{_, Len}=E | Es], Col0, Col, Ll, M, TInd, Ind, LD, S, W) ->
LD1 = last_depth(Es, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM(E);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM(E) ->
[$,, write(E) |
pp_tail(Es, Col0, Col + ELen, Ll, M, TInd, Ind, LD, S, W+ELen)];
true ->
{ES, WE} = pp_element(E, Col0, Ll, M, TInd, Ind, LD1, 0),
[$,, $\n, Ind, ES |
pp_tail(Es, Col0, Col0 + WE, Ll, M, TInd, Ind, LD, S, WE)]
end;
pp_tail({dots, _}, _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, S, _W) ->
[S | "..."];
pp_tail({_, Len}=E, _Col0, Col, Ll, M, _TInd, _Ind, LD, S, W)
when Len + 1 < Ll - Col - (LD + 1),
Len + 1 + W + (LD + 1) =< M,
?ATM(E) ->
[S | write(E)];
pp_tail(E, Col0, _Col, Ll, M, TInd, Ind, LD, S, _W) ->
[S, $\n, Ind | pp(E, Col0, Ll, M, TInd, Ind, LD + 1, 0)].
pp_element({_, Len}=E, Col, Ll, M, _TInd, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M, ?ATM(E) ->
{write(E), Len};
pp_element(E, Col, Ll, M, TInd, Ind, LD, W) ->
force nl
pp_binary([LT,LT,S,GT,GT], Col, Ll, M, Ind, LD, W) ->
N = erlang:max(8, erlang:min(Ll - Col, M - 4 - W) - LD),
[LT,LT,pp_binary(S, N, N, Ind),GT,GT].
pp_binary([BS, $, | S], N, N0, Ind) ->
Len = length(BS) + 1,
case N - Len of
N1 when N1 < 0 ->
[$\n, Ind, BS, $, | pp_binary(S, N0 - Len, N0, Ind)];
N1 ->
[BS, $, | pp_binary(S, N1, N0, Ind)]
end;
pp_binary([BS1, $:, BS2]=S, N, _N0, Ind)
when length(BS1) + length(BS2) + 1 > N ->
[$\n, Ind, S];
pp_binary(S, N, _N0, Ind) ->
case iolist_size(S) > N of
true ->
[$\n, Ind, S];
false ->
S
end.
write({{tuple, _IsTagged, L}, _}) ->
[${, write_list(L, $,), $}];
write({{list, L}, _}) ->
[$[, write_list(L, $|), $]];
write({{record, [{Name,_} | L]}, _}) ->
[Name, ${, write_fields(L), $}];
write({{bin, S}, _}) ->
S;
write({S, _}) ->
S.
write_fields([]) ->
"";
write_fields({dots, _}) ->
"...";
write_fields([F | Fs]) ->
[write_field(F) | write_fields_tail(Fs)].
write_fields_tail([]) ->
"";
write_fields_tail({dots, _}) ->
",...";
write_fields_tail([F | Fs]) ->
[$,, write_field(F) | write_fields_tail(Fs)].
write_field({{field, Name, _NameL, F}, _}) ->
[Name, " = " | write(F)].
write_list({dots, _}, _S) ->
"...";
write_list([E | Es], S) ->
[write(E) | write_tail(Es, S)].
write_tail([], _S) ->
[];
write_tail([E | Es], S) ->
[$,, write(E) | write_tail(Es, S)];
write_tail({dots, _}, S) ->
[S | "..."];
write_tail(E, S) ->
[S | write(E)].
print_length([], _D, _RF) ->
{"[]", 2};
print_length({}, _D, _RF) ->
{"{}", 2};
print_length(List, D, RF) when is_list(List) ->
case printable_list(List, D) of
true ->
S = io_lib:write_string(List, $"), %"
{S, length(S)};
{ [ S | " ... " ] , 3 + length(S ) } ;
false ->
print_length_list(List, D, RF)
end;
print_length(Fun, _D, _RF) when is_function(Fun) ->
S = io_lib:write(Fun),
{S, iolist_size(S)};
print_length(R, D, RF) when is_atom(element(1, R)),
is_function(RF) ->
case RF(element(1, R), tuple_size(R) - 1) of
no ->
print_length_tuple(R, D, RF);
RDefs ->
print_length_record(R, D, RF, RDefs)
end;
print_length(Tuple, D, RF) when is_tuple(Tuple) ->
print_length_tuple(Tuple, D, RF);
print_length(<<>>, _D, _RF) ->
{"<<>>", 4};
print_length(<<_/bitstring>>, 1, _RF) ->
{"<<...>>", 7};
print_length(<<_/bitstring>>=Bin, D, _RF) ->
case bit_size(Bin) rem 8 of
0 ->
D1 = D - 1,
case printable_bin(Bin, D1) of
List when is_list(List) ->
S = io_lib:write_string(List, $"),
{[$<,$<,S,$>,$>], 4 + length(S)};
{true, Prefix} ->
S = io_lib:write_string(Prefix, $"),
{[$<,$<, S | "...>>"], 4 + length(S)};
false ->
S = io_lib:write(Bin, D),
{{bin,S}, iolist_size(S)}
end;
_ ->
S = io_lib:write(Bin, D),
{{bin,S}, iolist_size(S)}
end;
print_length(Term, _D, _RF) ->
S = io_lib:write(Term),
{S, iolist_size(S)}.
print_length_tuple(_Tuple, 1, _RF) ->
{"{...}", 5};
print_length_tuple(Tuple, D, RF) ->
L = print_length_list1(tuple_to_list(Tuple), D, RF),
IsTagged = is_atom(element(1, Tuple)) and (tuple_size(Tuple) > 1),
{{tuple,IsTagged,L}, list_length(L, 2)}.
print_length_record(_Tuple, 1, _RF, _RDefs) ->
{"{...}", 5};
print_length_record(Tuple, D, RF, RDefs) ->
Name = [$# | io_lib:write_atom(element(1, Tuple))],
NameL = length(Name),
L = print_length_fields(RDefs, D - 1, tl(tuple_to_list(Tuple)), RF),
{{record, [{Name,NameL} | L]}, list_length(L, NameL + 2)}.
print_length_fields([], _D, [], _RF) ->
[];
print_length_fields(_, 1, _, _RF) ->
{dots, 3};
print_length_fields([Def | Defs], D, [E | Es], RF) ->
[print_length_field(Def, D - 1, E, RF) |
print_length_fields(Defs, D - 1, Es, RF)].
print_length_field(Def, D, E, RF) ->
Name = io_lib:write_atom(Def),
{S, L} = print_length(E, D, RF),
NameL = length(Name) + 3,
{{field, Name, NameL, {S, L}}, NameL + L}.
print_length_list(List, D, RF) ->
L = print_length_list1(List, D, RF),
{{list, L}, list_length(L, 2)}.
print_length_list1([], _D, _RF) ->
[];
print_length_list1(_, 1, _RF) ->
{dots, 3};
print_length_list1([E | Es], D, RF) ->
[print_length(E, D - 1, RF) | print_length_list1(Es, D - 1, RF)];
print_length_list1(E, D, RF) ->
print_length(E, D - 1, RF).
list_length([], Acc) ->
Acc;
list_length([{_, Len} | Es], Acc) ->
list_length_tail(Es, Acc + Len);
list_length({_, Len}, Acc) ->
Acc + Len.
list_length_tail([], Acc) ->
Acc;
list_length_tail([{_,Len} | Es], Acc) ->
list_length_tail(Es, Acc + 1 + Len);
list_length_tail({_, Len}, Acc) ->
Acc + 1 + Len.
? CHARS printable characters has depth 1 .
-define(CHARS, 4).
printable_list(L, D) when D < 0 ->
io_lib:printable_list(L);
printable_list(_L, 1) ->
false;
printable_list(L, _D) ->
io_lib:printable_list(L).
printable_list(L , D ) - >
case printable_list1(L , ) of
N when is_integer(N ) , > = D - 1 - >
printable_bin(Bin, D) when D >= 0, ?CHARS * D =< byte_size(Bin) ->
printable_bin(Bin, erlang:min(?CHARS * D, byte_size(Bin)), D);
printable_bin(Bin, D) ->
printable_bin(Bin, byte_size(Bin), D).
printable_bin(Bin, Len, D) ->
N = erlang:min(20, Len),
L = binary_to_list(Bin, 1, N),
case printable_list1(L, N) of
all when N =:= byte_size(Bin) ->
L;
{true, L};
all ->
case printable_bin1(Bin, 1 + N, Len - N) of
0 when byte_size(Bin) =:= Len ->
binary_to_list(Bin);
NC when D > 0, Len - NC >= D ->
{true, binary_to_list(Bin, 1, Len - NC)};
NC when is_integer(NC) ->
false
end;
NC when is_integer(NC), D > 0, N - NC >= D ->
{true, binary_to_list(Bin, 1, N - NC)};
NC when is_integer(NC) ->
false
end.
printable_bin1(_Bin, _Start, 0) ->
0;
printable_bin1(Bin, Start, Len) ->
N = erlang:min(10000, Len),
L = binary_to_list(Bin, Start, Start + N - 1),
case printable_list1(L, N) of
all ->
printable_bin1(Bin, Start + N, Len - N);
NC when is_integer(NC) ->
Len - (N - NC)
end.
printable_list1([C | Cs], N) when is_integer(C), C >= $\s, C =< $~ ->
printable_list1(Cs, N - 1);
printable_list1([C | Cs], N) when is_integer(C), C >= $\240, C =< $\377 ->
printable_list1(Cs, N - 1);
printable_list1([$\n | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\r | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\t | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\v | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\b | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\f | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([$\e | Cs], N) -> printable_list1(Cs, N - 1);
printable_list1([], _) -> all;
printable_list1(_, N) -> N.
Throw ' no_good ' if the indentation exceeds half the line length
cind({_S, Len}, Col, Ll, M, Ind, LD, W) when Len < Ll - Col - LD,
Len + W + LD =< M ->
Ind;
cind({{list,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_list(L, Col + 1, Ll, M, Ind, LD, W + 1);
cind({{tuple,true,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_tag_tuple(L, Col, Ll, M, Ind, LD, W + 1);
cind({{tuple,false,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_list(L, Col + 1, Ll, M, Ind, LD, W + 1);
cind({{record,[{_Name,NLen} | L]}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_record(L, NLen, Col, Ll, M, Ind, LD, W + NLen + 1);
cind({{bin,_S}, _Len}, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind({_S, _Len}, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind.
cind_tag_tuple([{_Tag,Tlen} | L], Col, Ll, M, Ind, LD, W) ->
TagInd = Tlen + 2,
Tcol = Col + TagInd,
if
Ind > 0, TagInd > Ind ->
Col1 = Col + Ind,
if
M + Col1 =< Ll; Col1 =< Ll div 2 ->
cind_tail(L, Col1, Tcol, Ll, M, Ind, LD, W + Tlen);
true ->
throw(no_good)
end;
M + Tcol < Ll; Tcol < Ll div 2 ->
cind_list(L, Tcol, Ll, M, Ind, LD, W + Tlen + 1);
true ->
throw(no_good)
end.
cind_record([F | Fs], Nlen, Col0, Ll, M, Ind, LD, W0) ->
Nind = Nlen + 1,
{Col, W} = cind_rec(Nind, Col0, Ll, M, Ind, W0),
FW = cind_field(F, Col, Ll, M, Ind, last_depth(Fs, LD), W),
cind_fields_tail(Fs, Col, Col + FW, Ll, M, Ind, LD, W + FW);
cind_record(_, _Nlen, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind.
cind_fields_tail([{_, Len}=F | Fs], Col0, Col, Ll, M, Ind, LD, W) ->
LD1 = last_depth(Fs, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_FLD(F);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_FLD(F) ->
cind_fields_tail(Fs, Col0, Col + ELen, Ll, M, Ind, LD, W + ELen);
true ->
FW = cind_field(F, Col0, Ll, M, Ind, LD1, 0),
cind_fields_tail(Fs, Col0, Col + FW, Ll, M, Ind, LD, FW)
end;
cind_fields_tail(_, _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind.
cind_field({{field, _N, _NL, _F}, Len}=Fl, Col, Ll, M, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M ->
if
?ATM_FLD(Fl) ->
Len;
true ->
Ll
end;
cind_field({{field, _Name, NameL, F}, _Len}, Col0, Ll, M, Ind, LD, W0) ->
{Col, W} = cind_rec(NameL, Col0, Ll, M, Ind, W0 + NameL),
cind(F, Col, Ll, M, Ind, LD, W),
Ll.
cind_rec(RInd, Col0, Ll, M, Ind, W0) ->
Nl = (Ind > 0) and (RInd > Ind),
DCol = case Nl of
true -> Ind;
false -> RInd
end,
Col = Col0 + DCol,
if
M + Col =< Ll; Col =< Ll div 2 ->
W = case Nl of
true -> 0;
false -> W0
end,
{Col, W};
true ->
throw(no_good)
end.
cind_list({dots, _}, _Col0, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind_list([E | Es], Col0, Ll, M, Ind, LD, W) ->
WE = cind_element(E, Col0, Ll, M, Ind, last_depth(Es, LD), W),
cind_tail(Es, Col0, Col0 + WE, Ll, M, Ind, LD, W + WE).
cind_tail([], _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind_tail([{_, Len}=E | Es], Col0, Col, Ll, M, Ind, LD, W) ->
LD1 = last_depth(Es, LD),
ELen = 1 + Len,
if
LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM(E);
LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM(E) ->
cind_tail(Es, Col0, Col + ELen, Ll, M, Ind, LD, W + ELen);
true ->
WE = cind_element(E, Col0, Ll, M, Ind, LD1, 0),
cind_tail(Es, Col0, Col0 + WE, Ll, M, Ind, LD, WE)
end;
cind_tail({dots, _}, _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
Ind;
cind_tail({_, Len}=E, _Col0, Col, Ll, M, Ind, LD, W)
when Len + 1 < Ll - Col - (LD + 1),
Len + 1 + W + (LD + 1) =< M,
?ATM(E) ->
Ind;
cind_tail(E, _Col0, Col, Ll, M, Ind, LD, _W) ->
cind(E, Col, Ll, M, Ind, LD + 1, 0).
cind_element({_, Len}=E, Col, Ll, M, _Ind, LD, W)
when Len < Ll - Col - LD, Len + W + LD =< M, ?ATM(E) ->
Len;
cind_element(E, Col, Ll, M, Ind, LD, W) ->
cind(E, Col, Ll, M, Ind, LD, W),
Ll.
last_depth([_ | _], _LD) ->
0;
last_depth(_, LD) ->
LD + 1.
while_fail([], _F, V) ->
V;
while_fail([A | As], F, V) ->
try F(A) catch _ -> while_fail(As, F, V) end.
indent(N) when is_integer(N), N > 0 ->
chars($\s, N-1).
[$\s | Ind];
S2 = [$\s, $\s],
[S2, S2 | Ind];
indent(N, Ind) when is_integer(N), N > 0 ->
[chars($\s, N) | Ind].
chars(_C, 0) ->
[];
chars(C, 2) ->
[C, C];
chars(C, 3) ->
[C, C, C];
chars(C, N) when (N band 1) =:= 0 ->
S = chars(C, N bsr 1),
[S | S];
chars(C, N) ->
S = chars(C, N bsr 1),
[C, S | S].
|
545fda0efe1cc9f8a401f1a036d5455f5c87b696112d8ac5bf2867a9907beca1 | richcarl/edoc | edoc.erl | %% =====================================================================
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may
%% not use this file except in compliance with the License. You may obtain
%% a copy of the License at <-2.0>
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% Alternatively, you may use this file under the terms of the GNU Lesser
General Public License ( the " LGPL " ) as published by the Free Software
Foundation ; either version 2.1 , or ( at your option ) any later version .
%% If you wish to allow use of your version of this file only under the
%% terms of the LGPL, you should delete the provisions above and replace
%% them with the notice and other provisions required by the LGPL; see
%% </>. If you do not delete the provisions
%% above, a recipient may use your version of this file under the terms of
either the Apache License or the LGPL .
%%
2001 - 2007
@author < >
%% @version {@version}
%% @end
%% =====================================================================
TODO : check weirdness in name generation for f(TypeName , ... ) - > ...
%% TODO: option for ignoring functions matching some pattern ('..._test_'/0)
%% TODO: @private_type tag, opaque unless generating private docs?
TODO : document the record type syntax
%% TODO: some 'skip' option for ignoring particular modules/packages?
%% TODO: intermediate-level packages: document even if no local sources.
TODO : multiline comment support ( needs modified comment representation )
%% TODO: config-file for default settings
TODO : config : locations of all local docdirs ; generate local doc - index page
%% TODO: config: URL:s of offline packages/apps
%% TODO: config: default stylesheet
%% TODO: config: default header/footer, etc.
%% TODO: offline linkage
%% TODO: including source code, explicitly and/or automatically
@doc EDoc - the Erlang program documentation generator .
%%
This module provides the main user interface to EDoc .
%% <ul>
%% <li><a href="overview-summary.html">EDoc User Manual</a></li>
%% <li><a href="overview-summary.html#Running_EDoc">Running EDoc</a></li>
%% </ul>
-module(edoc).
-export([packages/1, packages/2, files/1, files/2,
application/1, application/2, application/3,
toc/1, toc/2, toc/3,
run/3,
file/1, file/2,
read/1, read/2,
layout/1, layout/2,
get_doc/1, get_doc/2, get_doc/3,
read_comments/1, read_comments/2,
read_source/1, read_source/2]).
-compile({no_auto_import,[error/1]}).
-include("edoc.hrl").
%% @spec (Name::filename()) -> ok
%% @equiv file(Name, [])
%% @deprecated See {@link file/2} for details.
file(Name) ->
file(Name, []).
file(filename ( ) , proplist ( ) ) - > ok
%%
%% @type filename() = //kernel/file:filename()
%% @type proplist() = [term()]
%%
@deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
documentation is through one of the functions { @link application/2 } ,
%% {@link packages/2} and {@link files/2}.
%%
%% @doc Reads a source code file and outputs formatted documentation to
%% a corresponding file.
%%
%% Options:
%% <dl>
< , filename()@ } }
%% </dt>
%% <dd>Specifies the output directory for the created file. (By
%% default, the output is written to the directory of the source
%% file.)
%% </dd>
< dt>{@type { source_suffix , string()@ } }
%% </dt>
%% <dd>Specifies the expected suffix of the input file. The default
%% value is `".erl"'.
%% </dd>
%% <dt>{@type {file_suffix, string()@}}
%% </dt>
%% <dd>Specifies the suffix for the created file. The default value is
%% `".html"'.
%% </dd>
%% </dl>
%%
%% See {@link get_doc/2} and {@link layout/2} for further
%% options.
%%
For running EDoc from a Makefile or similar , see
%% {@link edoc_run:file/1}.
%%
%% @see read/2
NEW - OPTIONS : source_suffix , file_suffix , dir
%% INHERIT-OPTIONS: read/2
file(Name, Options) ->
Text = read(Name, Options),
SrcSuffix = proplists:get_value(source_suffix, Options,
?DEFAULT_SOURCE_SUFFIX),
BaseName = filename:basename(Name, SrcSuffix),
Suffix = proplists:get_value(file_suffix, Options,
?DEFAULT_FILE_SUFFIX),
Dir = proplists:get_value(dir, Options, filename:dirname(Name)),
Encoding = [{encoding, edoc_lib:read_encoding(Name, [])}],
edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, '', Encoding).
TODO : better documentation of files/1/2 , packages/1/2 , application/1/2/3
%% @spec (Files::[filename() | {package(), [filename()]}]) -> ok
%% @equiv packages(Packages, [])
files(Files) ->
files(Files, []).
%% @spec (Files::[filename() | {package(), [filename()]}],
%% Options::proplist()) -> ok
%% @doc Runs EDoc on a given set of source files. See {@link run/3} for
%% details, including options.
%% @equiv run([], Files, Options)
files(Files, Options) ->
run([], Files, Options).
( Packages::[package ( ) ] ) - > ok
%% @equiv packages(Packages, [])
packages(Packages) ->
packages(Packages, []).
( Packages::[package ( ) ] , Options::proplist ( ) ) - > ok
%% @type package() = atom() | string()
%%
%% @doc Runs EDoc on a set of packages. The `source_path' option is used
%% to locate the files; see {@link run/3} for details, including
%% options. This function automatically appends the current directory to
%% the source path.
%%
%% @equiv run(Packages, [], Options)
packages(Packages, Options) ->
run(Packages, [], Options ++ [{source_path, [?CURRENT_DIR]}]).
%% @spec (Application::atom()) -> ok
%% @equiv application(Application, [])
application(App) ->
application(App, []).
@spec ( Application::atom ( ) , Options::proplist ( ) ) - > ok
%% @doc Run EDoc on an application in its default app-directory. See
%% {@link application/3} for details.
@see application/1
application(App, Options) when is_atom(App) ->
case code:lib_dir(App) of
Dir when is_list(Dir) ->
application(App, Dir, Options);
_ ->
edoc_report:report("cannot find application directory for '~s'.",
[App]),
exit(error)
end.
@spec ( Application::atom ( ) , Dir::filename ( ) , Options::proplist ( ) )
%% -> ok
%% @doc Run EDoc on an application located in the specified directory.
%% Tries to automatically set up good defaults. Unless the user
%% specifies otherwise:
%% <ul>
%% <li>The `doc' subdirectory will be used as the target directory, if
%% it exists; otherwise the application directory is used.
%% </li>
%% <li>The source code is assumed to be located in the `src'
%% subdirectory, if it exists, or otherwise in the application
%% directory itself.
%% </li>
< li > The { @link run/3 . ` subpackages ' } option is turned on . All found
%% source files will be processed.
%% </li>
%% <li>The `include' subdirectory is automatically added to the
include path . ( Only important if { @link read_source/2 .
%% preprocessing} is turned on.)
%% </li>
%% </ul>
%%
%% See {@link run/3} for details, including options.
%%
%% @see application/2
application(App, Dir, Options) when is_atom(App) ->
Src = edoc_lib:try_subdir(Dir, ?SOURCE_DIR),
Overview = filename:join(edoc_lib:try_subdir(Dir, ?EDOC_DIR),
?OVERVIEW_FILE),
Opts = Options ++ [{source_path, [Src]},
subpackages,
{title, io_lib:fwrite("The ~s application", [App])},
{overview, Overview},
{dir, filename:join(Dir, ?EDOC_DIR)},
{includes, [filename:join(Dir, "include")]}],
Opts1 = set_app_default(App, Dir, Opts),
%% Recursively document all subpackages of '' - i.e., everything.
run([''], [], [{application, App} | Opts1]).
Try to set up a default application base URI in a smart way if the
%% user has not specified it explicitly.
set_app_default(App, Dir0, Opts) ->
case proplists:get_value(app_default, Opts) of
undefined ->
AppName = atom_to_list(App),
Dir = edoc_lib:simplify_path(filename:absname(Dir0)),
AppDir = case filename:basename(Dir) of
AppName ->
filename:dirname(Dir);
_ ->
?APP_DEFAULT
end,
[{app_default, AppDir} | Opts];
_ ->
Opts
end.
%% If no source files are found for a (specified) package, no package
%% documentation will be generated either (even if there is a
%% package-documentation file). This is the way it should be. For
%% specified files, use empty package (unless otherwise specified). The
%% assumed package is always used for creating the output. If the actual
%% module or package of the source differs from the assumption gathered
%% from the path and file name, a warning should be issued (since links
%% are likely to be incorrect).
opt_defaults() ->
[packages].
opt_negations() ->
[{no_preprocess, preprocess},
{no_subpackages, subpackages},
{no_report_missing_types, report_missing_types},
{no_packages, packages}].
%% @spec run(Packages::[package()],
%% Files::[filename() | {package(), [filename()]}],
Options::proplist ( ) ) - > ok
%% @doc Runs EDoc on a given set of source files and/or packages. Note
that the doclet plugin module has its own particular options ; see the
%% `doclet' option below.
%%
%% Also see {@link layout/2} for layout-related options, and
%% {@link get_doc/2} for options related to reading source
%% files.
%%
%% Options:
%% <dl>
%% <dt>{@type {app_default, string()@}}
%% </dt>
< dd > Specifies the default base URI for unknown applications .
%% </dd>
%% <dt>{@type {application, App::atom()@}}
%% </dt>
%% <dd>Specifies that the generated documentation describes the
%% application `App'. This mainly affects generated references.
%% </dd>
< , filename()@ } }
%% </dt>
%% <dd>Specifies the target directory for the generated documentation.
%% </dd>
%% <dt>{@type {doc_path, [string()]@}}
%% </dt>
< dd > Specifies a list of URI : s pointing to directories that contain
EDoc - generated documentation . URI without a ` scheme:// ' part are
%% taken as relative to `file://'. (Note that such paths must use
%% `/' as separator, regardless of the host operating system.)
%% </dd>
%% <dt>{@type {doclet, Module::atom()@}}
%% </dt>
%% <dd>Specifies a callback module to be used for creating the
documentation . The module must export a function ` run(Cmd , ) ' .
%% The default doclet module is {@link edoc_doclet}; see {@link
%% edoc_doclet:run/2} for doclet-specific options.
%% </dd>
%% <dt>{@type {exclude_packages, [package()]@}}
%% </dt>
%% <dd>Lists packages to be excluded from the documentation. Typically
used in conjunction with the ` subpackages ' option .
%% </dd>
%% <dt>{@type {file_suffix, string()@}}
%% </dt>
%% <dd>Specifies the suffix used for output files. The default value is
%% `".html"'. Note that this also affects generated references.
%% </dd>
%% <dt>{@type {new, boolean()@}}
%% </dt>
%% <dd>If the value is `true', any existing `edoc-info' file in the
%% target directory will be ignored and overwritten. The default
%% value is `false'.
%% </dd>
%% <dt>{@type {packages, boolean()@}}
%% </dt>
%% <dd>If the value is `true', it it assumed that packages (module
%% namespaces) are being used, and that the source code directory
%% structure reflects this. The default value is `true'. (Usually,
%% this does the right thing even if all the modules belong to the
%% top-level "empty" package.) `no_packages' is an alias for
%% `{packages, false}'. See the `subpackages' option below for
%% further details.
%%
%% If the source code is organized in a hierarchy of
%% subdirectories although it does not use packages, use
` no_packages ' together with the recursive - search ` subpackages '
%% option (on by default) to automatically generate documentation
%% for all the modules.
%% </dd>
< dt>{@type { source_path , [ filename()]@ } }
%% </dt>
%% <dd>Specifies a list of file system paths used to locate the source
%% code for packages.
%% </dd>
< dt>{@type { source_suffix , string()@ } }
%% </dt>
%% <dd>Specifies the expected suffix of input files. The default
%% value is `".erl"'.
%% </dd>
%% <dt>{@type {subpackages, boolean()@}}
%% </dt>
< dd > If the value is ` true ' , all subpackages of specified packages
%% will also be included in the documentation. The default value is
%% `false'. `no_subpackages' is an alias for `{subpackages,
%% false}'. See also the `exclude_packages' option.
%%
Subpackage source files are found by recursively searching
%% for source code files in subdirectories of the known source code
%% root directories. (Also see the `source_path' option.) Directory
%% names must begin with a lowercase letter and contain only
%% alphanumeric characters and underscore, or they will be ignored.
%% (For example, a subdirectory named `test-files' will not be
%% searched.)
%% </dd>
%% </dl>
%%
%% @see files/2
%% @see packages/2
%% @see application/2
%% NEW-OPTIONS: source_path, application
%% INHERIT-OPTIONS: init_context/1
%% INHERIT-OPTIONS: expand_sources/2
%% INHERIT-OPTIONS: target_dir_info/5
%% INHERIT-OPTIONS: edoc_lib:find_sources/3
%% INHERIT-OPTIONS: edoc_lib:run_doclet/2
%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
run(Packages, Files, Opts0) ->
Opts = expand_opts(Opts0),
Ctxt = init_context(Opts),
Dir = Ctxt#context.dir,
Path = proplists:append_values(source_path, Opts),
Ss = sources(Path, Packages, Opts),
{Ss1, Ms} = expand_sources(expand_files(Files) ++ Ss, Opts),
Ps = [P || {_, P, _, _} <- Ss1],
App = proplists:get_value(application, Opts, ?NO_APP),
{App1, Ps1, Ms1} = target_dir_info(Dir, App, Ps, Ms, Opts),
%% The "empty package" is never included in the list of packages.
Ps2 = edoc_lib:unique(lists:sort(Ps1)) -- [''],
Ms2 = edoc_lib:unique(lists:sort(Ms1)),
Fs = package_files(Path, Ps2),
Env = edoc_lib:get_doc_env(App1, Ps2, Ms2, Opts),
Ctxt1 = Ctxt#context{env = Env},
Cmd = #doclet_gen{sources = Ss1,
app = App1,
packages = Ps2,
modules = Ms2,
filemap = Fs
},
F = fun (M) ->
M:run(Cmd, Ctxt1)
end,
edoc_lib:run_doclet(F, Opts).
expand_opts(Opts0) ->
proplists:substitute_negations(opt_negations(),
Opts0 ++ opt_defaults()).
%% NEW-OPTIONS: dir
%% DEFER-OPTIONS: run/3
init_context(Opts) ->
#context{dir = proplists:get_value(dir, Opts, ?CURRENT_DIR),
opts = Opts
}.
%% INHERIT-OPTIONS: edoc_lib:find_sources/3
sources(Path, Packages, Opts) ->
lists:foldl(fun (P, Xs) ->
edoc_lib:find_sources(Path, P, Opts) ++ Xs
end,
[], Packages).
package_files(Path, Packages) ->
Name = ?PACKAGE_FILE, % this is hard-coded for now
D = lists:foldl(fun (P, D) ->
F = edoc_lib:find_file(Path, P, Name),
dict:store(P, F, D)
end,
dict:new(), Packages),
fun (P) ->
case dict:find(P, D) of
{ok, F} -> F;
error -> ""
end
end.
%% Expand user-specified sets of files.
expand_files([{P, Fs1} | Fs]) ->
[{P, filename:basename(F), filename:dirname(F)} || F <- Fs1]
++ expand_files(Fs);
expand_files([F | Fs]) ->
[{'', filename:basename(F), filename:dirname(F)} |
expand_files(Fs)];
expand_files([]) ->
[].
Create the ( assumed ) full module names . Keep only the first source
%% for each module, but preserve the order of the list.
%% NEW-OPTIONS: source_suffix, packages
%% DEFER-OPTIONS: run/3
expand_sources(Ss, Opts) ->
Suffix = proplists:get_value(source_suffix, Opts,
?DEFAULT_SOURCE_SUFFIX),
Ss1 = case proplists:get_bool(packages, Opts) of
true -> Ss;
false -> [{'',F,D} || {_P,F,D} <- Ss]
end,
expand_sources(Ss1, Suffix, sets:new(), [], []).
expand_sources([{'', F, D} | Fs], Suffix, S, As, Ms) ->
M = list_to_atom(filename:rootname(F, Suffix)),
case sets:is_element(M, S) of
true ->
expand_sources(Fs, Suffix, S, As, Ms);
false ->
S1 = sets:add_element(M, S),
expand_sources(Fs, Suffix, S1, [{M, '', F, D} | As],
[M | Ms])
end;
expand_sources([], _Suffix, _S, As, Ms) ->
{lists:reverse(As), lists:reverse(Ms)}.
%% NEW-OPTIONS: new
target_dir_info(Dir, App, Ps, Ms, Opts) ->
case proplists:get_bool(new, Opts) of
true ->
{App, Ps, Ms};
false ->
{App1, Ps1, Ms1} = edoc_lib:read_info_file(Dir),
{if App == ?NO_APP -> App1;
true -> App
end,
Ps ++ Ps1,
Ms ++ Ms1}
end.
%% @hidden Not official yet
toc(Dir) ->
toc(Dir, []).
%% @equiv toc(Dir, Paths, [])
%% @hidden Not official yet
%% NEW-OPTIONS: doc_path
toc(Dir, Opts) ->
Paths = proplists:append_values(doc_path, Opts)
++ edoc_lib:find_doc_dirs(),
toc(Dir, Paths, Opts).
%% @doc Create a meta-level table of contents.
%% @hidden Not official yet
%% INHERIT-OPTIONS: init_context/1
%% INHERIT-OPTIONS: edoc_lib:run_doclet/2
%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
toc(Dir, Paths, Opts0) ->
Opts = expand_opts(Opts0 ++ [{dir, Dir}]),
Ctxt = init_context(Opts),
Env = edoc_lib:get_doc_env('', [], [], Opts),
Ctxt1 = Ctxt#context{env = Env},
F = fun (M) ->
M:run(#doclet_toc{paths=Paths}, Ctxt1)
end,
edoc_lib:run_doclet(F, Opts).
read(File::filename ( ) ) - > string ( )
%% @equiv read(File, [])
read(File) ->
read(File, []).
read(File::filename ( ) , Options::proplist ( ) ) - > string ( )
%%
%% @doc Reads and processes a source file and returns the resulting
EDoc - text as a string . See { @link get_doc/2 } and { @link layout/2 } for
%% options.
%%
@see file/2
INHERIT - OPTIONS : get_doc/2 ,
read(File, Opts) ->
{_ModuleName, Doc} = get_doc(File, Opts),
layout(Doc, Opts).
( ) ) - > string ( )
%% @equiv layout(Doc, [])
layout(Doc) ->
layout(Doc, []).
( ) , Options::proplist ( ) ) - > string ( )
%%
@doc Transforms EDoc module documentation data to text . The default
%% layout creates an HTML document.
%%
%% Options:
%% <dl>
%% <dt>{@type {layout, Module::atom()@}}
%% </dt>
%% <dd>Specifies a callback module to be used for formatting. The
%% module must export a function `module(Doc, Options)'. The
%% default callback module is {@link edoc_layout}; see {@link
%% edoc_layout:module/2} for layout-specific options.
%% </dd>
%% </dl>
%%
%% @see layout/1
%% @see run/3
%% @see read/2
@see file/2
%% INHERIT-OPTIONS: edoc_lib:run_layout/2
layout(Doc, Opts) ->
F = fun (M) ->
M:module(Doc, Opts)
end,
edoc_lib:run_layout(F, Opts).
%% @spec (File) -> [comment()]
%% @type comment() = {Line, Column, Indentation, Text}
%% where
%% Line = integer(),
%% Column = integer(),
%% Indentation = integer(),
%% Text = [string()]
%% @equiv read_comments(File, [])
read_comments(File) ->
read_comments(File, []).
read_comments(File::filename ( ) , Options::proplist ( ) ) - >
%% [comment()]
%%
@doc Extracts comments from an Erlang source code file . See the
%% module {@link //syntax_tools/erl_comment_scan} for details on the
%% representation of comments. Currently, no options are avaliable.
read_comments(File, _Opts) ->
erl_comment_scan:file(File).
%% @spec (File) -> [syntaxTree()]
%% @equiv read_source(File, [])
read_source(Name) ->
read_source(Name, []).
read_source(File::filename ( ) , Options::proplist ( ) ) - >
%% [syntaxTree()]
%%
%% @type syntaxTree() = //syntax_tools/erl_syntax:syntaxTree()
%%
@doc Reads an Erlang source file and returns the list of " source code
%% form" syntax trees.
%%
%% Options:
%% <dl>
%% <dt>{@type {preprocess, boolean()@}}
%% </dt>
%% <dd>If the value is `true', the source file will be read via the
Erlang preprocessor ( ` epp ' ) . The default value is ` false ' .
%% `no_preprocess' is an alias for `{preprocess, false}'.
%%
Normally , preprocessing is not necessary for EDoc to work , but
%% if a file contains too exotic definitions or uses of macros, it
%% will not be possible to read it without preprocessing. <em>Note:
comments in included files will not be available to EDoc , even
%% with this option enabled.</em>
%% </dd>
%% <dt>{@type {includes, Path::[string()]@}}
%% </dt>
%% <dd>Specifies a list of directory names to be searched for include
%% files, if the `preprocess' option is turned on. Also used with
the ` @headerfile ' tag . The default value is the empty list . The
%% directory of the source file is always automatically appended to
%% the search path.
%% </dd>
%% <dt>{@type {macros, [{atom(), term()@}]@}}
%% </dt>
< dd > Specifies a list of pre - defined Erlang preprocessor ( ` epp ' )
%% macro definitions, used if the `preprocess' option is turned on.
%% The default value is the empty list.</dd>
%% </dl>
%% <dt>{@type {report_missing_types, boolean()@}}
%% </dt>
%% <dd>If the value is `true', warnings are issued for missing types.
%% The default value is `false'.
%% `no_report_missing_types' is an alias for
%% `{report_missing_types, false}'.
%% </dd>
%%
%% @see get_doc/2
%% @see //syntax_tools/erl_syntax
%% NEW-OPTIONS: [no_]preprocess (preprocess -> includes, macros)
read_source(Name, Opts0) ->
Opts = expand_opts(Opts0),
case read_source_1(Name, Opts) of
{ok, Forms} ->
check_forms(Forms, Name),
Forms;
{error, R} ->
edoc_report:error({"error reading file '~ts'.",
[edoc_lib:filename(Name)]}),
exit({error, R})
end.
read_source_1(Name, Opts) ->
case proplists:get_bool(preprocess, Opts) of
true ->
read_source_2(Name, Opts);
false ->
epp_dodger:quick_parse_file(Name, Opts ++ [{no_fail, false}])
end.
read_source_2(Name, Opts) ->
Includes = proplists:append_values(includes, Opts)
++ [filename:dirname(Name)],
Macros = proplists:append_values(macros, Opts),
epp : parse_file(Name , Includes , ) .
parse_file(Name, Includes, Macros).
%% The code below has been copied from epp.erl.
%%
%% Copy the line of the last token to the last token that will be
%% part of the parse tree.
%%
%% The last line is used in edoc_extract:find_type_docs() to determine
%% if a type declaration is followed by a comment.
%% <example>
%% -type t() :: [
%% {tag, integer()}
%% ].
%% %% Protocol options.
%% </example>
%% The line of the dot token will be copied to the integer token.
parse_file(Name, Includes, Macros) ->
case parse_file(utf8, Name, Includes, Macros) of
invalid_unicode ->
parse_file(latin1, Name, Includes, Macros);
Ret ->
Ret
end.
parse_file(DefEncoding, Name, Includes, Macros) ->
Options = [{name, Name},
{includes, Includes},
{macros, Macros},
{default_encoding, DefEncoding}],
case epp:open([extra | Options]) of
{ok, Epp, Extra} ->
try parse_file(Epp) of
Forms ->
Encoding = proplists:get_value(encoding, Extra),
case find_invalid_unicode(Forms) of
invalid_unicode when Encoding =/= utf8 ->
invalid_unicode;
_ ->
{ok, Forms}
end
after _ = epp:close(Epp)
end;
Error ->
Error
end.
find_invalid_unicode([H|T]) ->
case H of
{error,{_Line,file_io_server,invalid_unicode}} ->
invalid_unicode;
_Other ->
find_invalid_unicode(T)
end;
find_invalid_unicode([]) -> none.
parse_file(Epp) ->
case scan_and_parse(Epp) of
{ok, Form} ->
case Form of
{attribute,La,record,{Record, Fields}} ->
case epp:normalize_typed_record_fields(Fields) of
{typed, NewFields} ->
[{attribute, La, record, {Record, NewFields}},
{attribute, La, type,
{{record, Record}, Fields, []}}
| parse_file(Epp)];
not_typed ->
[Form | parse_file(Epp)]
end;
_ ->
[Form | parse_file(Epp)]
end;
{error, E} ->
[{error, E} | parse_file(Epp)];
{eof, Location} ->
[{eof, Location}]
end.
scan_and_parse(Epp) ->
case epp:scan_erl_form(Epp) of
{ok, Toks0} ->
Toks = fix_last_line(Toks0),
case erl_parse:parse_form(Toks) of
{ok, Form} ->
{ok, Form};
Else ->
Else
end;
Else ->
Else
end.
fix_last_line(Toks0) ->
Toks1 = lists:reverse(Toks0),
{line, LastLine} = erl_scan:token_info(hd(Toks1), line),
fll(Toks1, LastLine, []).
fll([{Category, Attributes0, Symbol} | L], LastLine, Ts) ->
F = fun(_OldLine) -> LastLine end,
Attributes = erl_scan:set_attribute(line, Attributes0, F),
lists:reverse(L, [{Category, Attributes, Symbol} | Ts]);
fll([T | L], LastLine, Ts) ->
fll(L, LastLine, [T | Ts]);
fll(L, _LastLine, Ts) ->
lists:reverse(L, Ts).
check_forms(Fs, Name) ->
Fun = fun (F) ->
case erl_syntax:type(F) of
error_marker ->
case erl_syntax:error_marker_info(F) of
{L, M, D} ->
edoc_report:error(L, Name, {format_error, M, D});
Other ->
edoc_report:report(Name, "unknown error in "
"source code: ~w.", [Other])
end,
exit(error);
_ ->
ok
end
end,
lists:foreach(Fun, Fs).
( ) ) - > { ModuleName , edoc_module ( ) }
%% @equiv get_doc(File, [])
get_doc(File) ->
get_doc(File, []).
( ) , Options::proplist ( ) ) - >
{ ModuleName , edoc_module ( ) }
ModuleName = atom ( )
%%
@type edoc_module ( ) . The EDoc documentation data for a module ,
expressed as an XML document in { @link //xmerl . XMerL } format . See
%% the file <a href="../priv/edoc.dtd">`edoc.dtd'</a> for details.
%%
@doc Reads a source code file and extracts EDoc documentation data .
%% Note that without an environment parameter (see {@link get_doc/3}),
%% hypertext links may not be correct.
%%
%% Options:
%% <dl>
%% <dt>{@type {def, Macros@}}
%% </dt>
%% <dd><ul>
%% <li>`Macros' = {@type Macro | [Macro]}</li>
%% <li>`Macro' = {@type {Name::atom(), Text::string()@}}</li>
%% </ul>
Specifies a set of EDoc macro definitions . See
%% <a href="overview-summary.html#Macro_expansion">Inline macro expansion</a>
%% for details.
%% </dd>
%% <dt>{@type {hidden, boolean()@}}
%% </dt>
%% <dd>If the value is `true', documentation of hidden functions will
%% also be included. The default value is `false'.
%% </dd>
%% <dt>{@type {private, boolean()@}}
%% </dt>
%% <dd>If the value is `true', documentation of private functions will
%% also be included. The default value is `false'.
%% </dd>
< } }
%% </dt>
%% <dd>If the value is `true', To-Do notes written using `@todo' or
` @TODO ' tags will be included in the documentation . The default
%% value is `false'.
%% </dd>
%% </dl>
%%
%% See {@link read_source/2}, {@link read_comments/2} and {@link
%% edoc_lib:get_doc_env/4} for further options.
%%
%% @see get_doc/3
%% @see run/3
%% @see edoc_extract:source/5
%% @see read/2
%% @see layout/2
%% INHERIT-OPTIONS: get_doc/3
%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
get_doc(File, Opts) ->
Env = edoc_lib:get_doc_env(Opts),
get_doc(File, Env, Opts).
( ) , Env::edoc_lib : edoc_env ( ) ,
Options::proplist ( ) ) - > { ModuleName , edoc_module ( ) }
ModuleName = atom ( )
%%
%% @doc Like {@link get_doc/2}, but for a given environment
%% parameter. `Env' is an environment created by {@link
%% edoc_lib:get_doc_env/4}.
INHERIT - OPTIONS : read_source/2 , read_comments/2 , edoc_extract : source/5
%% DEFER-OPTIONS: get_doc/2
get_doc(File, Env, Opts) ->
edoc_extract:source(File, Env, Opts).
| null | https://raw.githubusercontent.com/richcarl/edoc/1d816832ea6d4a8666c059aa9b84298a26a3265a/src/edoc.erl | erlang | =====================================================================
not use this file except in compliance with the License. You may obtain
a copy of the License at <-2.0>
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Alternatively, you may use this file under the terms of the GNU Lesser
If you wish to allow use of your version of this file only under the
terms of the LGPL, you should delete the provisions above and replace
them with the notice and other provisions required by the LGPL; see
</>. If you do not delete the provisions
above, a recipient may use your version of this file under the terms of
@version {@version}
@end
=====================================================================
TODO: option for ignoring functions matching some pattern ('..._test_'/0)
TODO: @private_type tag, opaque unless generating private docs?
TODO: some 'skip' option for ignoring particular modules/packages?
TODO: intermediate-level packages: document even if no local sources.
TODO: config-file for default settings
TODO: config: URL:s of offline packages/apps
TODO: config: default stylesheet
TODO: config: default header/footer, etc.
TODO: offline linkage
TODO: including source code, explicitly and/or automatically
<ul>
<li><a href="overview-summary.html">EDoc User Manual</a></li>
<li><a href="overview-summary.html#Running_EDoc">Running EDoc</a></li>
</ul>
@spec (Name::filename()) -> ok
@equiv file(Name, [])
@deprecated See {@link file/2} for details.
@type filename() = //kernel/file:filename()
@type proplist() = [term()]
kept for backwards compatibility. The preferred way of generating
{@link packages/2} and {@link files/2}.
@doc Reads a source code file and outputs formatted documentation to
a corresponding file.
Options:
<dl>
</dt>
<dd>Specifies the output directory for the created file. (By
default, the output is written to the directory of the source
file.)
</dd>
</dt>
<dd>Specifies the expected suffix of the input file. The default
value is `".erl"'.
</dd>
<dt>{@type {file_suffix, string()@}}
</dt>
<dd>Specifies the suffix for the created file. The default value is
`".html"'.
</dd>
</dl>
See {@link get_doc/2} and {@link layout/2} for further
options.
{@link edoc_run:file/1}.
@see read/2
INHERIT-OPTIONS: read/2
@spec (Files::[filename() | {package(), [filename()]}]) -> ok
@equiv packages(Packages, [])
@spec (Files::[filename() | {package(), [filename()]}],
Options::proplist()) -> ok
@doc Runs EDoc on a given set of source files. See {@link run/3} for
details, including options.
@equiv run([], Files, Options)
@equiv packages(Packages, [])
@type package() = atom() | string()
@doc Runs EDoc on a set of packages. The `source_path' option is used
to locate the files; see {@link run/3} for details, including
options. This function automatically appends the current directory to
the source path.
@equiv run(Packages, [], Options)
@spec (Application::atom()) -> ok
@equiv application(Application, [])
@doc Run EDoc on an application in its default app-directory. See
{@link application/3} for details.
-> ok
@doc Run EDoc on an application located in the specified directory.
Tries to automatically set up good defaults. Unless the user
specifies otherwise:
<ul>
<li>The `doc' subdirectory will be used as the target directory, if
it exists; otherwise the application directory is used.
</li>
<li>The source code is assumed to be located in the `src'
subdirectory, if it exists, or otherwise in the application
directory itself.
</li>
source files will be processed.
</li>
<li>The `include' subdirectory is automatically added to the
preprocessing} is turned on.)
</li>
</ul>
See {@link run/3} for details, including options.
@see application/2
Recursively document all subpackages of '' - i.e., everything.
user has not specified it explicitly.
If no source files are found for a (specified) package, no package
documentation will be generated either (even if there is a
package-documentation file). This is the way it should be. For
specified files, use empty package (unless otherwise specified). The
assumed package is always used for creating the output. If the actual
module or package of the source differs from the assumption gathered
from the path and file name, a warning should be issued (since links
are likely to be incorrect).
@spec run(Packages::[package()],
Files::[filename() | {package(), [filename()]}],
@doc Runs EDoc on a given set of source files and/or packages. Note
`doclet' option below.
Also see {@link layout/2} for layout-related options, and
{@link get_doc/2} for options related to reading source
files.
Options:
<dl>
<dt>{@type {app_default, string()@}}
</dt>
</dd>
<dt>{@type {application, App::atom()@}}
</dt>
<dd>Specifies that the generated documentation describes the
application `App'. This mainly affects generated references.
</dd>
</dt>
<dd>Specifies the target directory for the generated documentation.
</dd>
<dt>{@type {doc_path, [string()]@}}
</dt>
taken as relative to `file://'. (Note that such paths must use
`/' as separator, regardless of the host operating system.)
</dd>
<dt>{@type {doclet, Module::atom()@}}
</dt>
<dd>Specifies a callback module to be used for creating the
The default doclet module is {@link edoc_doclet}; see {@link
edoc_doclet:run/2} for doclet-specific options.
</dd>
<dt>{@type {exclude_packages, [package()]@}}
</dt>
<dd>Lists packages to be excluded from the documentation. Typically
</dd>
<dt>{@type {file_suffix, string()@}}
</dt>
<dd>Specifies the suffix used for output files. The default value is
`".html"'. Note that this also affects generated references.
</dd>
<dt>{@type {new, boolean()@}}
</dt>
<dd>If the value is `true', any existing `edoc-info' file in the
target directory will be ignored and overwritten. The default
value is `false'.
</dd>
<dt>{@type {packages, boolean()@}}
</dt>
<dd>If the value is `true', it it assumed that packages (module
namespaces) are being used, and that the source code directory
structure reflects this. The default value is `true'. (Usually,
this does the right thing even if all the modules belong to the
top-level "empty" package.) `no_packages' is an alias for
`{packages, false}'. See the `subpackages' option below for
further details.
If the source code is organized in a hierarchy of
subdirectories although it does not use packages, use
option (on by default) to automatically generate documentation
for all the modules.
</dd>
</dt>
<dd>Specifies a list of file system paths used to locate the source
code for packages.
</dd>
</dt>
<dd>Specifies the expected suffix of input files. The default
value is `".erl"'.
</dd>
<dt>{@type {subpackages, boolean()@}}
</dt>
will also be included in the documentation. The default value is
`false'. `no_subpackages' is an alias for `{subpackages,
false}'. See also the `exclude_packages' option.
for source code files in subdirectories of the known source code
root directories. (Also see the `source_path' option.) Directory
names must begin with a lowercase letter and contain only
alphanumeric characters and underscore, or they will be ignored.
(For example, a subdirectory named `test-files' will not be
searched.)
</dd>
</dl>
@see files/2
@see packages/2
@see application/2
NEW-OPTIONS: source_path, application
INHERIT-OPTIONS: init_context/1
INHERIT-OPTIONS: expand_sources/2
INHERIT-OPTIONS: target_dir_info/5
INHERIT-OPTIONS: edoc_lib:find_sources/3
INHERIT-OPTIONS: edoc_lib:run_doclet/2
INHERIT-OPTIONS: edoc_lib:get_doc_env/4
The "empty package" is never included in the list of packages.
NEW-OPTIONS: dir
DEFER-OPTIONS: run/3
INHERIT-OPTIONS: edoc_lib:find_sources/3
this is hard-coded for now
Expand user-specified sets of files.
for each module, but preserve the order of the list.
NEW-OPTIONS: source_suffix, packages
DEFER-OPTIONS: run/3
NEW-OPTIONS: new
@hidden Not official yet
@equiv toc(Dir, Paths, [])
@hidden Not official yet
NEW-OPTIONS: doc_path
@doc Create a meta-level table of contents.
@hidden Not official yet
INHERIT-OPTIONS: init_context/1
INHERIT-OPTIONS: edoc_lib:run_doclet/2
INHERIT-OPTIONS: edoc_lib:get_doc_env/4
@equiv read(File, [])
@doc Reads and processes a source file and returns the resulting
options.
@equiv layout(Doc, [])
layout creates an HTML document.
Options:
<dl>
<dt>{@type {layout, Module::atom()@}}
</dt>
<dd>Specifies a callback module to be used for formatting. The
module must export a function `module(Doc, Options)'. The
default callback module is {@link edoc_layout}; see {@link
edoc_layout:module/2} for layout-specific options.
</dd>
</dl>
@see layout/1
@see run/3
@see read/2
INHERIT-OPTIONS: edoc_lib:run_layout/2
@spec (File) -> [comment()]
@type comment() = {Line, Column, Indentation, Text}
where
Line = integer(),
Column = integer(),
Indentation = integer(),
Text = [string()]
@equiv read_comments(File, [])
[comment()]
module {@link //syntax_tools/erl_comment_scan} for details on the
representation of comments. Currently, no options are avaliable.
@spec (File) -> [syntaxTree()]
@equiv read_source(File, [])
[syntaxTree()]
@type syntaxTree() = //syntax_tools/erl_syntax:syntaxTree()
form" syntax trees.
Options:
<dl>
<dt>{@type {preprocess, boolean()@}}
</dt>
<dd>If the value is `true', the source file will be read via the
`no_preprocess' is an alias for `{preprocess, false}'.
if a file contains too exotic definitions or uses of macros, it
will not be possible to read it without preprocessing. <em>Note:
with this option enabled.</em>
</dd>
<dt>{@type {includes, Path::[string()]@}}
</dt>
<dd>Specifies a list of directory names to be searched for include
files, if the `preprocess' option is turned on. Also used with
directory of the source file is always automatically appended to
the search path.
</dd>
<dt>{@type {macros, [{atom(), term()@}]@}}
</dt>
macro definitions, used if the `preprocess' option is turned on.
The default value is the empty list.</dd>
</dl>
<dt>{@type {report_missing_types, boolean()@}}
</dt>
<dd>If the value is `true', warnings are issued for missing types.
The default value is `false'.
`no_report_missing_types' is an alias for
`{report_missing_types, false}'.
</dd>
@see get_doc/2
@see //syntax_tools/erl_syntax
NEW-OPTIONS: [no_]preprocess (preprocess -> includes, macros)
The code below has been copied from epp.erl.
Copy the line of the last token to the last token that will be
part of the parse tree.
The last line is used in edoc_extract:find_type_docs() to determine
if a type declaration is followed by a comment.
<example>
-type t() :: [
{tag, integer()}
].
%% Protocol options.
</example>
The line of the dot token will be copied to the integer token.
@equiv get_doc(File, [])
the file <a href="../priv/edoc.dtd">`edoc.dtd'</a> for details.
Note that without an environment parameter (see {@link get_doc/3}),
hypertext links may not be correct.
Options:
<dl>
<dt>{@type {def, Macros@}}
</dt>
<dd><ul>
<li>`Macros' = {@type Macro | [Macro]}</li>
<li>`Macro' = {@type {Name::atom(), Text::string()@}}</li>
</ul>
<a href="overview-summary.html#Macro_expansion">Inline macro expansion</a>
for details.
</dd>
<dt>{@type {hidden, boolean()@}}
</dt>
<dd>If the value is `true', documentation of hidden functions will
also be included. The default value is `false'.
</dd>
<dt>{@type {private, boolean()@}}
</dt>
<dd>If the value is `true', documentation of private functions will
also be included. The default value is `false'.
</dd>
</dt>
<dd>If the value is `true', To-Do notes written using `@todo' or
value is `false'.
</dd>
</dl>
See {@link read_source/2}, {@link read_comments/2} and {@link
edoc_lib:get_doc_env/4} for further options.
@see get_doc/3
@see run/3
@see edoc_extract:source/5
@see read/2
@see layout/2
INHERIT-OPTIONS: get_doc/3
INHERIT-OPTIONS: edoc_lib:get_doc_env/4
@doc Like {@link get_doc/2}, but for a given environment
parameter. `Env' is an environment created by {@link
edoc_lib:get_doc_env/4}.
DEFER-OPTIONS: get_doc/2 | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may
distributed under the License is distributed on an " AS IS " BASIS ,
General Public License ( the " LGPL " ) as published by the Free Software
Foundation ; either version 2.1 , or ( at your option ) any later version .
either the Apache License or the LGPL .
2001 - 2007
@author < >
TODO : check weirdness in name generation for f(TypeName , ... ) - > ...
TODO : document the record type syntax
TODO : multiline comment support ( needs modified comment representation )
TODO : config : locations of all local docdirs ; generate local doc - index page
@doc EDoc - the Erlang program documentation generator .
This module provides the main user interface to EDoc .
-module(edoc).
-export([packages/1, packages/2, files/1, files/2,
application/1, application/2, application/3,
toc/1, toc/2, toc/3,
run/3,
file/1, file/2,
read/1, read/2,
layout/1, layout/2,
get_doc/1, get_doc/2, get_doc/3,
read_comments/1, read_comments/2,
read_source/1, read_source/2]).
-compile({no_auto_import,[error/1]}).
-include("edoc.hrl").
file(Name) ->
file(Name, []).
file(filename ( ) , proplist ( ) ) - > ok
@deprecated This is part of the old interface to EDoc and is mainly
documentation is through one of the functions { @link application/2 } ,
< , filename()@ } }
< dt>{@type { source_suffix , string()@ } }
For running EDoc from a Makefile or similar , see
NEW - OPTIONS : source_suffix , file_suffix , dir
file(Name, Options) ->
Text = read(Name, Options),
SrcSuffix = proplists:get_value(source_suffix, Options,
?DEFAULT_SOURCE_SUFFIX),
BaseName = filename:basename(Name, SrcSuffix),
Suffix = proplists:get_value(file_suffix, Options,
?DEFAULT_FILE_SUFFIX),
Dir = proplists:get_value(dir, Options, filename:dirname(Name)),
Encoding = [{encoding, edoc_lib:read_encoding(Name, [])}],
edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, '', Encoding).
TODO : better documentation of files/1/2 , packages/1/2 , application/1/2/3
files(Files) ->
files(Files, []).
files(Files, Options) ->
run([], Files, Options).
( Packages::[package ( ) ] ) - > ok
packages(Packages) ->
packages(Packages, []).
( Packages::[package ( ) ] , Options::proplist ( ) ) - > ok
packages(Packages, Options) ->
run(Packages, [], Options ++ [{source_path, [?CURRENT_DIR]}]).
application(App) ->
application(App, []).
@spec ( Application::atom ( ) , Options::proplist ( ) ) - > ok
@see application/1
application(App, Options) when is_atom(App) ->
case code:lib_dir(App) of
Dir when is_list(Dir) ->
application(App, Dir, Options);
_ ->
edoc_report:report("cannot find application directory for '~s'.",
[App]),
exit(error)
end.
@spec ( Application::atom ( ) , Dir::filename ( ) , Options::proplist ( ) )
< li > The { @link run/3 . ` subpackages ' } option is turned on . All found
include path . ( Only important if { @link read_source/2 .
application(App, Dir, Options) when is_atom(App) ->
Src = edoc_lib:try_subdir(Dir, ?SOURCE_DIR),
Overview = filename:join(edoc_lib:try_subdir(Dir, ?EDOC_DIR),
?OVERVIEW_FILE),
Opts = Options ++ [{source_path, [Src]},
subpackages,
{title, io_lib:fwrite("The ~s application", [App])},
{overview, Overview},
{dir, filename:join(Dir, ?EDOC_DIR)},
{includes, [filename:join(Dir, "include")]}],
Opts1 = set_app_default(App, Dir, Opts),
run([''], [], [{application, App} | Opts1]).
Try to set up a default application base URI in a smart way if the
set_app_default(App, Dir0, Opts) ->
case proplists:get_value(app_default, Opts) of
undefined ->
AppName = atom_to_list(App),
Dir = edoc_lib:simplify_path(filename:absname(Dir0)),
AppDir = case filename:basename(Dir) of
AppName ->
filename:dirname(Dir);
_ ->
?APP_DEFAULT
end,
[{app_default, AppDir} | Opts];
_ ->
Opts
end.
opt_defaults() ->
[packages].
opt_negations() ->
[{no_preprocess, preprocess},
{no_subpackages, subpackages},
{no_report_missing_types, report_missing_types},
{no_packages, packages}].
Options::proplist ( ) ) - > ok
that the doclet plugin module has its own particular options ; see the
< dd > Specifies the default base URI for unknown applications .
< , filename()@ } }
< dd > Specifies a list of URI : s pointing to directories that contain
EDoc - generated documentation . URI without a ` scheme:// ' part are
documentation . The module must export a function ` run(Cmd , ) ' .
used in conjunction with the ` subpackages ' option .
` no_packages ' together with the recursive - search ` subpackages '
< dt>{@type { source_path , [ filename()]@ } }
< dt>{@type { source_suffix , string()@ } }
< dd > If the value is ` true ' , all subpackages of specified packages
Subpackage source files are found by recursively searching
run(Packages, Files, Opts0) ->
Opts = expand_opts(Opts0),
Ctxt = init_context(Opts),
Dir = Ctxt#context.dir,
Path = proplists:append_values(source_path, Opts),
Ss = sources(Path, Packages, Opts),
{Ss1, Ms} = expand_sources(expand_files(Files) ++ Ss, Opts),
Ps = [P || {_, P, _, _} <- Ss1],
App = proplists:get_value(application, Opts, ?NO_APP),
{App1, Ps1, Ms1} = target_dir_info(Dir, App, Ps, Ms, Opts),
Ps2 = edoc_lib:unique(lists:sort(Ps1)) -- [''],
Ms2 = edoc_lib:unique(lists:sort(Ms1)),
Fs = package_files(Path, Ps2),
Env = edoc_lib:get_doc_env(App1, Ps2, Ms2, Opts),
Ctxt1 = Ctxt#context{env = Env},
Cmd = #doclet_gen{sources = Ss1,
app = App1,
packages = Ps2,
modules = Ms2,
filemap = Fs
},
F = fun (M) ->
M:run(Cmd, Ctxt1)
end,
edoc_lib:run_doclet(F, Opts).
expand_opts(Opts0) ->
proplists:substitute_negations(opt_negations(),
Opts0 ++ opt_defaults()).
init_context(Opts) ->
#context{dir = proplists:get_value(dir, Opts, ?CURRENT_DIR),
opts = Opts
}.
sources(Path, Packages, Opts) ->
lists:foldl(fun (P, Xs) ->
edoc_lib:find_sources(Path, P, Opts) ++ Xs
end,
[], Packages).
package_files(Path, Packages) ->
D = lists:foldl(fun (P, D) ->
F = edoc_lib:find_file(Path, P, Name),
dict:store(P, F, D)
end,
dict:new(), Packages),
fun (P) ->
case dict:find(P, D) of
{ok, F} -> F;
error -> ""
end
end.
expand_files([{P, Fs1} | Fs]) ->
[{P, filename:basename(F), filename:dirname(F)} || F <- Fs1]
++ expand_files(Fs);
expand_files([F | Fs]) ->
[{'', filename:basename(F), filename:dirname(F)} |
expand_files(Fs)];
expand_files([]) ->
[].
Create the ( assumed ) full module names . Keep only the first source
expand_sources(Ss, Opts) ->
Suffix = proplists:get_value(source_suffix, Opts,
?DEFAULT_SOURCE_SUFFIX),
Ss1 = case proplists:get_bool(packages, Opts) of
true -> Ss;
false -> [{'',F,D} || {_P,F,D} <- Ss]
end,
expand_sources(Ss1, Suffix, sets:new(), [], []).
expand_sources([{'', F, D} | Fs], Suffix, S, As, Ms) ->
M = list_to_atom(filename:rootname(F, Suffix)),
case sets:is_element(M, S) of
true ->
expand_sources(Fs, Suffix, S, As, Ms);
false ->
S1 = sets:add_element(M, S),
expand_sources(Fs, Suffix, S1, [{M, '', F, D} | As],
[M | Ms])
end;
expand_sources([], _Suffix, _S, As, Ms) ->
{lists:reverse(As), lists:reverse(Ms)}.
target_dir_info(Dir, App, Ps, Ms, Opts) ->
case proplists:get_bool(new, Opts) of
true ->
{App, Ps, Ms};
false ->
{App1, Ps1, Ms1} = edoc_lib:read_info_file(Dir),
{if App == ?NO_APP -> App1;
true -> App
end,
Ps ++ Ps1,
Ms ++ Ms1}
end.
toc(Dir) ->
toc(Dir, []).
toc(Dir, Opts) ->
Paths = proplists:append_values(doc_path, Opts)
++ edoc_lib:find_doc_dirs(),
toc(Dir, Paths, Opts).
toc(Dir, Paths, Opts0) ->
Opts = expand_opts(Opts0 ++ [{dir, Dir}]),
Ctxt = init_context(Opts),
Env = edoc_lib:get_doc_env('', [], [], Opts),
Ctxt1 = Ctxt#context{env = Env},
F = fun (M) ->
M:run(#doclet_toc{paths=Paths}, Ctxt1)
end,
edoc_lib:run_doclet(F, Opts).
read(File::filename ( ) ) - > string ( )
read(File) ->
read(File, []).
read(File::filename ( ) , Options::proplist ( ) ) - > string ( )
EDoc - text as a string . See { @link get_doc/2 } and { @link layout/2 } for
@see file/2
INHERIT - OPTIONS : get_doc/2 ,
read(File, Opts) ->
{_ModuleName, Doc} = get_doc(File, Opts),
layout(Doc, Opts).
( ) ) - > string ( )
layout(Doc) ->
layout(Doc, []).
( ) , Options::proplist ( ) ) - > string ( )
@doc Transforms EDoc module documentation data to text . The default
@see file/2
layout(Doc, Opts) ->
F = fun (M) ->
M:module(Doc, Opts)
end,
edoc_lib:run_layout(F, Opts).
read_comments(File) ->
read_comments(File, []).
read_comments(File::filename ( ) , Options::proplist ( ) ) - >
@doc Extracts comments from an Erlang source code file . See the
read_comments(File, _Opts) ->
erl_comment_scan:file(File).
read_source(Name) ->
read_source(Name, []).
read_source(File::filename ( ) , Options::proplist ( ) ) - >
@doc Reads an Erlang source file and returns the list of " source code
Erlang preprocessor ( ` epp ' ) . The default value is ` false ' .
Normally , preprocessing is not necessary for EDoc to work , but
comments in included files will not be available to EDoc , even
the ` @headerfile ' tag . The default value is the empty list . The
< dd > Specifies a list of pre - defined Erlang preprocessor ( ` epp ' )
read_source(Name, Opts0) ->
Opts = expand_opts(Opts0),
case read_source_1(Name, Opts) of
{ok, Forms} ->
check_forms(Forms, Name),
Forms;
{error, R} ->
edoc_report:error({"error reading file '~ts'.",
[edoc_lib:filename(Name)]}),
exit({error, R})
end.
read_source_1(Name, Opts) ->
case proplists:get_bool(preprocess, Opts) of
true ->
read_source_2(Name, Opts);
false ->
epp_dodger:quick_parse_file(Name, Opts ++ [{no_fail, false}])
end.
read_source_2(Name, Opts) ->
Includes = proplists:append_values(includes, Opts)
++ [filename:dirname(Name)],
Macros = proplists:append_values(macros, Opts),
epp : parse_file(Name , Includes , ) .
parse_file(Name, Includes, Macros).
parse_file(Name, Includes, Macros) ->
case parse_file(utf8, Name, Includes, Macros) of
invalid_unicode ->
parse_file(latin1, Name, Includes, Macros);
Ret ->
Ret
end.
parse_file(DefEncoding, Name, Includes, Macros) ->
Options = [{name, Name},
{includes, Includes},
{macros, Macros},
{default_encoding, DefEncoding}],
case epp:open([extra | Options]) of
{ok, Epp, Extra} ->
try parse_file(Epp) of
Forms ->
Encoding = proplists:get_value(encoding, Extra),
case find_invalid_unicode(Forms) of
invalid_unicode when Encoding =/= utf8 ->
invalid_unicode;
_ ->
{ok, Forms}
end
after _ = epp:close(Epp)
end;
Error ->
Error
end.
find_invalid_unicode([H|T]) ->
case H of
{error,{_Line,file_io_server,invalid_unicode}} ->
invalid_unicode;
_Other ->
find_invalid_unicode(T)
end;
find_invalid_unicode([]) -> none.
parse_file(Epp) ->
case scan_and_parse(Epp) of
{ok, Form} ->
case Form of
{attribute,La,record,{Record, Fields}} ->
case epp:normalize_typed_record_fields(Fields) of
{typed, NewFields} ->
[{attribute, La, record, {Record, NewFields}},
{attribute, La, type,
{{record, Record}, Fields, []}}
| parse_file(Epp)];
not_typed ->
[Form | parse_file(Epp)]
end;
_ ->
[Form | parse_file(Epp)]
end;
{error, E} ->
[{error, E} | parse_file(Epp)];
{eof, Location} ->
[{eof, Location}]
end.
scan_and_parse(Epp) ->
case epp:scan_erl_form(Epp) of
{ok, Toks0} ->
Toks = fix_last_line(Toks0),
case erl_parse:parse_form(Toks) of
{ok, Form} ->
{ok, Form};
Else ->
Else
end;
Else ->
Else
end.
fix_last_line(Toks0) ->
Toks1 = lists:reverse(Toks0),
{line, LastLine} = erl_scan:token_info(hd(Toks1), line),
fll(Toks1, LastLine, []).
fll([{Category, Attributes0, Symbol} | L], LastLine, Ts) ->
F = fun(_OldLine) -> LastLine end,
Attributes = erl_scan:set_attribute(line, Attributes0, F),
lists:reverse(L, [{Category, Attributes, Symbol} | Ts]);
fll([T | L], LastLine, Ts) ->
fll(L, LastLine, [T | Ts]);
fll(L, _LastLine, Ts) ->
lists:reverse(L, Ts).
check_forms(Fs, Name) ->
Fun = fun (F) ->
case erl_syntax:type(F) of
error_marker ->
case erl_syntax:error_marker_info(F) of
{L, M, D} ->
edoc_report:error(L, Name, {format_error, M, D});
Other ->
edoc_report:report(Name, "unknown error in "
"source code: ~w.", [Other])
end,
exit(error);
_ ->
ok
end
end,
lists:foreach(Fun, Fs).
( ) ) - > { ModuleName , edoc_module ( ) }
get_doc(File) ->
get_doc(File, []).
( ) , Options::proplist ( ) ) - >
{ ModuleName , edoc_module ( ) }
ModuleName = atom ( )
@type edoc_module ( ) . The EDoc documentation data for a module ,
expressed as an XML document in { @link //xmerl . XMerL } format . See
@doc Reads a source code file and extracts EDoc documentation data .
Specifies a set of EDoc macro definitions . See
< } }
` @TODO ' tags will be included in the documentation . The default
get_doc(File, Opts) ->
Env = edoc_lib:get_doc_env(Opts),
get_doc(File, Env, Opts).
( ) , Env::edoc_lib : edoc_env ( ) ,
Options::proplist ( ) ) - > { ModuleName , edoc_module ( ) }
ModuleName = atom ( )
INHERIT - OPTIONS : read_source/2 , read_comments/2 , edoc_extract : source/5
get_doc(File, Env, Opts) ->
edoc_extract:source(File, Env, Opts).
|
1aad8df69471a3d8ec06e408a6c444ae8095dd2a266eb08017a4775d15a34d51 | cedlemo/OCaml-GI-ctypes-bindings-generator | Size_group.ml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_size_group_new" (Size_group_mode.t_view @-> returning (ptr t_typ))
let add_widget =
foreign "gtk_size_group_add_widget" (t_typ @-> ptr Widget.t_typ @-> returning (void))
let get_ignore_hidden =
foreign "gtk_size_group_get_ignore_hidden" (t_typ @-> returning (bool))
let get_mode =
foreign "gtk_size_group_get_mode" (t_typ @-> returning (Size_group_mode.t_view))
let get_widgets =
foreign "gtk_size_group_get_widgets" (t_typ @-> returning (ptr SList.t_typ))
let remove_widget =
foreign "gtk_size_group_remove_widget" (t_typ @-> ptr Widget.t_typ @-> returning (void))
let set_ignore_hidden =
foreign "gtk_size_group_set_ignore_hidden" (t_typ @-> bool @-> returning (void))
let set_mode =
foreign "gtk_size_group_set_mode" (t_typ @-> Size_group_mode.t_view @-> returning (void))
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Size_group.ml | ocaml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_size_group_new" (Size_group_mode.t_view @-> returning (ptr t_typ))
let add_widget =
foreign "gtk_size_group_add_widget" (t_typ @-> ptr Widget.t_typ @-> returning (void))
let get_ignore_hidden =
foreign "gtk_size_group_get_ignore_hidden" (t_typ @-> returning (bool))
let get_mode =
foreign "gtk_size_group_get_mode" (t_typ @-> returning (Size_group_mode.t_view))
let get_widgets =
foreign "gtk_size_group_get_widgets" (t_typ @-> returning (ptr SList.t_typ))
let remove_widget =
foreign "gtk_size_group_remove_widget" (t_typ @-> ptr Widget.t_typ @-> returning (void))
let set_ignore_hidden =
foreign "gtk_size_group_set_ignore_hidden" (t_typ @-> bool @-> returning (void))
let set_mode =
foreign "gtk_size_group_set_mode" (t_typ @-> Size_group_mode.t_view @-> returning (void))
| |
d3a36c63ad9035e6dbc1bd4285e5bbc7a59969ab9272554a55f985c3f85f9d0e | yrashk/erlang | mnesia_dumper.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
%%
-module(mnesia_dumper).
The arg may be one of the following :
%% scan_decisions Initial scan for decisions
%% startup Initial dump during startup
%% schema_prepare Dump initiated during schema transaction preparation
%% schema_update Dump initiated during schema transaction commit
%% fast_schema_update A schema_update, but ignores the log file
%% user Dump initiated by user
%% write_threshold Automatic dump caused by too many log writes
%% time_threshold Automatic dump caused by timeout
%% Public interface
-export([
get_log_writes/0,
incr_log_writes/0,
raw_dump_table/2,
raw_named_dump_table/2,
start_regulator/0,
opt_dump_log/1,
update/3
]).
%% Internal stuff
-export([regulator_init/1]).
-include("mnesia.hrl").
-include_lib("kernel/include/file.hrl").
-import(mnesia_lib, [fatal/2, dbg_out/2]).
-define(REGULATOR_NAME, mnesia_dumper_load_regulator).
-define(DumpToEtsMultiplier, 4).
get_log_writes() ->
Max = mnesia_monitor:get_env(dump_log_write_threshold),
Prev = mnesia_lib:read_counter(trans_log_writes),
Left = mnesia_lib:read_counter(trans_log_writes_left),
Diff = Max - Left,
Prev + Diff.
incr_log_writes() ->
Left = mnesia_lib:incr_counter(trans_log_writes_left, -1),
if
Left > 0 ->
ignore;
true ->
adjust_log_writes(true)
end.
adjust_log_writes(DoCast) ->
Token = {mnesia_adjust_log_writes, self()},
case global:set_lock(Token, [node()], 1) of
false ->
ignore; %% Somebody else is sending a dump request
true ->
case DoCast of
false ->
ignore;
true ->
mnesia_controller:async_dump_log(write_threshold)
end,
Max = mnesia_monitor:get_env(dump_log_write_threshold),
Left = mnesia_lib:read_counter(trans_log_writes_left),
%% Don't care if we lost a few writes
mnesia_lib:set_counter(trans_log_writes_left, Max),
Diff = Max - Left,
mnesia_lib:incr_counter(trans_log_writes, Diff),
global:del_lock(Token, [node()])
end.
%% Returns 'ok' or exits
opt_dump_log(InitBy) ->
Reg = case whereis(?REGULATOR_NAME) of
undefined ->
nopid;
Pid when pid(Pid) ->
Pid
end,
perform_dump(InitBy, Reg).
%% Scan for decisions
perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
dbg_out("Transaction log dump initiated by ~w~n", [InitBy]),
scan_decisions(mnesia_log:previous_log_file(), InitBy, Regulator),
scan_decisions(mnesia_log:latest_log_file(), InitBy, Regulator);
%% Propagate the log into the DAT-files
perform_dump(InitBy, Regulator) ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
LogState = mnesia_log:prepare_log_dump(InitBy),
dbg_out("Transaction log dump initiated by ~w: ~w~n",
[InitBy, LogState]),
adjust_log_writes(false),
case LogState of
already_dumped ->
mnesia_recover:allow_garb(),
dumped;
{needs_dump, Diff} ->
U = mnesia_monitor:get_env(dump_log_update_in_place),
Cont = mnesia_log:init_log_dump(),
mnesia_recover:sync(),
case catch do_perform_dump(Cont, U, InitBy, Regulator, undefined) of
ok ->
?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
case mnesia_monitor:use_dir() of
true ->
mnesia_recover:dump_decision_tab();
false ->
mnesia_log:purge_some_logs()
end,
mnesia_recover:allow_garb(),
%% And now to the crucial point...
mnesia_log:confirm_log_dump(Diff);
{error, Reason} ->
{error, Reason};
{'EXIT', {Desc, Reason}} ->
case mnesia_monitor:get_env(auto_repair) of
true ->
mnesia_lib:important(Desc, Reason),
%% Ignore rest of the log
mnesia_log:confirm_log_dump(Diff);
false ->
fatal(Desc, Reason)
end
end;
{error, Reason} ->
{error, {"Cannot prepare log dump", Reason}}
end.
scan_decisions(Fname, InitBy, Regulator) ->
Exists = mnesia_lib:exists(Fname),
case Exists of
false ->
ok;
true ->
Header = mnesia_log:trans_log_header(),
Name = previous_log,
mnesia_log:open_log(Name, Header, Fname, Exists,
mnesia_monitor:get_env(auto_repair), read_only),
Cont = start,
Res = (catch do_perform_dump(Cont, false, InitBy, Regulator, undefined)),
mnesia_log:close_log(Name),
case Res of
ok -> ok;
{'EXIT', Reason} -> {error, Reason}
end
end.
do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) ->
case mnesia_log:chunk_log(Cont) of
{C2, Recs} ->
case catch insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of
{'EXIT', R} ->
Reason = {"Transaction log dump error: ~p~n", [R]},
close_files(InPlace, {error, Reason}, InitBy),
exit(Reason);
Version ->
do_perform_dump(C2, InPlace, InitBy, Regulator, Version)
end;
eof ->
close_files(InPlace, ok, InitBy),
erase(mnesia_dumper_dets),
ok
end.
insert_recs([Rec | Recs], InPlace, InitBy, Regulator, LogV) ->
regulate(Regulator),
case insert_rec(Rec, InPlace, InitBy, LogV) of
LogH when record(LogH, log_header) ->
insert_recs(Recs, InPlace, InitBy, Regulator, LogH#log_header.log_version);
_ ->
insert_recs(Recs, InPlace, InitBy, Regulator, LogV)
end;
insert_recs([], _InPlace, _InitBy, _Regulator, Version) ->
Version.
insert_rec(Rec, _InPlace, scan_decisions, _LogV) ->
if
record(Rec, commit) ->
ignore;
record(Rec, log_header) ->
ignore;
true ->
mnesia_recover:note_log_decision(Rec, scan_decisions)
end;
insert_rec(Rec, InPlace, InitBy, LogV) when record(Rec, commit) ->
%% Determine the Outcome of the transaction and recover it
D = Rec#commit.decision,
case mnesia_recover:wait_for_decision(D, InitBy) of
{Tid, committed} ->
do_insert_rec(Tid, Rec, InPlace, InitBy, LogV);
{Tid, aborted} ->
mnesia_schema:undo_prepare_commit(Tid, Rec)
end;
insert_rec(H, _InPlace, _InitBy, _LogV) when record(H, log_header) ->
CurrentVersion = mnesia_log:version(),
if
H#log_header.log_kind /= trans_log ->
exit({"Bad kind of transaction log", H});
H#log_header.log_version == CurrentVersion ->
ok;
H#log_header.log_version == "4.2" ->
ok;
H#log_header.log_version == "4.1" ->
ok;
H#log_header.log_version == "4.0" ->
ok;
true ->
fatal("Bad version of transaction log: ~p~n", [H])
end,
H;
insert_rec(_Rec, _InPlace, _InitBy, _LogV) ->
ok.
do_insert_rec(Tid, Rec, InPlace, InitBy, LogV) ->
case Rec#commit.schema_ops of
[] ->
ignore;
SchemaOps ->
case val({schema, storage_type}) of
ram_copies ->
insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV);
Storage ->
true = open_files(schema, Storage, InPlace, InitBy),
insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV)
end
end,
D = Rec#commit.disc_copies,
insert_ops(Tid, disc_copies, D, InPlace, InitBy, LogV),
case InitBy of
startup ->
DO = Rec#commit.disc_only_copies,
insert_ops(Tid, disc_only_copies, DO, InPlace, InitBy, LogV);
_ ->
ignore
end.
update(_Tid, [], _DumperMode) ->
dumped;
update(Tid, SchemaOps, DumperMode) ->
UseDir = mnesia_monitor:use_dir(),
Res = perform_update(Tid, SchemaOps, DumperMode, UseDir),
mnesia_controller:release_schema_commit_lock(),
Res.
perform_update(_Tid, _SchemaOps, mandatory, true) ->
%% Force a dump of the transaction log in order to let the
%% dumper perform needed updates
InitBy = schema_update,
?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
opt_dump_log(InitBy);
perform_update(Tid, SchemaOps, _DumperMode, _UseDir) ->
%% No need for a full transaction log dump.
%% Ignore the log file and perform only perform
%% the corresponding updates.
InitBy = fast_schema_update,
InPlace = mnesia_monitor:get_env(dump_log_update_in_place),
?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
case catch insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy,
mnesia_log:version()) of
{'EXIT', Reason} ->
Error = {error, {"Schema update error", Reason}},
close_files(InPlace, Error, InitBy),
fatal("Schema update error ~p ~p", [Reason, SchemaOps]);
_ ->
?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
close_files(InPlace, ok, InitBy),
ok
end.
insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok;
insert_ops(Tid, Storage, [Op], InPlace, InitBy, Ver) when Ver >= "4.3"->
insert_op(Tid, Storage, Op, InPlace, InitBy),
ok;
insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver >= "4.3"->
insert_op(Tid, Storage, Op, InPlace, InitBy),
insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver);
insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver < "4.3" ->
insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver),
insert_op(Tid, Storage, Op, InPlace, InitBy).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Normal ops
disc_insert(_Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
case open_files(Tab, Storage, InPlace, InitBy) of
true ->
case Storage of
disc_copies when Tab /= schema ->
mnesia_log:append({?MODULE,Tab}, {{Tab, Key}, Val, Op}),
ok;
_ ->
dets_insert(Op,Tab,Key,Val)
end;
false ->
ignore
end.
%% To fix update_counter so that it behaves better.
%% i.e. if nothing have changed in tab except update_counter
%% trust that the value in the dets file is correct.
%% Otherwise we will get a double increment.
%% This is perfect but update_counter is a dirty op.
dets_insert(Op,Tab,Key,Val) ->
case Op of
write ->
dets_updated(Tab,Key),
ok = dets:insert(Tab, Val);
delete ->
dets_updated(Tab,Key),
ok = dets:delete(Tab, Key);
update_counter ->
case dets_incr_counter(Tab,Key) of
true ->
{RecName, Incr} = Val,
case catch dets:update_counter(Tab, Key, Incr) of
CounterVal when integer(CounterVal) ->
ok;
_ when Incr < 0 ->
Zero = {RecName, Key, 0},
ok = dets:insert(Tab, Zero);
_ ->
Init = {RecName, Key, Incr},
ok = dets:insert(Tab, Init)
end;
false -> ok
end;
delete_object ->
dets_updated(Tab,Key),
ok = dets:delete_object(Tab, Val);
clear_table ->
dets_cleared(Tab),
ok = dets:match_delete(Tab, '_')
end.
dets_updated(Tab,Key) ->
case get(mnesia_dumper_dets) of
undefined ->
Empty = gb_trees:empty(),
Tree = gb_trees:insert(Tab, gb_sets:singleton(Key), Empty),
put(mnesia_dumper_dets, Tree);
Tree ->
case gb_trees:lookup(Tab,Tree) of
{value, cleared} -> ignore;
{value, Set} ->
T = gb_trees:update(Tab, gb_sets:add(Key, Set), Tree),
put(mnesia_dumper_dets, T);
none ->
T = gb_trees:insert(Tab, gb_sets:singleton(Key), Tree),
put(mnesia_dumper_dets, T)
end
end.
dets_incr_counter(Tab,Key) ->
case get(mnesia_dumper_dets) of
undefined -> false;
Tree ->
case gb_trees:lookup(Tab,Tree) of
{value, cleared} -> true;
{value, Set} -> gb_sets:is_member(Key, Set);
none -> false
end
end.
dets_cleared(Tab) ->
case get(mnesia_dumper_dets) of
undefined ->
Empty = gb_trees:empty(),
Tree = gb_trees:insert(Tab, cleared, Empty),
put(mnesia_dumper_dets, Tree);
Tree ->
case gb_trees:lookup(Tab,Tree) of
{value, cleared} -> ignore;
_ ->
T = gb_trees:enter(Tab, cleared, Tree),
put(mnesia_dumper_dets, T)
end
end.
insert(Tid, Storage, Tab, Key, [Val | Tail], Op, InPlace, InitBy) ->
insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
insert(Tid, Storage, Tab, Key, Tail, Op, InPlace, InitBy);
insert(_Tid, _Storage, _Tab, _Key, [], _Op, _InPlace, _InitBy) ->
ok;
insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
Item = {{Tab, Key}, Val, Op},
case InitBy of
startup ->
disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
_ when Storage == ram_copies ->
mnesia_tm:do_update_op(Tid, Storage, Item),
Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
mnesia_tm:do_snmp(Tid, Snmp);
_ when Storage == disc_copies ->
disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
mnesia_tm:do_update_op(Tid, Storage, Item),
Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
mnesia_tm:do_snmp(Tid, Snmp);
_ when Storage == disc_only_copies ->
mnesia_tm:do_update_op(Tid, Storage, Item),
Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
mnesia_tm:do_snmp(Tid, Snmp);
_ when Storage == unknown ->
ignore
end.
disc_delete_table(Tab, Storage) ->
case mnesia_monitor:use_dir() of
true ->
if
Storage == disc_only_copies; Tab == schema ->
mnesia_monitor:unsafe_close_dets(Tab),
Dat = mnesia_lib:tab2dat(Tab),
file:delete(Dat);
true ->
DclFile = mnesia_lib:tab2dcl(Tab),
case get({?MODULE,Tab}) of
{opened_dumper, dcl} ->
del_opened_tab(Tab),
mnesia_log:unsafe_close_log(Tab);
_ ->
ok
end,
file:delete(DclFile),
DcdFile = mnesia_lib:tab2dcd(Tab),
file:delete(DcdFile),
ok
end,
erase({?MODULE, Tab});
false ->
ignore
end.
disc_delete_indecies(_Tab, _Cs, Storage) when Storage /= disc_only_copies ->
ignore;
disc_delete_indecies(Tab, Cs, disc_only_copies) ->
Indecies = Cs#cstruct.index,
mnesia_index:del_transient(Tab, Indecies, disc_only_copies).
insert_op(Tid, Storage, {{Tab, Key}, Val, Op}, InPlace, InitBy) ->
%% Propagate to disc only
disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% NOTE that all operations below will only
%% be performed if the dump is initiated by
%% startup or fast_schema_update
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
insert_op(_Tid, schema_ops, _OP, _InPlace, Initby)
when Initby /= startup,
Initby /= fast_schema_update,
Initby /= schema_update ->
ignore;
insert_op(Tid, _, {op, rec, Storage, Item}, InPlace, InitBy) ->
{{Tab, Key}, ValList, Op} = Item,
insert(Tid, Storage, Tab, Key, ValList, Op, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_copy_type, N, FromS, ToS, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Val = mnesia_schema:insert_cstruct(Tid, Cs, true), % Update ram only
{schema, Tab, _} = Val,
case lists:member(N, val({current, db_nodes})) of
true when InitBy /= startup ->
mnesia_controller:add_active_replica(Tab, N, Cs);
_ ->
ignore
end,
if
N == node() ->
Dmp = mnesia_lib:tab2dmp(Tab),
Dat = mnesia_lib:tab2dat(Tab),
Dcd = mnesia_lib:tab2dcd(Tab),
Dcl = mnesia_lib:tab2dcl(Tab),
case {FromS, ToS} of
{ram_copies, disc_copies} when Tab == schema ->
ok = ensure_rename(Dmp, Dat);
{ram_copies, disc_copies} ->
file:delete(Dcl),
ok = ensure_rename(Dmp, Dcd);
{disc_copies, ram_copies} when Tab == schema ->
mnesia_lib:set(use_dir, false),
mnesia_monitor:unsafe_close_dets(Tab),
file:delete(Dat);
{disc_copies, ram_copies} ->
file:delete(Dcl),
file:delete(Dcd);
{ram_copies, disc_only_copies} ->
ok = ensure_rename(Dmp, Dat),
true = open_files(Tab, disc_only_copies, InPlace, InitBy),
%% ram_delete_table must be done before init_indecies,
%% it uses info which is reset in init_indecies,
%% it doesn't matter, because init_indecies don't use
%% the ram replica of the table when creating the disc
%% index; Could be improved :)
mnesia_schema:ram_delete_table(Tab, FromS),
PosList = Cs#cstruct.index,
mnesia_index:init_indecies(Tab, disc_only_copies, PosList);
{disc_only_copies, ram_copies} ->
mnesia_monitor:unsafe_close_dets(Tab),
disc_delete_indecies(Tab, Cs, disc_only_copies),
case InitBy of
startup ->
ignore;
_ ->
mnesia_controller:get_disc_copy(Tab)
end,
disc_delete_table(Tab, disc_only_copies);
{disc_copies, disc_only_copies} ->
ok = ensure_rename(Dmp, Dat),
true = open_files(Tab, disc_only_copies, InPlace, InitBy),
mnesia_schema:ram_delete_table(Tab, FromS),
PosList = Cs#cstruct.index,
mnesia_index:init_indecies(Tab, disc_only_copies, PosList),
file:delete(Dcl),
file:delete(Dcd);
{disc_only_copies, disc_copies} ->
mnesia_monitor:unsafe_close_dets(Tab),
disc_delete_indecies(Tab, Cs, disc_only_copies),
case InitBy of
startup ->
ignore;
_ ->
mnesia_log:ets2dcd(Tab),
mnesia_controller:get_disc_copy(Tab),
disc_delete_table(Tab, disc_only_copies)
end
end;
true ->
ignore
end,
S = val({schema, storage_type}),
disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy);
insert_op(Tid, _, {op, transform, _Fun, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
case mnesia_lib:cs_to_storage_type(node(), Cs) of
disc_copies ->
open_dcl(Cs#cstruct.name);
_ ->
ignore
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
%%% Operations below this are handled without using the logg.
insert_op(Tid, _, {op, restore_recreate, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Type = Cs#cstruct.type,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
%% Delete all possibly existing files and tables
disc_delete_table(Tab, Storage),
disc_delete_indecies(Tab, Cs, Storage),
case InitBy of
startup ->
ignore;
_ ->
case ?catch_val({Tab, cstruct}) of
{'EXIT', _} -> ignore;
_ ->
mnesia_schema:ram_delete_table(Tab, Storage),
mnesia_checkpoint:tm_del_copy(Tab, node())
end
end,
%% And create new ones..
if
(InitBy == startup) or (Storage == unknown) ->
ignore;
Storage == ram_copies ->
Args = [{keypos, 2}, public, named_table, Type],
mnesia_monitor:mktab(Tab, Args);
Storage == disc_copies ->
Args = [{keypos, 2}, public, named_table, Type],
mnesia_monitor:mktab(Tab, Args),
File = mnesia_lib:tab2dcd(Tab),
FArg = [{file, File}, {name, {mnesia,create}},
{repair, false}, {mode, read_write}],
{ok, Log} = mnesia_monitor:open_log(FArg),
mnesia_monitor:unsafe_close_log(Log);
Storage == disc_only_copies ->
File = mnesia_lib:tab2dat(Tab),
file:delete(File),
Args = [{file, mnesia_lib:tab2dat(Tab)},
{type, mnesia_lib:disk_type(Tab, Type)},
{keypos, 2},
{repair, mnesia_monitor:get_env(auto_repair)}],
mnesia_monitor:open_dets(Tab, Args)
end,
insert_op(Tid, ignore, {op, create_table, TabDef}, InPlace, InitBy);
insert_op(Tid, _, {op, create_table, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, false, InPlace, InitBy),
Tab = Cs#cstruct.name,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
case InitBy of
startup ->
case Storage of
unknown ->
ignore;
ram_copies ->
ignore;
disc_copies ->
Dcd = mnesia_lib:tab2dcd(Tab),
case mnesia_lib:exists(Dcd) of
true -> ignore;
false ->
mnesia_log:open_log(temp,
mnesia_log:dcl_log_header(),
Dcd,
false,
false,
read_write),
mnesia_log:unsafe_close_log(temp)
end;
_ ->
Args = [{file, mnesia_lib:tab2dat(Tab)},
{type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)},
{keypos, 2},
{repair, mnesia_monitor:get_env(auto_repair)}],
case mnesia_monitor:open_dets(Tab, Args) of
{ok, _} ->
mnesia_monitor:unsafe_close_dets(Tab);
{error, Error} ->
exit({"Failed to create dets table", Error})
end
end;
_ ->
Copies = mnesia_lib:copy_holders(Cs),
Active = mnesia_lib:intersect(Copies, val({current, db_nodes})),
[mnesia_controller:add_active_replica(Tab, N, Cs) || N <- Active],
case Storage of
unknown ->
mnesia_lib:unset({Tab, create_table}),
case Cs#cstruct.local_content of
true ->
ignore;
false ->
mnesia_lib:set_remote_where_to_read(Tab)
end;
_ ->
case Cs#cstruct.local_content of
true ->
mnesia_lib:set_local_content_whereabouts(Tab);
false ->
mnesia_lib:set({Tab, where_to_read}, node())
end,
case Storage of
ram_copies ->
ignore;
_ ->
%% Indecies are still created by loader
disc_delete_indecies(Tab, Cs, Storage)
%% disc_delete_table(Tab, Storage)
end,
%% Update whereabouts and create table
mnesia_controller:create_table(Tab),
mnesia_lib:unset({Tab, create_table})
end
end;
insert_op(_Tid, _, {op, dump_table, Size, TabDef}, _InPlace, _InitBy) ->
case Size of
unknown ->
ignore;
_ ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Dmp = mnesia_lib:tab2dmp(Tab),
Dat = mnesia_lib:tab2dcd(Tab),
case Size of
0 ->
%% Assume that table files already are closed
file:delete(Dmp),
file:delete(Dat);
_ ->
ok = ensure_rename(Dmp, Dat)
end
end;
insert_op(Tid, _, {op, delete_table, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
case mnesia_lib:cs_to_storage_type(node(), Cs) of
unknown ->
ignore;
Storage ->
disc_delete_table(Tab, Storage),
disc_delete_indecies(Tab, Cs, Storage),
case InitBy of
startup ->
ignore;
_ ->
mnesia_schema:ram_delete_table(Tab, Storage),
mnesia_checkpoint:tm_del_copy(Tab, node())
end
end,
delete_cstruct(Tid, Cs, InPlace, InitBy);
insert_op(Tid, _, {op, clear_table, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
case mnesia_lib:cs_to_storage_type(node(), Cs) of
unknown ->
ignore;
Storage ->
Oid = '_', %%val({Tab, wild_pattern}),
if Storage == disc_copies ->
open_dcl(Cs#cstruct.name);
true ->
ignore
end,
%% Need to catch this, it crashes on ram_copies if
%% the op comes before table is loaded at startup.
catch insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy)
end;
insert_op(Tid, _, {op, merge_schema, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
case Cs#cstruct.name of
schema ->
%% If we bootstrap an empty (diskless) mnesia from another node
%% we might have changed the storage_type of schema.
%% I think this is a good place to do it.
Update = fun(NS = {Node,Storage}) ->
case mnesia_lib:cs_to_storage_type(Node, Cs) of
Storage -> NS;
disc_copies when Node == node() ->
Dir = mnesia_lib:dir(),
ok = mnesia_schema:opt_create_dir(true, Dir),
mnesia_schema:purge_dir(Dir, []),
mnesia_log:purge_all_logs(),
mnesia_lib:set(use_dir, true),
mnesia_log:init(),
Ns = val({current, db_nodes}),
F = fun(U) -> mnesia_recover:log_mnesia_up(U) end,
lists:foreach(F, Ns),
raw_named_dump_table(schema, dat),
temp_set_master_nodes(),
{Node,disc_copies};
CSstorage ->
{Node,CSstorage}
end
end,
W2C0 = val({schema, where_to_commit}),
W2C = case W2C0 of
{blocked, List} ->
{blocked,lists:map(Update,List)};
List ->
lists:map(Update,List)
end,
if W2C == W2C0 -> ignore;
true -> mnesia_lib:set({schema, where_to_commit}, W2C)
end;
_ ->
ignore
end,
insert_cstruct(Tid, Cs, false, InPlace, InitBy);
insert_op(Tid, _, {op, del_table_copy, Storage, Node, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
if
Tab == schema, Storage == ram_copies ->
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
Tab /= schema ->
mnesia_controller:del_active_replica(Tab, Node),
mnesia_lib:del({Tab, Storage}, Node),
if
Node == node() ->
case Cs#cstruct.local_content of
true -> mnesia_lib:set({Tab, where_to_read}, nowhere);
false -> mnesia_lib:set_remote_where_to_read(Tab)
end,
mnesia_lib:del({schema, local_tables}, Tab),
mnesia_lib:set({Tab, storage_type}, unknown),
insert_cstruct(Tid, Cs, true, InPlace, InitBy),
disc_delete_table(Tab, Storage),
disc_delete_indecies(Tab, Cs, Storage),
mnesia_schema:ram_delete_table(Tab, Storage),
mnesia_checkpoint:tm_del_copy(Tab, Node);
true ->
case val({Tab, where_to_read}) of
Node ->
mnesia_lib:set_remote_where_to_read(Tab);
_ ->
ignore
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy)
end
end;
insert_op(Tid, _, {op, add_table_copy, _Storage, _Node, TabDef}, InPlace, InitBy) ->
%% During prepare commit, the files was created
%% and the replica was announced
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, add_snmp, _Us, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, del_snmp, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
if
InitBy /= startup,
Storage /= unknown ->
case ?catch_val({Tab, {index, snmp}}) of
{'EXIT', _} ->
ignore;
Stab ->
mnesia_snmp_hook:delete_table(Tab, Stab),
mnesia_lib:unset({Tab, {index, snmp}})
end;
true ->
ignore
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, add_index, Pos, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = insert_cstruct(Tid, Cs, true, InPlace, InitBy),
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
case InitBy of
startup when Storage == disc_only_copies ->
true = open_files(Tab, Storage, InPlace, InitBy),
mnesia_index:init_indecies(Tab, Storage, [Pos]);
startup ->
ignore;
_ ->
mnesia_index:init_indecies(Tab, Storage, [Pos])
end;
insert_op(Tid, _, {op, del_index, Pos, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
case InitBy of
startup when Storage == disc_only_copies ->
mnesia_index:del_index_table(Tab, Storage, Pos);
startup ->
ignore;
_ ->
mnesia_index:del_index_table(Tab, Storage, Pos)
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_access_mode,TabDef, _OldAccess, _Access}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
case InitBy of
startup -> ignore;
_ -> mnesia_controller:change_table_access_mode(Cs)
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_load_order, TabDef, _OldLevel, _Level}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, delete_property, TabDef, PropKey}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
mnesia_lib:unset({Tab, user_property, PropKey}),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, write_property, TabDef, _Prop}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_frag, _Change, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy).
open_files(Tab, Storage, UpdateInPlace, InitBy)
when Storage /= unknown, Storage /= ram_copies ->
case get({?MODULE, Tab}) of
undefined ->
case ?catch_val({Tab, setorbag}) of
{'EXIT', _} ->
false;
Type ->
case Storage of
disc_copies when Tab /= schema ->
Bool = open_disc_copies(Tab, InitBy),
Bool;
_ ->
Fname = prepare_open(Tab, UpdateInPlace),
Args = [{file, Fname},
{keypos, 2},
{repair, mnesia_monitor:get_env(auto_repair)},
{type, mnesia_lib:disk_type(Tab, Type)}],
{ok, _} = mnesia_monitor:open_dets(Tab, Args),
put({?MODULE, Tab}, {opened_dumper, dat}),
true
end
end;
already_dumped ->
false;
{opened_dumper, _} ->
true
end;
open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
false.
open_disc_copies(Tab, InitBy) ->
DclF = mnesia_lib:tab2dcl(Tab),
DumpEts =
case file:read_file_info(DclF) of
{error, enoent} ->
false;
{ok, DclInfo} ->
DcdF = mnesia_lib:tab2dcd(Tab),
case file:read_file_info(DcdF) of
{error, Reason} ->
mnesia_lib:dbg_out("File ~p info_error ~p ~n",
[DcdF, Reason]),
true;
{ok, DcdInfo} ->
Mul = case ?catch_val(dc_dump_limit) of
{'EXIT', _} -> ?DumpToEtsMultiplier;
Val -> Val
end,
DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
end
end,
if
DumpEts == false; InitBy == startup ->
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
mnesia_lib:exists(DclF),
mnesia_monitor:get_env(auto_repair),
read_write),
put({?MODULE, Tab}, {opened_dumper, dcl}),
true;
true ->
mnesia_log:ets2dcd(Tab),
put({?MODULE, Tab}, already_dumped),
false
end.
%% Always opens the dcl file for writing overriding already_dumped
mechanismen , used for schema transactions .
open_dcl(Tab) ->
case get({?MODULE, Tab}) of
{opened_dumper, _} ->
true;
_ -> %% undefined or already_dumped
DclF = mnesia_lib:tab2dcl(Tab),
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
mnesia_lib:exists(DclF),
mnesia_monitor:get_env(auto_repair),
read_write),
put({?MODULE, Tab}, {opened_dumper, dcl}),
true
end.
prepare_open(Tab, UpdateInPlace) ->
Dat = mnesia_lib:tab2dat(Tab),
case UpdateInPlace of
true ->
Dat;
false ->
Tmp = mnesia_lib:tab2tmp(Tab),
case catch mnesia_lib:copy_file(Dat, Tmp) of
ok ->
Tmp;
Error ->
fatal("Cannot copy dets file ~p to ~p: ~p~n",
[Dat, Tmp, Error])
end
end.
del_opened_tab(Tab) ->
erase({?MODULE, Tab}).
close_files(UpdateInPlace, Outcome, InitBy) -> % Update in place
close_files(UpdateInPlace, Outcome, InitBy, get()).
close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, already_dumped} | Tail]) ->
erase({?MODULE, Tab}),
close_files(InPlace, Outcome, InitBy, Tail);
close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, {opened_dumper, Type}} | Tail]) ->
erase({?MODULE, Tab}),
case val({Tab, storage_type}) of
disc_only_copies when InitBy /= startup ->
ignore;
disc_copies when Tab /= schema ->
mnesia_log:close_log({?MODULE,Tab});
Storage ->
do_close(InPlace, Outcome, Tab, Type, Storage)
end,
close_files(InPlace, Outcome, InitBy, Tail);
close_files(InPlace, Outcome, InitBy, [_ | Tail]) ->
close_files(InPlace, Outcome, InitBy, Tail);
close_files(_, _, _InitBy, []) ->
ok.
%% If storage is unknown during close clean up files, this can happen if timing
%% is right and dirty_write conflicts with schema operations.
do_close(_, _, Tab, dcl, unknown) ->
mnesia_log:close_log({?MODULE,Tab}),
file:delete(mnesia_lib:tab2dcl(Tab));
do_close(_, _, Tab, dcl, _) -> %% To be safe, can it happen?
mnesia_log:close_log({?MODULE,Tab});
do_close(InPlace, Outcome, Tab, dat, Storage) ->
mnesia_monitor:close_dets(Tab),
if
Storage == unknown, InPlace == true ->
file:delete(mnesia_lib:tab2dat(Tab));
InPlace == true ->
%% Update in place
ok;
Outcome == ok, Storage /= unknown ->
Success : swap tmp files with dat files
TabDat = mnesia_lib:tab2dat(Tab),
ok = file:rename(mnesia_lib:tab2tmp(Tab), TabDat);
true ->
file:delete(mnesia_lib:tab2tmp(Tab))
end.
ensure_rename(From, To) ->
case mnesia_lib:exists(From) of
true ->
file:rename(From, To);
false ->
case mnesia_lib:exists(To) of
true ->
ok;
false ->
{error, {rename_failed, From, To}}
end
end.
insert_cstruct(Tid, Cs, KeepWhereabouts, InPlace, InitBy) ->
Val = mnesia_schema:insert_cstruct(Tid, Cs, KeepWhereabouts),
{schema, Tab, _} = Val,
S = val({schema, storage_type}),
disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy),
Tab.
delete_cstruct(Tid, Cs, InPlace, InitBy) ->
Val = mnesia_schema:delete_cstruct(Tid, Cs),
{schema, Tab, _} = Val,
S = val({schema, storage_type}),
disc_insert(Tid, S, schema, Tab, Val, delete, InPlace, InitBy),
Tab.
temp_set_master_nodes() ->
Tabs = val({schema, local_tables}),
Masters = [{Tab, (val({Tab, disc_copies}) ++
val({Tab, ram_copies}) ++
val({Tab, disc_only_copies})) -- [node()]}
|| Tab <- Tabs],
UseDir = false since we do n't want to remember these
%% masternodes and we are running (really soon anyway) since we want this
%% to be known during table loading.
mnesia_recover:log_master_nodes(Masters, false, yes),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Raw dump of table . Dumper must have unique access to the ets table .
raw_named_dump_table(Tab, Ftype) ->
case mnesia_monitor:use_dir() of
true ->
mnesia_lib:lock_table(Tab),
TmpFname = mnesia_lib:tab2tmp(Tab),
Fname =
case Ftype of
dat -> mnesia_lib:tab2dat(Tab);
dmp -> mnesia_lib:tab2dmp(Tab)
end,
file:delete(TmpFname),
file:delete(Fname),
TabSize = ?ets_info(Tab, size),
TabRef = Tab,
DiskType = mnesia_lib:disk_type(Tab),
Args = [{file, TmpFname},
{keypos, 2},
%% {ram_file, true},
{estimated_no_objects, TabSize + 256},
{repair, mnesia_monitor:get_env(auto_repair)},
{type, DiskType}],
case mnesia_lib:dets_sync_open(TabRef, Args) of
{ok, TabRef} ->
Storage = ram_copies,
mnesia_lib:db_fixtable(Storage, Tab, true),
case catch raw_dump_table(TabRef, Tab) of
{'EXIT', Reason} ->
mnesia_lib:db_fixtable(Storage, Tab, false),
mnesia_lib:dets_sync_close(Tab),
file:delete(TmpFname),
mnesia_lib:unlock_table(Tab),
exit({"Dump of table to disc failed", Reason});
ok ->
mnesia_lib:db_fixtable(Storage, Tab, false),
mnesia_lib:dets_sync_close(Tab),
mnesia_lib:unlock_table(Tab),
ok = file:rename(TmpFname, Fname)
end;
{error, Reason} ->
mnesia_lib:unlock_table(Tab),
exit({"Open of file before dump to disc failed", Reason})
end;
false ->
exit({has_no_disc, node()})
end.
raw_dump_table(DetsRef, EtsRef) ->
dets:from_ets(DetsRef, EtsRef).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Load regulator
%%
%% This is a poor mans substitute for a fair scheduler algorithm
in the Erlang emulator . The mnesia_dumper process performs many
costly BIF invokations and must pay for this . But since the
%% Emulator does not handle this properly we must compensate for
%% this with some form of load regulation of ourselves in order to
not steal all computation power in the Erlang Emulator ans make
%% other processes starve. Hopefully this is a temporary solution.
start_regulator() ->
case mnesia_monitor:get_env(dump_log_load_regulation) of
false ->
nopid;
true ->
N = ?REGULATOR_NAME,
case mnesia_monitor:start_proc(N, ?MODULE, regulator_init, [self()]) of
{ok, Pid} ->
Pid;
{error, Reason} ->
fatal("Failed to start ~n: ~p~n", [N, Reason])
end
end.
regulator_init(Parent) ->
%% No need for trapping exits.
%% Using low priority causes the regulation
process_flag(priority, low),
register(?REGULATOR_NAME, self()),
proc_lib:init_ack(Parent, {ok, self()}),
regulator_loop().
regulator_loop() ->
receive
{regulate, From} ->
From ! {regulated, self()},
regulator_loop();
{stop, From} ->
From ! {stopped, self()},
exit(normal)
end.
regulate(nopid) ->
ok;
regulate(RegulatorPid) ->
RegulatorPid ! {regulate, self()},
receive
{regulated, RegulatorPid} -> ok
end.
val(Var) ->
case ?catch_val(Var) of
{'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
Value -> Value
end.
| null | https://raw.githubusercontent.com/yrashk/erlang/e1282325ed75e52a98d58f5bd9fb0fa27896173f/lib/mnesia/src/mnesia_dumper.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
scan_decisions Initial scan for decisions
startup Initial dump during startup
schema_prepare Dump initiated during schema transaction preparation
schema_update Dump initiated during schema transaction commit
fast_schema_update A schema_update, but ignores the log file
user Dump initiated by user
write_threshold Automatic dump caused by too many log writes
time_threshold Automatic dump caused by timeout
Public interface
Internal stuff
Somebody else is sending a dump request
Don't care if we lost a few writes
Returns 'ok' or exits
Scan for decisions
Propagate the log into the DAT-files
And now to the crucial point...
Ignore rest of the log
Determine the Outcome of the transaction and recover it
Force a dump of the transaction log in order to let the
dumper perform needed updates
No need for a full transaction log dump.
Ignore the log file and perform only perform
the corresponding updates.
Normal ops
To fix update_counter so that it behaves better.
i.e. if nothing have changed in tab except update_counter
trust that the value in the dets file is correct.
Otherwise we will get a double increment.
This is perfect but update_counter is a dirty op.
Propagate to disc only
NOTE that all operations below will only
be performed if the dump is initiated by
startup or fast_schema_update
Update ram only
ram_delete_table must be done before init_indecies,
it uses info which is reset in init_indecies,
it doesn't matter, because init_indecies don't use
the ram replica of the table when creating the disc
index; Could be improved :)
Operations below this are handled without using the logg.
Delete all possibly existing files and tables
And create new ones..
Indecies are still created by loader
disc_delete_table(Tab, Storage)
Update whereabouts and create table
Assume that table files already are closed
val({Tab, wild_pattern}),
Need to catch this, it crashes on ram_copies if
the op comes before table is loaded at startup.
If we bootstrap an empty (diskless) mnesia from another node
we might have changed the storage_type of schema.
I think this is a good place to do it.
During prepare commit, the files was created
and the replica was announced
Always opens the dcl file for writing overriding already_dumped
undefined or already_dumped
Update in place
If storage is unknown during close clean up files, this can happen if timing
is right and dirty_write conflicts with schema operations.
To be safe, can it happen?
Update in place
masternodes and we are running (really soon anyway) since we want this
to be known during table loading.
{ram_file, true},
Load regulator
This is a poor mans substitute for a fair scheduler algorithm
Emulator does not handle this properly we must compensate for
this with some form of load regulation of ourselves in order to
other processes starve. Hopefully this is a temporary solution.
No need for trapping exits.
Using low priority causes the regulation | Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(mnesia_dumper).
The arg may be one of the following :
-export([
get_log_writes/0,
incr_log_writes/0,
raw_dump_table/2,
raw_named_dump_table/2,
start_regulator/0,
opt_dump_log/1,
update/3
]).
-export([regulator_init/1]).
-include("mnesia.hrl").
-include_lib("kernel/include/file.hrl").
-import(mnesia_lib, [fatal/2, dbg_out/2]).
-define(REGULATOR_NAME, mnesia_dumper_load_regulator).
-define(DumpToEtsMultiplier, 4).
get_log_writes() ->
Max = mnesia_monitor:get_env(dump_log_write_threshold),
Prev = mnesia_lib:read_counter(trans_log_writes),
Left = mnesia_lib:read_counter(trans_log_writes_left),
Diff = Max - Left,
Prev + Diff.
incr_log_writes() ->
Left = mnesia_lib:incr_counter(trans_log_writes_left, -1),
if
Left > 0 ->
ignore;
true ->
adjust_log_writes(true)
end.
adjust_log_writes(DoCast) ->
Token = {mnesia_adjust_log_writes, self()},
case global:set_lock(Token, [node()], 1) of
false ->
true ->
case DoCast of
false ->
ignore;
true ->
mnesia_controller:async_dump_log(write_threshold)
end,
Max = mnesia_monitor:get_env(dump_log_write_threshold),
Left = mnesia_lib:read_counter(trans_log_writes_left),
mnesia_lib:set_counter(trans_log_writes_left, Max),
Diff = Max - Left,
mnesia_lib:incr_counter(trans_log_writes, Diff),
global:del_lock(Token, [node()])
end.
opt_dump_log(InitBy) ->
Reg = case whereis(?REGULATOR_NAME) of
undefined ->
nopid;
Pid when pid(Pid) ->
Pid
end,
perform_dump(InitBy, Reg).
perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
dbg_out("Transaction log dump initiated by ~w~n", [InitBy]),
scan_decisions(mnesia_log:previous_log_file(), InitBy, Regulator),
scan_decisions(mnesia_log:latest_log_file(), InitBy, Regulator);
perform_dump(InitBy, Regulator) ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
LogState = mnesia_log:prepare_log_dump(InitBy),
dbg_out("Transaction log dump initiated by ~w: ~w~n",
[InitBy, LogState]),
adjust_log_writes(false),
case LogState of
already_dumped ->
mnesia_recover:allow_garb(),
dumped;
{needs_dump, Diff} ->
U = mnesia_monitor:get_env(dump_log_update_in_place),
Cont = mnesia_log:init_log_dump(),
mnesia_recover:sync(),
case catch do_perform_dump(Cont, U, InitBy, Regulator, undefined) of
ok ->
?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
case mnesia_monitor:use_dir() of
true ->
mnesia_recover:dump_decision_tab();
false ->
mnesia_log:purge_some_logs()
end,
mnesia_recover:allow_garb(),
mnesia_log:confirm_log_dump(Diff);
{error, Reason} ->
{error, Reason};
{'EXIT', {Desc, Reason}} ->
case mnesia_monitor:get_env(auto_repair) of
true ->
mnesia_lib:important(Desc, Reason),
mnesia_log:confirm_log_dump(Diff);
false ->
fatal(Desc, Reason)
end
end;
{error, Reason} ->
{error, {"Cannot prepare log dump", Reason}}
end.
scan_decisions(Fname, InitBy, Regulator) ->
Exists = mnesia_lib:exists(Fname),
case Exists of
false ->
ok;
true ->
Header = mnesia_log:trans_log_header(),
Name = previous_log,
mnesia_log:open_log(Name, Header, Fname, Exists,
mnesia_monitor:get_env(auto_repair), read_only),
Cont = start,
Res = (catch do_perform_dump(Cont, false, InitBy, Regulator, undefined)),
mnesia_log:close_log(Name),
case Res of
ok -> ok;
{'EXIT', Reason} -> {error, Reason}
end
end.
do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) ->
case mnesia_log:chunk_log(Cont) of
{C2, Recs} ->
case catch insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of
{'EXIT', R} ->
Reason = {"Transaction log dump error: ~p~n", [R]},
close_files(InPlace, {error, Reason}, InitBy),
exit(Reason);
Version ->
do_perform_dump(C2, InPlace, InitBy, Regulator, Version)
end;
eof ->
close_files(InPlace, ok, InitBy),
erase(mnesia_dumper_dets),
ok
end.
insert_recs([Rec | Recs], InPlace, InitBy, Regulator, LogV) ->
regulate(Regulator),
case insert_rec(Rec, InPlace, InitBy, LogV) of
LogH when record(LogH, log_header) ->
insert_recs(Recs, InPlace, InitBy, Regulator, LogH#log_header.log_version);
_ ->
insert_recs(Recs, InPlace, InitBy, Regulator, LogV)
end;
insert_recs([], _InPlace, _InitBy, _Regulator, Version) ->
Version.
insert_rec(Rec, _InPlace, scan_decisions, _LogV) ->
if
record(Rec, commit) ->
ignore;
record(Rec, log_header) ->
ignore;
true ->
mnesia_recover:note_log_decision(Rec, scan_decisions)
end;
insert_rec(Rec, InPlace, InitBy, LogV) when record(Rec, commit) ->
D = Rec#commit.decision,
case mnesia_recover:wait_for_decision(D, InitBy) of
{Tid, committed} ->
do_insert_rec(Tid, Rec, InPlace, InitBy, LogV);
{Tid, aborted} ->
mnesia_schema:undo_prepare_commit(Tid, Rec)
end;
insert_rec(H, _InPlace, _InitBy, _LogV) when record(H, log_header) ->
CurrentVersion = mnesia_log:version(),
if
H#log_header.log_kind /= trans_log ->
exit({"Bad kind of transaction log", H});
H#log_header.log_version == CurrentVersion ->
ok;
H#log_header.log_version == "4.2" ->
ok;
H#log_header.log_version == "4.1" ->
ok;
H#log_header.log_version == "4.0" ->
ok;
true ->
fatal("Bad version of transaction log: ~p~n", [H])
end,
H;
insert_rec(_Rec, _InPlace, _InitBy, _LogV) ->
ok.
do_insert_rec(Tid, Rec, InPlace, InitBy, LogV) ->
case Rec#commit.schema_ops of
[] ->
ignore;
SchemaOps ->
case val({schema, storage_type}) of
ram_copies ->
insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV);
Storage ->
true = open_files(schema, Storage, InPlace, InitBy),
insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy, LogV)
end
end,
D = Rec#commit.disc_copies,
insert_ops(Tid, disc_copies, D, InPlace, InitBy, LogV),
case InitBy of
startup ->
DO = Rec#commit.disc_only_copies,
insert_ops(Tid, disc_only_copies, DO, InPlace, InitBy, LogV);
_ ->
ignore
end.
update(_Tid, [], _DumperMode) ->
dumped;
update(Tid, SchemaOps, DumperMode) ->
UseDir = mnesia_monitor:use_dir(),
Res = perform_update(Tid, SchemaOps, DumperMode, UseDir),
mnesia_controller:release_schema_commit_lock(),
Res.
perform_update(_Tid, _SchemaOps, mandatory, true) ->
InitBy = schema_update,
?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
opt_dump_log(InitBy);
perform_update(Tid, SchemaOps, _DumperMode, _UseDir) ->
InitBy = fast_schema_update,
InPlace = mnesia_monitor:get_env(dump_log_update_in_place),
?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
case catch insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy,
mnesia_log:version()) of
{'EXIT', Reason} ->
Error = {error, {"Schema update error", Reason}},
close_files(InPlace, Error, InitBy),
fatal("Schema update error ~p ~p", [Reason, SchemaOps]);
_ ->
?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
close_files(InPlace, ok, InitBy),
ok
end.
insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok;
insert_ops(Tid, Storage, [Op], InPlace, InitBy, Ver) when Ver >= "4.3"->
insert_op(Tid, Storage, Op, InPlace, InitBy),
ok;
insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver >= "4.3"->
insert_op(Tid, Storage, Op, InPlace, InitBy),
insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver);
insert_ops(Tid, Storage, [Op | Ops], InPlace, InitBy, Ver) when Ver < "4.3" ->
insert_ops(Tid, Storage, Ops, InPlace, InitBy, Ver),
insert_op(Tid, Storage, Op, InPlace, InitBy).
disc_insert(_Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
case open_files(Tab, Storage, InPlace, InitBy) of
true ->
case Storage of
disc_copies when Tab /= schema ->
mnesia_log:append({?MODULE,Tab}, {{Tab, Key}, Val, Op}),
ok;
_ ->
dets_insert(Op,Tab,Key,Val)
end;
false ->
ignore
end.
dets_insert(Op,Tab,Key,Val) ->
case Op of
write ->
dets_updated(Tab,Key),
ok = dets:insert(Tab, Val);
delete ->
dets_updated(Tab,Key),
ok = dets:delete(Tab, Key);
update_counter ->
case dets_incr_counter(Tab,Key) of
true ->
{RecName, Incr} = Val,
case catch dets:update_counter(Tab, Key, Incr) of
CounterVal when integer(CounterVal) ->
ok;
_ when Incr < 0 ->
Zero = {RecName, Key, 0},
ok = dets:insert(Tab, Zero);
_ ->
Init = {RecName, Key, Incr},
ok = dets:insert(Tab, Init)
end;
false -> ok
end;
delete_object ->
dets_updated(Tab,Key),
ok = dets:delete_object(Tab, Val);
clear_table ->
dets_cleared(Tab),
ok = dets:match_delete(Tab, '_')
end.
dets_updated(Tab,Key) ->
case get(mnesia_dumper_dets) of
undefined ->
Empty = gb_trees:empty(),
Tree = gb_trees:insert(Tab, gb_sets:singleton(Key), Empty),
put(mnesia_dumper_dets, Tree);
Tree ->
case gb_trees:lookup(Tab,Tree) of
{value, cleared} -> ignore;
{value, Set} ->
T = gb_trees:update(Tab, gb_sets:add(Key, Set), Tree),
put(mnesia_dumper_dets, T);
none ->
T = gb_trees:insert(Tab, gb_sets:singleton(Key), Tree),
put(mnesia_dumper_dets, T)
end
end.
dets_incr_counter(Tab,Key) ->
case get(mnesia_dumper_dets) of
undefined -> false;
Tree ->
case gb_trees:lookup(Tab,Tree) of
{value, cleared} -> true;
{value, Set} -> gb_sets:is_member(Key, Set);
none -> false
end
end.
dets_cleared(Tab) ->
case get(mnesia_dumper_dets) of
undefined ->
Empty = gb_trees:empty(),
Tree = gb_trees:insert(Tab, cleared, Empty),
put(mnesia_dumper_dets, Tree);
Tree ->
case gb_trees:lookup(Tab,Tree) of
{value, cleared} -> ignore;
_ ->
T = gb_trees:enter(Tab, cleared, Tree),
put(mnesia_dumper_dets, T)
end
end.
insert(Tid, Storage, Tab, Key, [Val | Tail], Op, InPlace, InitBy) ->
insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
insert(Tid, Storage, Tab, Key, Tail, Op, InPlace, InitBy);
insert(_Tid, _Storage, _Tab, _Key, [], _Op, _InPlace, _InitBy) ->
ok;
insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy) ->
Item = {{Tab, Key}, Val, Op},
case InitBy of
startup ->
disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
_ when Storage == ram_copies ->
mnesia_tm:do_update_op(Tid, Storage, Item),
Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
mnesia_tm:do_snmp(Tid, Snmp);
_ when Storage == disc_copies ->
disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy),
mnesia_tm:do_update_op(Tid, Storage, Item),
Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
mnesia_tm:do_snmp(Tid, Snmp);
_ when Storage == disc_only_copies ->
mnesia_tm:do_update_op(Tid, Storage, Item),
Snmp = mnesia_tm:prepare_snmp(Tab, Key, [Item]),
mnesia_tm:do_snmp(Tid, Snmp);
_ when Storage == unknown ->
ignore
end.
disc_delete_table(Tab, Storage) ->
case mnesia_monitor:use_dir() of
true ->
if
Storage == disc_only_copies; Tab == schema ->
mnesia_monitor:unsafe_close_dets(Tab),
Dat = mnesia_lib:tab2dat(Tab),
file:delete(Dat);
true ->
DclFile = mnesia_lib:tab2dcl(Tab),
case get({?MODULE,Tab}) of
{opened_dumper, dcl} ->
del_opened_tab(Tab),
mnesia_log:unsafe_close_log(Tab);
_ ->
ok
end,
file:delete(DclFile),
DcdFile = mnesia_lib:tab2dcd(Tab),
file:delete(DcdFile),
ok
end,
erase({?MODULE, Tab});
false ->
ignore
end.
disc_delete_indecies(_Tab, _Cs, Storage) when Storage /= disc_only_copies ->
ignore;
disc_delete_indecies(Tab, Cs, disc_only_copies) ->
Indecies = Cs#cstruct.index,
mnesia_index:del_transient(Tab, Indecies, disc_only_copies).
insert_op(Tid, Storage, {{Tab, Key}, Val, Op}, InPlace, InitBy) ->
disc_insert(Tid, Storage, Tab, Key, Val, Op, InPlace, InitBy);
insert_op(_Tid, schema_ops, _OP, _InPlace, Initby)
when Initby /= startup,
Initby /= fast_schema_update,
Initby /= schema_update ->
ignore;
insert_op(Tid, _, {op, rec, Storage, Item}, InPlace, InitBy) ->
{{Tab, Key}, ValList, Op} = Item,
insert(Tid, Storage, Tab, Key, ValList, Op, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_copy_type, N, FromS, ToS, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
{schema, Tab, _} = Val,
case lists:member(N, val({current, db_nodes})) of
true when InitBy /= startup ->
mnesia_controller:add_active_replica(Tab, N, Cs);
_ ->
ignore
end,
if
N == node() ->
Dmp = mnesia_lib:tab2dmp(Tab),
Dat = mnesia_lib:tab2dat(Tab),
Dcd = mnesia_lib:tab2dcd(Tab),
Dcl = mnesia_lib:tab2dcl(Tab),
case {FromS, ToS} of
{ram_copies, disc_copies} when Tab == schema ->
ok = ensure_rename(Dmp, Dat);
{ram_copies, disc_copies} ->
file:delete(Dcl),
ok = ensure_rename(Dmp, Dcd);
{disc_copies, ram_copies} when Tab == schema ->
mnesia_lib:set(use_dir, false),
mnesia_monitor:unsafe_close_dets(Tab),
file:delete(Dat);
{disc_copies, ram_copies} ->
file:delete(Dcl),
file:delete(Dcd);
{ram_copies, disc_only_copies} ->
ok = ensure_rename(Dmp, Dat),
true = open_files(Tab, disc_only_copies, InPlace, InitBy),
mnesia_schema:ram_delete_table(Tab, FromS),
PosList = Cs#cstruct.index,
mnesia_index:init_indecies(Tab, disc_only_copies, PosList);
{disc_only_copies, ram_copies} ->
mnesia_monitor:unsafe_close_dets(Tab),
disc_delete_indecies(Tab, Cs, disc_only_copies),
case InitBy of
startup ->
ignore;
_ ->
mnesia_controller:get_disc_copy(Tab)
end,
disc_delete_table(Tab, disc_only_copies);
{disc_copies, disc_only_copies} ->
ok = ensure_rename(Dmp, Dat),
true = open_files(Tab, disc_only_copies, InPlace, InitBy),
mnesia_schema:ram_delete_table(Tab, FromS),
PosList = Cs#cstruct.index,
mnesia_index:init_indecies(Tab, disc_only_copies, PosList),
file:delete(Dcl),
file:delete(Dcd);
{disc_only_copies, disc_copies} ->
mnesia_monitor:unsafe_close_dets(Tab),
disc_delete_indecies(Tab, Cs, disc_only_copies),
case InitBy of
startup ->
ignore;
_ ->
mnesia_log:ets2dcd(Tab),
mnesia_controller:get_disc_copy(Tab),
disc_delete_table(Tab, disc_only_copies)
end
end;
true ->
ignore
end,
S = val({schema, storage_type}),
disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy);
insert_op(Tid, _, {op, transform, _Fun, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
case mnesia_lib:cs_to_storage_type(node(), Cs) of
disc_copies ->
open_dcl(Cs#cstruct.name);
_ ->
ignore
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, restore_recreate, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Type = Cs#cstruct.type,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
disc_delete_table(Tab, Storage),
disc_delete_indecies(Tab, Cs, Storage),
case InitBy of
startup ->
ignore;
_ ->
case ?catch_val({Tab, cstruct}) of
{'EXIT', _} -> ignore;
_ ->
mnesia_schema:ram_delete_table(Tab, Storage),
mnesia_checkpoint:tm_del_copy(Tab, node())
end
end,
if
(InitBy == startup) or (Storage == unknown) ->
ignore;
Storage == ram_copies ->
Args = [{keypos, 2}, public, named_table, Type],
mnesia_monitor:mktab(Tab, Args);
Storage == disc_copies ->
Args = [{keypos, 2}, public, named_table, Type],
mnesia_monitor:mktab(Tab, Args),
File = mnesia_lib:tab2dcd(Tab),
FArg = [{file, File}, {name, {mnesia,create}},
{repair, false}, {mode, read_write}],
{ok, Log} = mnesia_monitor:open_log(FArg),
mnesia_monitor:unsafe_close_log(Log);
Storage == disc_only_copies ->
File = mnesia_lib:tab2dat(Tab),
file:delete(File),
Args = [{file, mnesia_lib:tab2dat(Tab)},
{type, mnesia_lib:disk_type(Tab, Type)},
{keypos, 2},
{repair, mnesia_monitor:get_env(auto_repair)}],
mnesia_monitor:open_dets(Tab, Args)
end,
insert_op(Tid, ignore, {op, create_table, TabDef}, InPlace, InitBy);
insert_op(Tid, _, {op, create_table, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, false, InPlace, InitBy),
Tab = Cs#cstruct.name,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
case InitBy of
startup ->
case Storage of
unknown ->
ignore;
ram_copies ->
ignore;
disc_copies ->
Dcd = mnesia_lib:tab2dcd(Tab),
case mnesia_lib:exists(Dcd) of
true -> ignore;
false ->
mnesia_log:open_log(temp,
mnesia_log:dcl_log_header(),
Dcd,
false,
false,
read_write),
mnesia_log:unsafe_close_log(temp)
end;
_ ->
Args = [{file, mnesia_lib:tab2dat(Tab)},
{type, mnesia_lib:disk_type(Tab, Cs#cstruct.type)},
{keypos, 2},
{repair, mnesia_monitor:get_env(auto_repair)}],
case mnesia_monitor:open_dets(Tab, Args) of
{ok, _} ->
mnesia_monitor:unsafe_close_dets(Tab);
{error, Error} ->
exit({"Failed to create dets table", Error})
end
end;
_ ->
Copies = mnesia_lib:copy_holders(Cs),
Active = mnesia_lib:intersect(Copies, val({current, db_nodes})),
[mnesia_controller:add_active_replica(Tab, N, Cs) || N <- Active],
case Storage of
unknown ->
mnesia_lib:unset({Tab, create_table}),
case Cs#cstruct.local_content of
true ->
ignore;
false ->
mnesia_lib:set_remote_where_to_read(Tab)
end;
_ ->
case Cs#cstruct.local_content of
true ->
mnesia_lib:set_local_content_whereabouts(Tab);
false ->
mnesia_lib:set({Tab, where_to_read}, node())
end,
case Storage of
ram_copies ->
ignore;
_ ->
disc_delete_indecies(Tab, Cs, Storage)
end,
mnesia_controller:create_table(Tab),
mnesia_lib:unset({Tab, create_table})
end
end;
insert_op(_Tid, _, {op, dump_table, Size, TabDef}, _InPlace, _InitBy) ->
case Size of
unknown ->
ignore;
_ ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Dmp = mnesia_lib:tab2dmp(Tab),
Dat = mnesia_lib:tab2dcd(Tab),
case Size of
0 ->
file:delete(Dmp),
file:delete(Dat);
_ ->
ok = ensure_rename(Dmp, Dat)
end
end;
insert_op(Tid, _, {op, delete_table, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
case mnesia_lib:cs_to_storage_type(node(), Cs) of
unknown ->
ignore;
Storage ->
disc_delete_table(Tab, Storage),
disc_delete_indecies(Tab, Cs, Storage),
case InitBy of
startup ->
ignore;
_ ->
mnesia_schema:ram_delete_table(Tab, Storage),
mnesia_checkpoint:tm_del_copy(Tab, node())
end
end,
delete_cstruct(Tid, Cs, InPlace, InitBy);
insert_op(Tid, _, {op, clear_table, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
case mnesia_lib:cs_to_storage_type(node(), Cs) of
unknown ->
ignore;
Storage ->
if Storage == disc_copies ->
open_dcl(Cs#cstruct.name);
true ->
ignore
end,
catch insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy)
end;
insert_op(Tid, _, {op, merge_schema, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
case Cs#cstruct.name of
schema ->
Update = fun(NS = {Node,Storage}) ->
case mnesia_lib:cs_to_storage_type(Node, Cs) of
Storage -> NS;
disc_copies when Node == node() ->
Dir = mnesia_lib:dir(),
ok = mnesia_schema:opt_create_dir(true, Dir),
mnesia_schema:purge_dir(Dir, []),
mnesia_log:purge_all_logs(),
mnesia_lib:set(use_dir, true),
mnesia_log:init(),
Ns = val({current, db_nodes}),
F = fun(U) -> mnesia_recover:log_mnesia_up(U) end,
lists:foreach(F, Ns),
raw_named_dump_table(schema, dat),
temp_set_master_nodes(),
{Node,disc_copies};
CSstorage ->
{Node,CSstorage}
end
end,
W2C0 = val({schema, where_to_commit}),
W2C = case W2C0 of
{blocked, List} ->
{blocked,lists:map(Update,List)};
List ->
lists:map(Update,List)
end,
if W2C == W2C0 -> ignore;
true -> mnesia_lib:set({schema, where_to_commit}, W2C)
end;
_ ->
ignore
end,
insert_cstruct(Tid, Cs, false, InPlace, InitBy);
insert_op(Tid, _, {op, del_table_copy, Storage, Node, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
if
Tab == schema, Storage == ram_copies ->
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
Tab /= schema ->
mnesia_controller:del_active_replica(Tab, Node),
mnesia_lib:del({Tab, Storage}, Node),
if
Node == node() ->
case Cs#cstruct.local_content of
true -> mnesia_lib:set({Tab, where_to_read}, nowhere);
false -> mnesia_lib:set_remote_where_to_read(Tab)
end,
mnesia_lib:del({schema, local_tables}, Tab),
mnesia_lib:set({Tab, storage_type}, unknown),
insert_cstruct(Tid, Cs, true, InPlace, InitBy),
disc_delete_table(Tab, Storage),
disc_delete_indecies(Tab, Cs, Storage),
mnesia_schema:ram_delete_table(Tab, Storage),
mnesia_checkpoint:tm_del_copy(Tab, Node);
true ->
case val({Tab, where_to_read}) of
Node ->
mnesia_lib:set_remote_where_to_read(Tab);
_ ->
ignore
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy)
end
end;
insert_op(Tid, _, {op, add_table_copy, _Storage, _Node, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, add_snmp, _Us, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, del_snmp, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
if
InitBy /= startup,
Storage /= unknown ->
case ?catch_val({Tab, {index, snmp}}) of
{'EXIT', _} ->
ignore;
Stab ->
mnesia_snmp_hook:delete_table(Tab, Stab),
mnesia_lib:unset({Tab, {index, snmp}})
end;
true ->
ignore
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, add_index, Pos, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = insert_cstruct(Tid, Cs, true, InPlace, InitBy),
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
case InitBy of
startup when Storage == disc_only_copies ->
true = open_files(Tab, Storage, InPlace, InitBy),
mnesia_index:init_indecies(Tab, Storage, [Pos]);
startup ->
ignore;
_ ->
mnesia_index:init_indecies(Tab, Storage, [Pos])
end;
insert_op(Tid, _, {op, del_index, Pos, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
case InitBy of
startup when Storage == disc_only_copies ->
mnesia_index:del_index_table(Tab, Storage, Pos);
startup ->
ignore;
_ ->
mnesia_index:del_index_table(Tab, Storage, Pos)
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_access_mode,TabDef, _OldAccess, _Access}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
case InitBy of
startup -> ignore;
_ -> mnesia_controller:change_table_access_mode(Cs)
end,
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_load_order, TabDef, _OldLevel, _Level}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, delete_property, TabDef, PropKey}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
Tab = Cs#cstruct.name,
mnesia_lib:unset({Tab, user_property, PropKey}),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, write_property, TabDef, _Prop}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy);
insert_op(Tid, _, {op, change_table_frag, _Change, TabDef}, InPlace, InitBy) ->
Cs = mnesia_schema:list2cs(TabDef),
insert_cstruct(Tid, Cs, true, InPlace, InitBy).
open_files(Tab, Storage, UpdateInPlace, InitBy)
when Storage /= unknown, Storage /= ram_copies ->
case get({?MODULE, Tab}) of
undefined ->
case ?catch_val({Tab, setorbag}) of
{'EXIT', _} ->
false;
Type ->
case Storage of
disc_copies when Tab /= schema ->
Bool = open_disc_copies(Tab, InitBy),
Bool;
_ ->
Fname = prepare_open(Tab, UpdateInPlace),
Args = [{file, Fname},
{keypos, 2},
{repair, mnesia_monitor:get_env(auto_repair)},
{type, mnesia_lib:disk_type(Tab, Type)}],
{ok, _} = mnesia_monitor:open_dets(Tab, Args),
put({?MODULE, Tab}, {opened_dumper, dat}),
true
end
end;
already_dumped ->
false;
{opened_dumper, _} ->
true
end;
open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
false.
open_disc_copies(Tab, InitBy) ->
DclF = mnesia_lib:tab2dcl(Tab),
DumpEts =
case file:read_file_info(DclF) of
{error, enoent} ->
false;
{ok, DclInfo} ->
DcdF = mnesia_lib:tab2dcd(Tab),
case file:read_file_info(DcdF) of
{error, Reason} ->
mnesia_lib:dbg_out("File ~p info_error ~p ~n",
[DcdF, Reason]),
true;
{ok, DcdInfo} ->
Mul = case ?catch_val(dc_dump_limit) of
{'EXIT', _} -> ?DumpToEtsMultiplier;
Val -> Val
end,
DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
end
end,
if
DumpEts == false; InitBy == startup ->
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
mnesia_lib:exists(DclF),
mnesia_monitor:get_env(auto_repair),
read_write),
put({?MODULE, Tab}, {opened_dumper, dcl}),
true;
true ->
mnesia_log:ets2dcd(Tab),
put({?MODULE, Tab}, already_dumped),
false
end.
mechanismen , used for schema transactions .
open_dcl(Tab) ->
case get({?MODULE, Tab}) of
{opened_dumper, _} ->
true;
DclF = mnesia_lib:tab2dcl(Tab),
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
mnesia_lib:exists(DclF),
mnesia_monitor:get_env(auto_repair),
read_write),
put({?MODULE, Tab}, {opened_dumper, dcl}),
true
end.
prepare_open(Tab, UpdateInPlace) ->
Dat = mnesia_lib:tab2dat(Tab),
case UpdateInPlace of
true ->
Dat;
false ->
Tmp = mnesia_lib:tab2tmp(Tab),
case catch mnesia_lib:copy_file(Dat, Tmp) of
ok ->
Tmp;
Error ->
fatal("Cannot copy dets file ~p to ~p: ~p~n",
[Dat, Tmp, Error])
end
end.
del_opened_tab(Tab) ->
erase({?MODULE, Tab}).
close_files(UpdateInPlace, Outcome, InitBy, get()).
close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, already_dumped} | Tail]) ->
erase({?MODULE, Tab}),
close_files(InPlace, Outcome, InitBy, Tail);
close_files(InPlace, Outcome, InitBy, [{{?MODULE, Tab}, {opened_dumper, Type}} | Tail]) ->
erase({?MODULE, Tab}),
case val({Tab, storage_type}) of
disc_only_copies when InitBy /= startup ->
ignore;
disc_copies when Tab /= schema ->
mnesia_log:close_log({?MODULE,Tab});
Storage ->
do_close(InPlace, Outcome, Tab, Type, Storage)
end,
close_files(InPlace, Outcome, InitBy, Tail);
close_files(InPlace, Outcome, InitBy, [_ | Tail]) ->
close_files(InPlace, Outcome, InitBy, Tail);
close_files(_, _, _InitBy, []) ->
ok.
do_close(_, _, Tab, dcl, unknown) ->
mnesia_log:close_log({?MODULE,Tab}),
file:delete(mnesia_lib:tab2dcl(Tab));
mnesia_log:close_log({?MODULE,Tab});
do_close(InPlace, Outcome, Tab, dat, Storage) ->
mnesia_monitor:close_dets(Tab),
if
Storage == unknown, InPlace == true ->
file:delete(mnesia_lib:tab2dat(Tab));
InPlace == true ->
ok;
Outcome == ok, Storage /= unknown ->
Success : swap tmp files with dat files
TabDat = mnesia_lib:tab2dat(Tab),
ok = file:rename(mnesia_lib:tab2tmp(Tab), TabDat);
true ->
file:delete(mnesia_lib:tab2tmp(Tab))
end.
ensure_rename(From, To) ->
case mnesia_lib:exists(From) of
true ->
file:rename(From, To);
false ->
case mnesia_lib:exists(To) of
true ->
ok;
false ->
{error, {rename_failed, From, To}}
end
end.
insert_cstruct(Tid, Cs, KeepWhereabouts, InPlace, InitBy) ->
Val = mnesia_schema:insert_cstruct(Tid, Cs, KeepWhereabouts),
{schema, Tab, _} = Val,
S = val({schema, storage_type}),
disc_insert(Tid, S, schema, Tab, Val, write, InPlace, InitBy),
Tab.
delete_cstruct(Tid, Cs, InPlace, InitBy) ->
Val = mnesia_schema:delete_cstruct(Tid, Cs),
{schema, Tab, _} = Val,
S = val({schema, storage_type}),
disc_insert(Tid, S, schema, Tab, Val, delete, InPlace, InitBy),
Tab.
temp_set_master_nodes() ->
Tabs = val({schema, local_tables}),
Masters = [{Tab, (val({Tab, disc_copies}) ++
val({Tab, ram_copies}) ++
val({Tab, disc_only_copies})) -- [node()]}
|| Tab <- Tabs],
UseDir = false since we do n't want to remember these
mnesia_recover:log_master_nodes(Masters, false, yes),
ok.
Raw dump of table . Dumper must have unique access to the ets table .
raw_named_dump_table(Tab, Ftype) ->
case mnesia_monitor:use_dir() of
true ->
mnesia_lib:lock_table(Tab),
TmpFname = mnesia_lib:tab2tmp(Tab),
Fname =
case Ftype of
dat -> mnesia_lib:tab2dat(Tab);
dmp -> mnesia_lib:tab2dmp(Tab)
end,
file:delete(TmpFname),
file:delete(Fname),
TabSize = ?ets_info(Tab, size),
TabRef = Tab,
DiskType = mnesia_lib:disk_type(Tab),
Args = [{file, TmpFname},
{keypos, 2},
{estimated_no_objects, TabSize + 256},
{repair, mnesia_monitor:get_env(auto_repair)},
{type, DiskType}],
case mnesia_lib:dets_sync_open(TabRef, Args) of
{ok, TabRef} ->
Storage = ram_copies,
mnesia_lib:db_fixtable(Storage, Tab, true),
case catch raw_dump_table(TabRef, Tab) of
{'EXIT', Reason} ->
mnesia_lib:db_fixtable(Storage, Tab, false),
mnesia_lib:dets_sync_close(Tab),
file:delete(TmpFname),
mnesia_lib:unlock_table(Tab),
exit({"Dump of table to disc failed", Reason});
ok ->
mnesia_lib:db_fixtable(Storage, Tab, false),
mnesia_lib:dets_sync_close(Tab),
mnesia_lib:unlock_table(Tab),
ok = file:rename(TmpFname, Fname)
end;
{error, Reason} ->
mnesia_lib:unlock_table(Tab),
exit({"Open of file before dump to disc failed", Reason})
end;
false ->
exit({has_no_disc, node()})
end.
raw_dump_table(DetsRef, EtsRef) ->
dets:from_ets(DetsRef, EtsRef).
in the Erlang emulator . The mnesia_dumper process performs many
costly BIF invokations and must pay for this . But since the
not steal all computation power in the Erlang Emulator ans make
start_regulator() ->
case mnesia_monitor:get_env(dump_log_load_regulation) of
false ->
nopid;
true ->
N = ?REGULATOR_NAME,
case mnesia_monitor:start_proc(N, ?MODULE, regulator_init, [self()]) of
{ok, Pid} ->
Pid;
{error, Reason} ->
fatal("Failed to start ~n: ~p~n", [N, Reason])
end
end.
regulator_init(Parent) ->
process_flag(priority, low),
register(?REGULATOR_NAME, self()),
proc_lib:init_ack(Parent, {ok, self()}),
regulator_loop().
regulator_loop() ->
receive
{regulate, From} ->
From ! {regulated, self()},
regulator_loop();
{stop, From} ->
From ! {stopped, self()},
exit(normal)
end.
regulate(nopid) ->
ok;
regulate(RegulatorPid) ->
RegulatorPid ! {regulate, self()},
receive
{regulated, RegulatorPid} -> ok
end.
val(Var) ->
case ?catch_val(Var) of
{'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
Value -> Value
end.
|
aea4f2041ac6b242827d6e5e6106897b561cf02ee3e60ef6d4b2f9070befad4c | well-typed/large-records | Generics_SOP.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - orphans #
| Generic instance for HList using SOP generics
module Experiment.Generics_SOP (gtoJSON) where
import Data.Aeson
import Data.Kind
import Data.SOP
import Generics.SOP (Generic(..), HasDatatypeInfo(..))
import Generics.SOP.Type.Metadata
import qualified Generics.SOP.JSON as SOP
import qualified Generics.SOP.Metadata as SOP
import Bench.HList
import Infra.ShowType
gtoJSON :: (HasDatatypeInfo a, All2 ToJSON (Code a)) => a -> Value
gtoJSON = SOP.gtoJSON SOP.defaultJsonOptions
{-------------------------------------------------------------------------------
Computing metadata
-------------------------------------------------------------------------------}
type family ComputeDatatypeInfo (xs :: [Type]) :: DatatypeInfo where
ComputeDatatypeInfo xs =
'ADT "SomeModule" "Record"
'[ 'Record "MkRecord" (ComputeFieldInfo xs) ]
'[ ComputeStrictnessInfo xs ]
type family ComputeStrictnessInfo (xs :: [Type]) :: [StrictnessInfo] where
ComputeStrictnessInfo '[] = '[]
ComputeStrictnessInfo (_ ': xs) = DefaultStrictnessInfo
': ComputeStrictnessInfo xs
type DefaultStrictnessInfo =
'StrictnessInfo
'SOP.NoSourceUnpackedness
'SOP.NoSourceStrictness
'SOP.DecidedLazy
type family ComputeFieldInfo (xs :: [Type]) :: [FieldInfo] where
ComputeFieldInfo '[] = '[]
ComputeFieldInfo (x ': xs) = 'FieldInfo (ShowType x) ': ComputeFieldInfo xs
{-------------------------------------------------------------------------------
Generic instance proper
-------------------------------------------------------------------------------}
instance SListI xs => Generic (HList xs) where
type Code (HList xs) = '[xs]
from = SOP . Z . hlistToNP
to = hlistFromNP . unZ . unSOP
instance ( DemoteFieldInfos (ComputeFieldInfo xs) xs
, DemoteStrictnessInfos (ComputeStrictnessInfo xs) xs
) => HasDatatypeInfo (HList xs) where
type DatatypeInfoOf (HList xs) = ComputeDatatypeInfo xs
datatypeInfo _ = demoteDatatypeInfo (Proxy @(ComputeDatatypeInfo xs))
| null | https://raw.githubusercontent.com/well-typed/large-records/551f265845fbe56346988a6b484dca40ef380609/large-records-benchmarks/bench/experiments/Experiment/Generics_SOP.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE KindSignatures #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
------------------------------------------------------------------------------
Computing metadata
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Generic instance proper
------------------------------------------------------------------------------ | # LANGUAGE FlexibleContexts #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - orphans #
| Generic instance for HList using SOP generics
module Experiment.Generics_SOP (gtoJSON) where
import Data.Aeson
import Data.Kind
import Data.SOP
import Generics.SOP (Generic(..), HasDatatypeInfo(..))
import Generics.SOP.Type.Metadata
import qualified Generics.SOP.JSON as SOP
import qualified Generics.SOP.Metadata as SOP
import Bench.HList
import Infra.ShowType
gtoJSON :: (HasDatatypeInfo a, All2 ToJSON (Code a)) => a -> Value
gtoJSON = SOP.gtoJSON SOP.defaultJsonOptions
type family ComputeDatatypeInfo (xs :: [Type]) :: DatatypeInfo where
ComputeDatatypeInfo xs =
'ADT "SomeModule" "Record"
'[ 'Record "MkRecord" (ComputeFieldInfo xs) ]
'[ ComputeStrictnessInfo xs ]
type family ComputeStrictnessInfo (xs :: [Type]) :: [StrictnessInfo] where
ComputeStrictnessInfo '[] = '[]
ComputeStrictnessInfo (_ ': xs) = DefaultStrictnessInfo
': ComputeStrictnessInfo xs
type DefaultStrictnessInfo =
'StrictnessInfo
'SOP.NoSourceUnpackedness
'SOP.NoSourceStrictness
'SOP.DecidedLazy
type family ComputeFieldInfo (xs :: [Type]) :: [FieldInfo] where
ComputeFieldInfo '[] = '[]
ComputeFieldInfo (x ': xs) = 'FieldInfo (ShowType x) ': ComputeFieldInfo xs
instance SListI xs => Generic (HList xs) where
type Code (HList xs) = '[xs]
from = SOP . Z . hlistToNP
to = hlistFromNP . unZ . unSOP
instance ( DemoteFieldInfos (ComputeFieldInfo xs) xs
, DemoteStrictnessInfos (ComputeStrictnessInfo xs) xs
) => HasDatatypeInfo (HList xs) where
type DatatypeInfoOf (HList xs) = ComputeDatatypeInfo xs
datatypeInfo _ = demoteDatatypeInfo (Proxy @(ComputeDatatypeInfo xs))
|
e69e78ca226692c66cc0b83ec223b4eb6428032461d3213768095d44c872e60d | ghc/ghc | FromCmm.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
module GHC.Wasm.ControlFlow.FromCmm
( structuredControl
)
where
import GHC.Prelude hiding (succ)
import Data.Function
import Data.List (sortBy)
import qualified Data.Tree as Tree
import GHC.Cmm
import GHC.Cmm.Dataflow.Block
import GHC.Cmm.Dataflow.Collections
import GHC.Cmm.Dominators
import GHC.Cmm.Dataflow.Graph
import GHC.Cmm.Dataflow.Label
import GHC.Cmm.Switch
import GHC.CmmToAsm.Wasm.Types
import GHC.Platform
import GHC.Utils.Misc
import GHC.Utils.Panic
import GHC.Utils.Outputable ( Outputable, text, (<+>), ppr
, pprWithCommas
)
import GHC.Wasm.ControlFlow
|
Module : GHC.Wasm . ControlFlow . FromCmm
Description : Translation of ( reducible ) Cmm control flow to WebAssembly
Code in this module can translate any _ reducible _ Cmm control - flow
graph to the structured control flow that is required by WebAssembly .
The algorithm is subtle and is described in detail in a draft paper
to be found at /~nr/pubs/relooper.pdf .
Module : GHC.Wasm.ControlFlow.FromCmm
Description : Translation of (reducible) Cmm control flow to WebAssembly
Code in this module can translate any _reducible_ Cmm control-flow
graph to the structured control flow that is required by WebAssembly.
The algorithm is subtle and is described in detail in a draft paper
to be found at /~nr/pubs/relooper.pdf.
-}
------------------- Abstraction of Cmm control flow -----------------------
-- | Abstracts the kind of control flow we understand how to convert.
A block can be left in one of four ways :
--
-- * Unconditionally
--
-- * Conditionally on a predicate of type `e`
--
-- * To a location determined by the value of a scrutinee of type `e`
--
-- * Not at all.
data ControlFlow e = Unconditional Label
| Conditional e Label Label
| Switch { _scrutinee :: e
, _range :: BrTableInterval
from 0
, _defaultTarget :: Maybe Label
}
| TailCall e
flowLeaving :: Platform -> CmmBlock -> ControlFlow CmmExpr
flowLeaving platform b =
case lastNode b of
CmmBranch l -> Unconditional l
CmmCondBranch c t f _ -> Conditional c t f
CmmSwitch e targets ->
let (offset, target_labels) = switchTargetsToTable targets
(lo, hi) = switchTargetsRange targets
default_label = switchTargetsDefault targets
scrutinee = smartExtend platform $ smartPlus platform e offset
range = inclusiveInterval (lo+toInteger offset) (hi+toInteger offset)
in Switch scrutinee range target_labels default_label
CmmCall { cml_cont = Nothing, cml_target = e } -> TailCall e
_ -> panic "flowLeaving: unreachable"
----------------------- Evaluation contexts ------------------------------
-- | The syntactic constructs in which Wasm code may be contained.
-- A list of these constructs represents an evaluation context,
-- which is used to determined what level of `br` instruction
-- reaches a given label.
data ContainingSyntax
= BlockFollowedBy Label
| LoopHeadedBy Label
| IfThenElse (Maybe Label) -- ^ Carries the label that follows `if...end`, if any
matchesFrame :: Label -> ContainingSyntax -> Bool
matchesFrame label (BlockFollowedBy l) = label == l
matchesFrame label (LoopHeadedBy l) = label == l
matchesFrame label (IfThenElse (Just l)) = label == l
matchesFrame _ _ = False
data Context = Context { enclosing :: [ContainingSyntax]
, fallthrough :: Maybe Label -- the label can
-- be reached just by "falling through"
-- the hole
}
instance Outputable Context where
ppr c | Just l <- fallthrough c =
pprWithCommas ppr (enclosing c) <+> text "fallthrough to" <+> ppr l
| otherwise = pprWithCommas ppr (enclosing c)
emptyContext :: Context
emptyContext = Context [] Nothing
inside :: ContainingSyntax -> Context -> Context
withFallthrough :: Context -> Label -> Context
inside frame c = c { enclosing = frame : enclosing c }
withFallthrough c l = c { fallthrough = Just l }
type CmmActions = Block CmmNode O O
type FT pre post = WasmFunctionType pre post
returns :: FT '[] '[ 'I32]
doesn'tReturn :: FT '[] '[]
returns = WasmFunctionType TypeListNil (TypeListCons TagI32 TypeListNil)
doesn'tReturn = WasmFunctionType TypeListNil TypeListNil
emptyPost :: FT pre post -> Bool
emptyPost (WasmFunctionType _ TypeListNil) = True
emptyPost _ = False
----------------------- Translation ------------------------------
| Convert a Cmm CFG to WebAssembly 's structured control flow .
structuredControl :: forall expr stmt m .
Applicative m
=> Platform -- ^ needed for offset calculation
-> (Label -> CmmExpr -> m expr) -- ^ translator for expressions
-> (Label -> CmmActions -> m stmt) -- ^ translator for straight-line code
^ CFG to be translated
-> m (WasmControl stmt expr '[] '[ 'I32])
structuredControl platform txExpr txBlock g =
doTree returns dominatorTree emptyContext
where
gwd :: GraphWithDominators CmmNode
gwd = graphWithDominators g
dominatorTree :: Tree.Tree CmmBlock-- Dominator tree in which children are sorted
with highest reverse - postorder number first
dominatorTree = fmap blockLabeled $ sortTree $ gwdDominatorTree gwd
doTree :: FT '[] post -> Tree.Tree CmmBlock -> Context -> m (WasmControl stmt expr '[] post)
nodeWithin :: forall post .
FT '[] post -> CmmBlock -> [Tree.Tree CmmBlock] -> Maybe Label
-> Context -> m (WasmControl stmt expr '[] post)
doBranch :: FT '[] post -> Label -> Label -> Context -> m (WasmControl stmt expr '[] post)
doTree fty (Tree.Node x children) context =
let codeForX = nodeWithin fty x selectedChildren Nothing
in if isLoopHeader x then
WasmLoop fty <$> codeForX loopContext
else
codeForX context
where selectedChildren = case lastNode x of
CmmSwitch {} -> children
N.B. Unlike ` if ` , translation of Switch uses only labels .
_ -> filter hasMergeRoot children
loopContext = LoopHeadedBy (entryLabel x) `inside` context
hasMergeRoot = isMergeNode . Tree.rootLabel
nodeWithin fty x (y_n:ys) (Just zlabel) context =
WasmBlock fty <$> nodeWithin fty x (y_n:ys) Nothing context'
where context' = BlockFollowedBy zlabel `inside` context
nodeWithin fty x (y_n:ys) Nothing context =
nodeWithin doesn'tReturn x ys (Just ylabel) (context `withFallthrough` ylabel) <<>>
doTree fty y_n context
where ylabel = treeEntryLabel y_n
nodeWithin fty x [] (Just zlabel) context
| not (generatesIf x) =
WasmBlock fty <$> nodeWithin fty x [] Nothing context'
where context' = BlockFollowedBy zlabel `inside` context
nodeWithin fty x [] maybeMarks context =
translationOfX context
where xlabel = entryLabel x
translationOfX :: Context -> m (WasmControl stmt expr '[] post)
translationOfX context =
(WasmActions <$> txBlock xlabel (nodeBody x)) <<>>
case flowLeaving platform x of
Unconditional l -> doBranch fty xlabel l context
Conditional e t f ->
WasmIf fty
<$> txExpr xlabel e
<*> doBranch fty xlabel t (IfThenElse maybeMarks `inside` context)
<*> doBranch fty xlabel f (IfThenElse maybeMarks `inside` context)
TailCall e -> WasmTailCall <$> txExpr xlabel e
Switch e range targets default' ->
WasmBrTable <$> txExpr xlabel e
<$~> range
<$~> map switchIndex targets
<$~> switchIndex default'
where switchIndex :: Maybe Label -> Int
arbitrary ; GHC wo n't go here
switchIndex (Just lbl) = index lbl (enclosing context)
doBranch fty from to context
| to `elem` fallthrough context && emptyPost fty = pure WasmFallthrough
-- optimization: `br` is not needed, but it typechecks
-- only if nothing is expected to be left on the stack
| isBackward from to = pure $ WasmBr i -- continue
| isMergeLabel to = pure $ WasmBr i -- exit
| otherwise = doTree fty (subtreeAt to) context -- inline the code here
where i = index to (enclosing context)
generatesIf :: CmmBlock -> Bool
generatesIf x = case flowLeaving platform x of Conditional {} -> True
_ -> False
---- everything else is utility functions
treeEntryLabel :: Tree.Tree CmmBlock -> Label
treeEntryLabel = entryLabel . Tree.rootLabel
sortTree :: Tree.Tree Label -> Tree.Tree Label
Sort highest rpnum first
sortTree (Tree.Node label children) =
Tree.Node label $ sortBy (flip compare `on` (rpnum . Tree.rootLabel)) $
map sortTree children
subtreeAt :: Label -> Tree.Tree CmmBlock
blockLabeled :: Label -> CmmBlock
rpnum :: Label -> RPNum-- reverse postorder number of the labeled block
isMergeLabel :: Label -> Bool
isMergeNode :: CmmBlock -> Bool
isLoopHeader :: CmmBlock -> Bool-- identify loop headers
-- all nodes whose immediate dominator is the given block.
They are produced with the largest RP number first ,
so the largest RP number is pushed on the context first .
dominates :: Label -> Label -> Bool
Domination relation ( not just immediate domination )
blockmap :: LabelMap CmmBlock
GMany NothingO blockmap NothingO = g_graph g
blockLabeled l = findLabelIn l blockmap
rpblocks :: [CmmBlock]
rpblocks = revPostorderFrom blockmap (g_entry g)
foldEdges :: forall a . (Label -> Label -> a -> a) -> a -> a
foldEdges f a =
foldl (\a (from, to) -> f from to a)
a
[(entryLabel from, to) | from <- rpblocks, to <- successors from]
isMergeLabel l = setMember l mergeBlockLabels
isMergeNode = isMergeLabel . entryLabel
isBackward :: Label -> Label -> Bool
isBackward from to = rpnum to <= rpnum from -- self-edge counts as a backward edge
subtreeAt label = findLabelIn label subtrees
subtrees :: LabelMap (Tree.Tree CmmBlock)
subtrees = addSubtree mapEmpty dominatorTree
where addSubtree map t@(Tree.Node root children) =
foldl addSubtree (mapInsert (entryLabel root) t map) children
mergeBlockLabels :: LabelSet
-- N.B. A block is a merge node if it is where control flow merges.
-- That means it is entered by multiple control-flow edges, _except_
-- back edges don't count. There must be multiple paths that enter the
-- block _without_ passing through the block itself.
mergeBlockLabels =
setFromList [entryLabel n | n <- rpblocks, big (forwardPreds (entryLabel n))]
where big [] = False
big [_] = False
big (_ : _ : _) = True
forwardPreds :: Label -> [Label] -- reachable predecessors of reachable blocks,
-- via forward edges only
forwardPreds = \l -> mapFindWithDefault [] l predmap
where predmap :: LabelMap [Label]
predmap = foldEdges addForwardEdge mapEmpty
addForwardEdge from to pm
| isBackward from to = pm
| otherwise = addToList (from :) to pm
isLoopHeader = isHeaderLabel . entryLabel
isHeaderLabel = (`setMember` headers) -- loop headers
where headers :: LabelSet
headers = foldMap headersPointedTo blockmap
headersPointedTo block =
setFromList [label | label <- successors block,
dominates label (entryLabel block)]
index :: Label -> [ContainingSyntax] -> Int
index _ [] = panic "destination label not in evaluation context"
index label (frame : context)
| label `matchesFrame` frame = 0
| otherwise = 1 + index label context
rpnum = gwdRPNumber gwd
dominates lbl blockname =
lbl == blockname || dominatorsMember lbl (gwdDominatorsOf gwd blockname)
nodeBody :: CmmBlock -> CmmActions
nodeBody (BlockCC _first middle _last) = middle
| A CmmSwitch scrutinee may have any width , but a br_table operand
must be exactly word sized , hence the extension here . ( # 22871 )
smartExtend :: Platform -> CmmExpr -> CmmExpr
smartExtend p e | w0 == w1 = e
| otherwise = CmmMachOp (MO_UU_Conv w0 w1) [e]
where
w0 = cmmExprWidth p e
w1 = wordWidth p
smartPlus :: Platform -> CmmExpr -> Int -> CmmExpr
smartPlus _ e 0 = e
smartPlus platform e k =
CmmMachOp (MO_Add width) [e, CmmLit (CmmInt (toInteger k) width)]
where width = cmmExprWidth platform e
addToList :: (IsMap map) => ([a] -> [a]) -> KeyOf map -> map [a] -> map [a]
addToList consx = mapAlter add
where add Nothing = Just (consx [])
add (Just xs) = Just (consx xs)
------------------------------------------------------------------
--- everything below here is for diagnostics in case of panic
instance Outputable ContainingSyntax where
ppr (BlockFollowedBy l) = text "node" <+> ppr l
ppr (LoopHeadedBy l) = text "loop" <+> ppr l
ppr (IfThenElse l) = text "if-then-else" <+> ppr l
findLabelIn :: HasDebugCallStack => Label -> LabelMap a -> a
findLabelIn lbl = mapFindWithDefault failed lbl
where failed =
pprPanic "label not found in control-flow graph" (ppr lbl)
infixl 4 <$~>
(<$~>) :: Functor m => m (a -> b) -> a -> m b
(<$~>) f x = fmap ($ x) f
(<<>>) :: forall m s e pre mid post
. Applicative m
=> m (WasmControl s e pre mid)
-> m (WasmControl s e mid post)
-> m (WasmControl s e pre post)
(<<>>) = liftA2 (<>)
| null | https://raw.githubusercontent.com/ghc/ghc/354aa47d313113855aff9e5c5476fcb56f80e3bf/compiler/GHC/Wasm/ControlFlow/FromCmm.hs | haskell | ----------------- Abstraction of Cmm control flow -----------------------
| Abstracts the kind of control flow we understand how to convert.
* Unconditionally
* Conditionally on a predicate of type `e`
* To a location determined by the value of a scrutinee of type `e`
* Not at all.
--------------------- Evaluation contexts ------------------------------
| The syntactic constructs in which Wasm code may be contained.
A list of these constructs represents an evaluation context,
which is used to determined what level of `br` instruction
reaches a given label.
^ Carries the label that follows `if...end`, if any
the label can
be reached just by "falling through"
the hole
--------------------- Translation ------------------------------
^ needed for offset calculation
^ translator for expressions
^ translator for straight-line code
Dominator tree in which children are sorted
optimization: `br` is not needed, but it typechecks
only if nothing is expected to be left on the stack
continue
exit
inline the code here
-- everything else is utility functions
reverse postorder number of the labeled block
identify loop headers
all nodes whose immediate dominator is the given block.
self-edge counts as a backward edge
N.B. A block is a merge node if it is where control flow merges.
That means it is entered by multiple control-flow edges, _except_
back edges don't count. There must be multiple paths that enter the
block _without_ passing through the block itself.
reachable predecessors of reachable blocks,
via forward edges only
loop headers
----------------------------------------------------------------
- everything below here is for diagnostics in case of panic | # LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
module GHC.Wasm.ControlFlow.FromCmm
( structuredControl
)
where
import GHC.Prelude hiding (succ)
import Data.Function
import Data.List (sortBy)
import qualified Data.Tree as Tree
import GHC.Cmm
import GHC.Cmm.Dataflow.Block
import GHC.Cmm.Dataflow.Collections
import GHC.Cmm.Dominators
import GHC.Cmm.Dataflow.Graph
import GHC.Cmm.Dataflow.Label
import GHC.Cmm.Switch
import GHC.CmmToAsm.Wasm.Types
import GHC.Platform
import GHC.Utils.Misc
import GHC.Utils.Panic
import GHC.Utils.Outputable ( Outputable, text, (<+>), ppr
, pprWithCommas
)
import GHC.Wasm.ControlFlow
|
Module : GHC.Wasm . ControlFlow . FromCmm
Description : Translation of ( reducible ) Cmm control flow to WebAssembly
Code in this module can translate any _ reducible _ Cmm control - flow
graph to the structured control flow that is required by WebAssembly .
The algorithm is subtle and is described in detail in a draft paper
to be found at /~nr/pubs/relooper.pdf .
Module : GHC.Wasm.ControlFlow.FromCmm
Description : Translation of (reducible) Cmm control flow to WebAssembly
Code in this module can translate any _reducible_ Cmm control-flow
graph to the structured control flow that is required by WebAssembly.
The algorithm is subtle and is described in detail in a draft paper
to be found at /~nr/pubs/relooper.pdf.
-}
A block can be left in one of four ways :
data ControlFlow e = Unconditional Label
| Conditional e Label Label
| Switch { _scrutinee :: e
, _range :: BrTableInterval
from 0
, _defaultTarget :: Maybe Label
}
| TailCall e
flowLeaving :: Platform -> CmmBlock -> ControlFlow CmmExpr
flowLeaving platform b =
case lastNode b of
CmmBranch l -> Unconditional l
CmmCondBranch c t f _ -> Conditional c t f
CmmSwitch e targets ->
let (offset, target_labels) = switchTargetsToTable targets
(lo, hi) = switchTargetsRange targets
default_label = switchTargetsDefault targets
scrutinee = smartExtend platform $ smartPlus platform e offset
range = inclusiveInterval (lo+toInteger offset) (hi+toInteger offset)
in Switch scrutinee range target_labels default_label
CmmCall { cml_cont = Nothing, cml_target = e } -> TailCall e
_ -> panic "flowLeaving: unreachable"
data ContainingSyntax
= BlockFollowedBy Label
| LoopHeadedBy Label
matchesFrame :: Label -> ContainingSyntax -> Bool
matchesFrame label (BlockFollowedBy l) = label == l
matchesFrame label (LoopHeadedBy l) = label == l
matchesFrame label (IfThenElse (Just l)) = label == l
matchesFrame _ _ = False
data Context = Context { enclosing :: [ContainingSyntax]
}
instance Outputable Context where
ppr c | Just l <- fallthrough c =
pprWithCommas ppr (enclosing c) <+> text "fallthrough to" <+> ppr l
| otherwise = pprWithCommas ppr (enclosing c)
emptyContext :: Context
emptyContext = Context [] Nothing
inside :: ContainingSyntax -> Context -> Context
withFallthrough :: Context -> Label -> Context
inside frame c = c { enclosing = frame : enclosing c }
withFallthrough c l = c { fallthrough = Just l }
type CmmActions = Block CmmNode O O
type FT pre post = WasmFunctionType pre post
returns :: FT '[] '[ 'I32]
doesn'tReturn :: FT '[] '[]
returns = WasmFunctionType TypeListNil (TypeListCons TagI32 TypeListNil)
doesn'tReturn = WasmFunctionType TypeListNil TypeListNil
emptyPost :: FT pre post -> Bool
emptyPost (WasmFunctionType _ TypeListNil) = True
emptyPost _ = False
| Convert a Cmm CFG to WebAssembly 's structured control flow .
structuredControl :: forall expr stmt m .
Applicative m
^ CFG to be translated
-> m (WasmControl stmt expr '[] '[ 'I32])
structuredControl platform txExpr txBlock g =
doTree returns dominatorTree emptyContext
where
gwd :: GraphWithDominators CmmNode
gwd = graphWithDominators g
with highest reverse - postorder number first
dominatorTree = fmap blockLabeled $ sortTree $ gwdDominatorTree gwd
doTree :: FT '[] post -> Tree.Tree CmmBlock -> Context -> m (WasmControl stmt expr '[] post)
nodeWithin :: forall post .
FT '[] post -> CmmBlock -> [Tree.Tree CmmBlock] -> Maybe Label
-> Context -> m (WasmControl stmt expr '[] post)
doBranch :: FT '[] post -> Label -> Label -> Context -> m (WasmControl stmt expr '[] post)
doTree fty (Tree.Node x children) context =
let codeForX = nodeWithin fty x selectedChildren Nothing
in if isLoopHeader x then
WasmLoop fty <$> codeForX loopContext
else
codeForX context
where selectedChildren = case lastNode x of
CmmSwitch {} -> children
N.B. Unlike ` if ` , translation of Switch uses only labels .
_ -> filter hasMergeRoot children
loopContext = LoopHeadedBy (entryLabel x) `inside` context
hasMergeRoot = isMergeNode . Tree.rootLabel
nodeWithin fty x (y_n:ys) (Just zlabel) context =
WasmBlock fty <$> nodeWithin fty x (y_n:ys) Nothing context'
where context' = BlockFollowedBy zlabel `inside` context
nodeWithin fty x (y_n:ys) Nothing context =
nodeWithin doesn'tReturn x ys (Just ylabel) (context `withFallthrough` ylabel) <<>>
doTree fty y_n context
where ylabel = treeEntryLabel y_n
nodeWithin fty x [] (Just zlabel) context
| not (generatesIf x) =
WasmBlock fty <$> nodeWithin fty x [] Nothing context'
where context' = BlockFollowedBy zlabel `inside` context
nodeWithin fty x [] maybeMarks context =
translationOfX context
where xlabel = entryLabel x
translationOfX :: Context -> m (WasmControl stmt expr '[] post)
translationOfX context =
(WasmActions <$> txBlock xlabel (nodeBody x)) <<>>
case flowLeaving platform x of
Unconditional l -> doBranch fty xlabel l context
Conditional e t f ->
WasmIf fty
<$> txExpr xlabel e
<*> doBranch fty xlabel t (IfThenElse maybeMarks `inside` context)
<*> doBranch fty xlabel f (IfThenElse maybeMarks `inside` context)
TailCall e -> WasmTailCall <$> txExpr xlabel e
Switch e range targets default' ->
WasmBrTable <$> txExpr xlabel e
<$~> range
<$~> map switchIndex targets
<$~> switchIndex default'
where switchIndex :: Maybe Label -> Int
arbitrary ; GHC wo n't go here
switchIndex (Just lbl) = index lbl (enclosing context)
doBranch fty from to context
| to `elem` fallthrough context && emptyPost fty = pure WasmFallthrough
where i = index to (enclosing context)
generatesIf :: CmmBlock -> Bool
generatesIf x = case flowLeaving platform x of Conditional {} -> True
_ -> False
treeEntryLabel :: Tree.Tree CmmBlock -> Label
treeEntryLabel = entryLabel . Tree.rootLabel
sortTree :: Tree.Tree Label -> Tree.Tree Label
Sort highest rpnum first
sortTree (Tree.Node label children) =
Tree.Node label $ sortBy (flip compare `on` (rpnum . Tree.rootLabel)) $
map sortTree children
subtreeAt :: Label -> Tree.Tree CmmBlock
blockLabeled :: Label -> CmmBlock
isMergeLabel :: Label -> Bool
isMergeNode :: CmmBlock -> Bool
They are produced with the largest RP number first ,
so the largest RP number is pushed on the context first .
dominates :: Label -> Label -> Bool
Domination relation ( not just immediate domination )
blockmap :: LabelMap CmmBlock
GMany NothingO blockmap NothingO = g_graph g
blockLabeled l = findLabelIn l blockmap
rpblocks :: [CmmBlock]
rpblocks = revPostorderFrom blockmap (g_entry g)
foldEdges :: forall a . (Label -> Label -> a -> a) -> a -> a
foldEdges f a =
foldl (\a (from, to) -> f from to a)
a
[(entryLabel from, to) | from <- rpblocks, to <- successors from]
isMergeLabel l = setMember l mergeBlockLabels
isMergeNode = isMergeLabel . entryLabel
isBackward :: Label -> Label -> Bool
subtreeAt label = findLabelIn label subtrees
subtrees :: LabelMap (Tree.Tree CmmBlock)
subtrees = addSubtree mapEmpty dominatorTree
where addSubtree map t@(Tree.Node root children) =
foldl addSubtree (mapInsert (entryLabel root) t map) children
mergeBlockLabels :: LabelSet
mergeBlockLabels =
setFromList [entryLabel n | n <- rpblocks, big (forwardPreds (entryLabel n))]
where big [] = False
big [_] = False
big (_ : _ : _) = True
forwardPreds = \l -> mapFindWithDefault [] l predmap
where predmap :: LabelMap [Label]
predmap = foldEdges addForwardEdge mapEmpty
addForwardEdge from to pm
| isBackward from to = pm
| otherwise = addToList (from :) to pm
isLoopHeader = isHeaderLabel . entryLabel
where headers :: LabelSet
headers = foldMap headersPointedTo blockmap
headersPointedTo block =
setFromList [label | label <- successors block,
dominates label (entryLabel block)]
index :: Label -> [ContainingSyntax] -> Int
index _ [] = panic "destination label not in evaluation context"
index label (frame : context)
| label `matchesFrame` frame = 0
| otherwise = 1 + index label context
rpnum = gwdRPNumber gwd
dominates lbl blockname =
lbl == blockname || dominatorsMember lbl (gwdDominatorsOf gwd blockname)
nodeBody :: CmmBlock -> CmmActions
nodeBody (BlockCC _first middle _last) = middle
| A CmmSwitch scrutinee may have any width , but a br_table operand
must be exactly word sized , hence the extension here . ( # 22871 )
smartExtend :: Platform -> CmmExpr -> CmmExpr
smartExtend p e | w0 == w1 = e
| otherwise = CmmMachOp (MO_UU_Conv w0 w1) [e]
where
w0 = cmmExprWidth p e
w1 = wordWidth p
smartPlus :: Platform -> CmmExpr -> Int -> CmmExpr
smartPlus _ e 0 = e
smartPlus platform e k =
CmmMachOp (MO_Add width) [e, CmmLit (CmmInt (toInteger k) width)]
where width = cmmExprWidth platform e
addToList :: (IsMap map) => ([a] -> [a]) -> KeyOf map -> map [a] -> map [a]
addToList consx = mapAlter add
where add Nothing = Just (consx [])
add (Just xs) = Just (consx xs)
instance Outputable ContainingSyntax where
ppr (BlockFollowedBy l) = text "node" <+> ppr l
ppr (LoopHeadedBy l) = text "loop" <+> ppr l
ppr (IfThenElse l) = text "if-then-else" <+> ppr l
findLabelIn :: HasDebugCallStack => Label -> LabelMap a -> a
findLabelIn lbl = mapFindWithDefault failed lbl
where failed =
pprPanic "label not found in control-flow graph" (ppr lbl)
infixl 4 <$~>
(<$~>) :: Functor m => m (a -> b) -> a -> m b
(<$~>) f x = fmap ($ x) f
(<<>>) :: forall m s e pre mid post
. Applicative m
=> m (WasmControl s e pre mid)
-> m (WasmControl s e mid post)
-> m (WasmControl s e pre post)
(<<>>) = liftA2 (<>)
|
f156aabf221b18e79e66b325214fb1f988dbcbafdefbf569ccbb799a094bc1d2 | josefs/Gradualizer | refine_bound_var_on_mismatch.erl | -module(refine_bound_var_on_mismatch).
%% Note: Here we're refining an already bound variable
-export([refined_var_not_matching_itself/1,
refine_bound_var_by_pattern_mismatch/1]).
Current error : is expected to have type y | z but has type x | y | z
-spec refined_var_not_matching_itself(x | y | z) -> ok.
refined_var_not_matching_itself(Var) ->
case Var of
x -> ok;
Var -> ok
end.
Current error : is expected to have type ok but it has type ok |
-spec refine_bound_var_by_pattern_mismatch(ok | nok) -> ok.
refine_bound_var_by_pattern_mismatch(Var) ->
case Var of
nok -> ok;
_ -> Var
end.
| null | https://raw.githubusercontent.com/josefs/Gradualizer/208f5816b0157f282212fc036ba7560f0822f9fc/test/known_problems/should_pass/refine_bound_var_on_mismatch.erl | erlang | Note: Here we're refining an already bound variable | -module(refine_bound_var_on_mismatch).
-export([refined_var_not_matching_itself/1,
refine_bound_var_by_pattern_mismatch/1]).
Current error : is expected to have type y | z but has type x | y | z
-spec refined_var_not_matching_itself(x | y | z) -> ok.
refined_var_not_matching_itself(Var) ->
case Var of
x -> ok;
Var -> ok
end.
Current error : is expected to have type ok but it has type ok |
-spec refine_bound_var_by_pattern_mismatch(ok | nok) -> ok.
refine_bound_var_by_pattern_mismatch(Var) ->
case Var of
nok -> ok;
_ -> Var
end.
|
dcbc7b36142cd494d9ca3732828cb5e85c1e25e07864d354b4c91d147272fc3f | CloudI/CloudI | proper_arith.erl | -*- coding : utf-8 -*-
-*- erlang - indent - level : 2 -*-
%%% -------------------------------------------------------------------
Copyright 2010 - 2020 < > ,
< >
and < >
%%%
This file is part of PropEr .
%%%
%%% PropEr is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
%%% (at your option) any later version.
%%%
%%% PropEr is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
%%% GNU General Public License for more details.
%%%
You should have received a copy of the GNU General Public License
%%% along with PropEr. If not, see </>.
2010 - 2020 , and
%%% @version {@version}
@author
%%% @doc This module contains helper arithmetic, list handling and random
%%% functions.
@private
-module(proper_arith).
,
safe_any/2, safe_zip/2, tuple_map/2, cut_improper_tail/1,
head_length/1, find_first/2, filter/2, partition/2, insert/3,%remove/2,
unflatten/2]).
-export([rand_start/1, rand_restart/1, rand_reseed/0, rand_stop/0,
rand_int/1, rand_int/2, smart_rand_int/3, rand_non_neg_int/1,
rand_float/1, rand_float/2, rand_non_neg_float/1,
distribute/2, jumble/1, rand_choose/1, freq_choose/1]).
-include("proper_internal.hrl").
%%-----------------------------------------------------------------------------
%% List handling functions
%%-----------------------------------------------------------------------------
-spec list_remove(position(), [T]) -> [T].
list_remove(Index, List) ->
{H,[_Elem | T]} = lists:split(Index - 1, List),
H ++ T.
-spec list_update(position(), T, [T]) -> [T,...].
list_update(Index, NewElem, List) ->
{H,[_OldElem | T]} = lists:split(Index - 1, List),
H ++ [NewElem] ++ T.
-spec list_insert(position(), T, [T]) -> [T,...].
list_insert(Index, Elem, List) ->
{H,T} = lists:split(Index - 1, List),
H ++ [Elem] ++ T.
TODO : safe_map and cut_improper_tail can be combined into one generic list-
recursing function , with 3 function arguments : apply_to_proper_elems ,
%% apply_to_improper_tail, combine
-spec safe_map(fun((T) -> S), maybe_improper_list(T,T | [])) ->
maybe_improper_list(S,S | []).
safe_map(Fun, List) ->
safe_map_tr(Fun, List, []).
-spec safe_map_tr(fun((T) -> S), maybe_improper_list(T,T | []) | T, [S]) ->
maybe_improper_list(S,S | []).
safe_map_tr(_Fun, [], AccList) ->
lists:reverse(AccList);
safe_map_tr(Fun, [Head | Tail], AccList) ->
safe_map_tr(Fun, Tail, [Fun(Head) | AccList]);
safe_map_tr(Fun, ImproperTail, AccList) ->
lists:reverse(AccList, Fun(ImproperTail)).
-spec , A ) - > A ) , A , maybe_improper_list(T , T | [ ] ) ) - > A.
safe_foldl(_Fun , Acc , [ ] ) - >
Acc ;
safe_foldl(Fun , Acc , [ X | Rest ] ) - >
safe_foldl(Fun , Fun(X , Acc ) , Rest ) ;
safe_foldl(Fun , Acc , ImproperTail ) - >
Fun(ImproperTail , Acc ) .
-spec safe_any(fun((T) -> boolean()), maybe_improper_list(T,T | [])) ->
boolean().
safe_any(_Pred, []) ->
false;
safe_any(Pred, [X | Rest]) ->
Pred(X) orelse safe_any(Pred, Rest);
safe_any(Pred, ImproperTail) ->
Pred(ImproperTail).
-spec safe_zip([T], [S]) -> [{T,S}].
safe_zip(Xs, Ys) ->
safe_zip_tr(Xs, Ys, []).
-spec safe_zip_tr([T], [S], [{T,S}]) -> [{T,S}].
safe_zip_tr([], _Ys, Acc) ->
lists:reverse(Acc);
safe_zip_tr(_Xs, [], Acc) ->
lists:reverse(Acc);
safe_zip_tr([X|Xtail], [Y|YTail], Acc) ->
safe_zip_tr(Xtail, YTail, [{X,Y}|Acc]).
-spec tuple_map(fun((T) -> S), loose_tuple(T)) -> loose_tuple(S).
tuple_map(Fun, Tuple) ->
list_to_tuple(lists:map(Fun, tuple_to_list(Tuple))).
-spec cut_improper_tail(maybe_improper_list(T,T | [])) -> [T] | {[T],T}.
cut_improper_tail(List) ->
cut_improper_tail_tr(List, []).
-spec cut_improper_tail_tr(maybe_improper_list(T,T | []) | T, [T]) ->
[T] | {[T],T}.
cut_improper_tail_tr([], AccList) ->
lists:reverse(AccList);
cut_improper_tail_tr([Head | Tail], AccList) ->
cut_improper_tail_tr(Tail, [Head | AccList]);
cut_improper_tail_tr(ImproperTail, AccList) ->
{lists:reverse(AccList), ImproperTail}.
-spec head_length(nonempty_improper_list(term(),term())) -> pos_integer().
head_length(List) ->
head_length_tr(List, 0).
-spec head_length_tr(nonempty_improper_list(term(),term()) | term(),
non_neg_integer()) -> pos_integer().
head_length_tr([_Head | Tail], Len) ->
head_length_tr(Tail, Len + 1);
head_length_tr(_ImproperTail, Len) ->
Len.
-spec find_first(fun((T) -> boolean()), [T]) -> {position(),T} | 'none'.
find_first(Pred, List) ->
find_first_tr(Pred, List, 1).
-spec find_first_tr(fun((T) -> boolean()), [T], position()) ->
{position(),T} | 'none'.
find_first_tr(_Pred, [], _Pos) ->
none;
find_first_tr(Pred, [X | Rest], Pos) ->
case Pred(X) of
true -> {Pos, X};
false -> find_first_tr(Pred, Rest, Pos + 1)
end.
-spec filter(fun((T) -> boolean()), [T]) -> {[T],[position()]}.
filter(Pred, List) ->
filter_tr(Pred, lists:reverse(List), length(List), [], []).
-spec filter_tr(fun((T) -> boolean()), [T], position(), [T], [position()]) ->
{[T], [position()]}.
filter_tr(_Pred, [], _Pos, Trues, TrueLookup) ->
{Trues, TrueLookup};
filter_tr(Pred, [X | Rest], Pos, Trues, TrueLookup) ->
case Pred(X) of
true ->
filter_tr(Pred, Rest, Pos - 1, [X | Trues], [Pos | TrueLookup]);
false ->
filter_tr(Pred, Rest, Pos - 1, Trues, TrueLookup)
end.
-spec partition(fun((T) -> boolean()), [T]) ->
{[T], [position()], [T], [position()]}.
partition(Pred, List) ->
partition_tr(Pred, lists:reverse(List), length(List), [], [], [], []).
-spec partition_tr(fun((T) -> boolean()), [T], position(), [T], [position()],
[T], [position()]) -> {[T],[position()],[T],[position()]}.
partition_tr(_Pred, [], _Pos, Trues, TrueLookup, Falses, FalseLookup) ->
{Trues, TrueLookup, Falses, FalseLookup};
partition_tr(Pred, [X | Rest], Pos, Trues, TrueLookup, Falses, FalseLookup) ->
case Pred(X) of
true ->
partition_tr(Pred, Rest, Pos - 1, [X | Trues], [Pos | TrueLookup],
Falses, FalseLookup);
false ->
partition_tr(Pred, Rest, Pos - 1, Trues, TrueLookup, [X | Falses],
[Pos | FalseLookup])
end.
-spec insert([T], [position()], [T]) -> [T].
insert(Xs, Positions, Ys) ->
insert_tr(Xs, Positions, Ys, 1, []).
-spec insert_tr([T], [position()], [T], position(), [T]) -> [T].
insert_tr([], [], Ys, _Pos, Acc) ->
lists:reverse(Acc, Ys);
insert_tr([X | XsTail], [Pos | PosTail], Ys, Pos, Acc) ->
insert_tr(XsTail, PosTail, Ys, Pos + 1, [X | Acc]);
insert_tr(Xs, Positions, [Y | YsTail], Pos, Acc) ->
insert_tr(Xs, Positions, YsTail, Pos + 1, [Y | Acc]).
%% -spec remove([T], [position()]) -> [T].
%% remove(Xs, Positions) ->
remove_tr(Xs , Positions , 1 , [ ] ) .
%%
-spec remove_tr([T ] , [ position ( ) ] , position ( ) , [ T ] ) - > [ T ] .
%% remove_tr(Xs, [], _Pos, Acc) ->
lists : reverse(Acc , ) ;
remove_tr([_X | XsTail ] , [ Pos | PosTail ] , Pos , Acc ) - >
remove_tr(XsTail , PosTail , Pos + 1 , Acc ) ;
%% remove_tr([X | XsTail], Positions, Pos, Acc) ->
%% remove_tr(XsTail, Positions, Pos + 1, [X | Acc]).
-spec unflatten([T], [proper_types:length()]) -> [[T]].
unflatten(List, Lens) ->
{[],RevSubLists} = lists:foldl(fun remove_n/2, {List,[]}, Lens),
lists:reverse(RevSubLists).
-spec remove_n(non_neg_integer(), {[T],[[T]]}) -> {[T],[[T]]}.
remove_n(N, {List,Acc}) ->
{Front,Back} = lists:split(N, List),
{Back, [Front | Acc]}.
%%-----------------------------------------------------------------------------
%% Random functions
%%-----------------------------------------------------------------------------
%% @doc Seeds the random number generator. This function should be run before
%% calling any random function from this module.
-spec rand_start(proper_gen:seed()) -> 'ok'.
rand_start(Seed) ->
_ = rand:seed(exsplus, Seed),
ok.
%% @doc Conditionally seeds the random number generator. This function should
%% be run before calling any random function from this module.
-spec rand_restart(proper_gen:seed()) -> 'ok'.
rand_restart(Seed) ->
case get(?SEED_NAME) of
undefined ->
rand_start(Seed);
_ ->
ok
end.
-spec rand_reseed() -> 'ok'.
rand_reseed() ->
_ = rand:seed(exsplus, os:timestamp()),
ok.
-spec rand_stop() -> 'ok'.
rand_stop() ->
erase(?SEED_NAME),
ok.
-spec rand_int(non_neg_integer()) -> integer().
rand_int(Const) ->
round(rand_float(Const)).
-spec rand_non_neg_int(non_neg_integer()) -> non_neg_integer().
rand_non_neg_int(Const) ->
trunc(rand_non_neg_float(Const)).
-spec bounded_rand_non_neg_int(non_neg_integer(), non_neg_integer()) ->
non_neg_integer().
bounded_rand_non_neg_int(Const, Lim) when is_integer(Lim), Lim >= 0 ->
X = rand_non_neg_int(Const),
case X > Lim of
true -> bounded_rand_non_neg_int(Const, Lim);
false -> X
end.
-spec rand_int(integer(), integer()) -> integer().
rand_int(Low, High) when is_integer(Low), is_integer(High), Low =< High ->
Low + ?RANDOM_MOD:uniform(High - Low + 1) - 1.
%% When the range is large, skew the distribution to be more like that of an
%% unbounded random integer.
-spec smart_rand_int(non_neg_integer(), integer(), integer()) -> integer().
smart_rand_int(Const, Low, High) ->
case High - Low =< ?SMALL_RANGE_THRESHOLD of
true -> rand_int(Low, High);
false -> wide_range_rand_int(Const, Low, High)
end.
-spec wide_range_rand_int(non_neg_integer(), integer(), integer()) ->
integer().
wide_range_rand_int(Const, Low, High) when Low >= 0 ->
Low + bounded_rand_non_neg_int(Const, High - Low);
wide_range_rand_int(Const, Low, High) when High =< 0 ->
High - bounded_rand_non_neg_int(Const, High - Low);
wide_range_rand_int(Const, Low, High) ->
case ?RANDOM_MOD:uniform(2) of
1 -> smart_rand_int(Const, 0, High);
2 -> smart_rand_int(Const, Low, 0)
end.
-spec rand_float(non_neg_integer()) -> float().
rand_float(Const) ->
X = rand_non_neg_float(Const),
case ?RANDOM_MOD:uniform(2) of
1 -> X;
2 -> -X
end.
-spec rand_non_neg_float(non_neg_integer()) -> float().
rand_non_neg_float(Const) when is_integer(Const), Const >= 0 ->
case ?RANDOM_MOD:uniform() of
1.0 -> rand_non_neg_float(Const);
X -> Const * zero_one_to_zero_inf(X)
end.
-spec rand_float(float(), float()) -> float().
rand_float(Low, High) when is_float(Low), is_float(High), Low =< High ->
Low + ?RANDOM_MOD:uniform() * (High - Low).
-spec zero_one_to_zero_inf(float()) -> float().
This function must return only non - negative values and map 0.0 to 0.0 , but
may be undefined at 1.0 .
%% TODO: read global options and decide here which bijection to use
zero_one_to_zero_inf(X) ->
X / math:sqrt(1 - X*X).
-spec distribute(non_neg_integer(), non_neg_integer()) -> [non_neg_integer()].
distribute(_Credits, 0) ->
[];
distribute(Credits, People) ->
jumble(distribute_tr(Credits, People, [])).
-spec distribute_tr(non_neg_integer(), pos_integer(), [non_neg_integer()]) ->
[non_neg_integer()].
distribute_tr(0, PeopleLeft, AccList) ->
lists:duplicate(PeopleLeft, 0) ++ AccList;
distribute_tr(CreditsLeft, 1, AccList) ->
[CreditsLeft | AccList];
distribute_tr(CreditsLeft, PeopleLeft, AccList) ->
YourCut = rand_int(0, CreditsLeft),
distribute_tr(CreditsLeft - YourCut, PeopleLeft - 1, [YourCut | AccList]).
-spec jumble([T]) -> [T].
%% @doc Produces a random permutation of a list.
jumble(List) ->
[X || {_, X} <- lists:sort([{?RANDOM_MOD:uniform(), X} || X <- List])].
-spec rand_choose([T,...]) -> {position(),T}.
rand_choose(Choices) when Choices =/= [] ->
Pos = rand_int(1, length(Choices)),
{Pos, lists:nth(Pos, Choices)}.
-spec freq_choose([{proper_types:frequency(),T},...]) -> {position(),T}.
freq_choose(Choices) when Choices =/= [] ->
AddFreq = fun({Freq,_},Acc) -> Freq + Acc end,
SumFreq = lists:foldl(AddFreq, 0, Choices),
freq_select(rand_int(1, SumFreq), Choices, 1).
-spec freq_select(proper_types:frequency(), [{proper_types:frequency(),T}],
position()) -> {position(),T}.
freq_select(N, [{Freq,Choice} | Rest], Pos) ->
case N =< Freq of
true ->
{Pos,Choice};
false ->
freq_select(N - Freq, Rest, Pos + 1)
end.
| null | https://raw.githubusercontent.com/CloudI/CloudI/3e45031c7ee3e974ead2612ea7dd06c9edf973c9/src/external/proper/src/proper_arith.erl | erlang | -------------------------------------------------------------------
PropEr is free software: you can redistribute it and/or modify
(at your option) any later version.
PropEr is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with PropEr. If not, see </>.
@version {@version}
@doc This module contains helper arithmetic, list handling and random
functions.
remove/2,
-----------------------------------------------------------------------------
List handling functions
-----------------------------------------------------------------------------
apply_to_improper_tail, combine
-spec remove([T], [position()]) -> [T].
remove(Xs, Positions) ->
remove_tr(Xs, [], _Pos, Acc) ->
remove_tr([X | XsTail], Positions, Pos, Acc) ->
remove_tr(XsTail, Positions, Pos + 1, [X | Acc]).
-----------------------------------------------------------------------------
Random functions
-----------------------------------------------------------------------------
@doc Seeds the random number generator. This function should be run before
calling any random function from this module.
@doc Conditionally seeds the random number generator. This function should
be run before calling any random function from this module.
When the range is large, skew the distribution to be more like that of an
unbounded random integer.
TODO: read global options and decide here which bijection to use
@doc Produces a random permutation of a list. | -*- coding : utf-8 -*-
-*- erlang - indent - level : 2 -*-
Copyright 2010 - 2020 < > ,
< >
and < >
This file is part of PropEr .
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
2010 - 2020 , and
@author
@private
-module(proper_arith).
,
safe_any/2, safe_zip/2, tuple_map/2, cut_improper_tail/1,
unflatten/2]).
-export([rand_start/1, rand_restart/1, rand_reseed/0, rand_stop/0,
rand_int/1, rand_int/2, smart_rand_int/3, rand_non_neg_int/1,
rand_float/1, rand_float/2, rand_non_neg_float/1,
distribute/2, jumble/1, rand_choose/1, freq_choose/1]).
-include("proper_internal.hrl").
-spec list_remove(position(), [T]) -> [T].
list_remove(Index, List) ->
{H,[_Elem | T]} = lists:split(Index - 1, List),
H ++ T.
-spec list_update(position(), T, [T]) -> [T,...].
list_update(Index, NewElem, List) ->
{H,[_OldElem | T]} = lists:split(Index - 1, List),
H ++ [NewElem] ++ T.
-spec list_insert(position(), T, [T]) -> [T,...].
list_insert(Index, Elem, List) ->
{H,T} = lists:split(Index - 1, List),
H ++ [Elem] ++ T.
TODO : safe_map and cut_improper_tail can be combined into one generic list-
recursing function , with 3 function arguments : apply_to_proper_elems ,
-spec safe_map(fun((T) -> S), maybe_improper_list(T,T | [])) ->
maybe_improper_list(S,S | []).
safe_map(Fun, List) ->
safe_map_tr(Fun, List, []).
-spec safe_map_tr(fun((T) -> S), maybe_improper_list(T,T | []) | T, [S]) ->
maybe_improper_list(S,S | []).
safe_map_tr(_Fun, [], AccList) ->
lists:reverse(AccList);
safe_map_tr(Fun, [Head | Tail], AccList) ->
safe_map_tr(Fun, Tail, [Fun(Head) | AccList]);
safe_map_tr(Fun, ImproperTail, AccList) ->
lists:reverse(AccList, Fun(ImproperTail)).
-spec , A ) - > A ) , A , maybe_improper_list(T , T | [ ] ) ) - > A.
safe_foldl(_Fun , Acc , [ ] ) - >
Acc ;
safe_foldl(Fun , Acc , [ X | Rest ] ) - >
safe_foldl(Fun , Fun(X , Acc ) , Rest ) ;
safe_foldl(Fun , Acc , ImproperTail ) - >
Fun(ImproperTail , Acc ) .
-spec safe_any(fun((T) -> boolean()), maybe_improper_list(T,T | [])) ->
boolean().
safe_any(_Pred, []) ->
false;
safe_any(Pred, [X | Rest]) ->
Pred(X) orelse safe_any(Pred, Rest);
safe_any(Pred, ImproperTail) ->
Pred(ImproperTail).
-spec safe_zip([T], [S]) -> [{T,S}].
safe_zip(Xs, Ys) ->
safe_zip_tr(Xs, Ys, []).
-spec safe_zip_tr([T], [S], [{T,S}]) -> [{T,S}].
safe_zip_tr([], _Ys, Acc) ->
lists:reverse(Acc);
safe_zip_tr(_Xs, [], Acc) ->
lists:reverse(Acc);
safe_zip_tr([X|Xtail], [Y|YTail], Acc) ->
safe_zip_tr(Xtail, YTail, [{X,Y}|Acc]).
-spec tuple_map(fun((T) -> S), loose_tuple(T)) -> loose_tuple(S).
tuple_map(Fun, Tuple) ->
list_to_tuple(lists:map(Fun, tuple_to_list(Tuple))).
-spec cut_improper_tail(maybe_improper_list(T,T | [])) -> [T] | {[T],T}.
cut_improper_tail(List) ->
cut_improper_tail_tr(List, []).
-spec cut_improper_tail_tr(maybe_improper_list(T,T | []) | T, [T]) ->
[T] | {[T],T}.
cut_improper_tail_tr([], AccList) ->
lists:reverse(AccList);
cut_improper_tail_tr([Head | Tail], AccList) ->
cut_improper_tail_tr(Tail, [Head | AccList]);
cut_improper_tail_tr(ImproperTail, AccList) ->
{lists:reverse(AccList), ImproperTail}.
-spec head_length(nonempty_improper_list(term(),term())) -> pos_integer().
head_length(List) ->
head_length_tr(List, 0).
-spec head_length_tr(nonempty_improper_list(term(),term()) | term(),
non_neg_integer()) -> pos_integer().
head_length_tr([_Head | Tail], Len) ->
head_length_tr(Tail, Len + 1);
head_length_tr(_ImproperTail, Len) ->
Len.
-spec find_first(fun((T) -> boolean()), [T]) -> {position(),T} | 'none'.
find_first(Pred, List) ->
find_first_tr(Pred, List, 1).
-spec find_first_tr(fun((T) -> boolean()), [T], position()) ->
{position(),T} | 'none'.
find_first_tr(_Pred, [], _Pos) ->
none;
find_first_tr(Pred, [X | Rest], Pos) ->
case Pred(X) of
true -> {Pos, X};
false -> find_first_tr(Pred, Rest, Pos + 1)
end.
-spec filter(fun((T) -> boolean()), [T]) -> {[T],[position()]}.
filter(Pred, List) ->
filter_tr(Pred, lists:reverse(List), length(List), [], []).
-spec filter_tr(fun((T) -> boolean()), [T], position(), [T], [position()]) ->
{[T], [position()]}.
filter_tr(_Pred, [], _Pos, Trues, TrueLookup) ->
{Trues, TrueLookup};
filter_tr(Pred, [X | Rest], Pos, Trues, TrueLookup) ->
case Pred(X) of
true ->
filter_tr(Pred, Rest, Pos - 1, [X | Trues], [Pos | TrueLookup]);
false ->
filter_tr(Pred, Rest, Pos - 1, Trues, TrueLookup)
end.
-spec partition(fun((T) -> boolean()), [T]) ->
{[T], [position()], [T], [position()]}.
partition(Pred, List) ->
partition_tr(Pred, lists:reverse(List), length(List), [], [], [], []).
-spec partition_tr(fun((T) -> boolean()), [T], position(), [T], [position()],
[T], [position()]) -> {[T],[position()],[T],[position()]}.
partition_tr(_Pred, [], _Pos, Trues, TrueLookup, Falses, FalseLookup) ->
{Trues, TrueLookup, Falses, FalseLookup};
partition_tr(Pred, [X | Rest], Pos, Trues, TrueLookup, Falses, FalseLookup) ->
case Pred(X) of
true ->
partition_tr(Pred, Rest, Pos - 1, [X | Trues], [Pos | TrueLookup],
Falses, FalseLookup);
false ->
partition_tr(Pred, Rest, Pos - 1, Trues, TrueLookup, [X | Falses],
[Pos | FalseLookup])
end.
-spec insert([T], [position()], [T]) -> [T].
insert(Xs, Positions, Ys) ->
insert_tr(Xs, Positions, Ys, 1, []).
-spec insert_tr([T], [position()], [T], position(), [T]) -> [T].
insert_tr([], [], Ys, _Pos, Acc) ->
lists:reverse(Acc, Ys);
insert_tr([X | XsTail], [Pos | PosTail], Ys, Pos, Acc) ->
insert_tr(XsTail, PosTail, Ys, Pos + 1, [X | Acc]);
insert_tr(Xs, Positions, [Y | YsTail], Pos, Acc) ->
insert_tr(Xs, Positions, YsTail, Pos + 1, [Y | Acc]).
remove_tr(Xs , Positions , 1 , [ ] ) .
-spec remove_tr([T ] , [ position ( ) ] , position ( ) , [ T ] ) - > [ T ] .
lists : reverse(Acc , ) ;
remove_tr([_X | XsTail ] , [ Pos | PosTail ] , Pos , Acc ) - >
remove_tr(XsTail , PosTail , Pos + 1 , Acc ) ;
-spec unflatten([T], [proper_types:length()]) -> [[T]].
unflatten(List, Lens) ->
{[],RevSubLists} = lists:foldl(fun remove_n/2, {List,[]}, Lens),
lists:reverse(RevSubLists).
-spec remove_n(non_neg_integer(), {[T],[[T]]}) -> {[T],[[T]]}.
remove_n(N, {List,Acc}) ->
{Front,Back} = lists:split(N, List),
{Back, [Front | Acc]}.
-spec rand_start(proper_gen:seed()) -> 'ok'.
rand_start(Seed) ->
_ = rand:seed(exsplus, Seed),
ok.
-spec rand_restart(proper_gen:seed()) -> 'ok'.
rand_restart(Seed) ->
case get(?SEED_NAME) of
undefined ->
rand_start(Seed);
_ ->
ok
end.
-spec rand_reseed() -> 'ok'.
rand_reseed() ->
_ = rand:seed(exsplus, os:timestamp()),
ok.
-spec rand_stop() -> 'ok'.
rand_stop() ->
erase(?SEED_NAME),
ok.
-spec rand_int(non_neg_integer()) -> integer().
rand_int(Const) ->
round(rand_float(Const)).
-spec rand_non_neg_int(non_neg_integer()) -> non_neg_integer().
rand_non_neg_int(Const) ->
trunc(rand_non_neg_float(Const)).
-spec bounded_rand_non_neg_int(non_neg_integer(), non_neg_integer()) ->
non_neg_integer().
bounded_rand_non_neg_int(Const, Lim) when is_integer(Lim), Lim >= 0 ->
X = rand_non_neg_int(Const),
case X > Lim of
true -> bounded_rand_non_neg_int(Const, Lim);
false -> X
end.
-spec rand_int(integer(), integer()) -> integer().
rand_int(Low, High) when is_integer(Low), is_integer(High), Low =< High ->
Low + ?RANDOM_MOD:uniform(High - Low + 1) - 1.
-spec smart_rand_int(non_neg_integer(), integer(), integer()) -> integer().
smart_rand_int(Const, Low, High) ->
case High - Low =< ?SMALL_RANGE_THRESHOLD of
true -> rand_int(Low, High);
false -> wide_range_rand_int(Const, Low, High)
end.
-spec wide_range_rand_int(non_neg_integer(), integer(), integer()) ->
integer().
wide_range_rand_int(Const, Low, High) when Low >= 0 ->
Low + bounded_rand_non_neg_int(Const, High - Low);
wide_range_rand_int(Const, Low, High) when High =< 0 ->
High - bounded_rand_non_neg_int(Const, High - Low);
wide_range_rand_int(Const, Low, High) ->
case ?RANDOM_MOD:uniform(2) of
1 -> smart_rand_int(Const, 0, High);
2 -> smart_rand_int(Const, Low, 0)
end.
-spec rand_float(non_neg_integer()) -> float().
rand_float(Const) ->
X = rand_non_neg_float(Const),
case ?RANDOM_MOD:uniform(2) of
1 -> X;
2 -> -X
end.
-spec rand_non_neg_float(non_neg_integer()) -> float().
rand_non_neg_float(Const) when is_integer(Const), Const >= 0 ->
case ?RANDOM_MOD:uniform() of
1.0 -> rand_non_neg_float(Const);
X -> Const * zero_one_to_zero_inf(X)
end.
-spec rand_float(float(), float()) -> float().
rand_float(Low, High) when is_float(Low), is_float(High), Low =< High ->
Low + ?RANDOM_MOD:uniform() * (High - Low).
-spec zero_one_to_zero_inf(float()) -> float().
This function must return only non - negative values and map 0.0 to 0.0 , but
may be undefined at 1.0 .
zero_one_to_zero_inf(X) ->
X / math:sqrt(1 - X*X).
-spec distribute(non_neg_integer(), non_neg_integer()) -> [non_neg_integer()].
distribute(_Credits, 0) ->
[];
distribute(Credits, People) ->
jumble(distribute_tr(Credits, People, [])).
-spec distribute_tr(non_neg_integer(), pos_integer(), [non_neg_integer()]) ->
[non_neg_integer()].
distribute_tr(0, PeopleLeft, AccList) ->
lists:duplicate(PeopleLeft, 0) ++ AccList;
distribute_tr(CreditsLeft, 1, AccList) ->
[CreditsLeft | AccList];
distribute_tr(CreditsLeft, PeopleLeft, AccList) ->
YourCut = rand_int(0, CreditsLeft),
distribute_tr(CreditsLeft - YourCut, PeopleLeft - 1, [YourCut | AccList]).
-spec jumble([T]) -> [T].
jumble(List) ->
[X || {_, X} <- lists:sort([{?RANDOM_MOD:uniform(), X} || X <- List])].
-spec rand_choose([T,...]) -> {position(),T}.
rand_choose(Choices) when Choices =/= [] ->
Pos = rand_int(1, length(Choices)),
{Pos, lists:nth(Pos, Choices)}.
-spec freq_choose([{proper_types:frequency(),T},...]) -> {position(),T}.
freq_choose(Choices) when Choices =/= [] ->
AddFreq = fun({Freq,_},Acc) -> Freq + Acc end,
SumFreq = lists:foldl(AddFreq, 0, Choices),
freq_select(rand_int(1, SumFreq), Choices, 1).
-spec freq_select(proper_types:frequency(), [{proper_types:frequency(),T}],
position()) -> {position(),T}.
freq_select(N, [{Freq,Choice} | Rest], Pos) ->
case N =< Freq of
true ->
{Pos,Choice};
false ->
freq_select(N - Freq, Rest, Pos + 1)
end.
|
f9b0df95aa186275049680b080f8993ffcd81b51e5e323098dbf5987b4484b9b | sadiqj/ocaml-esp32 | primreq.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Determine the set of C primitives required by the given .cmo and .cma
files *)
open Config
open Cmo_format
module StringSet = Set.Make(struct type t = string let compare = compare end)
let defined = ref true
let used = ref false
let exclude_file = ref ""
let primitives = ref StringSet.empty
let scan_reloc = function
(Reloc_primitive s, _) -> primitives := StringSet.add s !primitives
| _ -> ()
let scan_prim s =
primitives := StringSet.add s !primitives
let scan_info cu =
if !used then List.iter scan_reloc cu.cu_reloc;
if !defined then List.iter scan_prim cu.cu_primitives
let scan_obj filename =
let ic = open_in_bin filename in
let buffer = really_input_string ic (String.length cmo_magic_number) in
if buffer = cmo_magic_number then begin
let cu_pos = input_binary_int ic in
seek_in ic cu_pos;
let cu = (input_value ic : compilation_unit) in
close_in ic;
scan_info cu
end else
if buffer = cma_magic_number then begin
let toc_pos = input_binary_int ic in
seek_in ic toc_pos;
let toc = (input_value ic : library) in
close_in ic;
List.iter scan_info toc.lib_units
end else begin
prerr_endline "Not an object file"; exit 2
end
let exclude filename =
let ic = open_in filename in
try
while true do
let s = input_line ic in
primitives := StringSet.remove s !primitives
done
with End_of_file -> close_in ic
| x -> close_in ic; raise x
let main() =
Arg.parse_expand
["-used", Arg.Unit(fun () -> used := true; defined := false),
"show primitives referenced in the object files";
"-defined", Arg.Unit(fun () -> defined := true; used := false),
"show primitives defined in the object files (default)";
"-all", Arg.Unit(fun () -> defined := true; used := true),
"show primitives defined or referenced in the object files";
"-exclude", Arg.String(fun s -> exclude_file := s),
"<file> don't print the primitives mentioned in <file>";
"-args", Arg.Expand Arg.read_arg,
"<file> Read additional newline separated command line arguments \n\
\ from <file>";
"-args0", Arg.Expand Arg.read_arg0,
"<file> Read additional NUL separated command line arguments from \n\
\ <file>";]
scan_obj
"Usage: primreq [options] <.cmo and .cma files>\nOptions are:";
if String.length !exclude_file > 0 then exclude !exclude_file;
StringSet.iter
(fun s ->
if s.[0] <> '%' then begin print_string s; print_newline() end)
!primitives;
exit 0
let _ = main ()
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/tools/primreq.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Determine the set of C primitives required by the given .cmo and .cma
files | , projet Cristal , INRIA Rocquencourt
Copyright 1999 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Config
open Cmo_format
module StringSet = Set.Make(struct type t = string let compare = compare end)
let defined = ref true
let used = ref false
let exclude_file = ref ""
let primitives = ref StringSet.empty
let scan_reloc = function
(Reloc_primitive s, _) -> primitives := StringSet.add s !primitives
| _ -> ()
let scan_prim s =
primitives := StringSet.add s !primitives
let scan_info cu =
if !used then List.iter scan_reloc cu.cu_reloc;
if !defined then List.iter scan_prim cu.cu_primitives
let scan_obj filename =
let ic = open_in_bin filename in
let buffer = really_input_string ic (String.length cmo_magic_number) in
if buffer = cmo_magic_number then begin
let cu_pos = input_binary_int ic in
seek_in ic cu_pos;
let cu = (input_value ic : compilation_unit) in
close_in ic;
scan_info cu
end else
if buffer = cma_magic_number then begin
let toc_pos = input_binary_int ic in
seek_in ic toc_pos;
let toc = (input_value ic : library) in
close_in ic;
List.iter scan_info toc.lib_units
end else begin
prerr_endline "Not an object file"; exit 2
end
let exclude filename =
let ic = open_in filename in
try
while true do
let s = input_line ic in
primitives := StringSet.remove s !primitives
done
with End_of_file -> close_in ic
| x -> close_in ic; raise x
let main() =
Arg.parse_expand
["-used", Arg.Unit(fun () -> used := true; defined := false),
"show primitives referenced in the object files";
"-defined", Arg.Unit(fun () -> defined := true; used := false),
"show primitives defined in the object files (default)";
"-all", Arg.Unit(fun () -> defined := true; used := true),
"show primitives defined or referenced in the object files";
"-exclude", Arg.String(fun s -> exclude_file := s),
"<file> don't print the primitives mentioned in <file>";
"-args", Arg.Expand Arg.read_arg,
"<file> Read additional newline separated command line arguments \n\
\ from <file>";
"-args0", Arg.Expand Arg.read_arg0,
"<file> Read additional NUL separated command line arguments from \n\
\ <file>";]
scan_obj
"Usage: primreq [options] <.cmo and .cma files>\nOptions are:";
if String.length !exclude_file > 0 then exclude !exclude_file;
StringSet.iter
(fun s ->
if s.[0] <> '%' then begin print_string s; print_newline() end)
!primitives;
exit 0
let _ = main ()
|
0779eb684fdc266820a328febafbcefdae937b76e21fede9c57c75a1e8052625 | facebookincubator/hsthrift | Predicate.hs | Copyright ( c ) Facebook , Inc. and its affiliates .
module Util.Predicate
( Pred
, predAnd
, predTrue
, predFalse
) where
import Control.Applicative (liftA2)
-- | Predicate function.
type Pred a = a -> Bool
| Combine two predicate functions to produce a new function that holds if
-- both input predicates hold.
predAnd :: Pred a -> Pred a -> Pred a
predAnd = liftA2 (&&)
-- | Predicate which returns True for all inputs
predTrue :: Pred a
predTrue _ = True
-- | Predicate which returns False for all inputs
predFalse :: Pred a
predFalse _ = False
| null | https://raw.githubusercontent.com/facebookincubator/hsthrift/d3ff75d487e9d0c2904d18327373b603456e7a01/common/util/Util/Predicate.hs | haskell | | Predicate function.
both input predicates hold.
| Predicate which returns True for all inputs
| Predicate which returns False for all inputs | Copyright ( c ) Facebook , Inc. and its affiliates .
module Util.Predicate
( Pred
, predAnd
, predTrue
, predFalse
) where
import Control.Applicative (liftA2)
type Pred a = a -> Bool
| Combine two predicate functions to produce a new function that holds if
predAnd :: Pred a -> Pred a -> Pred a
predAnd = liftA2 (&&)
predTrue :: Pred a
predTrue _ = True
predFalse :: Pred a
predFalse _ = False
|
2a959b8fbf74e2ddbd355bc6468ee8a428f8cd87fee2ddab6d28f26b31370193 | haskell-suite/base | Char.hs | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude #
-----------------------------------------------------------------------------
-- |
Module : Data .
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : stable
-- Portability : portable
--
The type and associated operations .
--
-----------------------------------------------------------------------------
module Data.Char
(
Char
-- * Character classification
-- | Unicode characters are divided into letters, numbers, marks,
-- punctuation, symbols, separators (including spaces) and others
-- (including control characters).
, isControl, isSpace
, isLower, isUpper, isAlpha, isAlphaNum, isPrint
, isDigit, isOctDigit, isHexDigit
, isLetter, isMark, isNumber, isPunctuation, isSymbol, isSeparator
-- ** Subranges
, isAscii, isLatin1
, isAsciiUpper, isAsciiLower
-- ** Unicode general categories
, GeneralCategory(..), generalCategory
-- * Case conversion
, toUpper, toLower, toTitle
-- * Single digit characters
, digitToInt
, intToDigit
-- * Numeric representations
, ord
, chr
-- * String representations
, showLitChar
, lexLitChar
, readLitChar
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import GHC.Arr (Ix)
import GHC.Char
import GHC.Real (fromIntegral)
import GHC.Show
import GHC.Read (Read, readLitChar, lexLitChar)
import GHC.Unicode
import GHC.Num
import GHC.Enum
#endif
#ifdef __HUGS__
import Hugs.Prelude (Ix)
import Hugs.Char
#endif
| Convert a single digit ' ' to the corresponding ' Int ' .
-- This function fails unless its argument satisfies 'isHexDigit',
-- but recognises both upper and lower-case hexadecimal digits
( i.e. @\'0\'@ .. @\'9\'@ , @\'a\'@ .. @\'f\'@ , @\'A\'@ .. @\'F\'@ ) .
digitToInt :: Char -> Int
digitToInt c
| isDigit c = ord c - ord '0'
| c >= 'a' && c <= 'f' = ord c - ord 'a' + 10
| c >= 'A' && c <= 'F' = ord c - ord 'A' + 10
| otherwise = error ("Char.digitToInt: not a digit " ++ show c) -- sigh
#ifndef __GLASGOW_HASKELL__
isAsciiUpper, isAsciiLower :: Char -> Bool
isAsciiLower c = c >= 'a' && c <= 'z'
isAsciiUpper c = c >= 'A' && c <= 'Z'
#endif
| Unicode General Categories ( column 2 of the UnicodeData table )
in the order they are listed in the Unicode standard .
data GeneralCategory
^ : Letter , Uppercase
| LowercaseLetter -- ^ Ll: Letter, Lowercase
^ Lt : Letter ,
| ModifierLetter -- ^ Lm: Letter, Modifier
| OtherLetter -- ^ Lo: Letter, Other
^ Mn : , Non - Spacing
| SpacingCombiningMark -- ^ Mc: Mark, Spacing Combining
^ Me : , Enclosing
| DecimalNumber -- ^ Nd: Number, Decimal
| LetterNumber -- ^ Nl: Number, Letter
| OtherNumber -- ^ No: Number, Other
^ Pc : Punctuation , Connector
| DashPunctuation -- ^ Pd: Punctuation, Dash
^ Ps : Punctuation , Open
| ClosePunctuation -- ^ Pe: Punctuation, Close
| InitialQuote -- ^ Pi: Punctuation, Initial quote
| FinalQuote -- ^ Pf: Punctuation, Final quote
| OtherPunctuation -- ^ Po: Punctuation, Other
| MathSymbol -- ^ Sm: Symbol, Math
| CurrencySymbol -- ^ Sc: Symbol, Currency
| ModifierSymbol -- ^ Sk: Symbol, Modifier
| OtherSymbol -- ^ So: Symbol, Other
| Space -- ^ Zs: Separator, Space
^ Zl : Separator , Line
^ Zp : Separator , Paragraph
| Control -- ^ Cc: Other, Control
| Format -- ^ Cf: Other, Format
| Surrogate -- ^ Cs: Other, Surrogate
| PrivateUse -- ^ Co: Other, Private Use
| NotAssigned -- ^ Cn: Other, Not Assigned
deriving (Eq, Ord, Enum, Read, Show, Bounded, Ix)
-- | The Unicode general category of the character.
generalCategory :: Char -> GeneralCategory
#if defined(__GLASGOW_HASKELL__)
generalCategory c = toEnum $ fromIntegral $ wgencat $ fromIntegral $ ord c
#endif
#ifdef __HUGS__
generalCategory c = toEnum (primUniGenCat c)
#endif
-- derived character classifiers
-- | Selects alphabetic Unicode characters (lower-case, upper-case and
-- title-case letters, plus letters of caseless scripts and modifiers letters).
-- This function is equivalent to 'Data.Char.isAlpha'.
isLetter :: Char -> Bool
isLetter c = case generalCategory c of
UppercaseLetter -> True
LowercaseLetter -> True
TitlecaseLetter -> True
ModifierLetter -> True
OtherLetter -> True
_ -> False
-- | Selects Unicode mark characters, e.g. accents and the like, which
-- combine with preceding letters.
isMark :: Char -> Bool
isMark c = case generalCategory c of
NonSpacingMark -> True
SpacingCombiningMark -> True
EnclosingMark -> True
_ -> False
-- | Selects Unicode numeric characters, including digits from various
scripts , Roman numerals , etc .
isNumber :: Char -> Bool
isNumber c = case generalCategory c of
DecimalNumber -> True
LetterNumber -> True
OtherNumber -> True
_ -> False
-- | Selects Unicode punctuation characters, including various kinds
-- of connectors, brackets and quotes.
isPunctuation :: Char -> Bool
isPunctuation c = case generalCategory c of
ConnectorPunctuation -> True
DashPunctuation -> True
OpenPunctuation -> True
ClosePunctuation -> True
InitialQuote -> True
FinalQuote -> True
OtherPunctuation -> True
_ -> False
-- | Selects Unicode symbol characters, including mathematical and
-- currency symbols.
isSymbol :: Char -> Bool
isSymbol c = case generalCategory c of
MathSymbol -> True
CurrencySymbol -> True
ModifierSymbol -> True
OtherSymbol -> True
_ -> False
-- | Selects Unicode space and separator characters.
isSeparator :: Char -> Bool
isSeparator c = case generalCategory c of
Space -> True
LineSeparator -> True
ParagraphSeparator -> True
_ -> False
| null | https://raw.githubusercontent.com/haskell-suite/base/1ee14681910c76d0a5a436c33ecf3289443e65ed/Data/Char.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : stable
Portability : portable
---------------------------------------------------------------------------
* Character classification
| Unicode characters are divided into letters, numbers, marks,
punctuation, symbols, separators (including spaces) and others
(including control characters).
** Subranges
** Unicode general categories
* Case conversion
* Single digit characters
* Numeric representations
* String representations
This function fails unless its argument satisfies 'isHexDigit',
but recognises both upper and lower-case hexadecimal digits
sigh
^ Ll: Letter, Lowercase
^ Lm: Letter, Modifier
^ Lo: Letter, Other
^ Mc: Mark, Spacing Combining
^ Nd: Number, Decimal
^ Nl: Number, Letter
^ No: Number, Other
^ Pd: Punctuation, Dash
^ Pe: Punctuation, Close
^ Pi: Punctuation, Initial quote
^ Pf: Punctuation, Final quote
^ Po: Punctuation, Other
^ Sm: Symbol, Math
^ Sc: Symbol, Currency
^ Sk: Symbol, Modifier
^ So: Symbol, Other
^ Zs: Separator, Space
^ Cc: Other, Control
^ Cf: Other, Format
^ Cs: Other, Surrogate
^ Co: Other, Private Use
^ Cn: Other, Not Assigned
| The Unicode general category of the character.
derived character classifiers
| Selects alphabetic Unicode characters (lower-case, upper-case and
title-case letters, plus letters of caseless scripts and modifiers letters).
This function is equivalent to 'Data.Char.isAlpha'.
| Selects Unicode mark characters, e.g. accents and the like, which
combine with preceding letters.
| Selects Unicode numeric characters, including digits from various
| Selects Unicode punctuation characters, including various kinds
of connectors, brackets and quotes.
| Selects Unicode symbol characters, including mathematical and
currency symbols.
| Selects Unicode space and separator characters. | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude #
Module : Data .
Copyright : ( c ) The University of Glasgow 2001
The type and associated operations .
module Data.Char
(
Char
, isControl, isSpace
, isLower, isUpper, isAlpha, isAlphaNum, isPrint
, isDigit, isOctDigit, isHexDigit
, isLetter, isMark, isNumber, isPunctuation, isSymbol, isSeparator
, isAscii, isLatin1
, isAsciiUpper, isAsciiLower
, GeneralCategory(..), generalCategory
, toUpper, toLower, toTitle
, digitToInt
, intToDigit
, ord
, chr
, showLitChar
, lexLitChar
, readLitChar
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import GHC.Arr (Ix)
import GHC.Char
import GHC.Real (fromIntegral)
import GHC.Show
import GHC.Read (Read, readLitChar, lexLitChar)
import GHC.Unicode
import GHC.Num
import GHC.Enum
#endif
#ifdef __HUGS__
import Hugs.Prelude (Ix)
import Hugs.Char
#endif
| Convert a single digit ' ' to the corresponding ' Int ' .
( i.e. @\'0\'@ .. @\'9\'@ , @\'a\'@ .. @\'f\'@ , @\'A\'@ .. @\'F\'@ ) .
digitToInt :: Char -> Int
digitToInt c
| isDigit c = ord c - ord '0'
| c >= 'a' && c <= 'f' = ord c - ord 'a' + 10
| c >= 'A' && c <= 'F' = ord c - ord 'A' + 10
#ifndef __GLASGOW_HASKELL__
isAsciiUpper, isAsciiLower :: Char -> Bool
isAsciiLower c = c >= 'a' && c <= 'z'
isAsciiUpper c = c >= 'A' && c <= 'Z'
#endif
| Unicode General Categories ( column 2 of the UnicodeData table )
in the order they are listed in the Unicode standard .
data GeneralCategory
^ : Letter , Uppercase
^ Lt : Letter ,
^ Mn : , Non - Spacing
^ Me : , Enclosing
^ Pc : Punctuation , Connector
^ Ps : Punctuation , Open
^ Zl : Separator , Line
^ Zp : Separator , Paragraph
deriving (Eq, Ord, Enum, Read, Show, Bounded, Ix)
generalCategory :: Char -> GeneralCategory
#if defined(__GLASGOW_HASKELL__)
generalCategory c = toEnum $ fromIntegral $ wgencat $ fromIntegral $ ord c
#endif
#ifdef __HUGS__
generalCategory c = toEnum (primUniGenCat c)
#endif
isLetter :: Char -> Bool
isLetter c = case generalCategory c of
UppercaseLetter -> True
LowercaseLetter -> True
TitlecaseLetter -> True
ModifierLetter -> True
OtherLetter -> True
_ -> False
isMark :: Char -> Bool
isMark c = case generalCategory c of
NonSpacingMark -> True
SpacingCombiningMark -> True
EnclosingMark -> True
_ -> False
scripts , Roman numerals , etc .
isNumber :: Char -> Bool
isNumber c = case generalCategory c of
DecimalNumber -> True
LetterNumber -> True
OtherNumber -> True
_ -> False
isPunctuation :: Char -> Bool
isPunctuation c = case generalCategory c of
ConnectorPunctuation -> True
DashPunctuation -> True
OpenPunctuation -> True
ClosePunctuation -> True
InitialQuote -> True
FinalQuote -> True
OtherPunctuation -> True
_ -> False
isSymbol :: Char -> Bool
isSymbol c = case generalCategory c of
MathSymbol -> True
CurrencySymbol -> True
ModifierSymbol -> True
OtherSymbol -> True
_ -> False
isSeparator :: Char -> Bool
isSeparator c = case generalCategory c of
Space -> True
LineSeparator -> True
ParagraphSeparator -> True
_ -> False
|
daf326050397961e9c06a5857c575662ddeb99c58ee8fc6d6aed322c06f7980a | snoyberg/http-client | Create.hs | {-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE CPP #
|
This script parses the public suffix list , and constructs a data structure which can
be used with the isSuffix function in Lookup.hs . It exports a GSink which produces
the opaque ' DataStructure ' and can be fed any Source as input .
This makes an few assumption about the information in the public suffix list :
namely , that no rule is a suffix of another rule . For example , if there is a rule
abc.def.ghi
then there is no other rule
def.ghi
or
! def.ghi
The actual data structure involved here is a tree where the nodes have no value and
the edges are DNS labels . There are two trees : one to handle the exception rules ,
and one to handle the regular rules .
This script parses the public suffix list, and constructs a data structure which can
be used with the isSuffix function in Lookup.hs. It exports a GSink which produces
the opaque 'DataStructure' and can be fed any Source as input.
This makes an few assumption about the information in the public suffix list:
namely, that no rule is a suffix of another rule. For example, if there is a rule
abc.def.ghi
then there is no other rule
def.ghi
or
!def.ghi
The actual data structure involved here is a tree where the nodes have no value and
the edges are DNS labels. There are two trees: one to handle the exception rules,
and one to handle the regular rules.
-}
module Network.PublicSuffixList.Create (PublicSuffixListException, sink) where
import Control.Exception
import qualified Data.ByteString as BS
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import qualified Data.Map as M
import qualified Data.Text as T
import Data.Typeable
import Text.IDNA
import Control.Monad.Catch (MonadThrow)
import Network.PublicSuffixList.Types
data PublicSuffixListException = PublicSuffixListException
deriving (Show, Typeable)
instance Exception PublicSuffixListException
insert :: (Ord e) => Tree e -> [e] -> Tree e
insert _ [] = def
insert t (p : ps) = case M.lookup p $ children t of
Nothing -> t { children = M.insert p (insert def ps) $ children t }
Just l -> t { children = M.insert p (insert l ps) $ children t }
foldingFunction :: DataStructure -> T.Text -> DataStructure
foldingFunction d@(rules, exceptions) s'
| T.null s = d
| T.take 2 s == "//" = d
| T.head s == '!' = (rules, insert exceptions $ labelList $ T.tail s)
| otherwise = (insert rules $ labelList s, exceptions)
where ss = filter (not . T.null) $ T.words s'
s
| null ss = ""
| otherwise = head ss
labelList = reverse . map internationalize . T.split (== '.')
internationalize str
| str == "*" = str
| otherwise = case toASCII False True $ T.toLower str of
Just x -> x
Nothing -> throw PublicSuffixListException
Generate the opaque ' DataStructure '
Generate the opaque 'DataStructure'
-}
sink :: MonadThrow m => C.Sink BS.ByteString m DataStructure
sink = CT.decode CT.utf8 C.=$ CT.lines C.=$ CL.fold foldingFunction (def, def)
| null | https://raw.githubusercontent.com/snoyberg/http-client/df5b154a70cee2d94f66eccf18d6b821073b7cfb/http-client/publicsuffixlist/Network/PublicSuffixList/Create.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings # | # LANGUAGE NoMonomorphismRestriction #
# LANGUAGE CPP #
|
This script parses the public suffix list , and constructs a data structure which can
be used with the isSuffix function in Lookup.hs . It exports a GSink which produces
the opaque ' DataStructure ' and can be fed any Source as input .
This makes an few assumption about the information in the public suffix list :
namely , that no rule is a suffix of another rule . For example , if there is a rule
abc.def.ghi
then there is no other rule
def.ghi
or
! def.ghi
The actual data structure involved here is a tree where the nodes have no value and
the edges are DNS labels . There are two trees : one to handle the exception rules ,
and one to handle the regular rules .
This script parses the public suffix list, and constructs a data structure which can
be used with the isSuffix function in Lookup.hs. It exports a GSink which produces
the opaque 'DataStructure' and can be fed any Source as input.
This makes an few assumption about the information in the public suffix list:
namely, that no rule is a suffix of another rule. For example, if there is a rule
abc.def.ghi
then there is no other rule
def.ghi
or
!def.ghi
The actual data structure involved here is a tree where the nodes have no value and
the edges are DNS labels. There are two trees: one to handle the exception rules,
and one to handle the regular rules.
-}
module Network.PublicSuffixList.Create (PublicSuffixListException, sink) where
import Control.Exception
import qualified Data.ByteString as BS
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
import qualified Data.Map as M
import qualified Data.Text as T
import Data.Typeable
import Text.IDNA
import Control.Monad.Catch (MonadThrow)
import Network.PublicSuffixList.Types
data PublicSuffixListException = PublicSuffixListException
deriving (Show, Typeable)
instance Exception PublicSuffixListException
insert :: (Ord e) => Tree e -> [e] -> Tree e
insert _ [] = def
insert t (p : ps) = case M.lookup p $ children t of
Nothing -> t { children = M.insert p (insert def ps) $ children t }
Just l -> t { children = M.insert p (insert l ps) $ children t }
foldingFunction :: DataStructure -> T.Text -> DataStructure
foldingFunction d@(rules, exceptions) s'
| T.null s = d
| T.take 2 s == "//" = d
| T.head s == '!' = (rules, insert exceptions $ labelList $ T.tail s)
| otherwise = (insert rules $ labelList s, exceptions)
where ss = filter (not . T.null) $ T.words s'
s
| null ss = ""
| otherwise = head ss
labelList = reverse . map internationalize . T.split (== '.')
internationalize str
| str == "*" = str
| otherwise = case toASCII False True $ T.toLower str of
Just x -> x
Nothing -> throw PublicSuffixListException
Generate the opaque ' DataStructure '
Generate the opaque 'DataStructure'
-}
sink :: MonadThrow m => C.Sink BS.ByteString m DataStructure
sink = CT.decode CT.utf8 C.=$ CT.lines C.=$ CL.fold foldingFunction (def, def)
|
a6588bee2812a95223d8aa2f48fe09d71e6838510d3a30d4af28c4b855b008de | tweag/sparkle | RDD.hs | -- | Bindings for
< org.apache.spark.api.java . JavaRDD > .
--
-- Please refer to that documentation for the meaning of each binding.
# LANGUAGE DataKinds #
{-# LANGUAGE ExplicitForAll #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
{-# LANGUAGE LinearTypes #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedLabels #
# LANGUAGE QualifiedDo #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StaticPointers #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Control.Distributed.Spark.Safe.RDD
( RDD(..)
, isEmpty
, toDebugString
, cache
, unpersist
, repartition
, coalesce
, filter
, map
, module Choice
, mapPartitions
, mapPartitionsWithIndex
, fold
, reduce
, slowReduce
, aggregate
, slowAggregate
, treeAggregate
, count
, mean
, collect
, collectJ
, take
, takeJ
, distinct
, intersection
, union
, sortBy
, sample
, randomSplit
, first
, firstJ
, getNumPartitions
, saveAsTextFile
, subtract
$ reading_files
) where
import qualified Prelude
import Prelude.Linear hiding (IO, filter, map, subtract, take, zero)
import qualified Prelude.Linear as PL
import System.IO.Linear as LIO
import Control.Functor.Linear as Linear
import qualified Data.Functor.Linear as Data
import Control.Distributed.Closure
import Control.Distributed.Spark.Safe.Closure (reflectFun)
import Data.Choice (Choice)
import qualified Data.Choice as Choice
import Data.Int
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Typeable (Typeable)
import Data.Vector.Storable as V (fromList)
-- NOTE: We need this in order to be able to use newLocalRef and deleteLocalRef,
-- as the typechecker needs to be able to see the unsafe J data constructor to
-- derive Coercible instances
import qualified Foreign.JNI.Types
import Foreign.JNI.Safe
import Foreign.JNI.Types.Safe
import Language.Java.Safe
import Language.Java.Inline.Safe
import Streaming (Stream, Of, effect)
import qualified Streaming.Prelude as S (fold_, uncons, yield)
newtype RDD a = RDD (J ('Class "org.apache.spark.api.java.JavaRDD"))
deriving Coercible
cache :: RDD a %1 -> IO (RDD a)
cache rdd = [java| $rdd.cache() |]
unpersist :: RDD a %1 -> Bool -> IO (RDD a)
unpersist rdd blocking = [java| $rdd.unpersist($blocking) |]
isEmpty :: RDD a %1 -> IO (Ur Bool)
isEmpty rdd = [java| $rdd.isEmpty() |]
toDebugString :: RDD a %1 -> IO (Ur Text)
toDebugString rdd = [java| $rdd.toDebugString() |] >>= reify_
repartition :: Int32 -> RDD a %1 -> IO (RDD a)
repartition n rdd = [java| $rdd.repartition($n) |]
coalesce :: Int32 -> RDD a %1 -> IO (RDD a)
coalesce n rdd = [java| $rdd.coalesce($n) |]
filter
:: (Static (Reify a), Typeable a)
=> Closure (a -> Bool)
-> RDD a
%1 -> IO (RDD a)
filter clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 1) clos
[java| $rdd.filter($f) |]
map
:: (Static (Reify a), Static (Reflect b), Typeable a, Typeable b)
=> Closure (a -> b)
-> RDD a
%1 -> IO (RDD b)
map clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 1) clos
[java| $rdd.map($f) |]
mapPartitions
:: ( Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of b) PL.IO ()))
, Typeable a
, Typeable b
)
=> Choice "preservePartitions"
-> Closure (Stream (Of a) PL.IO () -> Stream (Of b) PL.IO ())
-> RDD a
%1 -> IO (RDD b)
mapPartitions preservePartitions clos rdd =
mapPartitionsWithIndex preservePartitions (closure (static const) `cap` clos) rdd
mapPartitionsWithIndex
:: ( Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of b) PL.IO ()))
, Typeable a
, Typeable b
)
=> Choice "preservePartitions"
-> Closure (Int32 -> Stream (Of a) PL.IO () -> Stream (Of b) PL.IO ())
-> RDD a
%1 -> IO (RDD b)
mapPartitionsWithIndex preservePartitions clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 2) clos
[java| $rdd.mapPartitionsWithIndex($f, $preservePartitions) |]
-- NOTE: we cannot implement foldJ at this time without the ability to
-- write instances for linear static closures
fold
:: (Static (Reify a), Static (Reflect a), Typeable a)
=> Closure (a -> a -> a)
-> a
-> RDD a
%1 -> IO (Ur a)
fold clos zero rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 2) clos
jzero <- upcast <$> reflect zero
res :: JObject <- [java| $rdd.fold($jzero, $f) |]
reify_ (unsafeCast res)
slowReduce
:: (Static (Reify a), Static (Reflect a), Typeable a)
=> Closure (a -> a -> a)
-> RDD a
%1 -> IO (Ur a)
slowReduce clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 2) clos
res :: JObject <- [java| $rdd.reduce($f) |]
reify_ (unsafeCast res)
| A version of reduce implemented in terms of ' ' .
--
-- NOTE: This is not defined in terms of 'aggregate' because we don't have a
-- unit element here.
reduce
:: ( Static (Reify a)
, Static (Reflect a)
, Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of a) PL.IO ()))
, Typeable a
)
=> Closure (a -> a -> a)
-> RDD a
%1 -> IO (Ur a)
reduce combOp rdd0 =
mapPartitions (Choice.Don't #preservePartitions) combOp' rdd0 >>= slowReduce combOp
where
combOp' = closure (static (\f s -> effect $ S.uncons s Prelude.>>= \case
Just (e, ss) -> S.yield Prelude.<$> S.fold_ f e id ss
Nothing -> Prelude.return Prelude.mempty
))
`cap` combOp
sortBy
:: (Static (Reify a), Static (Reflect b), Typeable a, Typeable b)
=> Closure (a -> b)
-> Choice "ascending"
-> Int32
-- ^ Number of partitions.
-> RDD a
%1 -> IO (RDD a)
sortBy clos ascending numPartitions rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 1) clos
[java| $rdd.sortBy($f, $ascending, $numPartitions) |]
slowAggregate
:: (Static (Reify a), Static (Reify b), Static (Reflect b), Typeable a, Typeable b)
=> Closure (b -> a -> b)
-> Closure (b -> b -> b)
-> b
-> RDD a
%1 -> IO (Ur b)
slowAggregate seqOp combOp zero rdd = Linear.do
jseqOp <- ungeneric <$> reflectFun (sing :: Sing 2) seqOp
jcombOp <- ungeneric <$> reflectFun (sing :: Sing 2) combOp
jzero <- upcast <$> reflect zero
res :: JObject <- [java| $rdd.aggregate($jzero, $jseqOp, $jcombOp) |]
reify_ (unsafeCast res)
| A version of aggregate implemented in terms of ' ' .
aggregate
:: ( Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of b) PL.IO ()))
, Static (Reify b)
, Static (Reflect b)
, Static (Serializable b)
, Typeable a
)
=> Closure (b -> a -> b)
-> Closure (b -> b -> b)
-> b
-> RDD a
%1 -> IO (Ur b)
aggregate seqOp combOp zero rdd0 =
mapPartitions (Choice.Don't #preservePartitions) seqOp' rdd0 >>= slowReduce combOp
where
seqOp' = closure (static (\f e s -> effect (S.yield Prelude.<$> S.fold_ f e id s)))
`cap` seqOp
`cap` cpure closureDict zero
treeAggregate
:: (Static (Reify a), Static (Reify b), Static (Reflect b), Typeable a, Typeable b)
=> Closure (b -> a -> b)
-> Closure (b -> b -> b)
-> b
-> Int32
-> RDD a
%1 -> IO (Ur b)
treeAggregate seqOp combOp zero depth rdd = Linear.do
jseqOp <- ungeneric <$> reflectFun (sing :: Sing 2) seqOp
jcombOp <- ungeneric <$> reflectFun (sing :: Sing 2) combOp
jzero <- upcast <$> reflect zero
res :: JObject <- [java| $rdd.treeAggregate($jzero, $jseqOp, $jcombOp, $depth) |]
reify_ (unsafeCast res)
count :: RDD a %1 -> IO (Ur Int64)
count rdd =
[java| $rdd.count() |] >>= reify_
mean :: RDD Double %1 -> IO (Ur Double)
mean rdd =
[java| $rdd.mapToDouble(r -> (double)r).mean() |]
subtract :: RDD a %1 -> RDD a %1 -> IO (RDD a)
subtract rdd1 rdd2 = [java| $rdd1.subtract($rdd2) |]
$ reading_files
--
-- ==== Note [Reading files]
# reading_files #
--
-- File-reading functions might produce a particular form of RDD (HadoopRDD)
-- whose elements are sensitive to the order in which they are used. If
the elements are not used sequentially , then the RDD might show incorrect
contents [ 1 ] .
--
-- In practice, most functions respect this access pattern, but 'collect' and
' take ' do not . A workaround is to use a copy of the RDD created with
-- 'map' before using those functions.
--
[ 1 ] -1018
| See Note [ Reading Files ] ( " Control . Distributed . Spark . RDD#reading_files " ) .
collect :: Reify a => RDD a %1 -> IO (Ur [a])
collect rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.collect().toArray() |]
reify_ (unsafeCast arr)
collectJ :: forall a. (Coercible a, IsReferenceType (Ty a)) => RDD a %1 -> IO [a]
collectJ rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.collect().toArray() |]
refList :: [J (Ty a)] <- fromArray (unsafeCast arr)
pure $ Data.fmap (unsafeUncoerce . coerce) refList
| See Note [ Reading Files ] ( " Control . Distributed . Spark . RDD#reading_files " ) .
take :: Reify a => Int32 -> RDD a %1 -> IO (Ur [a])
take n rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.take($n).toArray() |]
reify_ (unsafeCast arr)
takeJ :: forall a. (Coercible a, IsReferenceType (Ty a)) => Int32 -> RDD a %1 -> IO [a]
takeJ n rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.take($n).toArray() |]
refList :: [J (Ty a)] <- fromArray (unsafeCast arr)
pure $ Data.fmap (unsafeUncoerce . coerce) refList
distinct :: RDD a %1 -> IO (RDD a)
distinct rdd = [java| $rdd.distinct() |]
intersection :: RDD a %1 -> RDD a %1 -> IO (RDD a)
intersection rdd1 rdd2 = [java| $rdd1.intersection($rdd2) |]
union :: RDD a %1 -> RDD a %1 -> IO (RDD a)
union rdd1 rdd2 = [java| $rdd1.union($rdd2) |]
sample
:: RDD a
-> Choice "replacement" -- ^ Whether to sample with replacement
-> Double -- ^ fraction of elements to keep
-> IO (RDD a)
sample rdd replacement frac = [java| $rdd.sample($replacement, $frac) |]
randomSplit
:: RDD a
%1 -> [Double] -- ^ Statistical weights of RDD fractions.
-> IO [RDD a]
randomSplit rdd weights = Linear.do
jweights <- reflect $ V.fromList weights
arr :: JObjectArray <- [java| $rdd.randomSplit($jweights) |]
(arr', Ur n) <- getArrayLength arr
go [] arr' (fromEnum n)
where
Fold - like helper to thread array reference through
go :: [RDD a] %1 -> JObjectArray %1 -> Int -> IO [RDD a]
go acc arr' n
| n == -1 = pure acc <* deleteLocalRef arr'
| otherwise = Linear.do
(arr'', elt) <- getObjectArrayElement arr' (toEnum n)
go ((RDD . unsafeCast) elt : acc) arr'' (n - 1)
first :: Reify a => RDD a %1 -> IO (Ur a)
first rdd = Linear.do
res :: JObject <- [java| $rdd.first() |]
reify_ (unsafeCast res)
firstJ :: forall a. Coercible a => RDD a %1 -> IO a
firstJ rdd = Linear.do
res :: JObject <- [java| $rdd.first() |]
ref :: J (Ty a) <- pure (unsafeCast res)
pure . unsafeUncoerce . JObject $ ref
getNumPartitions :: RDD a %1 -> IO (Ur Int32)
getNumPartitions rdd = [java| $rdd.getNumPartitions() |]
saveAsTextFile :: RDD a %1 -> FilePath -> IO ()
saveAsTextFile rdd fp = Linear.do
jfp <- reflect (Text.pack fp)
[java| { $rdd.saveAsTextFile($jfp); } |]
| null | https://raw.githubusercontent.com/tweag/sparkle/24b8c452e67b414f93e4d7fc8c54c7175ddb1b2f/src/linear-types/Control/Distributed/Spark/Safe/RDD.hs | haskell | | Bindings for
Please refer to that documentation for the meaning of each binding.
# LANGUAGE ExplicitForAll #
# LANGUAGE LinearTypes #
NOTE: We need this in order to be able to use newLocalRef and deleteLocalRef,
as the typechecker needs to be able to see the unsafe J data constructor to
derive Coercible instances
NOTE: we cannot implement foldJ at this time without the ability to
write instances for linear static closures
NOTE: This is not defined in terms of 'aggregate' because we don't have a
unit element here.
^ Number of partitions.
==== Note [Reading files]
File-reading functions might produce a particular form of RDD (HadoopRDD)
whose elements are sensitive to the order in which they are used. If
In practice, most functions respect this access pattern, but 'collect' and
'map' before using those functions.
^ Whether to sample with replacement
^ fraction of elements to keep
^ Statistical weights of RDD fractions. | < org.apache.spark.api.java . JavaRDD > .
# LANGUAGE DataKinds #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedLabels #
# LANGUAGE QualifiedDo #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StaticPointers #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Control.Distributed.Spark.Safe.RDD
( RDD(..)
, isEmpty
, toDebugString
, cache
, unpersist
, repartition
, coalesce
, filter
, map
, module Choice
, mapPartitions
, mapPartitionsWithIndex
, fold
, reduce
, slowReduce
, aggregate
, slowAggregate
, treeAggregate
, count
, mean
, collect
, collectJ
, take
, takeJ
, distinct
, intersection
, union
, sortBy
, sample
, randomSplit
, first
, firstJ
, getNumPartitions
, saveAsTextFile
, subtract
$ reading_files
) where
import qualified Prelude
import Prelude.Linear hiding (IO, filter, map, subtract, take, zero)
import qualified Prelude.Linear as PL
import System.IO.Linear as LIO
import Control.Functor.Linear as Linear
import qualified Data.Functor.Linear as Data
import Control.Distributed.Closure
import Control.Distributed.Spark.Safe.Closure (reflectFun)
import Data.Choice (Choice)
import qualified Data.Choice as Choice
import Data.Int
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Typeable (Typeable)
import Data.Vector.Storable as V (fromList)
import qualified Foreign.JNI.Types
import Foreign.JNI.Safe
import Foreign.JNI.Types.Safe
import Language.Java.Safe
import Language.Java.Inline.Safe
import Streaming (Stream, Of, effect)
import qualified Streaming.Prelude as S (fold_, uncons, yield)
newtype RDD a = RDD (J ('Class "org.apache.spark.api.java.JavaRDD"))
deriving Coercible
cache :: RDD a %1 -> IO (RDD a)
cache rdd = [java| $rdd.cache() |]
unpersist :: RDD a %1 -> Bool -> IO (RDD a)
unpersist rdd blocking = [java| $rdd.unpersist($blocking) |]
isEmpty :: RDD a %1 -> IO (Ur Bool)
isEmpty rdd = [java| $rdd.isEmpty() |]
toDebugString :: RDD a %1 -> IO (Ur Text)
toDebugString rdd = [java| $rdd.toDebugString() |] >>= reify_
repartition :: Int32 -> RDD a %1 -> IO (RDD a)
repartition n rdd = [java| $rdd.repartition($n) |]
coalesce :: Int32 -> RDD a %1 -> IO (RDD a)
coalesce n rdd = [java| $rdd.coalesce($n) |]
filter
:: (Static (Reify a), Typeable a)
=> Closure (a -> Bool)
-> RDD a
%1 -> IO (RDD a)
filter clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 1) clos
[java| $rdd.filter($f) |]
map
:: (Static (Reify a), Static (Reflect b), Typeable a, Typeable b)
=> Closure (a -> b)
-> RDD a
%1 -> IO (RDD b)
map clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 1) clos
[java| $rdd.map($f) |]
mapPartitions
:: ( Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of b) PL.IO ()))
, Typeable a
, Typeable b
)
=> Choice "preservePartitions"
-> Closure (Stream (Of a) PL.IO () -> Stream (Of b) PL.IO ())
-> RDD a
%1 -> IO (RDD b)
mapPartitions preservePartitions clos rdd =
mapPartitionsWithIndex preservePartitions (closure (static const) `cap` clos) rdd
mapPartitionsWithIndex
:: ( Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of b) PL.IO ()))
, Typeable a
, Typeable b
)
=> Choice "preservePartitions"
-> Closure (Int32 -> Stream (Of a) PL.IO () -> Stream (Of b) PL.IO ())
-> RDD a
%1 -> IO (RDD b)
mapPartitionsWithIndex preservePartitions clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 2) clos
[java| $rdd.mapPartitionsWithIndex($f, $preservePartitions) |]
fold
:: (Static (Reify a), Static (Reflect a), Typeable a)
=> Closure (a -> a -> a)
-> a
-> RDD a
%1 -> IO (Ur a)
fold clos zero rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 2) clos
jzero <- upcast <$> reflect zero
res :: JObject <- [java| $rdd.fold($jzero, $f) |]
reify_ (unsafeCast res)
slowReduce
:: (Static (Reify a), Static (Reflect a), Typeable a)
=> Closure (a -> a -> a)
-> RDD a
%1 -> IO (Ur a)
slowReduce clos rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 2) clos
res :: JObject <- [java| $rdd.reduce($f) |]
reify_ (unsafeCast res)
| A version of reduce implemented in terms of ' ' .
reduce
:: ( Static (Reify a)
, Static (Reflect a)
, Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of a) PL.IO ()))
, Typeable a
)
=> Closure (a -> a -> a)
-> RDD a
%1 -> IO (Ur a)
reduce combOp rdd0 =
mapPartitions (Choice.Don't #preservePartitions) combOp' rdd0 >>= slowReduce combOp
where
combOp' = closure (static (\f s -> effect $ S.uncons s Prelude.>>= \case
Just (e, ss) -> S.yield Prelude.<$> S.fold_ f e id ss
Nothing -> Prelude.return Prelude.mempty
))
`cap` combOp
sortBy
:: (Static (Reify a), Static (Reflect b), Typeable a, Typeable b)
=> Closure (a -> b)
-> Choice "ascending"
-> Int32
-> RDD a
%1 -> IO (RDD a)
sortBy clos ascending numPartitions rdd = Linear.do
f <- ungeneric <$> reflectFun (sing :: Sing 1) clos
[java| $rdd.sortBy($f, $ascending, $numPartitions) |]
slowAggregate
:: (Static (Reify a), Static (Reify b), Static (Reflect b), Typeable a, Typeable b)
=> Closure (b -> a -> b)
-> Closure (b -> b -> b)
-> b
-> RDD a
%1 -> IO (Ur b)
slowAggregate seqOp combOp zero rdd = Linear.do
jseqOp <- ungeneric <$> reflectFun (sing :: Sing 2) seqOp
jcombOp <- ungeneric <$> reflectFun (sing :: Sing 2) combOp
jzero <- upcast <$> reflect zero
res :: JObject <- [java| $rdd.aggregate($jzero, $jseqOp, $jcombOp) |]
reify_ (unsafeCast res)
| A version of aggregate implemented in terms of ' ' .
aggregate
:: ( Static (Reify (Stream (Of a) PL.IO ()))
, Static (Reflect (Stream (Of b) PL.IO ()))
, Static (Reify b)
, Static (Reflect b)
, Static (Serializable b)
, Typeable a
)
=> Closure (b -> a -> b)
-> Closure (b -> b -> b)
-> b
-> RDD a
%1 -> IO (Ur b)
aggregate seqOp combOp zero rdd0 =
mapPartitions (Choice.Don't #preservePartitions) seqOp' rdd0 >>= slowReduce combOp
where
seqOp' = closure (static (\f e s -> effect (S.yield Prelude.<$> S.fold_ f e id s)))
`cap` seqOp
`cap` cpure closureDict zero
treeAggregate
:: (Static (Reify a), Static (Reify b), Static (Reflect b), Typeable a, Typeable b)
=> Closure (b -> a -> b)
-> Closure (b -> b -> b)
-> b
-> Int32
-> RDD a
%1 -> IO (Ur b)
treeAggregate seqOp combOp zero depth rdd = Linear.do
jseqOp <- ungeneric <$> reflectFun (sing :: Sing 2) seqOp
jcombOp <- ungeneric <$> reflectFun (sing :: Sing 2) combOp
jzero <- upcast <$> reflect zero
res :: JObject <- [java| $rdd.treeAggregate($jzero, $jseqOp, $jcombOp, $depth) |]
reify_ (unsafeCast res)
count :: RDD a %1 -> IO (Ur Int64)
count rdd =
[java| $rdd.count() |] >>= reify_
mean :: RDD Double %1 -> IO (Ur Double)
mean rdd =
[java| $rdd.mapToDouble(r -> (double)r).mean() |]
subtract :: RDD a %1 -> RDD a %1 -> IO (RDD a)
subtract rdd1 rdd2 = [java| $rdd1.subtract($rdd2) |]
$ reading_files
# reading_files #
the elements are not used sequentially , then the RDD might show incorrect
contents [ 1 ] .
' take ' do not . A workaround is to use a copy of the RDD created with
[ 1 ] -1018
| See Note [ Reading Files ] ( " Control . Distributed . Spark . RDD#reading_files " ) .
collect :: Reify a => RDD a %1 -> IO (Ur [a])
collect rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.collect().toArray() |]
reify_ (unsafeCast arr)
collectJ :: forall a. (Coercible a, IsReferenceType (Ty a)) => RDD a %1 -> IO [a]
collectJ rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.collect().toArray() |]
refList :: [J (Ty a)] <- fromArray (unsafeCast arr)
pure $ Data.fmap (unsafeUncoerce . coerce) refList
| See Note [ Reading Files ] ( " Control . Distributed . Spark . RDD#reading_files " ) .
take :: Reify a => Int32 -> RDD a %1 -> IO (Ur [a])
take n rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.take($n).toArray() |]
reify_ (unsafeCast arr)
takeJ :: forall a. (Coercible a, IsReferenceType (Ty a)) => Int32 -> RDD a %1 -> IO [a]
takeJ n rdd = Linear.do
arr :: JObjectArray <- [java| $rdd.take($n).toArray() |]
refList :: [J (Ty a)] <- fromArray (unsafeCast arr)
pure $ Data.fmap (unsafeUncoerce . coerce) refList
distinct :: RDD a %1 -> IO (RDD a)
distinct rdd = [java| $rdd.distinct() |]
intersection :: RDD a %1 -> RDD a %1 -> IO (RDD a)
intersection rdd1 rdd2 = [java| $rdd1.intersection($rdd2) |]
union :: RDD a %1 -> RDD a %1 -> IO (RDD a)
union rdd1 rdd2 = [java| $rdd1.union($rdd2) |]
sample
:: RDD a
-> IO (RDD a)
sample rdd replacement frac = [java| $rdd.sample($replacement, $frac) |]
randomSplit
:: RDD a
-> IO [RDD a]
randomSplit rdd weights = Linear.do
jweights <- reflect $ V.fromList weights
arr :: JObjectArray <- [java| $rdd.randomSplit($jweights) |]
(arr', Ur n) <- getArrayLength arr
go [] arr' (fromEnum n)
where
Fold - like helper to thread array reference through
go :: [RDD a] %1 -> JObjectArray %1 -> Int -> IO [RDD a]
go acc arr' n
| n == -1 = pure acc <* deleteLocalRef arr'
| otherwise = Linear.do
(arr'', elt) <- getObjectArrayElement arr' (toEnum n)
go ((RDD . unsafeCast) elt : acc) arr'' (n - 1)
first :: Reify a => RDD a %1 -> IO (Ur a)
first rdd = Linear.do
res :: JObject <- [java| $rdd.first() |]
reify_ (unsafeCast res)
firstJ :: forall a. Coercible a => RDD a %1 -> IO a
firstJ rdd = Linear.do
res :: JObject <- [java| $rdd.first() |]
ref :: J (Ty a) <- pure (unsafeCast res)
pure . unsafeUncoerce . JObject $ ref
getNumPartitions :: RDD a %1 -> IO (Ur Int32)
getNumPartitions rdd = [java| $rdd.getNumPartitions() |]
saveAsTextFile :: RDD a %1 -> FilePath -> IO ()
saveAsTextFile rdd fp = Linear.do
jfp <- reflect (Text.pack fp)
[java| { $rdd.saveAsTextFile($jfp); } |]
|
2427566113e9c35ce42bca42960d8bf5b59786b331201080ebdd731c5ff3f6e6 | brownplt/TeJaS | strobe_sigs.ml | open Prelude
open Sig
module type STROBE_TYPS = sig
type pat
type extKind
type kind =
| KStar
| KArrow of kind list * kind
| KEmbed of extKind
type presence =
| Inherited
| Present
| Maybe
type extTyp
type typ =
| TPrim of string
| TUnion of string option * typ * typ
| TInter of string option * typ * typ
| TArrow of typ list * typ option * typ (* args (including <this>), optional variadic arg, return typ *)
| TThis of typ
| TObject of obj_typ
| TWith of typ * obj_typ
| TRegex of pat
| TRef of string option * typ
| TSource of string option * typ
| TSink of string option * typ
| TTop
| TBot
| TForall of string option * id * typ * typ (** [TForall (a, s, t)] forall a <: s . t *)
| TId of id
| TRec of string option * id * typ
| TLambda of string option * (id * kind) list * typ (** type operator *)
| TApp of typ * typ list (** type operator application *)
| TFix of string option * id * kind * typ (** recursive type operators *)
| TUninit of typ option ref (** type of uninitialized variables *)
| TEmbed of extTyp
and obj_typ = {
fields : (pat * presence * typ) list;
absent_pat : pat;
cached_parent_typ : typ option option ref;
cached_guard_pat : pat option ref;
cached_cover_pat : pat option ref;
cached_possible_cover_pat : pat option ref; (* pat Lazy.t *)
}
type field = pat * presence * typ
type extBinding
type binding =
BEmbed of extBinding | BTermTyp of typ | BTypDef of typ * kind | BTypBound of typ * kind | BLabelTyp of typ | BTyvar of kind
type env = extBinding list IdMap.t
val proto_str : string
val proto_pat : pat
val fields : obj_typ -> field list
val mk_obj_typ : field list -> pat -> obj_typ
(** Pattern for absent field *)
val absent_pat : obj_typ -> pat
(** includes absent *)
val cover_pat : obj_typ -> pat
(** excludes absent *)
val possible_field_cover_pat : obj_typ -> pat
end
module type STROBE_TYP = functor (Pat : SET) -> functor (EXT : TYPS) ->
(STROBE_TYPS
with type extKind = EXT.kind
with type extTyp = EXT.typ
with type extBinding = EXT.binding
with type pat = Pat.t)
module type STROBE_ACTIONS = sig
include STROBE_TYPS
type typ_error_details =
| TypKind of (typ -> kind -> string) * typ * kind
| StringTyp of (string -> typ -> string) * string * typ
| FixedString of string
| String of (string -> string) * string
| TypTyp of (typ -> typ -> string) * typ * typ
| NumNum of (int -> int -> string) * int * int
| Typ of (typ -> string) * typ
| Pat of (pat -> string) * pat
| PatPat of (pat -> pat -> string) * pat * pat
| PatPatTyp of (pat -> pat -> typ -> string) * pat * pat * typ
| PatTyp of (pat -> typ -> string) * pat * typ
| TypTypTyp of (typ -> typ -> typ -> string) * typ * typ * typ
exception Kind_error of string
exception Typ_error of Pos.t * typ_error_details
val typ_error_details_to_string : typ_error_details -> string
val typ_mismatch : Pos.t -> typ_error_details -> unit
val get_num_typ_errors : unit -> int
val with_typ_exns : (unit -> 'a) -> 'a
module Pretty : sig
val typ : typ -> FormatExt.printer
val kind : kind -> FormatExt.printer
val useNames : bool -> unit
val shouldUseNames : unit -> bool
val env : env -> FormatExt.printer list
val simpl_typ : typ -> string
val simpl_kind : kind -> string
end
val apply_name : string option -> typ -> typ
val replace_name : string option -> typ -> typ
val string_of_typ : typ -> string
val string_of_kind : kind -> string
val name_of : typ -> string option
val free_ids : typ -> IdSet.t
val free_typ_ids : typ -> IdSet.t
val map_reduce_t : (extTyp -> 'a) -> (IdSet.t -> id -> 'b) -> (IdSet.t -> 'b -> 'a -> 'b) -> 'b -> typ -> 'b
val subst : id option -> typ -> (extTyp -> extTyp) -> typ -> typ
val typ_subst : id -> typ -> typ -> typ
val rename_avoid_capture : IdSet.t -> id list -> typ -> (id list * typ)
val equivalent_typ : env -> typ -> typ -> bool
val canonical_type : typ -> typ
val collapse_if_possible : env -> typ -> typ
val expose_twith : env -> typ -> typ
val lookup_typ : env -> id -> typ * kind
val expose : env -> typ -> typ
val simpl_typ : env -> typ -> typ
val typ_assoc:
(id -> typ -> extBinding IdMap.t -> extBinding IdMap.t) ->
(extBinding IdMap.t -> extBinding IdMap.t -> extBinding IdMap.t) ->
env -> typ -> typ -> extBinding IdMap.t
val trace : string -> string -> ('a -> bool) -> (unit -> 'a) -> 'a
val traceMsg : ('a, out_channel, unit, unit, unit, unit) format6 -> 'a
end
module type STROBE_SUBTYPING = sig
include STROBE_ACTIONS
val subtype : env -> typ -> typ -> bool
val unfold_typdefs : env -> typ -> typ
val pat_env : env -> pat IdMap.t
val simpl_lookup : Pos.t -> env -> typ -> pat -> typ
val inherits : Pos.t -> env -> typ -> pat -> typ
val typ_union : env -> typ -> typ -> typ
val typ_intersect : env -> typ -> typ -> typ
val num_cache_hits : unit -> int
val num_cache_misses : unit -> int
val print_cache : string -> FormatExt.printer
end
module type STROBE_MODULE = sig
include STROBE_ACTIONS
module Ext : (EXT_TYP_ACTIONS
with type baseTyp = typ
with type baseKind = kind
with type baseBinding = binding
with type env = env
with type typ = extTyp
with type kind = extKind
with type binding = extBinding)
module Pat : (SET with type t = pat)
end
module type STROBE_KINDING = sig
include STROBE_TYPS
val list_prims : unit -> id list
val new_prim_typ : string -> unit
val kind_check : env -> id list -> typ -> kind
end
module type STROBE_TYPECHECKING = sig
include STROBE_ACTIONS
type exp
val check : env -> extTyp option -> exp -> typ -> unit
val synth : env -> extTyp option -> exp -> typ
val check_app : env -> extTyp option -> exp -> exp list -> typ -> typ
val disable_flows : unit -> unit
val bind_forall_vars : env -> typ -> env * typ
val typecheck : env -> extTyp option -> exp -> unit
val trace : string -> ('a -> bool) -> (exp -> 'a) -> exp -> 'a
val forall_arrow : typ -> ((id * extBinding) list * typ) option
end
| null | https://raw.githubusercontent.com/brownplt/TeJaS/a8ad7e5e9ad938db205074469bbde6a688ec913e/src/strobe_sigs.ml | ocaml | args (including <this>), optional variadic arg, return typ
* [TForall (a, s, t)] forall a <: s . t
* type operator
* type operator application
* recursive type operators
* type of uninitialized variables
pat Lazy.t
* Pattern for absent field
* includes absent
* excludes absent | open Prelude
open Sig
module type STROBE_TYPS = sig
type pat
type extKind
type kind =
| KStar
| KArrow of kind list * kind
| KEmbed of extKind
type presence =
| Inherited
| Present
| Maybe
type extTyp
type typ =
| TPrim of string
| TUnion of string option * typ * typ
| TInter of string option * typ * typ
| TThis of typ
| TObject of obj_typ
| TWith of typ * obj_typ
| TRegex of pat
| TRef of string option * typ
| TSource of string option * typ
| TSink of string option * typ
| TTop
| TBot
| TId of id
| TRec of string option * id * typ
| TEmbed of extTyp
and obj_typ = {
fields : (pat * presence * typ) list;
absent_pat : pat;
cached_parent_typ : typ option option ref;
cached_guard_pat : pat option ref;
cached_cover_pat : pat option ref;
}
type field = pat * presence * typ
type extBinding
type binding =
BEmbed of extBinding | BTermTyp of typ | BTypDef of typ * kind | BTypBound of typ * kind | BLabelTyp of typ | BTyvar of kind
type env = extBinding list IdMap.t
val proto_str : string
val proto_pat : pat
val fields : obj_typ -> field list
val mk_obj_typ : field list -> pat -> obj_typ
val absent_pat : obj_typ -> pat
val cover_pat : obj_typ -> pat
val possible_field_cover_pat : obj_typ -> pat
end
module type STROBE_TYP = functor (Pat : SET) -> functor (EXT : TYPS) ->
(STROBE_TYPS
with type extKind = EXT.kind
with type extTyp = EXT.typ
with type extBinding = EXT.binding
with type pat = Pat.t)
module type STROBE_ACTIONS = sig
include STROBE_TYPS
type typ_error_details =
| TypKind of (typ -> kind -> string) * typ * kind
| StringTyp of (string -> typ -> string) * string * typ
| FixedString of string
| String of (string -> string) * string
| TypTyp of (typ -> typ -> string) * typ * typ
| NumNum of (int -> int -> string) * int * int
| Typ of (typ -> string) * typ
| Pat of (pat -> string) * pat
| PatPat of (pat -> pat -> string) * pat * pat
| PatPatTyp of (pat -> pat -> typ -> string) * pat * pat * typ
| PatTyp of (pat -> typ -> string) * pat * typ
| TypTypTyp of (typ -> typ -> typ -> string) * typ * typ * typ
exception Kind_error of string
exception Typ_error of Pos.t * typ_error_details
val typ_error_details_to_string : typ_error_details -> string
val typ_mismatch : Pos.t -> typ_error_details -> unit
val get_num_typ_errors : unit -> int
val with_typ_exns : (unit -> 'a) -> 'a
module Pretty : sig
val typ : typ -> FormatExt.printer
val kind : kind -> FormatExt.printer
val useNames : bool -> unit
val shouldUseNames : unit -> bool
val env : env -> FormatExt.printer list
val simpl_typ : typ -> string
val simpl_kind : kind -> string
end
val apply_name : string option -> typ -> typ
val replace_name : string option -> typ -> typ
val string_of_typ : typ -> string
val string_of_kind : kind -> string
val name_of : typ -> string option
val free_ids : typ -> IdSet.t
val free_typ_ids : typ -> IdSet.t
val map_reduce_t : (extTyp -> 'a) -> (IdSet.t -> id -> 'b) -> (IdSet.t -> 'b -> 'a -> 'b) -> 'b -> typ -> 'b
val subst : id option -> typ -> (extTyp -> extTyp) -> typ -> typ
val typ_subst : id -> typ -> typ -> typ
val rename_avoid_capture : IdSet.t -> id list -> typ -> (id list * typ)
val equivalent_typ : env -> typ -> typ -> bool
val canonical_type : typ -> typ
val collapse_if_possible : env -> typ -> typ
val expose_twith : env -> typ -> typ
val lookup_typ : env -> id -> typ * kind
val expose : env -> typ -> typ
val simpl_typ : env -> typ -> typ
val typ_assoc:
(id -> typ -> extBinding IdMap.t -> extBinding IdMap.t) ->
(extBinding IdMap.t -> extBinding IdMap.t -> extBinding IdMap.t) ->
env -> typ -> typ -> extBinding IdMap.t
val trace : string -> string -> ('a -> bool) -> (unit -> 'a) -> 'a
val traceMsg : ('a, out_channel, unit, unit, unit, unit) format6 -> 'a
end
module type STROBE_SUBTYPING = sig
include STROBE_ACTIONS
val subtype : env -> typ -> typ -> bool
val unfold_typdefs : env -> typ -> typ
val pat_env : env -> pat IdMap.t
val simpl_lookup : Pos.t -> env -> typ -> pat -> typ
val inherits : Pos.t -> env -> typ -> pat -> typ
val typ_union : env -> typ -> typ -> typ
val typ_intersect : env -> typ -> typ -> typ
val num_cache_hits : unit -> int
val num_cache_misses : unit -> int
val print_cache : string -> FormatExt.printer
end
module type STROBE_MODULE = sig
include STROBE_ACTIONS
module Ext : (EXT_TYP_ACTIONS
with type baseTyp = typ
with type baseKind = kind
with type baseBinding = binding
with type env = env
with type typ = extTyp
with type kind = extKind
with type binding = extBinding)
module Pat : (SET with type t = pat)
end
module type STROBE_KINDING = sig
include STROBE_TYPS
val list_prims : unit -> id list
val new_prim_typ : string -> unit
val kind_check : env -> id list -> typ -> kind
end
module type STROBE_TYPECHECKING = sig
include STROBE_ACTIONS
type exp
val check : env -> extTyp option -> exp -> typ -> unit
val synth : env -> extTyp option -> exp -> typ
val check_app : env -> extTyp option -> exp -> exp list -> typ -> typ
val disable_flows : unit -> unit
val bind_forall_vars : env -> typ -> env * typ
val typecheck : env -> extTyp option -> exp -> unit
val trace : string -> ('a -> bool) -> (exp -> 'a) -> exp -> 'a
val forall_arrow : typ -> ((id * extBinding) list * typ) option
end
|
9dbb7c02a05fa35d36b238d0ac7fc3ab281cbbb72b53ad8035fd30a38db3995f | fujita-y/digamma | r7rs.scm | Copyright ( c ) 2004 - 2022 Yoshikatsu Fujita / LittleWing Company Limited .
;;; See LICENSE file for terms and conditions of use.
(define feature-identifies
(make-parameter
(cons*
(if (= (architecture-feature 'sizeof:void*) 64) 'lp64 'ilp32)
(string->symbol (string-append (symbol->string (native-endianness)) "-endian"))
'(r7rs exact-closed exact-complex ieee-float full-unicode ratios posix digamma digamma-1))))
(define fulfill-feature-requirements?
(lambda (form spec)
(let loop ((spec spec))
(destructuring-match spec
((? symbol? id) (memq id (feature-identifies)))
(('and) #t)
(('and clause . more)
(and (if (symbol? clause) (memq clause (feature-identifies)) (loop clause))
(loop `(and ,@more))))
(('or) #f)
(('or clause . more)
(or (if (symbol? clause)
(memq clause (feature-identifies))
(loop clause))
(loop `(or ,@more))))
(('not) (syntax-violation 'cond-expand "malformed clause" (abbreviated-take-form form 4 8) spec))
(('not clause) (not (loop clause)))
(('library name) (or (member name '((core primitives) '(core intrinsics))) (locate-library-file name)))
(_ (syntax-violation 'cond-expand "malformed clause" (abbreviated-take-form form 4 8) spec))))))
(define expand-include
(lambda (form env)
`(begin ,@(apply append (map (lambda (e) (read-include-file #f e 'include)) (cdr form))))))
(define expand-include-ci
(lambda (form env)
`(begin ,@(apply append (map (lambda (e) (read-include-file #f e 'include-ci)) (cdr form))))))
(define parse-cond-expand
(lambda (form specs)
(let loop ((spec specs))
(destructuring-match spec
(() '())
((('else body ...)) body)
((('else body ...) . _)
(syntax-violation 'cond-expand "misplaced else" (abbreviated-take-form form 4 8) (car spec)))
(((condition body ...) . more) (if (fulfill-feature-requirements? form condition) body (loop more)))
(_ (syntax-violation 'cond-expand "malformed clause" (abbreviated-take-form form 4 8) (car spec)))))))
(define expand-define-library
(lambda (form env)
(define permute-env
(lambda (ht)
(let loop ((lst (core-hashtable->alist ht)) (bounds '()) (unbounds '()))
(cond ((null? lst) (append bounds unbounds))
((unbound? (cdar lst)) (loop (cdr lst) bounds (cons (car lst) unbounds)))
(else (loop (cdr lst) (cons (car lst) bounds) unbounds))))))
(parameterize ((lexical-syntax-version 7))
(destructuring-match form
((_ library-name clauses ...)
(let ((library-id (library-name->id form library-name)) (library-version (library-name->version form library-name)))
(and library-version (core-hashtable-set! (scheme-library-versions) library-id library-version))
(parameterize ((current-include-files (make-core-hashtable)))
(let ((coreform
(let loop ((clauses clauses) (exports '()) (imports '()) (depends '()) (commands '()))
(if (null? clauses)
(let ((ht-immutables (make-core-hashtable))
(ht-imports (make-core-hashtable))
(ht-publics (make-core-hashtable)))
(for-each
(lambda (a)
(and (core-hashtable-ref ht-publics (cdr a) #f)
(syntax-violation
'define-library
"duplicate export identifiers"
(abbreviated-take-form form 4 8)
(cdr a)))
(core-hashtable-set! ht-publics (cdr a) #t)
(core-hashtable-set! ht-immutables (car a) #t))
exports)
(for-each
(lambda (a)
(core-hashtable-set! ht-immutables (car a) #t)
(cond ((core-hashtable-ref ht-imports (car a) #f)
=>
(lambda (deno)
(or (eq? deno (cdr a))
(syntax-violation
'define-library
"duplicate import identifiers"
(abbreviated-take-form form 4 8)
(car a)))))
(else (core-hashtable-set! ht-imports (car a) (cdr a)))))
imports)
(let ((ht-env (make-shield-id-table commands)) (ht-libenv (make-core-hashtable)))
(for-each
(lambda (a) (core-hashtable-set! ht-env (car a) (cdr a)) (core-hashtable-set! ht-libenv (car a) (cdr a)))
(core-hashtable->alist ht-imports))
(parameterize ((current-immutable-identifiers ht-immutables))
(expand-define-library-body
form
library-id
library-version
commands
exports
imports
depends
(extend-env private-primitives-environment (permute-env ht-env))
(permute-env ht-libenv)))))
(destructuring-match clauses
((('export export-spec ...) more ...)
(loop more (append exports (parse-exports form export-spec)) imports depends commands))
((('import import-spec ...) more ...)
(loop
more
exports
(append imports (parse-imports form import-spec))
(append depends (parse-depends form import-spec))
commands))
((('include path ...) more ...)
(every1 string? path)
(let ((more
`((begin ,@(apply append (map (lambda (e) (read-include-file library-name e 'include)) path)))
,@more)))
(loop more exports imports depends commands)))
((('include-ci path ...) more ...)
(every1 string? path)
(let ((more
`((begin ,@(apply append (map (lambda (e) (read-include-file library-name e 'include-ci)) path)))
,@more)))
(loop more exports imports depends commands)))
((('include-library-declarations path ...) more ...)
(every1 string? path)
(let ((more
(append
(apply
append
(map (lambda (e) (read-include-file library-name e 'include-library-declarations)) path))
more)))
(loop more exports imports depends commands)))
((('cond-expand spec ...) more ...)
(loop (append (parse-cond-expand form spec) more) exports imports depends commands))
((('begin body ...) more ...) (loop more exports imports depends (append commands body)))
(_
(syntax-violation
'define-library
"malformed library declarations"
(abbreviated-take-form form 4 8)
(car clauses))))))))
(or (= (core-hashtable-size (current-include-files)) 0)
(core-hashtable-set! library-include-dependencies library-id (current-include-files)))
coreform))))
(_ (syntax-violation 'define-library "expected library name and declarations" (abbreviated-take-form form 4 8)))))))
(define expand-define-library-body
(lambda (form library-id library-version body exports imports depends env libenv)
(define initial-libenv #f)
(define macro-defs '())
(define extend-env!
(lambda (datum1 datum2)
(and (macro? datum2) (set! macro-defs (acons datum1 datum2 macro-defs)))
(set! env (extend-env (list (cons datum1 datum2)) env))
(for-each (lambda (a) (set-cdr! (cddr a) env)) macro-defs)))
(define extend-libenv!
(lambda (datum1 datum2)
(set! libenv (extend-env (list (cons datum1 datum2)) libenv))
(current-template-environment libenv)))
(define ht-imported-immutables (make-core-hashtable))
(define expression-tag (let ((num 0)) (lambda () (set! num (+ num 1)) (string->symbol (format ".e~a" num)))))
(current-template-environment libenv)
(for-each (lambda (b) (core-hashtable-set! ht-imported-immutables (car b) #t)) imports)
(let loop ((body (flatten-begin body env)) (defs '()) (macros '()) (renames '()))
(cond ((null? body)
(rewrite-library-body
form
library-id
library-version
body
(reverse defs)
(reverse macros)
renames
exports
imports
depends
env
libenv))
((and (pair? body) (pair? (car body)) (symbol? (caar body)))
(let ((deno (env-lookup env (caar body))))
(cond ((eq? denote-begin deno) (loop (flatten-begin body env) defs macros renames))
((eq? denote-define-syntax deno)
(destructuring-match body
(((_ (? symbol? org) clause) more ...)
(begin
(and (core-hashtable-contains? ht-imported-immutables org)
(syntax-violation 'define-syntax "attempt to modify immutable binding" (car body)))
(let-values (((code . expr)
(parameterize ((current-template-environment initial-libenv))
(compile-macro (car body) clause env))))
(let ((new (generate-global-id library-id org)))
(extend-libenv! org (make-import new))
(cond ((procedure? code)
(extend-env! org (make-macro code env))
(loop
more
defs
(cons (list org 'procedure (car expr)) macros)
(acons org new renames)))
((macro-variable? code)
(extend-env! org (make-macro-variable (cadr code) env))
(loop
more
defs
(cons (list org 'variable (car expr)) macros)
(acons org new renames)))
(else
(extend-env! org (make-macro code env))
(loop
more
defs
(cons (list org 'template code) macros)
(acons org new renames))))))))
(_ (syntax-violation 'define-syntax "expected symbol and single expression" (car body)))))
((eq? denote-define deno)
(let ((def (annotate (cdr (desugar-define (car body))) (car body))))
(and (core-hashtable-contains? ht-imported-immutables (car def))
(syntax-violation 'define "attempt to modify immutable binding" (car body)))
(let ((org (car def)) (new (generate-global-id library-id (car def))))
(extend-env! org new)
(extend-libenv! org (make-import new))
(loop (cdr body) (cons def defs) macros (acons org new renames)))))
((or (macro? deno) (eq? denote-let-syntax deno) (eq? denote-letrec-syntax deno))
(let-values (((expr new) (expand-initial-forms (car body) env)))
(set! env new)
(loop (append (flatten-begin (list expr) env) (cdr body)) defs macros renames)))
(else (loop (cons `(|.define| ,(expression-tag) ,(car body)) (cdr body)) defs macros renames)))))
(else (loop (cons `(|.define| ,(expression-tag) ,(car body)) (cdr body)) defs macros renames))))))
| null | https://raw.githubusercontent.com/fujita-y/digamma/31f1512de2d406448ba3a9c8c352c56f30eb99e4/heap/boot/macro/r7rs.scm | scheme | See LICENSE file for terms and conditions of use. | Copyright ( c ) 2004 - 2022 Yoshikatsu Fujita / LittleWing Company Limited .
(define feature-identifies
(make-parameter
(cons*
(if (= (architecture-feature 'sizeof:void*) 64) 'lp64 'ilp32)
(string->symbol (string-append (symbol->string (native-endianness)) "-endian"))
'(r7rs exact-closed exact-complex ieee-float full-unicode ratios posix digamma digamma-1))))
(define fulfill-feature-requirements?
(lambda (form spec)
(let loop ((spec spec))
(destructuring-match spec
((? symbol? id) (memq id (feature-identifies)))
(('and) #t)
(('and clause . more)
(and (if (symbol? clause) (memq clause (feature-identifies)) (loop clause))
(loop `(and ,@more))))
(('or) #f)
(('or clause . more)
(or (if (symbol? clause)
(memq clause (feature-identifies))
(loop clause))
(loop `(or ,@more))))
(('not) (syntax-violation 'cond-expand "malformed clause" (abbreviated-take-form form 4 8) spec))
(('not clause) (not (loop clause)))
(('library name) (or (member name '((core primitives) '(core intrinsics))) (locate-library-file name)))
(_ (syntax-violation 'cond-expand "malformed clause" (abbreviated-take-form form 4 8) spec))))))
(define expand-include
(lambda (form env)
`(begin ,@(apply append (map (lambda (e) (read-include-file #f e 'include)) (cdr form))))))
(define expand-include-ci
(lambda (form env)
`(begin ,@(apply append (map (lambda (e) (read-include-file #f e 'include-ci)) (cdr form))))))
(define parse-cond-expand
(lambda (form specs)
(let loop ((spec specs))
(destructuring-match spec
(() '())
((('else body ...)) body)
((('else body ...) . _)
(syntax-violation 'cond-expand "misplaced else" (abbreviated-take-form form 4 8) (car spec)))
(((condition body ...) . more) (if (fulfill-feature-requirements? form condition) body (loop more)))
(_ (syntax-violation 'cond-expand "malformed clause" (abbreviated-take-form form 4 8) (car spec)))))))
(define expand-define-library
(lambda (form env)
(define permute-env
(lambda (ht)
(let loop ((lst (core-hashtable->alist ht)) (bounds '()) (unbounds '()))
(cond ((null? lst) (append bounds unbounds))
((unbound? (cdar lst)) (loop (cdr lst) bounds (cons (car lst) unbounds)))
(else (loop (cdr lst) (cons (car lst) bounds) unbounds))))))
(parameterize ((lexical-syntax-version 7))
(destructuring-match form
((_ library-name clauses ...)
(let ((library-id (library-name->id form library-name)) (library-version (library-name->version form library-name)))
(and library-version (core-hashtable-set! (scheme-library-versions) library-id library-version))
(parameterize ((current-include-files (make-core-hashtable)))
(let ((coreform
(let loop ((clauses clauses) (exports '()) (imports '()) (depends '()) (commands '()))
(if (null? clauses)
(let ((ht-immutables (make-core-hashtable))
(ht-imports (make-core-hashtable))
(ht-publics (make-core-hashtable)))
(for-each
(lambda (a)
(and (core-hashtable-ref ht-publics (cdr a) #f)
(syntax-violation
'define-library
"duplicate export identifiers"
(abbreviated-take-form form 4 8)
(cdr a)))
(core-hashtable-set! ht-publics (cdr a) #t)
(core-hashtable-set! ht-immutables (car a) #t))
exports)
(for-each
(lambda (a)
(core-hashtable-set! ht-immutables (car a) #t)
(cond ((core-hashtable-ref ht-imports (car a) #f)
=>
(lambda (deno)
(or (eq? deno (cdr a))
(syntax-violation
'define-library
"duplicate import identifiers"
(abbreviated-take-form form 4 8)
(car a)))))
(else (core-hashtable-set! ht-imports (car a) (cdr a)))))
imports)
(let ((ht-env (make-shield-id-table commands)) (ht-libenv (make-core-hashtable)))
(for-each
(lambda (a) (core-hashtable-set! ht-env (car a) (cdr a)) (core-hashtable-set! ht-libenv (car a) (cdr a)))
(core-hashtable->alist ht-imports))
(parameterize ((current-immutable-identifiers ht-immutables))
(expand-define-library-body
form
library-id
library-version
commands
exports
imports
depends
(extend-env private-primitives-environment (permute-env ht-env))
(permute-env ht-libenv)))))
(destructuring-match clauses
((('export export-spec ...) more ...)
(loop more (append exports (parse-exports form export-spec)) imports depends commands))
((('import import-spec ...) more ...)
(loop
more
exports
(append imports (parse-imports form import-spec))
(append depends (parse-depends form import-spec))
commands))
((('include path ...) more ...)
(every1 string? path)
(let ((more
`((begin ,@(apply append (map (lambda (e) (read-include-file library-name e 'include)) path)))
,@more)))
(loop more exports imports depends commands)))
((('include-ci path ...) more ...)
(every1 string? path)
(let ((more
`((begin ,@(apply append (map (lambda (e) (read-include-file library-name e 'include-ci)) path)))
,@more)))
(loop more exports imports depends commands)))
((('include-library-declarations path ...) more ...)
(every1 string? path)
(let ((more
(append
(apply
append
(map (lambda (e) (read-include-file library-name e 'include-library-declarations)) path))
more)))
(loop more exports imports depends commands)))
((('cond-expand spec ...) more ...)
(loop (append (parse-cond-expand form spec) more) exports imports depends commands))
((('begin body ...) more ...) (loop more exports imports depends (append commands body)))
(_
(syntax-violation
'define-library
"malformed library declarations"
(abbreviated-take-form form 4 8)
(car clauses))))))))
(or (= (core-hashtable-size (current-include-files)) 0)
(core-hashtable-set! library-include-dependencies library-id (current-include-files)))
coreform))))
(_ (syntax-violation 'define-library "expected library name and declarations" (abbreviated-take-form form 4 8)))))))
(define expand-define-library-body
(lambda (form library-id library-version body exports imports depends env libenv)
(define initial-libenv #f)
(define macro-defs '())
(define extend-env!
(lambda (datum1 datum2)
(and (macro? datum2) (set! macro-defs (acons datum1 datum2 macro-defs)))
(set! env (extend-env (list (cons datum1 datum2)) env))
(for-each (lambda (a) (set-cdr! (cddr a) env)) macro-defs)))
(define extend-libenv!
(lambda (datum1 datum2)
(set! libenv (extend-env (list (cons datum1 datum2)) libenv))
(current-template-environment libenv)))
(define ht-imported-immutables (make-core-hashtable))
(define expression-tag (let ((num 0)) (lambda () (set! num (+ num 1)) (string->symbol (format ".e~a" num)))))
(current-template-environment libenv)
(for-each (lambda (b) (core-hashtable-set! ht-imported-immutables (car b) #t)) imports)
(let loop ((body (flatten-begin body env)) (defs '()) (macros '()) (renames '()))
(cond ((null? body)
(rewrite-library-body
form
library-id
library-version
body
(reverse defs)
(reverse macros)
renames
exports
imports
depends
env
libenv))
((and (pair? body) (pair? (car body)) (symbol? (caar body)))
(let ((deno (env-lookup env (caar body))))
(cond ((eq? denote-begin deno) (loop (flatten-begin body env) defs macros renames))
((eq? denote-define-syntax deno)
(destructuring-match body
(((_ (? symbol? org) clause) more ...)
(begin
(and (core-hashtable-contains? ht-imported-immutables org)
(syntax-violation 'define-syntax "attempt to modify immutable binding" (car body)))
(let-values (((code . expr)
(parameterize ((current-template-environment initial-libenv))
(compile-macro (car body) clause env))))
(let ((new (generate-global-id library-id org)))
(extend-libenv! org (make-import new))
(cond ((procedure? code)
(extend-env! org (make-macro code env))
(loop
more
defs
(cons (list org 'procedure (car expr)) macros)
(acons org new renames)))
((macro-variable? code)
(extend-env! org (make-macro-variable (cadr code) env))
(loop
more
defs
(cons (list org 'variable (car expr)) macros)
(acons org new renames)))
(else
(extend-env! org (make-macro code env))
(loop
more
defs
(cons (list org 'template code) macros)
(acons org new renames))))))))
(_ (syntax-violation 'define-syntax "expected symbol and single expression" (car body)))))
((eq? denote-define deno)
(let ((def (annotate (cdr (desugar-define (car body))) (car body))))
(and (core-hashtable-contains? ht-imported-immutables (car def))
(syntax-violation 'define "attempt to modify immutable binding" (car body)))
(let ((org (car def)) (new (generate-global-id library-id (car def))))
(extend-env! org new)
(extend-libenv! org (make-import new))
(loop (cdr body) (cons def defs) macros (acons org new renames)))))
((or (macro? deno) (eq? denote-let-syntax deno) (eq? denote-letrec-syntax deno))
(let-values (((expr new) (expand-initial-forms (car body) env)))
(set! env new)
(loop (append (flatten-begin (list expr) env) (cdr body)) defs macros renames)))
(else (loop (cons `(|.define| ,(expression-tag) ,(car body)) (cdr body)) defs macros renames)))))
(else (loop (cons `(|.define| ,(expression-tag) ,(car body)) (cdr body)) defs macros renames))))))
|
d965404295888a94aacc6c4b6f48196d97592499ebc0d61c6a7de545a0c3b5ac | ghc/packages-Cabal | MyLibrary.hs | module MyLibrary where
main :: IO ()
main = error ""
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-testsuite/PackageTests/AutogenModules/Package/MyLibrary.hs | haskell | module MyLibrary where
main :: IO ()
main = error ""
| |
c2caf9a8c1e37d7cd9506e4aab26ef59d3e38a69ac5ed22687ad815a6cbe8fd2 | GrammaTech/sel | in-memory-fodder-database.lisp | ;;; in-memory-fodder-database.lisp --- In-memory fodder database
;;; Base class for all fodder database implementations
;;; with data stored entirely in a LISP representation
(defpackage :software-evolution-library/components/in-memory-fodder-database
(:nicknames :sel/components/in-memory-fodder-database
:sel/cp/in-memory-fodder-database)
(:use :gt/full
:software-evolution-library
:software-evolution-library/software/parseable
:software-evolution-library/software/clang
:software-evolution-library/components/searchable
:software-evolution-library/components/fodder-database)
(:export :in-memory-database
:ast-database-ht
:ast-database-list
:ast-database-full-stmt-list
:type-database-ht
:macro-database-ht))
(in-package :software-evolution-library/components/in-memory-fodder-database)
(in-readtable :curry-compose-reader-macros)
(defclass in-memory-database (fodder-database)
;; The current implementation of the database
;; has redundant data, trading space for query time.
;; It is assumed that all in-memeory databases will be fairly
small ; otherwise , Mongo or Pliny should be utilized .
((ast-database-ht
:initarg :ast-database-ht
:accessor ast-database-ht
:initform (make-hash-table :test 'equal)
:documentation
"The database of source code snippets, grouped by AST class name.")
(ast-database-list
:initarg :ast-database-list
:accessor ast-database-list
:initform nil
:documentation "The database of source code snippets as a raw list.")
(ast-database-full-stmt-list
:initarg :ast-database-full-stmt-list
:accessor ast-database-full-stmt-list
:initform nil
:documentation
"The database of source code snippets which are full statements.")
(type-database-ht
:initarg :type-database-ht
:accessor type-database-ht
:initform (make-hash-table :test 'equal)
:documentation
"An auxillary database of type snippets, grouped by hash-code")
(macro-database-ht
:initarg :macro-database-ht
:accessor macro-database-ht
:initform (make-hash-table :test 'equal)
:documentation
"An auxillary database of macro snippets, grouped by hash-code"))
(:documentation "DOCFIXME"))
(defmethod size ((db in-memory-database))
"The number of objects stored in the database DB"
(length (ast-database-list db)))
(defmethod database-emptyp ((db in-memory-database))
"True if the database DB contains no entries"
(zerop (size db)))
(defmethod find-snippets ((db in-memory-database)
&key ast-class full-stmt decls limit)
"Find LIMIT snippets stored in DB, an in-memory database.
If LIMIT is NIL or >= the number of snippets of the desired kind,
return a list of all of them. Otherwise, return a random
subset of LIMIT objects of the desired kind.
If AST-CLASS is not nil, it is the name of an AST-CLASS;
consider only ASTs of that class.
Otherwise, if FULL-STMT is true, consider only full statements.
If DECLS is :ONLY, consider only ASTs for which the :IS-DECL
property is true.
Otherwise, consider all ASTs."
(let ((snippets (nest (remove-if
(cond
((eql decls :only)
(complement {aget :is-decl}))
(decls #'null)
(t {aget :is-decl})))
(cond (ast-class
(gethash ast-class (ast-database-ht db)))
(full-stmt
(ast-database-full-stmt-list db))
(t (ast-database-list db))))))
(if (and limit (length< limit snippets))
(mapcar {aref (coerce snippets 'vector)}
(random-sample-without-replacement (length snippets) limit))
snippets)))
(defmethod find-type ((db in-memory-database) hash)
"DOCFIXME
* DB DOCFIXME
* HASH DOCFIXME
"
(let ((type (gethash hash (type-database-ht db))))
(when type (from-alist 'clang-type type))))
(defmethod find-macro ((db in-memory-database) hash)
"DOCFIXME
* DB DOCFIXME
* HASH DOCFIXME
"
(let ((macro (gethash hash (macro-database-ht db))))
(when macro (from-alist 'clang-macro macro))))
| null | https://raw.githubusercontent.com/GrammaTech/sel/a4238945dfbf98b67b9d8a10b08924634544f9f8/components/in-memory-fodder-database.lisp | lisp | in-memory-fodder-database.lisp --- In-memory fodder database
Base class for all fodder database implementations
with data stored entirely in a LISP representation
The current implementation of the database
has redundant data, trading space for query time.
It is assumed that all in-memeory databases will be fairly
otherwise , Mongo or Pliny should be utilized .
| (defpackage :software-evolution-library/components/in-memory-fodder-database
(:nicknames :sel/components/in-memory-fodder-database
:sel/cp/in-memory-fodder-database)
(:use :gt/full
:software-evolution-library
:software-evolution-library/software/parseable
:software-evolution-library/software/clang
:software-evolution-library/components/searchable
:software-evolution-library/components/fodder-database)
(:export :in-memory-database
:ast-database-ht
:ast-database-list
:ast-database-full-stmt-list
:type-database-ht
:macro-database-ht))
(in-package :software-evolution-library/components/in-memory-fodder-database)
(in-readtable :curry-compose-reader-macros)
(defclass in-memory-database (fodder-database)
((ast-database-ht
:initarg :ast-database-ht
:accessor ast-database-ht
:initform (make-hash-table :test 'equal)
:documentation
"The database of source code snippets, grouped by AST class name.")
(ast-database-list
:initarg :ast-database-list
:accessor ast-database-list
:initform nil
:documentation "The database of source code snippets as a raw list.")
(ast-database-full-stmt-list
:initarg :ast-database-full-stmt-list
:accessor ast-database-full-stmt-list
:initform nil
:documentation
"The database of source code snippets which are full statements.")
(type-database-ht
:initarg :type-database-ht
:accessor type-database-ht
:initform (make-hash-table :test 'equal)
:documentation
"An auxillary database of type snippets, grouped by hash-code")
(macro-database-ht
:initarg :macro-database-ht
:accessor macro-database-ht
:initform (make-hash-table :test 'equal)
:documentation
"An auxillary database of macro snippets, grouped by hash-code"))
(:documentation "DOCFIXME"))
(defmethod size ((db in-memory-database))
"The number of objects stored in the database DB"
(length (ast-database-list db)))
(defmethod database-emptyp ((db in-memory-database))
"True if the database DB contains no entries"
(zerop (size db)))
(defmethod find-snippets ((db in-memory-database)
&key ast-class full-stmt decls limit)
"Find LIMIT snippets stored in DB, an in-memory database.
If LIMIT is NIL or >= the number of snippets of the desired kind,
return a list of all of them. Otherwise, return a random
subset of LIMIT objects of the desired kind.
consider only ASTs of that class.
Otherwise, if FULL-STMT is true, consider only full statements.
If DECLS is :ONLY, consider only ASTs for which the :IS-DECL
property is true.
Otherwise, consider all ASTs."
(let ((snippets (nest (remove-if
(cond
((eql decls :only)
(complement {aget :is-decl}))
(decls #'null)
(t {aget :is-decl})))
(cond (ast-class
(gethash ast-class (ast-database-ht db)))
(full-stmt
(ast-database-full-stmt-list db))
(t (ast-database-list db))))))
(if (and limit (length< limit snippets))
(mapcar {aref (coerce snippets 'vector)}
(random-sample-without-replacement (length snippets) limit))
snippets)))
(defmethod find-type ((db in-memory-database) hash)
"DOCFIXME
* DB DOCFIXME
* HASH DOCFIXME
"
(let ((type (gethash hash (type-database-ht db))))
(when type (from-alist 'clang-type type))))
(defmethod find-macro ((db in-memory-database) hash)
"DOCFIXME
* DB DOCFIXME
* HASH DOCFIXME
"
(let ((macro (gethash hash (macro-database-ht db))))
(when macro (from-alist 'clang-macro macro))))
|
fb552636d0c3c285aa0758cb4874f9d4d67b46652f304b947d4ca72ef2ddfe72 | bob-cd/bob | cctray.clj | ; Copyright 2018- Rahul De
;
Use of this source code is governed by an MIT - style
; license that can be found in the LICENSE file or at
; .
(ns apiserver.cctray
(:require
[clojure.data.xml :as xml]
[failjure.core :as f]
[xtdb.api :as xt]))
(defn make-project
[{:keys [group name status completed]
:as data}]
(let [last-build-status (case status
(:passed :running) "Success"
:failed "Failure"
:stopped "Exception"
"Unknown")
last-build-label (-> data
:xt/id
clojure.core/name)]
[[:name
(format "%s:%s"
group
name)]
[:activity
(if (= status :running)
"Running"
"Sleeping")]
[:lastBuildStatus last-build-status]
[:lastBuildLabel last-build-label]
[:lastBuildTime completed]
[:webUrl "#"]]))
(defn generate-report
[db]
(f/try-all [statuses (xt/q (xt/db db)
'{:find [(pull run [:group :name :status :completed :xt/id])]
:where [[pipeline :type :pipeline]
[pipeline :group group]
[pipeline :name name]
[run :type :pipeline-run]
[run :group group]
[run :name name]]})]
(-> [:Projects (map make-project statuses)]
xml/sexp-as-element
xml/emit-str)
(f/when-failed [err]
err)))
| null | https://raw.githubusercontent.com/bob-cd/bob/7e171012f0750f108c9d8499201d1ec65f30efbb/apiserver/src/apiserver/cctray.clj | clojure | Copyright 2018- Rahul De
license that can be found in the LICENSE file or at
. | Use of this source code is governed by an MIT - style
(ns apiserver.cctray
(:require
[clojure.data.xml :as xml]
[failjure.core :as f]
[xtdb.api :as xt]))
(defn make-project
[{:keys [group name status completed]
:as data}]
(let [last-build-status (case status
(:passed :running) "Success"
:failed "Failure"
:stopped "Exception"
"Unknown")
last-build-label (-> data
:xt/id
clojure.core/name)]
[[:name
(format "%s:%s"
group
name)]
[:activity
(if (= status :running)
"Running"
"Sleeping")]
[:lastBuildStatus last-build-status]
[:lastBuildLabel last-build-label]
[:lastBuildTime completed]
[:webUrl "#"]]))
(defn generate-report
[db]
(f/try-all [statuses (xt/q (xt/db db)
'{:find [(pull run [:group :name :status :completed :xt/id])]
:where [[pipeline :type :pipeline]
[pipeline :group group]
[pipeline :name name]
[run :type :pipeline-run]
[run :group group]
[run :name name]]})]
(-> [:Projects (map make-project statuses)]
xml/sexp-as-element
xml/emit-str)
(f/when-failed [err]
err)))
|
5840fdfe0d80a8cf4f6e6a30c2323c67599cb50fad7ee2f6575e7664907e27bc | diagrams/diagrams-lib | Compile.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - unused - imports #
for Data . Semigroup
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.Attributes.Compile
Copyright : ( c ) 2014 diagrams - lib team ( see LICENSE )
-- License : BSD-style (see LICENSE)
-- Maintainer :
--
-- XXX
--
-----------------------------------------------------------------------------
module Diagrams.Attributes.Compile (
SplitAttribute(..), splitAttr
) where
import Data.Typeable
import Control.Arrow (second)
import Control.Lens ((%~), (&), _Wrapping')
import Data.Kind (Type)
import qualified Data.HashMap.Strict as HM
import Data.Semigroup
import Data.Tree (Tree (..))
import Diagrams.Core
import Diagrams.Core.Style (Style (..), attributeToStyle)
import Diagrams.Core.Types (RNode (..), RTree)
------------------------------------------------------------
-- This is a sort of roundabout, overly-general way to define
-- splitFills; it's done this way to facilitate testing.
class (AttributeClass (AttrType code), Typeable (PrimType code)) => SplitAttribute code where
type AttrType code :: Type
type PrimType code :: Type
primOK :: code -> PrimType code -> Bool
-- | Push certain attributes down until they are at the roots of trees
-- containing only "safe" nodes. In particular this is used to push
-- fill attributes down until they are over only loops; see
-- 'splitFills'.
splitAttr :: forall code b v n a. SplitAttribute code => code -> RTree b v n a -> RTree b v n a
splitAttr code = fst . splitAttr' Nothing
where
-- splitAttr' is where the most interesting logic happens.
-- Mutually recursive with splitAttr'Forest. rebuildNode and
-- applyMfc are helper functions.
--
-- Input: attribute to apply to "safe" subtrees.
--
-- Output: tree with attributes pushed down appropriately, and
a indicating whether the tree contains only " safe " prims ( True ) or
-- contains some unsafe ones (False).
splitAttr' :: Maybe (AttrType code) -> RTree b v n a -> (RTree b v n a, Bool)
-- RStyle node: Check for the special attribute, and split it out of
the style , combining it with the incoming attribute . Recurse and
-- rebuild. The tricky bit is that we use some knot-tying to
-- determine the right attribute to pass down to the subtrees based
on this computed : if all subtrees are safe , then we will
-- apply the attribute at the root of this tree, and pass Nothing to
-- all the subtrees. Otherwise, we pass the given attribute along.
-- This works out because the attribute does not need to be
-- pattern-matched until actually applying it at some root, so the
recursion can proceed and the values be computed with the
-- actual value of the attributes nodes filled in lazily.
splitAttr' mattr (Node (RStyle sty) cs) = (t', ok)
where
mattr' = mattr <> getAttr sty
sty' = sty & _Wrapping' Style %~ HM.delete ty
ty = typeOf (undefined :: AttrType code)
(cs', ok) = splitAttr'Forest mattr' cs
t' | ok = rebuildNode Nothing ok (RStyle sty) cs'
| otherwise = rebuildNode mattr ok (RStyle sty') cs'
-- RPrim node: check whether it
-- * is some sort of prim not under consideration: don't apply the attribute; return True
-- * is unsafe: don't apply the attribute; return False
-- * is safe : do apply the attribute; return True
splitAttr' mattr (Node rp@(RPrim (Prim prm)) _) =
case cast prm :: Maybe (PrimType code) of
Nothing -> (Node rp [], True)
Just p ->
if primOK code p
then (rebuildNode mattr True rp [], True)
else (Node rp [], False)
-- RFrozenTr, RAnnot, REmpty cases: just recurse and rebuild. Note
-- we assume that transformations do not affect the attributes.
splitAttr' mattr (Node nd cs) = (t', ok)
where
(cs', ok) = splitAttr'Forest mattr cs
t' = rebuildNode mattr ok nd cs'
-- Recursively call splitAttr' on all subtrees, returning the
logical AND of the results returned ( the whole forest is
-- safe iff all subtrees are).
splitAttr'Forest :: Maybe (AttrType code) -> [RTree b v n a] -> ([RTree b v n a], Bool)
splitAttr'Forest mattr cs = (cs', ok)
where
(cs', ok) = second and . unzip . map (splitAttr' mattr) $ cs
Given a fill attribute , a indicating whether the given
-- subforest contains only loops, a node, and a subforest, rebuild a
-- tree, applying the fill attribute as appropriate (only if the
-- Bool is true and the attribute is not Nothing).
rebuildNode :: Maybe (AttrType code) -> Bool -> RNode b v n a -> [RTree b v n a] -> RTree b v n a
rebuildNode mattr ok nd cs
| ok = applyMattr mattr (Node nd cs)
| otherwise = Node nd cs
-- Prepend a new fill color node if Just; the identity function if
-- Nothing.
applyMattr :: Maybe (AttrType code) -> RTree b v n a -> RTree b v n a
applyMattr Nothing t = t
applyMattr (Just a) t = Node (RStyle $ attributeToStyle (Attribute a)) [t]
| null | https://raw.githubusercontent.com/diagrams/diagrams-lib/6f66ce6bd5aed81d8a1330c143ea012724dbac3c/src/Diagrams/Attributes/Compile.hs | haskell | # LANGUAGE FlexibleContexts #
---------------------------------------------------------------------------
|
Module : Diagrams.Attributes.Compile
License : BSD-style (see LICENSE)
Maintainer :
XXX
---------------------------------------------------------------------------
----------------------------------------------------------
This is a sort of roundabout, overly-general way to define
splitFills; it's done this way to facilitate testing.
| Push certain attributes down until they are at the roots of trees
containing only "safe" nodes. In particular this is used to push
fill attributes down until they are over only loops; see
'splitFills'.
splitAttr' is where the most interesting logic happens.
Mutually recursive with splitAttr'Forest. rebuildNode and
applyMfc are helper functions.
Input: attribute to apply to "safe" subtrees.
Output: tree with attributes pushed down appropriately, and
contains some unsafe ones (False).
RStyle node: Check for the special attribute, and split it out of
rebuild. The tricky bit is that we use some knot-tying to
determine the right attribute to pass down to the subtrees based
apply the attribute at the root of this tree, and pass Nothing to
all the subtrees. Otherwise, we pass the given attribute along.
This works out because the attribute does not need to be
pattern-matched until actually applying it at some root, so the
actual value of the attributes nodes filled in lazily.
RPrim node: check whether it
* is some sort of prim not under consideration: don't apply the attribute; return True
* is unsafe: don't apply the attribute; return False
* is safe : do apply the attribute; return True
RFrozenTr, RAnnot, REmpty cases: just recurse and rebuild. Note
we assume that transformations do not affect the attributes.
Recursively call splitAttr' on all subtrees, returning the
safe iff all subtrees are).
subforest contains only loops, a node, and a subforest, rebuild a
tree, applying the fill attribute as appropriate (only if the
Bool is true and the attribute is not Nothing).
Prepend a new fill color node if Just; the identity function if
Nothing. | # LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - unused - imports #
for Data . Semigroup
Copyright : ( c ) 2014 diagrams - lib team ( see LICENSE )
module Diagrams.Attributes.Compile (
SplitAttribute(..), splitAttr
) where
import Data.Typeable
import Control.Arrow (second)
import Control.Lens ((%~), (&), _Wrapping')
import Data.Kind (Type)
import qualified Data.HashMap.Strict as HM
import Data.Semigroup
import Data.Tree (Tree (..))
import Diagrams.Core
import Diagrams.Core.Style (Style (..), attributeToStyle)
import Diagrams.Core.Types (RNode (..), RTree)
class (AttributeClass (AttrType code), Typeable (PrimType code)) => SplitAttribute code where
type AttrType code :: Type
type PrimType code :: Type
primOK :: code -> PrimType code -> Bool
splitAttr :: forall code b v n a. SplitAttribute code => code -> RTree b v n a -> RTree b v n a
splitAttr code = fst . splitAttr' Nothing
where
a indicating whether the tree contains only " safe " prims ( True ) or
splitAttr' :: Maybe (AttrType code) -> RTree b v n a -> (RTree b v n a, Bool)
the style , combining it with the incoming attribute . Recurse and
on this computed : if all subtrees are safe , then we will
recursion can proceed and the values be computed with the
splitAttr' mattr (Node (RStyle sty) cs) = (t', ok)
where
mattr' = mattr <> getAttr sty
sty' = sty & _Wrapping' Style %~ HM.delete ty
ty = typeOf (undefined :: AttrType code)
(cs', ok) = splitAttr'Forest mattr' cs
t' | ok = rebuildNode Nothing ok (RStyle sty) cs'
| otherwise = rebuildNode mattr ok (RStyle sty') cs'
splitAttr' mattr (Node rp@(RPrim (Prim prm)) _) =
case cast prm :: Maybe (PrimType code) of
Nothing -> (Node rp [], True)
Just p ->
if primOK code p
then (rebuildNode mattr True rp [], True)
else (Node rp [], False)
splitAttr' mattr (Node nd cs) = (t', ok)
where
(cs', ok) = splitAttr'Forest mattr cs
t' = rebuildNode mattr ok nd cs'
logical AND of the results returned ( the whole forest is
splitAttr'Forest :: Maybe (AttrType code) -> [RTree b v n a] -> ([RTree b v n a], Bool)
splitAttr'Forest mattr cs = (cs', ok)
where
(cs', ok) = second and . unzip . map (splitAttr' mattr) $ cs
Given a fill attribute , a indicating whether the given
rebuildNode :: Maybe (AttrType code) -> Bool -> RNode b v n a -> [RTree b v n a] -> RTree b v n a
rebuildNode mattr ok nd cs
| ok = applyMattr mattr (Node nd cs)
| otherwise = Node nd cs
applyMattr :: Maybe (AttrType code) -> RTree b v n a -> RTree b v n a
applyMattr Nothing t = t
applyMattr (Just a) t = Node (RStyle $ attributeToStyle (Attribute a)) [t]
|
58efb9605395cb2c1dac2f478e33016666c841862efccae7f5b1f5b7f87d8bc9 | burgerdev/ocaml-rfc7748 | curve.mli |
module type Field = sig
type t
val zero: t
val ( + ): t -> t -> t
val ( - ): t -> t -> t
val double: t -> t
val one: t
val ( * ): t -> t -> t
val ( / ): t -> t -> t
val square: t -> t
end
module type Integral = sig
type t
val zero: t
val one: t
val ( + ): t -> t -> t
val ( mod ): t -> t -> t
val ( asr ): t -> int -> t
val logxor: t -> t -> t
val gt: t -> t -> bool
end
module type Edwards = sig
type integral
type element
val bits: int
val a24: element
val constant_time_conditional_swap: integral -> element -> element -> element * element
end
module Make: functor (F: Field)(I: Integral)(E: Edwards with type integral = I.t and type element = F.t) -> sig
val scale: I.t -> F.t -> F.t
end
| null | https://raw.githubusercontent.com/burgerdev/ocaml-rfc7748/ed034213ff02cd55870ae1387e91deebc9838eb4/src/curve.mli | ocaml |
module type Field = sig
type t
val zero: t
val ( + ): t -> t -> t
val ( - ): t -> t -> t
val double: t -> t
val one: t
val ( * ): t -> t -> t
val ( / ): t -> t -> t
val square: t -> t
end
module type Integral = sig
type t
val zero: t
val one: t
val ( + ): t -> t -> t
val ( mod ): t -> t -> t
val ( asr ): t -> int -> t
val logxor: t -> t -> t
val gt: t -> t -> bool
end
module type Edwards = sig
type integral
type element
val bits: int
val a24: element
val constant_time_conditional_swap: integral -> element -> element -> element * element
end
module Make: functor (F: Field)(I: Integral)(E: Edwards with type integral = I.t and type element = F.t) -> sig
val scale: I.t -> F.t -> F.t
end
| |
be209828271a9921a9ccd391e10da4de946e0a32ea569edf5ad440c149db39e1 | jacius/lispbuilder | package.lisp | ;;;; lispbuilder-sdl-image-binaries
(in-package #:cl-user)
(defpackage #:lispbuilder-sdl-image-binaries
(:use #:cl)
(:nicknames #:sdl-image-bin)
(:documentation "The main package of `lispbuilder-sdl-image'.")
(:export
;; globals.lisp
#:*dll-path*)) | null | https://raw.githubusercontent.com/jacius/lispbuilder/e693651b95f6818e3cab70f0074af9f9511584c3/lispbuilder-sdl-image/bin/package.lisp | lisp | lispbuilder-sdl-image-binaries
globals.lisp
|
(in-package #:cl-user)
(defpackage #:lispbuilder-sdl-image-binaries
(:use #:cl)
(:nicknames #:sdl-image-bin)
(:documentation "The main package of `lispbuilder-sdl-image'.")
(:export
#:*dll-path*)) |
fae1e5c062a3ea4daa4fc171f1d06480880c3dc335d4dcb3c7d4fba45584df13 | binsec/binsec | senv.ml | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
let solvers =
let open Formula_options in
[ Bitwuzla; Boolector; Z3; CVC4; Yices ]
let map =
let open Formula_options in
let open Smt_options in
function
| Auto | Bitwuzla_native -> assert false
| Bitwuzla_smtlib -> Bitwuzla
| Boolector_smtlib -> Boolector
| Z3_smtlib -> Z3
| CVC4_smtlib -> CVC4
| Yices_smtlib -> Yices
let get_solver_factory () =
let open Formula_options in
let open Smt_options in
match Smt_options.SMTSolver.get () with
| (Smt_options.Auto | Smt_options.Bitwuzla_native) when Smt_bitwuzla.available
->
(module Native_solver.Solver : Solver_sig.FACTORY)
| Auto -> (
try
let solver = List.find Prover.ping solvers in
Logger.info "Found %a in the path." Prover.pp solver;
Solver.set solver;
(module Smt2_solver.Solver : Solver_sig.FACTORY)
with Not_found -> Logger.fatal "No SMT solver found.")
| Bitwuzla_native ->
Logger.fatal "Native bitwuzla binding is required but not available."
| solver when Prover.ping (map solver) ->
Solver.set (map solver);
(module Smt2_solver.Solver : Solver_sig.FACTORY)
| solver ->
Logger.fatal "%a is required but not available in path." Prover.pp
(map solver)
exception Undef = Types.Undef
exception Uninterp = Types.Uninterp
exception Unknown = Types.Unknown
exception Non_unique = Types.Non_unique
exception Non_mergeable = Types.Non_mergeable
type 'a test = 'a Types.test =
| True of 'a
| False of 'a
| Both of { t : 'a; f : 'a }
(* utils *)
let byte_size = Natural.to_int Basic_types.Constants.bytesize
module BiMap = Basic_types.BigInt.Map
module NiTbl = Basic_types.Int.Htbl
module Sname = Suid
open Sexpr
module BiItM = Imap
module BvSet = Set.Make (Expr)
module S = Basic_types.String.Map
module I = Basic_types.Int.Map
module R = Basic_types.Int.Htbl
module State (F : Solver_sig.FACTORY) (QS : Types.QUERY_STATISTICS) = struct
type t = {
constraints : Expr.t list;
(* reversed sequence of assertions *)
constset : BvSet.t;
vsymbols : Expr.t I.t;
(* collection of visible symbols *)
varrays : Memory.t S.t;
(* collection of visible arrays *)
vmemory : Memory.t;
(* visible memory *)
fid : Sname.t;
(* unique indice counter *)
fvariables : Expr.t list S.t;
(* collection of free variables *)
farrays : Memory.t S.t;
(* collection of free array *)
ilocs : (Z.t * Loader_buf.t) BiItM.t;
(* set of initialized memory locations *)
alocs : (Z.t * char) list ref;
(* shared list of already accessed initialized memory locations *)
model : Model.t; (* a model that satisfy constraints *)
}
let pp ppf state =
Model.pp ppf state.fvariables
(Kernel_options.Machine.word_size ())
state.model
let empty () =
{
constraints = [];
constset = BvSet.empty;
vsymbols = I.empty;
varrays = S.empty;
vmemory = Memory.Root;
fid = Sname.(incr zero);
zero is reserved for initial memory
fvariables = S.empty;
farrays = S.empty;
ilocs = BiItM.empty;
alocs = ref [];
model = Model.empty ();
}
let fresh ({ id; name; size; _ } : Types.Var.t) state =
let v = Expr.var (Sname.to_string state.fid) size name in
let fid = Sname.incr state.fid in
let h =
match S.find name state.fvariables with
| exception Not_found -> [ v ]
| h -> v :: h
in
let fvariables = S.add name h state.fvariables in
let vsymbols = I.add id v state.vsymbols in
{ state with vsymbols; fid; fvariables }
let alloc ~array state =
let symbol = Memory.Symbol array in
{
state with
varrays = S.add array symbol state.varrays;
farrays = S.add array symbol state.farrays;
}
let assign ({ id; _ } : Types.Var.t) value state =
{ state with vsymbols = I.add id value state.vsymbols }
let write ~addr value dir state =
{ state with vmemory = Memory.write ~addr value dir state.vmemory }
let store name ~addr value dir state =
try
let ar = S.find name state.varrays in
{
state with
varrays = S.add name (Memory.write ~addr value dir ar) state.varrays;
}
with Not_found -> raise_notrace (Uninterp name)
let read ~addr bytes dir state =
let bytes, vmemory = Memory.read ~addr bytes dir state.vmemory in
if state.vmemory == vmemory then (bytes, state)
else (bytes, { state with vmemory })
let select name ~addr bytes dir state =
try
let array = S.find name state.varrays in
let bytes, array' = Memory.read ~addr bytes dir array in
if array == array' then (bytes, state)
else (bytes, { state with varrays = S.add name array' state.varrays })
with Not_found -> raise_notrace (Uninterp name)
let memcpy ~addr len orig state =
let base = Bv.value_of addr in
let ilocs = BiItM.add ~base len (Bv.value_of addr, orig) state.ilocs in
let vmemory =
Memory.source ~addr:(Expr.constant addr) ~len orig state.vmemory
in
{ state with ilocs; vmemory }
module Engine (Solver : Solver_sig.S) = struct
type result = Unsat | Sat of t
let extract_memory state =
match Solver.get_array Memory.Root with
| (exception Not_found) | [||] -> (BiTbl.create 0, !(state.alocs))
| assignment ->
let memory = BiTbl.create (Array.length assignment) in
let alocs =
Array.fold_left
(fun alocs (addr, value) ->
match BiItM.find addr state.ilocs with
| exception Not_found ->
BiTbl.add memory addr value;
alocs
| base, img ->
let offset = Z.to_int (Z.sub addr base) in
let value' =
Char.unsafe_chr
(if offset < Bigarray.Array1.dim img then
Bigarray.Array1.get img offset
else 0)
in
if value <> value' then (addr, value') :: alocs else alocs)
!(state.alocs) assignment
in
(memory, alocs)
let extract_array name =
match Solver.get_array name with
| (exception Not_found) | [||] -> BiTbl.create 0
| assignment ->
let array = BiTbl.create (Array.length assignment) in
Array.iter
(fun (addr, value) -> BiTbl.add array addr value)
assignment;
array
let extract_arrays state =
let arrays = StTbl.create 5 in
S.iter
(fun name symbol -> StTbl.add arrays name (extract_array symbol))
state.farrays;
arrays
let extract_vars state =
let vars = BvTbl.create 32 in
S.iter
(fun _ ->
List.iter (fun bv ->
match Solver.get bv with
| exception Not_found -> ()
| x ->
BvTbl.add vars bv
(Bitvector.create Solver.(get_value x) (Expr.sizeof bv))))
state.fvariables;
vars
let rec force_lazy_init alocs state =
if alocs == !(state.alocs) = false then
match alocs with
| [] -> ()
| (addr, value) :: alocs ->
Solver.set_memory ~addr (Z.of_int (Char.code value));
force_lazy_init alocs state
let enumerate =
let rec iter state e expr size n enum =
if n = 0 then enum
else
match Solver.check_sat () with
| Unknown ->
QS.Solver.incr_err ();
raise Unknown
| Unsat ->
QS.Solver.incr_unsat ();
enum
| Sat ->
QS.Solver.incr_sat ();
let memory, alocs = extract_memory state in
if alocs == !(state.alocs) = false then (
force_lazy_init alocs state;
state.alocs := alocs;
iter state e expr size n enum)
else
let x = Solver.get_value expr in
let b = Bv.create x size in
let cond = Expr.equal e (Expr.constant b) in
let state' =
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
model = (extract_vars state, memory, extract_arrays state);
}
in
Solver.neq expr x;
iter state e expr size (n - 1) ((b, state') :: enum)
in
fun e ?(n = (1 lsl Expr.sizeof e) - 1) ?(except = []) state ->
let size = Expr.sizeof e in
let expr = Solver.bind state.fid e state.constraints in
List.iter
(fun (addr, value) ->
Solver.set_memory ~addr (Z.of_int (Char.code value)))
!(state.alocs);
let init =
let bv = Model.eval state.model e in
if List.mem bv except then []
else (
QS.Preprocess.incr_const ();
Solver.neq expr (Bitvector.value_of bv);
let cond = Expr.equal e (Expr.constant bv) in
[
( bv,
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
} );
])
in
List.iter (fun bv -> Solver.neq expr (Bitvector.value_of bv)) except;
iter state e expr size (n - 1) init
let check_sat =
let rec check_sat_true state =
match Solver.check_sat () with
| Unknown -> raise Unknown
| Unsat -> Unsat
| Sat ->
let memory, alocs = extract_memory state in
if alocs == !(state.alocs) = false then (
force_lazy_init alocs state;
state.alocs := alocs;
check_sat_true state)
else
Sat
{
state with
model = (extract_vars state, memory, extract_arrays state);
}
in
fun state ->
Solver.put state.fid state.constraints;
List.iter
(fun (addr, value) ->
Solver.set_memory ~addr (Z.of_int (Char.code value)))
!(state.alocs);
check_sat_true state
let close () = Solver.close ()
end
let assume cond state =
if Expr.is_equal cond Expr.one then (
QS.Preprocess.incr_sat ();
Some state)
else if Expr.is_equal cond Expr.zero then (
QS.Preprocess.incr_unsat ();
None)
else if BvSet.mem cond state.constset then (
QS.Preprocess.incr_sat ();
Some state)
else if BvSet.mem (Expr.lognot cond) state.constset then (
QS.Preprocess.incr_unsat ();
None)
else
let state =
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
}
in
if Bitvector.zero = Model.eval state.model cond then (
QS.Solver.start_timer ();
let open Engine (F ()) in
let r =
match check_sat state with
| exception Unknown ->
QS.Solver.incr_err ();
raise Unknown
| Unsat ->
QS.Solver.incr_unsat ();
None
| Sat state ->
QS.Solver.incr_sat ();
Some state
in
close ();
QS.Solver.stop_timer ();
r)
else (
QS.Preprocess.incr_sat ();
Some state)
let test cond state =
if Expr.is_equal cond Expr.one then (
QS.Preprocess.incr_sat ();
True state)
else if Expr.is_equal cond Expr.zero then (
QS.Preprocess.incr_unsat ();
False state)
else if BvSet.mem cond state.constset then (
QS.Preprocess.incr_sat ();
True state)
else if BvSet.mem (Expr.lognot cond) state.constset then (
QS.Preprocess.incr_unsat ();
False state)
else
let t =
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
}
in
let ncond = Expr.lognot cond in
let f =
{
state with
constraints = ncond :: state.constraints;
constset = BvSet.add ncond state.constset;
}
in
let e = Model.eval state.model cond in
let s = if Bv.is_zero e then t else f in
QS.Solver.start_timer ();
let open Engine (F ()) in
let r =
match check_sat s with
| exception Unknown ->
QS.Solver.incr_err ();
raise Unknown
| Unsat ->
QS.Solver.incr_unsat ();
if Bv.is_zero e then False f else True t
| Sat state ->
QS.Solver.incr_sat ();
if Bv.is_zero e then Both { t = state; f }
else Both { t; f = state }
in
close ();
QS.Solver.stop_timer ();
r
let enumerate =
let with_solver e ?n ?except state =
QS.Solver.start_timer ();
let open Engine (F ()) in
let r = enumerate e ?n ?except state in
close ();
QS.Solver.stop_timer ();
r
in
fun e ?n ?(except = []) state ->
match (e, n) with
| Expr.Cst bv, _ when List.mem bv except = false ->
QS.Preprocess.incr_const ();
[ (bv, state) ]
| Expr.Cst _, _ ->
QS.Preprocess.incr_const ();
[]
| _, Some 1 ->
let bv = Model.eval state.model e in
if List.mem bv except then with_solver e ?n ~except state
else (
QS.Preprocess.incr_const ();
let cond = Expr.equal e (Expr.constant bv) in
[
( bv,
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
} );
])
| _, _ -> with_solver e ?n ~except state
let merge t t' =
if t == t' then t
else if
t.fid = t'.fid
&& t.fvariables == t'.fvariables
&& t.farrays == t'.farrays && t.ilocs == t'.ilocs
then
match (t.constraints, t'.constraints) with
| c :: constraints, c' :: constraints'
when constraints == constraints' && Expr.is_equal c (Expr.lognot c') ->
let constset = BvSet.remove c t.constset
and vsymbols =
if t.vsymbols == t'.vsymbols then t.vsymbols
else
I.merge
(fun _ o0 o1 ->
match (o0, o1) with
| Some e0, Some e1 ->
if Expr.is_equal e0 e1 then o0
else Some (Expr.ite c e0 e1)
| (Some _ | None), (Some _ | None) ->
raise_notrace Non_mergeable)
t.vsymbols t'.vsymbols
and varrays =
if t.varrays == t'.varrays then t.varrays
else
S.merge
(fun _ o0 o1 ->
match (o0, o1) with
| Some a0, Some a1 -> Some (Memory.merge c a0 a1)
| (Some _ | None), (Some _ | None) ->
raise_notrace Non_mergeable)
t.varrays t'.varrays
and vmemory = Memory.merge c t.vmemory t'.vmemory
and fid = t.fid
and fvariables = t.fvariables
and farrays = t.farrays
and ilocs = t.ilocs
and alocs = t.alocs
and model = t.model in
{
constraints;
constset;
vsymbols;
varrays;
vmemory;
fid;
fvariables;
farrays;
ilocs;
alocs;
model;
}
| _ -> raise_notrace Non_mergeable
else raise_notrace Non_mergeable
module Value = struct
type t = Expr.t
let constant = Expr.constant
let lookup ({ id; _ } as var : Types.Var.t) t =
try I.find id t.vsymbols with Not_found -> raise_notrace (Undef var)
let read = read
let select = select
let unary = Expr.unary
let binary = Expr.binary
let ite = Expr.ite
let uop e = function
| Dba.Unary_op.Not -> Term.Not
| Dba.Unary_op.UMinus -> Term.Minus
| Dba.Unary_op.Sext n -> Term.Sext (n - Dba.Expr.size_of e)
| Dba.Unary_op.Uext n -> Term.Uext (n - Dba.Expr.size_of e)
| Dba.Unary_op.Restrict interval -> Term.Restrict interval
let bop op =
let open Dba.Binary_op in
match op with
| Plus -> Term.Plus
| Minus -> Term.Minus
| Mult -> Term.Mul
| DivU -> Term.Udiv
| DivS -> Term.Sdiv
| ModU -> Term.Umod
| ModS -> Term.Smod
| Eq -> Term.Eq
| Diff -> Term.Diff
| LeqU -> Term.Ule
| LtU -> Term.Ult
| GeqU -> Term.Uge
| GtU -> Term.Ugt
| LeqS -> Term.Sle
| LtS -> Term.Slt
| GeqS -> Term.Sge
| GtS -> Term.Sgt
| Xor -> Term.Xor
| And -> Term.And
| Or -> Term.Or
| Concat -> Term.Concat
| LShift -> Term.Lsl
| RShiftU -> Term.Lsr
| RShiftS -> Term.Asr
| LeftRotate -> Term.Rol
| RightRotate -> Term.Ror
let rec eval (e : Types.Expr.t) t =
match e with
| Cst bv | Var { info = Symbol (_, (lazy bv)); _ } -> constant bv
| Var var -> lookup var t
| Load (len, dir, addr, None) -> fst (read ~addr:(eval addr t) len dir t)
| Load (len, dir, addr, Some name) ->
fst (select name ~addr:(eval addr t) len dir t)
| Unary (f, x) -> unary (uop x f) (eval x t)
| Binary (f, x, y) -> binary (bop f) (eval x t) (eval y t)
| Ite (c, r, e) -> ite (eval c t) (eval r t) (eval e t)
end
let get_value (e : Expr.t) _ =
match e with Cst bv -> bv | _ -> raise_notrace Non_unique
let pp_smt (target : Types.target) ppf t =
let module P = Smt2_solver.Printer in
let ctx =
P.create ~debug:(fun ~name ~label -> label ^ name) ~next_id:t.fid ()
in
(* visit assertions *)
List.iter (P.visit_bl ctx) t.constraints;
(* visit terms *)
let defs =
match target with
| Some defs ->
let rec proceed defs t =
try
List.map
(fun (expr, name) ->
let expr = Value.eval expr t in
P.visit_bv ctx expr;
(expr, name))
defs
with
| Undef var -> proceed defs (fresh var t)
| Uninterp array -> proceed defs (alloc ~array t)
in
proceed defs t
| None ->
P.visit_ax ctx t.vmemory;
List.rev
(I.fold
(fun id expr defs ->
P.visit_bv ctx expr;
(expr, (Dba.Var.from_id id).name) :: defs)
t.vsymbols [])
in
Format.pp_open_vbox ppf 0;
(* print declarations *)
P.pp_print_decls ppf ctx;
(* print definitions *)
P.pp_print_defs ppf ctx;
List.iter
(fun (bv, name) ->
Format.fprintf ppf "@[<h>(define-fun %s () (_ BitVec %d)@ " name
(Expr.sizeof bv);
P.pp_print_bv ctx ppf bv;
Format.fprintf ppf ")@]@ ")
defs;
if target = None then
Format.fprintf ppf
"@[<h>(define-fun memory () (Array (_ BitVec %d) (_ BitVec 8))@ %a)@]"
(Kernel_options.Machine.word_size ())
(P.pp_print_ax ctx) t.vmemory;
(* print assertions *)
List.iter
(fun bl ->
Format.pp_open_hbox ppf ();
Format.pp_print_string ppf "(assert ";
P.pp_print_bl ctx ppf bl;
Format.pp_print_char ppf ')';
Format.pp_close_box ppf ();
Format.pp_print_space ppf ())
t.constraints;
Format.pp_close_box ppf ()
let as_ascii ~name t =
let buf = Buffer.create 16 in
List.iter (fun var ->
assert (Expr.sizeof var mod byte_size = 0);
let rec iter bv =
let size = Bitvector.size_of bv in
if size = byte_size then Buffer.add_char buf (Bitvector.to_char bv)
else
let byte = Bitvector.extract bv { Interval.lo = 0; hi = 7 } in
Buffer.add_char buf (Bitvector.to_char byte);
iter (Bitvector.extract bv { Interval.lo = 8; hi = size - 1 })
in
iter (Model.eval t.model var))
@@ List.rev @@ S.find name t.fvariables;
Buffer.contents buf
let as_c_string ~name t =
try
let ar = S.find name t.varrays in
let buf = Buffer.create 16 in
let rec iter addr =
let byte =
Model.eval t.model (fst (Memory.read ~addr 1 Machine.LittleEndian ar))
in
if Bitvector.is_zeros byte then Buffer.contents buf
else (
Buffer.add_char buf (Bitvector.to_char byte);
iter (Expr.addi addr 1))
in
iter (Expr.zeros (Kernel_options.Machine.word_size ()))
with Not_found -> ""
let to_formula t =
let module C = Smt2_solver.Cross in
let ctx =
C.create ~debug:(fun ~name ~label -> label ^ name) ~next_id:t.fid ()
in
List.iter (C.assert_bl ctx) t.constraints;
C.define_ax ctx "memory" t.vmemory;
I.iter
(fun id expr -> C.define_bv ctx (Dba.Var.from_id id).name expr)
t.vsymbols;
C.to_formula ctx
end
| null | https://raw.githubusercontent.com/binsec/binsec/22ee39aad58219e8837b6ba15f150ba04a498b63/src/sse/term/senv.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
utils
reversed sequence of assertions
collection of visible symbols
collection of visible arrays
visible memory
unique indice counter
collection of free variables
collection of free array
set of initialized memory locations
shared list of already accessed initialized memory locations
a model that satisfy constraints
visit assertions
visit terms
print declarations
print definitions
print assertions | This file is part of BINSEC .
Copyright ( C ) 2016 - 2022
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
let solvers =
let open Formula_options in
[ Bitwuzla; Boolector; Z3; CVC4; Yices ]
let map =
let open Formula_options in
let open Smt_options in
function
| Auto | Bitwuzla_native -> assert false
| Bitwuzla_smtlib -> Bitwuzla
| Boolector_smtlib -> Boolector
| Z3_smtlib -> Z3
| CVC4_smtlib -> CVC4
| Yices_smtlib -> Yices
let get_solver_factory () =
let open Formula_options in
let open Smt_options in
match Smt_options.SMTSolver.get () with
| (Smt_options.Auto | Smt_options.Bitwuzla_native) when Smt_bitwuzla.available
->
(module Native_solver.Solver : Solver_sig.FACTORY)
| Auto -> (
try
let solver = List.find Prover.ping solvers in
Logger.info "Found %a in the path." Prover.pp solver;
Solver.set solver;
(module Smt2_solver.Solver : Solver_sig.FACTORY)
with Not_found -> Logger.fatal "No SMT solver found.")
| Bitwuzla_native ->
Logger.fatal "Native bitwuzla binding is required but not available."
| solver when Prover.ping (map solver) ->
Solver.set (map solver);
(module Smt2_solver.Solver : Solver_sig.FACTORY)
| solver ->
Logger.fatal "%a is required but not available in path." Prover.pp
(map solver)
exception Undef = Types.Undef
exception Uninterp = Types.Uninterp
exception Unknown = Types.Unknown
exception Non_unique = Types.Non_unique
exception Non_mergeable = Types.Non_mergeable
type 'a test = 'a Types.test =
| True of 'a
| False of 'a
| Both of { t : 'a; f : 'a }
let byte_size = Natural.to_int Basic_types.Constants.bytesize
module BiMap = Basic_types.BigInt.Map
module NiTbl = Basic_types.Int.Htbl
module Sname = Suid
open Sexpr
module BiItM = Imap
module BvSet = Set.Make (Expr)
module S = Basic_types.String.Map
module I = Basic_types.Int.Map
module R = Basic_types.Int.Htbl
module State (F : Solver_sig.FACTORY) (QS : Types.QUERY_STATISTICS) = struct
type t = {
constraints : Expr.t list;
constset : BvSet.t;
vsymbols : Expr.t I.t;
varrays : Memory.t S.t;
vmemory : Memory.t;
fid : Sname.t;
fvariables : Expr.t list S.t;
farrays : Memory.t S.t;
ilocs : (Z.t * Loader_buf.t) BiItM.t;
alocs : (Z.t * char) list ref;
}
let pp ppf state =
Model.pp ppf state.fvariables
(Kernel_options.Machine.word_size ())
state.model
let empty () =
{
constraints = [];
constset = BvSet.empty;
vsymbols = I.empty;
varrays = S.empty;
vmemory = Memory.Root;
fid = Sname.(incr zero);
zero is reserved for initial memory
fvariables = S.empty;
farrays = S.empty;
ilocs = BiItM.empty;
alocs = ref [];
model = Model.empty ();
}
let fresh ({ id; name; size; _ } : Types.Var.t) state =
let v = Expr.var (Sname.to_string state.fid) size name in
let fid = Sname.incr state.fid in
let h =
match S.find name state.fvariables with
| exception Not_found -> [ v ]
| h -> v :: h
in
let fvariables = S.add name h state.fvariables in
let vsymbols = I.add id v state.vsymbols in
{ state with vsymbols; fid; fvariables }
let alloc ~array state =
let symbol = Memory.Symbol array in
{
state with
varrays = S.add array symbol state.varrays;
farrays = S.add array symbol state.farrays;
}
let assign ({ id; _ } : Types.Var.t) value state =
{ state with vsymbols = I.add id value state.vsymbols }
let write ~addr value dir state =
{ state with vmemory = Memory.write ~addr value dir state.vmemory }
let store name ~addr value dir state =
try
let ar = S.find name state.varrays in
{
state with
varrays = S.add name (Memory.write ~addr value dir ar) state.varrays;
}
with Not_found -> raise_notrace (Uninterp name)
let read ~addr bytes dir state =
let bytes, vmemory = Memory.read ~addr bytes dir state.vmemory in
if state.vmemory == vmemory then (bytes, state)
else (bytes, { state with vmemory })
let select name ~addr bytes dir state =
try
let array = S.find name state.varrays in
let bytes, array' = Memory.read ~addr bytes dir array in
if array == array' then (bytes, state)
else (bytes, { state with varrays = S.add name array' state.varrays })
with Not_found -> raise_notrace (Uninterp name)
let memcpy ~addr len orig state =
let base = Bv.value_of addr in
let ilocs = BiItM.add ~base len (Bv.value_of addr, orig) state.ilocs in
let vmemory =
Memory.source ~addr:(Expr.constant addr) ~len orig state.vmemory
in
{ state with ilocs; vmemory }
module Engine (Solver : Solver_sig.S) = struct
type result = Unsat | Sat of t
let extract_memory state =
match Solver.get_array Memory.Root with
| (exception Not_found) | [||] -> (BiTbl.create 0, !(state.alocs))
| assignment ->
let memory = BiTbl.create (Array.length assignment) in
let alocs =
Array.fold_left
(fun alocs (addr, value) ->
match BiItM.find addr state.ilocs with
| exception Not_found ->
BiTbl.add memory addr value;
alocs
| base, img ->
let offset = Z.to_int (Z.sub addr base) in
let value' =
Char.unsafe_chr
(if offset < Bigarray.Array1.dim img then
Bigarray.Array1.get img offset
else 0)
in
if value <> value' then (addr, value') :: alocs else alocs)
!(state.alocs) assignment
in
(memory, alocs)
let extract_array name =
match Solver.get_array name with
| (exception Not_found) | [||] -> BiTbl.create 0
| assignment ->
let array = BiTbl.create (Array.length assignment) in
Array.iter
(fun (addr, value) -> BiTbl.add array addr value)
assignment;
array
let extract_arrays state =
let arrays = StTbl.create 5 in
S.iter
(fun name symbol -> StTbl.add arrays name (extract_array symbol))
state.farrays;
arrays
let extract_vars state =
let vars = BvTbl.create 32 in
S.iter
(fun _ ->
List.iter (fun bv ->
match Solver.get bv with
| exception Not_found -> ()
| x ->
BvTbl.add vars bv
(Bitvector.create Solver.(get_value x) (Expr.sizeof bv))))
state.fvariables;
vars
let rec force_lazy_init alocs state =
if alocs == !(state.alocs) = false then
match alocs with
| [] -> ()
| (addr, value) :: alocs ->
Solver.set_memory ~addr (Z.of_int (Char.code value));
force_lazy_init alocs state
let enumerate =
let rec iter state e expr size n enum =
if n = 0 then enum
else
match Solver.check_sat () with
| Unknown ->
QS.Solver.incr_err ();
raise Unknown
| Unsat ->
QS.Solver.incr_unsat ();
enum
| Sat ->
QS.Solver.incr_sat ();
let memory, alocs = extract_memory state in
if alocs == !(state.alocs) = false then (
force_lazy_init alocs state;
state.alocs := alocs;
iter state e expr size n enum)
else
let x = Solver.get_value expr in
let b = Bv.create x size in
let cond = Expr.equal e (Expr.constant b) in
let state' =
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
model = (extract_vars state, memory, extract_arrays state);
}
in
Solver.neq expr x;
iter state e expr size (n - 1) ((b, state') :: enum)
in
fun e ?(n = (1 lsl Expr.sizeof e) - 1) ?(except = []) state ->
let size = Expr.sizeof e in
let expr = Solver.bind state.fid e state.constraints in
List.iter
(fun (addr, value) ->
Solver.set_memory ~addr (Z.of_int (Char.code value)))
!(state.alocs);
let init =
let bv = Model.eval state.model e in
if List.mem bv except then []
else (
QS.Preprocess.incr_const ();
Solver.neq expr (Bitvector.value_of bv);
let cond = Expr.equal e (Expr.constant bv) in
[
( bv,
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
} );
])
in
List.iter (fun bv -> Solver.neq expr (Bitvector.value_of bv)) except;
iter state e expr size (n - 1) init
let check_sat =
let rec check_sat_true state =
match Solver.check_sat () with
| Unknown -> raise Unknown
| Unsat -> Unsat
| Sat ->
let memory, alocs = extract_memory state in
if alocs == !(state.alocs) = false then (
force_lazy_init alocs state;
state.alocs := alocs;
check_sat_true state)
else
Sat
{
state with
model = (extract_vars state, memory, extract_arrays state);
}
in
fun state ->
Solver.put state.fid state.constraints;
List.iter
(fun (addr, value) ->
Solver.set_memory ~addr (Z.of_int (Char.code value)))
!(state.alocs);
check_sat_true state
let close () = Solver.close ()
end
let assume cond state =
if Expr.is_equal cond Expr.one then (
QS.Preprocess.incr_sat ();
Some state)
else if Expr.is_equal cond Expr.zero then (
QS.Preprocess.incr_unsat ();
None)
else if BvSet.mem cond state.constset then (
QS.Preprocess.incr_sat ();
Some state)
else if BvSet.mem (Expr.lognot cond) state.constset then (
QS.Preprocess.incr_unsat ();
None)
else
let state =
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
}
in
if Bitvector.zero = Model.eval state.model cond then (
QS.Solver.start_timer ();
let open Engine (F ()) in
let r =
match check_sat state with
| exception Unknown ->
QS.Solver.incr_err ();
raise Unknown
| Unsat ->
QS.Solver.incr_unsat ();
None
| Sat state ->
QS.Solver.incr_sat ();
Some state
in
close ();
QS.Solver.stop_timer ();
r)
else (
QS.Preprocess.incr_sat ();
Some state)
let test cond state =
if Expr.is_equal cond Expr.one then (
QS.Preprocess.incr_sat ();
True state)
else if Expr.is_equal cond Expr.zero then (
QS.Preprocess.incr_unsat ();
False state)
else if BvSet.mem cond state.constset then (
QS.Preprocess.incr_sat ();
True state)
else if BvSet.mem (Expr.lognot cond) state.constset then (
QS.Preprocess.incr_unsat ();
False state)
else
let t =
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
}
in
let ncond = Expr.lognot cond in
let f =
{
state with
constraints = ncond :: state.constraints;
constset = BvSet.add ncond state.constset;
}
in
let e = Model.eval state.model cond in
let s = if Bv.is_zero e then t else f in
QS.Solver.start_timer ();
let open Engine (F ()) in
let r =
match check_sat s with
| exception Unknown ->
QS.Solver.incr_err ();
raise Unknown
| Unsat ->
QS.Solver.incr_unsat ();
if Bv.is_zero e then False f else True t
| Sat state ->
QS.Solver.incr_sat ();
if Bv.is_zero e then Both { t = state; f }
else Both { t; f = state }
in
close ();
QS.Solver.stop_timer ();
r
let enumerate =
let with_solver e ?n ?except state =
QS.Solver.start_timer ();
let open Engine (F ()) in
let r = enumerate e ?n ?except state in
close ();
QS.Solver.stop_timer ();
r
in
fun e ?n ?(except = []) state ->
match (e, n) with
| Expr.Cst bv, _ when List.mem bv except = false ->
QS.Preprocess.incr_const ();
[ (bv, state) ]
| Expr.Cst _, _ ->
QS.Preprocess.incr_const ();
[]
| _, Some 1 ->
let bv = Model.eval state.model e in
if List.mem bv except then with_solver e ?n ~except state
else (
QS.Preprocess.incr_const ();
let cond = Expr.equal e (Expr.constant bv) in
[
( bv,
{
state with
constraints = cond :: state.constraints;
constset = BvSet.add cond state.constset;
} );
])
| _, _ -> with_solver e ?n ~except state
let merge t t' =
if t == t' then t
else if
t.fid = t'.fid
&& t.fvariables == t'.fvariables
&& t.farrays == t'.farrays && t.ilocs == t'.ilocs
then
match (t.constraints, t'.constraints) with
| c :: constraints, c' :: constraints'
when constraints == constraints' && Expr.is_equal c (Expr.lognot c') ->
let constset = BvSet.remove c t.constset
and vsymbols =
if t.vsymbols == t'.vsymbols then t.vsymbols
else
I.merge
(fun _ o0 o1 ->
match (o0, o1) with
| Some e0, Some e1 ->
if Expr.is_equal e0 e1 then o0
else Some (Expr.ite c e0 e1)
| (Some _ | None), (Some _ | None) ->
raise_notrace Non_mergeable)
t.vsymbols t'.vsymbols
and varrays =
if t.varrays == t'.varrays then t.varrays
else
S.merge
(fun _ o0 o1 ->
match (o0, o1) with
| Some a0, Some a1 -> Some (Memory.merge c a0 a1)
| (Some _ | None), (Some _ | None) ->
raise_notrace Non_mergeable)
t.varrays t'.varrays
and vmemory = Memory.merge c t.vmemory t'.vmemory
and fid = t.fid
and fvariables = t.fvariables
and farrays = t.farrays
and ilocs = t.ilocs
and alocs = t.alocs
and model = t.model in
{
constraints;
constset;
vsymbols;
varrays;
vmemory;
fid;
fvariables;
farrays;
ilocs;
alocs;
model;
}
| _ -> raise_notrace Non_mergeable
else raise_notrace Non_mergeable
module Value = struct
type t = Expr.t
let constant = Expr.constant
let lookup ({ id; _ } as var : Types.Var.t) t =
try I.find id t.vsymbols with Not_found -> raise_notrace (Undef var)
let read = read
let select = select
let unary = Expr.unary
let binary = Expr.binary
let ite = Expr.ite
let uop e = function
| Dba.Unary_op.Not -> Term.Not
| Dba.Unary_op.UMinus -> Term.Minus
| Dba.Unary_op.Sext n -> Term.Sext (n - Dba.Expr.size_of e)
| Dba.Unary_op.Uext n -> Term.Uext (n - Dba.Expr.size_of e)
| Dba.Unary_op.Restrict interval -> Term.Restrict interval
let bop op =
let open Dba.Binary_op in
match op with
| Plus -> Term.Plus
| Minus -> Term.Minus
| Mult -> Term.Mul
| DivU -> Term.Udiv
| DivS -> Term.Sdiv
| ModU -> Term.Umod
| ModS -> Term.Smod
| Eq -> Term.Eq
| Diff -> Term.Diff
| LeqU -> Term.Ule
| LtU -> Term.Ult
| GeqU -> Term.Uge
| GtU -> Term.Ugt
| LeqS -> Term.Sle
| LtS -> Term.Slt
| GeqS -> Term.Sge
| GtS -> Term.Sgt
| Xor -> Term.Xor
| And -> Term.And
| Or -> Term.Or
| Concat -> Term.Concat
| LShift -> Term.Lsl
| RShiftU -> Term.Lsr
| RShiftS -> Term.Asr
| LeftRotate -> Term.Rol
| RightRotate -> Term.Ror
let rec eval (e : Types.Expr.t) t =
match e with
| Cst bv | Var { info = Symbol (_, (lazy bv)); _ } -> constant bv
| Var var -> lookup var t
| Load (len, dir, addr, None) -> fst (read ~addr:(eval addr t) len dir t)
| Load (len, dir, addr, Some name) ->
fst (select name ~addr:(eval addr t) len dir t)
| Unary (f, x) -> unary (uop x f) (eval x t)
| Binary (f, x, y) -> binary (bop f) (eval x t) (eval y t)
| Ite (c, r, e) -> ite (eval c t) (eval r t) (eval e t)
end
let get_value (e : Expr.t) _ =
match e with Cst bv -> bv | _ -> raise_notrace Non_unique
let pp_smt (target : Types.target) ppf t =
let module P = Smt2_solver.Printer in
let ctx =
P.create ~debug:(fun ~name ~label -> label ^ name) ~next_id:t.fid ()
in
List.iter (P.visit_bl ctx) t.constraints;
let defs =
match target with
| Some defs ->
let rec proceed defs t =
try
List.map
(fun (expr, name) ->
let expr = Value.eval expr t in
P.visit_bv ctx expr;
(expr, name))
defs
with
| Undef var -> proceed defs (fresh var t)
| Uninterp array -> proceed defs (alloc ~array t)
in
proceed defs t
| None ->
P.visit_ax ctx t.vmemory;
List.rev
(I.fold
(fun id expr defs ->
P.visit_bv ctx expr;
(expr, (Dba.Var.from_id id).name) :: defs)
t.vsymbols [])
in
Format.pp_open_vbox ppf 0;
P.pp_print_decls ppf ctx;
P.pp_print_defs ppf ctx;
List.iter
(fun (bv, name) ->
Format.fprintf ppf "@[<h>(define-fun %s () (_ BitVec %d)@ " name
(Expr.sizeof bv);
P.pp_print_bv ctx ppf bv;
Format.fprintf ppf ")@]@ ")
defs;
if target = None then
Format.fprintf ppf
"@[<h>(define-fun memory () (Array (_ BitVec %d) (_ BitVec 8))@ %a)@]"
(Kernel_options.Machine.word_size ())
(P.pp_print_ax ctx) t.vmemory;
List.iter
(fun bl ->
Format.pp_open_hbox ppf ();
Format.pp_print_string ppf "(assert ";
P.pp_print_bl ctx ppf bl;
Format.pp_print_char ppf ')';
Format.pp_close_box ppf ();
Format.pp_print_space ppf ())
t.constraints;
Format.pp_close_box ppf ()
let as_ascii ~name t =
let buf = Buffer.create 16 in
List.iter (fun var ->
assert (Expr.sizeof var mod byte_size = 0);
let rec iter bv =
let size = Bitvector.size_of bv in
if size = byte_size then Buffer.add_char buf (Bitvector.to_char bv)
else
let byte = Bitvector.extract bv { Interval.lo = 0; hi = 7 } in
Buffer.add_char buf (Bitvector.to_char byte);
iter (Bitvector.extract bv { Interval.lo = 8; hi = size - 1 })
in
iter (Model.eval t.model var))
@@ List.rev @@ S.find name t.fvariables;
Buffer.contents buf
let as_c_string ~name t =
try
let ar = S.find name t.varrays in
let buf = Buffer.create 16 in
let rec iter addr =
let byte =
Model.eval t.model (fst (Memory.read ~addr 1 Machine.LittleEndian ar))
in
if Bitvector.is_zeros byte then Buffer.contents buf
else (
Buffer.add_char buf (Bitvector.to_char byte);
iter (Expr.addi addr 1))
in
iter (Expr.zeros (Kernel_options.Machine.word_size ()))
with Not_found -> ""
let to_formula t =
let module C = Smt2_solver.Cross in
let ctx =
C.create ~debug:(fun ~name ~label -> label ^ name) ~next_id:t.fid ()
in
List.iter (C.assert_bl ctx) t.constraints;
C.define_ax ctx "memory" t.vmemory;
I.iter
(fun id expr -> C.define_bv ctx (Dba.Var.from_id id).name expr)
t.vsymbols;
C.to_formula ctx
end
|
9c9f751736d471e062b0c8c58e8c4a32145819ceedf9e85b6c79b0165f2f0fd3 | CodyReichert/qi | enum.lisp | ;;;; -*- Mode: lisp; indent-tabs-mode: nil -*-
;;;
;;; enum.lisp --- Defining foreign constants as Lisp keywords.
;;;
Copyright ( C ) 2005 - 2006 , < >
;;;
;;; Permission is hereby granted, free of charge, to any person
;;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
;;; restriction, including without limitation the rights to use, copy,
;;; modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
;;; HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
;;; WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
;;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
;;; DEALINGS IN THE SOFTWARE.
;;;
(in-package #:cffi)
;;;# Foreign Constants as Lisp Keywords
;;;
;;; This module defines the DEFCENUM macro, which provides an
;;; interface for defining a type and associating a set of integer
;;; constants with keyword symbols for that type.
;;;
;;; The keywords are automatically translated to the appropriate
;;; constant for the type by a type translator when passed as
;;; arguments or a return value to a foreign function.
(defclass foreign-enum (foreign-typedef enhanced-foreign-type)
((keyword-values
:initform (make-hash-table :test 'eq)
:reader keyword-values)
(value-keywords
:initform (make-hash-table)
:reader value-keywords))
(:documentation "Describes a foreign enumerated type."))
(defun make-foreign-enum (type-name base-type values)
"Makes a new instance of the foreign-enum class."
(let ((type (make-instance 'foreign-enum :name type-name
:actual-type (parse-type base-type)))
(default-value 0))
(dolist (pair values)
(destructuring-bind (keyword &optional (value default-value))
(ensure-list pair)
(check-type keyword keyword)
(check-type value integer)
(if (gethash keyword (keyword-values type))
(error "A foreign enum cannot contain duplicate keywords: ~S."
keyword)
(setf (gethash keyword (keyword-values type)) value))
;; This is completely arbitrary behaviour: we keep the last we
;; value->keyword mapping. I suppose the opposite would be
just as good ( keeping the first ) . Returning a list with all
;; the keywords might be a solution too? Suggestions
;; welcome. --luis
(setf (gethash value (value-keywords type)) keyword)
(setq default-value (1+ value))))
type))
(defmacro defcenum (name-and-options &body enum-list)
"Define an foreign enumerated type."
(discard-docstring enum-list)
(destructuring-bind (name &optional (base-type :int))
(ensure-list name-and-options)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(notice-foreign-type
',name (make-foreign-enum ',name ',base-type ',enum-list)))))
(defun hash-keys-to-list (ht)
(loop for k being the hash-keys in ht collect k))
(defun foreign-enum-keyword-list (enum-type)
"Return a list of KEYWORDS defined in ENUM-TYPE."
(hash-keys-to-list (keyword-values (parse-type enum-type))))
These [ four ] functions could be good canditates for compiler macros
;;; when the value or keyword is constant. I am not going to bother
;;; until someone has a serious performance need to do so though. --jamesjb
(defun %foreign-enum-value (type keyword &key errorp)
(check-type keyword keyword)
(or (gethash keyword (keyword-values type))
(when errorp
(error "~S is not defined as a keyword for enum type ~S."
keyword type))))
(defun foreign-enum-value (type keyword &key (errorp t))
"Convert a KEYWORD into an integer according to the enum TYPE."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-enum))
(error "~S is not a foreign enum type." type)
(%foreign-enum-value type-obj keyword :errorp errorp))))
(defun %foreign-enum-keyword (type value &key errorp)
(check-type value integer)
(or (gethash value (value-keywords type))
(when errorp
(error "~S is not defined as a value for enum type ~S."
value type))))
(defun foreign-enum-keyword (type value &key (errorp t))
"Convert an integer VALUE into a keyword according to the enum TYPE."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-enum))
(error "~S is not a foreign enum type." type)
(%foreign-enum-keyword type-obj value :errorp errorp))))
(defmethod translate-to-foreign (value (type foreign-enum))
(if (keywordp value)
(%foreign-enum-value type value :errorp t)
value))
(defmethod translate-into-foreign-memory
(value (type foreign-enum) pointer)
(setf (mem-aref pointer (unparse-type (actual-type type)))
(translate-to-foreign value type)))
(defmethod translate-from-foreign (value (type foreign-enum))
(%foreign-enum-keyword type value :errorp t))
(defmethod expand-to-foreign (value (type foreign-enum))
(once-only (value)
`(if (keywordp ,value)
(%foreign-enum-value ,type ,value :errorp t)
,value)))
;;;# Foreign Bitfields as Lisp keywords
;;;
DEFBITFIELD is an abstraction similar to the one provided by DEFCENUM .
;;; With some changes to DEFCENUM, this could certainly be implemented on
;;; top of it.
(defclass foreign-bitfield (foreign-typedef enhanced-foreign-type)
((symbol-values
:initform (make-hash-table :test 'eq)
:reader symbol-values)
(value-symbols
:initform (make-hash-table)
:reader value-symbols))
(:documentation "Describes a foreign bitfield type."))
(defun make-foreign-bitfield (type-name base-type values)
"Makes a new instance of the foreign-bitfield class."
(let ((type (make-instance 'foreign-bitfield :name type-name
:actual-type (parse-type base-type)))
(bit-floor 1))
(dolist (pair values)
;; bit-floor rule: find the greatest single-bit int used so far,
;; and store its left-shift
(destructuring-bind (symbol &optional
(value (prog1 bit-floor
(setf bit-floor (ash bit-floor 1)))
value-p))
(ensure-list pair)
(check-type symbol symbol)
(when value-p
(check-type value integer)
(when (and (>= value bit-floor) (single-bit-p value))
(setf bit-floor (ash value 1))))
(if (gethash symbol (symbol-values type))
(error "A foreign bitfield cannot contain duplicate symbols: ~S."
symbol)
(setf (gethash symbol (symbol-values type)) value))
(push symbol (gethash value (value-symbols type)))))
type))
(defmacro defbitfield (name-and-options &body masks)
"Define an foreign enumerated type."
(discard-docstring masks)
(destructuring-bind (name &optional (base-type :int))
(ensure-list name-and-options)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(notice-foreign-type
',name (make-foreign-bitfield ',name ',base-type ',masks)))))
(defun foreign-bitfield-symbol-list (bitfield-type)
"Return a list of SYMBOLS defined in BITFIELD-TYPE."
(hash-keys-to-list (symbol-values (parse-type bitfield-type))))
(defun %foreign-bitfield-value (type symbols)
(reduce #'logior symbols
:key (lambda (symbol)
(check-type symbol symbol)
(or (gethash symbol (symbol-values type))
(error "~S is not a valid symbol for bitfield type ~S."
symbol type)))))
(defun foreign-bitfield-value (type symbols)
"Convert a list of symbols into an integer according to the TYPE bitfield."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
(%foreign-bitfield-value type-obj symbols))))
(define-compiler-macro foreign-bitfield-value (&whole form type symbols)
"Optimize for when TYPE and SYMBOLS are constant."
(if (and (constantp type) (constantp symbols))
(let ((type-obj (parse-type (eval type))))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
(%foreign-bitfield-value type-obj (eval symbols))))
form))
(defun %foreign-bitfield-symbols (type value)
(check-type value integer)
(loop for mask being the hash-keys in (value-symbols type)
using (hash-value symbols)
when (= (logand value mask) mask)
append symbols))
(defun foreign-bitfield-symbols (type value)
"Convert an integer VALUE into a list of matching symbols according to
the bitfield TYPE."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
(%foreign-bitfield-symbols type-obj value))))
(define-compiler-macro foreign-bitfield-symbols (&whole form type value)
"Optimize for when TYPE and SYMBOLS are constant."
(if (and (constantp type) (constantp value))
(let ((type-obj (parse-type (eval type))))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
`(quote ,(%foreign-bitfield-symbols type-obj (eval value)))))
form))
(defmethod translate-to-foreign (value (type foreign-bitfield))
(if (integerp value)
value
(%foreign-bitfield-value type (ensure-list value))))
(defmethod translate-from-foreign (value (type foreign-bitfield))
(%foreign-bitfield-symbols type value))
(defmethod expand-to-foreign (value (type foreign-bitfield))
(flet ((expander (value type)
`(if (integerp ,value)
,value
(%foreign-bitfield-value ,type (ensure-list ,value)))))
(if (constantp value)
(eval (expander value type))
(expander value type))))
(defmethod expand-from-foreign (value (type foreign-bitfield))
(flet ((expander (value type)
`(%foreign-bitfield-symbols ,type ,value)))
(if (constantp value)
(eval (expander value type))
(expander value type))))
| null | https://raw.githubusercontent.com/CodyReichert/qi/9cf6d31f40e19f4a7f60891ef7c8c0381ccac66f/dependencies/cffi-master/src/enum.lisp | lisp | -*- Mode: lisp; indent-tabs-mode: nil -*-
enum.lisp --- Defining foreign constants as Lisp keywords.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
# Foreign Constants as Lisp Keywords
This module defines the DEFCENUM macro, which provides an
interface for defining a type and associating a set of integer
constants with keyword symbols for that type.
The keywords are automatically translated to the appropriate
constant for the type by a type translator when passed as
arguments or a return value to a foreign function.
This is completely arbitrary behaviour: we keep the last we
value->keyword mapping. I suppose the opposite would be
the keywords might be a solution too? Suggestions
welcome. --luis
when the value or keyword is constant. I am not going to bother
until someone has a serious performance need to do so though. --jamesjb
# Foreign Bitfields as Lisp keywords
With some changes to DEFCENUM, this could certainly be implemented on
top of it.
bit-floor rule: find the greatest single-bit int used so far,
and store its left-shift | Copyright ( C ) 2005 - 2006 , < >
files ( the " Software " ) , to deal in the Software without
of the Software , and to permit persons to whom the Software is
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
(in-package #:cffi)
(defclass foreign-enum (foreign-typedef enhanced-foreign-type)
((keyword-values
:initform (make-hash-table :test 'eq)
:reader keyword-values)
(value-keywords
:initform (make-hash-table)
:reader value-keywords))
(:documentation "Describes a foreign enumerated type."))
(defun make-foreign-enum (type-name base-type values)
"Makes a new instance of the foreign-enum class."
(let ((type (make-instance 'foreign-enum :name type-name
:actual-type (parse-type base-type)))
(default-value 0))
(dolist (pair values)
(destructuring-bind (keyword &optional (value default-value))
(ensure-list pair)
(check-type keyword keyword)
(check-type value integer)
(if (gethash keyword (keyword-values type))
(error "A foreign enum cannot contain duplicate keywords: ~S."
keyword)
(setf (gethash keyword (keyword-values type)) value))
just as good ( keeping the first ) . Returning a list with all
(setf (gethash value (value-keywords type)) keyword)
(setq default-value (1+ value))))
type))
(defmacro defcenum (name-and-options &body enum-list)
"Define an foreign enumerated type."
(discard-docstring enum-list)
(destructuring-bind (name &optional (base-type :int))
(ensure-list name-and-options)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(notice-foreign-type
',name (make-foreign-enum ',name ',base-type ',enum-list)))))
(defun hash-keys-to-list (ht)
(loop for k being the hash-keys in ht collect k))
(defun foreign-enum-keyword-list (enum-type)
"Return a list of KEYWORDS defined in ENUM-TYPE."
(hash-keys-to-list (keyword-values (parse-type enum-type))))
These [ four ] functions could be good canditates for compiler macros
(defun %foreign-enum-value (type keyword &key errorp)
(check-type keyword keyword)
(or (gethash keyword (keyword-values type))
(when errorp
(error "~S is not defined as a keyword for enum type ~S."
keyword type))))
(defun foreign-enum-value (type keyword &key (errorp t))
"Convert a KEYWORD into an integer according to the enum TYPE."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-enum))
(error "~S is not a foreign enum type." type)
(%foreign-enum-value type-obj keyword :errorp errorp))))
(defun %foreign-enum-keyword (type value &key errorp)
(check-type value integer)
(or (gethash value (value-keywords type))
(when errorp
(error "~S is not defined as a value for enum type ~S."
value type))))
(defun foreign-enum-keyword (type value &key (errorp t))
"Convert an integer VALUE into a keyword according to the enum TYPE."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-enum))
(error "~S is not a foreign enum type." type)
(%foreign-enum-keyword type-obj value :errorp errorp))))
(defmethod translate-to-foreign (value (type foreign-enum))
(if (keywordp value)
(%foreign-enum-value type value :errorp t)
value))
(defmethod translate-into-foreign-memory
(value (type foreign-enum) pointer)
(setf (mem-aref pointer (unparse-type (actual-type type)))
(translate-to-foreign value type)))
(defmethod translate-from-foreign (value (type foreign-enum))
(%foreign-enum-keyword type value :errorp t))
(defmethod expand-to-foreign (value (type foreign-enum))
(once-only (value)
`(if (keywordp ,value)
(%foreign-enum-value ,type ,value :errorp t)
,value)))
DEFBITFIELD is an abstraction similar to the one provided by DEFCENUM .
(defclass foreign-bitfield (foreign-typedef enhanced-foreign-type)
((symbol-values
:initform (make-hash-table :test 'eq)
:reader symbol-values)
(value-symbols
:initform (make-hash-table)
:reader value-symbols))
(:documentation "Describes a foreign bitfield type."))
(defun make-foreign-bitfield (type-name base-type values)
"Makes a new instance of the foreign-bitfield class."
(let ((type (make-instance 'foreign-bitfield :name type-name
:actual-type (parse-type base-type)))
(bit-floor 1))
(dolist (pair values)
(destructuring-bind (symbol &optional
(value (prog1 bit-floor
(setf bit-floor (ash bit-floor 1)))
value-p))
(ensure-list pair)
(check-type symbol symbol)
(when value-p
(check-type value integer)
(when (and (>= value bit-floor) (single-bit-p value))
(setf bit-floor (ash value 1))))
(if (gethash symbol (symbol-values type))
(error "A foreign bitfield cannot contain duplicate symbols: ~S."
symbol)
(setf (gethash symbol (symbol-values type)) value))
(push symbol (gethash value (value-symbols type)))))
type))
(defmacro defbitfield (name-and-options &body masks)
"Define an foreign enumerated type."
(discard-docstring masks)
(destructuring-bind (name &optional (base-type :int))
(ensure-list name-and-options)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(notice-foreign-type
',name (make-foreign-bitfield ',name ',base-type ',masks)))))
(defun foreign-bitfield-symbol-list (bitfield-type)
"Return a list of SYMBOLS defined in BITFIELD-TYPE."
(hash-keys-to-list (symbol-values (parse-type bitfield-type))))
(defun %foreign-bitfield-value (type symbols)
(reduce #'logior symbols
:key (lambda (symbol)
(check-type symbol symbol)
(or (gethash symbol (symbol-values type))
(error "~S is not a valid symbol for bitfield type ~S."
symbol type)))))
(defun foreign-bitfield-value (type symbols)
"Convert a list of symbols into an integer according to the TYPE bitfield."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
(%foreign-bitfield-value type-obj symbols))))
(define-compiler-macro foreign-bitfield-value (&whole form type symbols)
"Optimize for when TYPE and SYMBOLS are constant."
(if (and (constantp type) (constantp symbols))
(let ((type-obj (parse-type (eval type))))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
(%foreign-bitfield-value type-obj (eval symbols))))
form))
(defun %foreign-bitfield-symbols (type value)
(check-type value integer)
(loop for mask being the hash-keys in (value-symbols type)
using (hash-value symbols)
when (= (logand value mask) mask)
append symbols))
(defun foreign-bitfield-symbols (type value)
"Convert an integer VALUE into a list of matching symbols according to
the bitfield TYPE."
(let ((type-obj (parse-type type)))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
(%foreign-bitfield-symbols type-obj value))))
(define-compiler-macro foreign-bitfield-symbols (&whole form type value)
"Optimize for when TYPE and SYMBOLS are constant."
(if (and (constantp type) (constantp value))
(let ((type-obj (parse-type (eval type))))
(if (not (typep type-obj 'foreign-bitfield))
(error "~S is not a foreign bitfield type." type)
`(quote ,(%foreign-bitfield-symbols type-obj (eval value)))))
form))
(defmethod translate-to-foreign (value (type foreign-bitfield))
(if (integerp value)
value
(%foreign-bitfield-value type (ensure-list value))))
(defmethod translate-from-foreign (value (type foreign-bitfield))
(%foreign-bitfield-symbols type value))
(defmethod expand-to-foreign (value (type foreign-bitfield))
(flet ((expander (value type)
`(if (integerp ,value)
,value
(%foreign-bitfield-value ,type (ensure-list ,value)))))
(if (constantp value)
(eval (expander value type))
(expander value type))))
(defmethod expand-from-foreign (value (type foreign-bitfield))
(flet ((expander (value type)
`(%foreign-bitfield-symbols ,type ,value)))
(if (constantp value)
(eval (expander value type))
(expander value type))))
|
1100c909455b1784686059332e43c6f2957629736cdbfedd05a2ef16e289b645 | rems-project/lem | rename_top_level.ml | (**************************************************************************)
(* Lem *)
(* *)
, University of Cambridge
, INRIA Paris - Rocquencourt
, University of Cambridge
, University of Cambridge
, University of Cambridge ( while working on Lem )
, University of Cambridge
, University of Kent
, University of Cambridge
, University of Edinburgh
Shaked Flur , University of Cambridge
, University of Cambridge
, University of Cambridge
(* *)
The Lem sources are copyright 2010 - 2018
by the authors above and Institut National de Recherche en
Informatique et en Automatique ( INRIA ) .
(* *)
All files except / pmap.{ml , mli } and ocaml - libpset.{ml , mli }
(* are distributed under the license below. The former are distributed *)
(* under the LGPLv2, as in the LICENSE file. *)
(* *)
(* *)
(* Redistribution and use in source and binary forms, with or without *)
(* modification, are permitted provided that the following conditions *)
(* are met: *)
1 . Redistributions of source code must retain the above copyright
(* notice, this list of conditions and the following disclaimer. *)
2 . Redistributions in binary form must reproduce the above copyright
(* notice, this list of conditions and the following disclaimer in the *)
(* documentation and/or other materials provided with the distribution. *)
3 . The names of the authors may not be used to endorse or promote
(* products derived from this software without specific prior written *)
(* permission. *)
(* *)
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ` ` AS IS '' AND ANY EXPRESS
(* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *)
(* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE *)
ARE DISCLAIMED . IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
(* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE *)
(* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS *)
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER
(* IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR *)
(* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN *)
(* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *)
(**************************************************************************)
(* Support for changing the names of top-level definitions, including removal of
* nested modules. We also figure out how much of each path needs to be
* printed.
*)
open Typed_ast
open Typed_ast_syntax
open Target
open Util
(* TODO: Fix this hack for recognising library functions!*)
let library_pathL : Name.t list =
let targetsL = List.map non_ident_target_to_mname (Targetset.elements all_targets) in
let extraL = ["Pervasives"; "Pmap"; "Int"; "List"; "Vector"; "Set"] in
let extranL = List.map Name.from_string extraL in
targetsL @ extranL
let is_lib_path path =
match path with
| [] -> true
| p :: _ -> List.exists (fun p' -> (Name.compare p p' = 0)) library_pathL
let prevent_lib_renames p =
let exceptions = [] in
let (path, n) = Path.to_name_list p in
let path_n_s = (List.map Name.to_string path, Name.to_string n) in
if (List.mem path_n_s exceptions) then false else is_lib_path path
(* end hack *)
let flatten_modules_macro path env ((d,s),l,lenv) =
let l_unk = Ast.Trans(false, "flatten_modules", Some l) in
match d with
| Module(sk1,n,mod_path,sk2,sk3,ds,sk4) ->
let mod_shell = ((Module(sk1,n,mod_path,sk2,sk3,[],sk4),s),l,lenv) in
let com = ((Comment(mod_shell),None),l_unk,lenv) in
Some((env,List.rev (com::ds)))
| _ -> None
let flatten_modules mod_path e d =
let (module_path, module_name) = Path.to_name_list mod_path in
snd (Def_trans.process_defs (List.rev module_path) flatten_modules_macro module_name e d)
let compute_ocaml_rename_constant_fun (nk : name_kind) (n : Name.t) : Name.t option =
match nk with
| Nk_typeconstr _ -> Name.uncapitalize n
| Nk_const _ -> Name.uncapitalize n
| Nk_constr _ -> Name.capitalize n
| Nk_field _ -> Name.uncapitalize n
| Nk_module _ -> Name.capitalize n
| Nk_class _ -> None
let compute_isa_rename_constant_fun (nk : name_kind) (n : Name.t) : Name.t option =
let n0 = Util.option_repeat Name.remove_underscore n in
let n1 = Util.option_repeat Name.remove_underscore_suffix n0 in
if (Name.compare n1 n = 0) then None else Some n1
(* TODO: check whether this is sufficient, or if more restrictions are required *)
let compute_hol_rename_constant_fun (nk : name_kind) (n : Name.t) : Name.t option =
let n0 = Util.option_repeat Name.remove_underscore n in
if (Name.compare n0 n = 0) then None else Some n0
let compute_target_rename_constant_fun (targ : Target.non_ident_target) (nk : name_kind) (n : Name.t) : Name.t option =
match targ with
| Target_ocaml -> compute_ocaml_rename_constant_fun nk n
| Target_isa -> compute_isa_rename_constant_fun nk n
| Target_hol -> compute_hol_rename_constant_fun nk n
| _ -> None
let get_fresh_name consts consts' n =
let is_good n = not (NameSet.mem n consts) && not (NameSet.mem n consts') in
if (is_good n) then None else
Some (Name.fresh (Name.to_rope n) is_good)
let rename_constant (targ : Target.non_ident_target) (consts : NameSet.t) (consts_new : NameSet.t) (env : env) (c : const_descr_ref) :
(NameSet.t * env) = begin
let l = Ast.Trans (false, "rename_constant", None) in
let c_d = c_env_lookup l env.c_env c in
let (is_shown, n, n_ascii_opt) = constant_descr_to_name (Target_no_ident targ) c_d in
let compute_new_name (n : Name.t) = begin
(* apply target specific renaming *)
let nk = const_descr_to_kind (c, c_d) in
let n'_opt = compute_target_rename_constant_fun targ nk n in
let n' = Util.option_default n n'_opt in
(* check whether the computed name is fresh and
enforce it if necessary *)
let (is_auto_renamed, n''_opt) =
match get_fresh_name consts consts_new n' with
None -> (false, n'_opt)
| Some n'' -> (true, Some n'') in
let n'' = Util.option_default n' n''_opt in
let is_renamed = match n''_opt with None -> false | _ -> true in
(is_auto_renamed, is_renamed, n'')
end in
let check_module_in_output () = begin
match (Path.get_module_path c_d.const_binding) with
| None -> true
| Some mp -> (e_env_lookup l env.e_env mp).mod_in_output
end in
(* rename constant name *)
let (consts_new, env) = if (not is_shown) then (consts_new, env) else begin
let (is_auto_renamed, is_renamed, n_new) = compute_new_name n in
(** add name to the list of constants to avoid *)
let consts_new' = NameSet.add n_new consts_new in
if not (is_renamed) then (* do nothing *) (consts_new', env) else
begin
let (c_d', via_opt) = constant_descr_rename targ n_new l c_d in
(* print warning *)
let _ = (if (not is_auto_renamed) || not (check_module_in_output ()) then () else
let n_org : string = Name.to_string (Path.get_name c_d.const_binding) in
(Reporting.report_warning env (Reporting.Warn_rename (c_d.spec_l, n_org, Util.option_map (fun (l, n) -> (Name.to_string n, l)) via_opt, Name.to_string n_new, Target_no_ident targ))))
in
(consts_new', env_c_env_update env c c_d')
end
end in
(* rename constant ascii-name *)
if (not is_shown) then (consts_new, env) else
match n_ascii_opt with None -> (consts_new, env) | Some n_ascii ->
begin
let (is_auto_renamed, is_renamed, n_ascii_new) = compute_new_name n_ascii in
(** add name to the list of constants to avoid *)
let consts_new' = NameSet.add n_ascii_new consts_new in
if not (is_renamed) then (* do nothing *) (consts_new', env) else
begin
let c_d' = {c_d with target_ascii_rep = Targetmap.insert c_d.target_ascii_rep (targ, (l, n_ascii_new))} in
(consts_new', env_c_env_update env c c_d')
end
end
end
let rename_type (targ : Target.non_ident_target) (consts : NameSet.t) (consts_new : NameSet.t) (env : env) (t : Path.t) :
(NameSet.t * env) = begin
let l = Ast.Trans (false, "rename_type", None) in
let td = Types.type_defs_lookup l env.t_env t in
let n = type_descr_to_name (Target_no_ident targ) t td in
(* apply target specific renaming *)
let n'_opt = compute_target_rename_constant_fun targ (Nk_typeconstr t) n in
let n' = Util.option_default n n'_opt in
(* check whether the computed name is fresh and enforce it if necessary *)
let (is_auto_renamed, n''_opt) =
match get_fresh_name consts consts_new n' with
None -> (false, n'_opt)
| Some n'' -> (true, Some n'') in
(** add name to the list of constants to avoid *)
let n'' = Util.option_default n' n''_opt in
let consts_new' = NameSet.add n'' consts_new in
match Util.option_map (fun n'' -> type_descr_rename targ n'' l td) n''_opt with
| None -> (* if no renaming is necessary or if renaming is not possible, do nothing *) (consts_new', env)
| Some (td', via_opt) -> begin
(* print warning *)
let n0 : string = Name.to_string (Path.get_name t) in
let _ = (if (not is_auto_renamed) then () else
(Reporting.report_warning env (Reporting.Warn_rename (Ast.Unknown, n0, Util.option_map (fun (l, n) -> (Name.to_string n, l)) via_opt, Name.to_string n'', Target_no_ident targ))))
in
(* update environment *)
let env' = {env with t_env = Types.type_defs_update env.t_env t td'} in
(consts_new', env')
end
end
let rename_defs_target (targ : Target.target) ue consts env =
match dest_human_target targ with
| None -> env
| Some targ_ni ->
begin
let (new_types', env) = List.fold_left (fun (consts_new, env) t -> rename_type targ_ni consts consts_new env t) (NameSet.empty, env)
ue.Typed_ast_syntax.used_types in
(* rename constants *)
let (new_consts', env) = List.fold_left (fun (consts_new, env) c -> rename_constant targ_ni consts consts_new env c) (NameSet.empty, env)
ue.Typed_ast_syntax.used_consts in
env
end
let c_env_save c_env c_id_opt c_d =
match c_id_opt with
| None -> c_env_store c_env c_d
| Some c_id -> (c_env_update c_env c_id c_d, c_id)
| null | https://raw.githubusercontent.com/rems-project/lem/a839114e468119d9ac0868d7dc53eae7f3cc3a6c/src/rename_top_level.ml | ocaml | ************************************************************************
Lem
are distributed under the license below. The former are distributed
under the LGPLv2, as in the LICENSE file.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
products derived from this software without specific prior written
permission.
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
************************************************************************
Support for changing the names of top-level definitions, including removal of
* nested modules. We also figure out how much of each path needs to be
* printed.
TODO: Fix this hack for recognising library functions!
end hack
TODO: check whether this is sufficient, or if more restrictions are required
apply target specific renaming
check whether the computed name is fresh and
enforce it if necessary
rename constant name
* add name to the list of constants to avoid
do nothing
print warning
rename constant ascii-name
* add name to the list of constants to avoid
do nothing
apply target specific renaming
check whether the computed name is fresh and enforce it if necessary
* add name to the list of constants to avoid
if no renaming is necessary or if renaming is not possible, do nothing
print warning
update environment
rename constants | , University of Cambridge
, INRIA Paris - Rocquencourt
, University of Cambridge
, University of Cambridge
, University of Cambridge ( while working on Lem )
, University of Cambridge
, University of Kent
, University of Cambridge
, University of Edinburgh
Shaked Flur , University of Cambridge
, University of Cambridge
, University of Cambridge
The Lem sources are copyright 2010 - 2018
by the authors above and Institut National de Recherche en
Informatique et en Automatique ( INRIA ) .
All files except / pmap.{ml , mli } and ocaml - libpset.{ml , mli }
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . The names of the authors may not be used to endorse or promote
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ` ` AS IS '' AND ANY EXPRESS
ARE DISCLAIMED . IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER
open Typed_ast
open Typed_ast_syntax
open Target
open Util
let library_pathL : Name.t list =
let targetsL = List.map non_ident_target_to_mname (Targetset.elements all_targets) in
let extraL = ["Pervasives"; "Pmap"; "Int"; "List"; "Vector"; "Set"] in
let extranL = List.map Name.from_string extraL in
targetsL @ extranL
let is_lib_path path =
match path with
| [] -> true
| p :: _ -> List.exists (fun p' -> (Name.compare p p' = 0)) library_pathL
let prevent_lib_renames p =
let exceptions = [] in
let (path, n) = Path.to_name_list p in
let path_n_s = (List.map Name.to_string path, Name.to_string n) in
if (List.mem path_n_s exceptions) then false else is_lib_path path
let flatten_modules_macro path env ((d,s),l,lenv) =
let l_unk = Ast.Trans(false, "flatten_modules", Some l) in
match d with
| Module(sk1,n,mod_path,sk2,sk3,ds,sk4) ->
let mod_shell = ((Module(sk1,n,mod_path,sk2,sk3,[],sk4),s),l,lenv) in
let com = ((Comment(mod_shell),None),l_unk,lenv) in
Some((env,List.rev (com::ds)))
| _ -> None
let flatten_modules mod_path e d =
let (module_path, module_name) = Path.to_name_list mod_path in
snd (Def_trans.process_defs (List.rev module_path) flatten_modules_macro module_name e d)
let compute_ocaml_rename_constant_fun (nk : name_kind) (n : Name.t) : Name.t option =
match nk with
| Nk_typeconstr _ -> Name.uncapitalize n
| Nk_const _ -> Name.uncapitalize n
| Nk_constr _ -> Name.capitalize n
| Nk_field _ -> Name.uncapitalize n
| Nk_module _ -> Name.capitalize n
| Nk_class _ -> None
let compute_isa_rename_constant_fun (nk : name_kind) (n : Name.t) : Name.t option =
let n0 = Util.option_repeat Name.remove_underscore n in
let n1 = Util.option_repeat Name.remove_underscore_suffix n0 in
if (Name.compare n1 n = 0) then None else Some n1
let compute_hol_rename_constant_fun (nk : name_kind) (n : Name.t) : Name.t option =
let n0 = Util.option_repeat Name.remove_underscore n in
if (Name.compare n0 n = 0) then None else Some n0
let compute_target_rename_constant_fun (targ : Target.non_ident_target) (nk : name_kind) (n : Name.t) : Name.t option =
match targ with
| Target_ocaml -> compute_ocaml_rename_constant_fun nk n
| Target_isa -> compute_isa_rename_constant_fun nk n
| Target_hol -> compute_hol_rename_constant_fun nk n
| _ -> None
let get_fresh_name consts consts' n =
let is_good n = not (NameSet.mem n consts) && not (NameSet.mem n consts') in
if (is_good n) then None else
Some (Name.fresh (Name.to_rope n) is_good)
let rename_constant (targ : Target.non_ident_target) (consts : NameSet.t) (consts_new : NameSet.t) (env : env) (c : const_descr_ref) :
(NameSet.t * env) = begin
let l = Ast.Trans (false, "rename_constant", None) in
let c_d = c_env_lookup l env.c_env c in
let (is_shown, n, n_ascii_opt) = constant_descr_to_name (Target_no_ident targ) c_d in
let compute_new_name (n : Name.t) = begin
let nk = const_descr_to_kind (c, c_d) in
let n'_opt = compute_target_rename_constant_fun targ nk n in
let n' = Util.option_default n n'_opt in
let (is_auto_renamed, n''_opt) =
match get_fresh_name consts consts_new n' with
None -> (false, n'_opt)
| Some n'' -> (true, Some n'') in
let n'' = Util.option_default n' n''_opt in
let is_renamed = match n''_opt with None -> false | _ -> true in
(is_auto_renamed, is_renamed, n'')
end in
let check_module_in_output () = begin
match (Path.get_module_path c_d.const_binding) with
| None -> true
| Some mp -> (e_env_lookup l env.e_env mp).mod_in_output
end in
let (consts_new, env) = if (not is_shown) then (consts_new, env) else begin
let (is_auto_renamed, is_renamed, n_new) = compute_new_name n in
let consts_new' = NameSet.add n_new consts_new in
begin
let (c_d', via_opt) = constant_descr_rename targ n_new l c_d in
let _ = (if (not is_auto_renamed) || not (check_module_in_output ()) then () else
let n_org : string = Name.to_string (Path.get_name c_d.const_binding) in
(Reporting.report_warning env (Reporting.Warn_rename (c_d.spec_l, n_org, Util.option_map (fun (l, n) -> (Name.to_string n, l)) via_opt, Name.to_string n_new, Target_no_ident targ))))
in
(consts_new', env_c_env_update env c c_d')
end
end in
if (not is_shown) then (consts_new, env) else
match n_ascii_opt with None -> (consts_new, env) | Some n_ascii ->
begin
let (is_auto_renamed, is_renamed, n_ascii_new) = compute_new_name n_ascii in
let consts_new' = NameSet.add n_ascii_new consts_new in
begin
let c_d' = {c_d with target_ascii_rep = Targetmap.insert c_d.target_ascii_rep (targ, (l, n_ascii_new))} in
(consts_new', env_c_env_update env c c_d')
end
end
end
let rename_type (targ : Target.non_ident_target) (consts : NameSet.t) (consts_new : NameSet.t) (env : env) (t : Path.t) :
(NameSet.t * env) = begin
let l = Ast.Trans (false, "rename_type", None) in
let td = Types.type_defs_lookup l env.t_env t in
let n = type_descr_to_name (Target_no_ident targ) t td in
let n'_opt = compute_target_rename_constant_fun targ (Nk_typeconstr t) n in
let n' = Util.option_default n n'_opt in
let (is_auto_renamed, n''_opt) =
match get_fresh_name consts consts_new n' with
None -> (false, n'_opt)
| Some n'' -> (true, Some n'') in
let n'' = Util.option_default n' n''_opt in
let consts_new' = NameSet.add n'' consts_new in
match Util.option_map (fun n'' -> type_descr_rename targ n'' l td) n''_opt with
| Some (td', via_opt) -> begin
let n0 : string = Name.to_string (Path.get_name t) in
let _ = (if (not is_auto_renamed) then () else
(Reporting.report_warning env (Reporting.Warn_rename (Ast.Unknown, n0, Util.option_map (fun (l, n) -> (Name.to_string n, l)) via_opt, Name.to_string n'', Target_no_ident targ))))
in
let env' = {env with t_env = Types.type_defs_update env.t_env t td'} in
(consts_new', env')
end
end
let rename_defs_target (targ : Target.target) ue consts env =
match dest_human_target targ with
| None -> env
| Some targ_ni ->
begin
let (new_types', env) = List.fold_left (fun (consts_new, env) t -> rename_type targ_ni consts consts_new env t) (NameSet.empty, env)
ue.Typed_ast_syntax.used_types in
let (new_consts', env) = List.fold_left (fun (consts_new, env) c -> rename_constant targ_ni consts consts_new env c) (NameSet.empty, env)
ue.Typed_ast_syntax.used_consts in
env
end
let c_env_save c_env c_id_opt c_d =
match c_id_opt with
| None -> c_env_store c_env c_d
| Some c_id -> (c_env_update c_env c_id c_d, c_id)
|
277c9cb1cbf403beb6ecc3ea1e030e3c5dd52d101d341c771a288008733c18e7 | mzp/coq-ide-for-ios | haskell.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
i $ I d : haskell.ml 14010 2011 - 04 - 15 16:05:07Z letouzey $ i
(*s Production of Haskell syntax. *)
open Pp
open Util
open Names
open Nameops
open Libnames
open Table
open Miniml
open Mlutil
open Common
s renaming issues .
let pr_lower_id id = str (String.uncapitalize (string_of_id id))
let pr_upper_id id = str (String.capitalize (string_of_id id))
let keywords =
List.fold_right (fun s -> Idset.add (id_of_string s))
[ "case"; "class"; "data"; "default"; "deriving"; "do"; "else";
"if"; "import"; "in"; "infix"; "infixl"; "infixr"; "instance";
"let"; "module"; "newtype"; "of"; "then"; "type"; "where"; "_"; "__";
"as"; "qualified"; "hiding" ; "unit" ; "unsafeCoerce" ]
Idset.empty
let preamble mod_name used_modules usf =
let pp_import mp = str ("import qualified "^ string_of_modfile mp ^"\n")
in
(if not usf.magic then mt ()
else
str "{-# OPTIONS_GHC -cpp -fglasgow-exts #-}\n" ++
str "{- For Hugs, use the option -F\"cpp -P -traditional\" -}\n\n")
++
str "module " ++ pr_upper_id mod_name ++ str " where" ++ fnl2 () ++
str "import qualified Prelude" ++ fnl () ++
prlist pp_import used_modules ++ fnl () ++
(if used_modules = [] then mt () else fnl ()) ++
(if not usf.magic then mt ()
else str "\
#ifdef __GLASGOW_HASKELL__
import qualified GHC.Base
unsafeCoerce = GHC.Base.unsafeCoerce#
#else
-- HUGS
import qualified IOExts
unsafeCoerce = IOExts.unsafeCoerce
#endif" ++ fnl2 ())
++
(if not usf.mldummy then mt ()
else str "__ = Prelude.error \"Logical or arity value used\"" ++ fnl2 ())
let pp_abst = function
| [] -> (mt ())
| l -> (str "\\" ++
prlist_with_sep (fun () -> (str " ")) pr_id l ++
str " ->" ++ spc ())
(*s The pretty-printer for haskell syntax *)
let pp_global k r =
if is_inline_custom r then str (find_custom r)
else str (Common.pp_global k r)
(*s Pretty-printing of types. [par] is a boolean indicating whether parentheses
are needed or not. *)
let kn_sig =
let specif = MPfile (dirpath_of_string "Coq.Init.Specif") in
make_kn specif empty_dirpath (mk_label "sig")
let rec pp_type par vl t =
let rec pp_rec par = function
| Tmeta _ | Tvar' _ -> assert false
| Tvar i -> (try pr_id (List.nth vl (pred i)) with _ -> (str "a" ++ int i))
| Tglob (r,[]) -> pp_global Type r
| Tglob (r,l) ->
if r = IndRef (mind_of_kn kn_sig,0) then
pp_type true vl (List.hd l)
else
pp_par par
(pp_global Type r ++ spc () ++
prlist_with_sep spc (pp_type true vl) l)
| Tarr (t1,t2) ->
pp_par par
(pp_rec true t1 ++ spc () ++ str "->" ++ spc () ++ pp_rec false t2)
| Tdummy _ -> str "()"
| Tunknown -> str "()"
| Taxiom -> str "() -- AXIOM TO BE REALIZED\n"
in
hov 0 (pp_rec par t)
s Pretty - printing of expressions . [ par ] indicates whether
parentheses are needed or not . [ env ] is the list of names for the
de Bruijn variables . [ args ] is the list of collected arguments
( already pretty - printed ) .
parentheses are needed or not. [env] is the list of names for the
de Bruijn variables. [args] is the list of collected arguments
(already pretty-printed). *)
let expr_needs_par = function
| MLlam _ -> true
| MLcase _ -> false (* now that we use the case ... of { ... } syntax *)
| _ -> false
let rec pp_expr par env args =
let par' = args <> [] || par
and apply st = pp_apply st par args in
function
| MLrel n ->
let id = get_db_name n env in apply (pr_id id)
| MLapp (f,args') ->
let stl = List.map (pp_expr true env []) args' in
pp_expr par env (stl @ args) f
| MLlam _ as a ->
let fl,a' = collect_lams a in
let fl,env' = push_vars (List.map id_of_mlid fl) env in
let st = (pp_abst (List.rev fl) ++ pp_expr false env' [] a') in
apply (pp_par par' st)
| MLletin (id,a1,a2) ->
let i,env' = push_vars [id_of_mlid id] env in
let pp_id = pr_id (List.hd i)
and pp_a1 = pp_expr false env [] a1
and pp_a2 = pp_expr (not par && expr_needs_par a2) env' [] a2 in
let pp_def =
str "let {" ++ cut () ++
hov 1 (pp_id ++ str " = " ++ pp_a1 ++ str "}")
in
apply
(pp_par par'
(hv 0 (hv 0 (hv 1 pp_def ++ spc () ++ str "in") ++
spc () ++ hov 0 pp_a2)))
| MLglob r ->
apply (pp_global Term r)
| MLcons _ as c when is_native_char c -> assert (args=[]); pp_native_char c
| MLcons (_,r,[]) ->
assert (args=[]); pp_global Cons r
| MLcons (_,r,[a]) ->
assert (args=[]);
pp_par par (pp_global Cons r ++ spc () ++ pp_expr true env [] a)
| MLcons (_,r,args') ->
assert (args=[]);
pp_par par (pp_global Cons r ++ spc () ++
prlist_with_sep spc (pp_expr true env []) args')
| MLcase (_,t, pv) when is_custom_match pv ->
let mkfun (_,ids,e) =
if ids <> [] then named_lams (List.rev ids) e
else dummy_lams (ast_lift 1 e) 1
in
apply
(pp_par par'
(hov 2
(str (find_custom_match pv) ++ fnl () ++
prvect (fun tr -> pp_expr true env [] (mkfun tr) ++ fnl ()) pv
++ pp_expr true env [] t)))
| MLcase (info,t, pv) ->
apply (pp_par par'
(v 0 (str "case " ++ pp_expr false env [] t ++ str " of {" ++
fnl () ++ pp_pat env info pv)))
| MLfix (i,ids,defs) ->
let ids',env' = push_vars (List.rev (Array.to_list ids)) env in
pp_fix par env' i (Array.of_list (List.rev ids'),defs) args
| MLexn s ->
An [ MLexn ] may be applied , but I do n't really care .
pp_par par (str "Prelude.error" ++ spc () ++ qs s)
| MLdummy ->
str "__" (* An [MLdummy] may be applied, but I don't really care. *)
| MLmagic a ->
pp_apply (str "unsafeCoerce") par (pp_expr true env [] a :: args)
| MLaxiom -> pp_par par (str "Prelude.error \"AXIOM TO BE REALIZED\"")
and pp_pat env info pv =
let pp_one_pat (name,ids,t) =
let ids,env' = push_vars (List.rev_map id_of_mlid ids) env in
let par = expr_needs_par t in
hov 2 (str " " ++ pp_global Cons name ++
(match ids with
| [] -> mt ()
| _ -> (str " " ++
prlist_with_sep spc pr_id (List.rev ids))) ++
str " ->" ++ spc () ++ pp_expr par env' [] t)
in
let factor_br, factor_set = try match info.m_same with
| BranchFun ints ->
let i = Intset.choose ints in
branch_as_fun info.m_typs pv.(i), ints
| BranchCst ints ->
let i = Intset.choose ints in
ast_pop (branch_as_cst pv.(i)), ints
| BranchNone -> MLdummy, Intset.empty
with _ -> MLdummy, Intset.empty
in
let last = Array.length pv - 1 in
prvecti
(fun i x -> if Intset.mem i factor_set then mt () else
(pp_one_pat pv.(i) ++
if i = last && Intset.is_empty factor_set then str "}" else
(str ";" ++ fnl ()))) pv
++
if Intset.is_empty factor_set then mt () else
let par = expr_needs_par factor_br in
match info.m_same with
| BranchFun _ ->
let ids, env' = push_vars [anonymous_name] env in
hov 2 (str " " ++ pr_id (List.hd ids) ++ str " ->" ++ spc () ++
pp_expr par env' [] factor_br ++ str "}")
| BranchCst _ ->
hov 2 (str " _ ->" ++ spc () ++ pp_expr par env [] factor_br ++ str "}")
| BranchNone -> mt ()
(*s names of the functions ([ids]) are already pushed in [env],
and passed here just for convenience. *)
and pp_fix par env i (ids,bl) args =
pp_par par
(v 0
(v 1 (str "let {" ++ fnl () ++
prvect_with_sep (fun () -> str ";" ++ fnl ())
(fun (fi,ti) -> pp_function env (pr_id fi) ti)
(array_map2 (fun a b -> a,b) ids bl) ++
str "}") ++
fnl () ++ str "in " ++ pp_apply (pr_id ids.(i)) false args))
and pp_function env f t =
let bl,t' = collect_lams t in
let bl,env' = push_vars (List.map id_of_mlid bl) env in
(f ++ pr_binding (List.rev bl) ++
str " =" ++ fnl () ++ str " " ++
hov 2 (pp_expr false env' [] t'))
(*s Pretty-printing of inductive types declaration. *)
let pp_comment s = str "-- " ++ s ++ fnl ()
let pp_logical_ind packet =
pp_comment (pr_id packet.ip_typename ++ str " : logical inductive") ++
pp_comment (str "with constructors : " ++
prvect_with_sep spc pr_id packet.ip_consnames)
let pp_singleton kn packet =
let l = rename_tvars keywords packet.ip_vars in
let l' = List.rev l in
hov 2 (str "type " ++ pp_global Type (IndRef (kn,0)) ++ spc () ++
prlist_with_sep spc pr_id l ++
(if l <> [] then str " " else mt ()) ++ str "=" ++ spc () ++
pp_type false l' (List.hd packet.ip_types.(0)) ++ fnl () ++
pp_comment (str "singleton inductive, whose constructor was " ++
pr_id packet.ip_consnames.(0)))
let pp_one_ind ip pl cv =
let pl = rename_tvars keywords pl in
let pp_constructor (r,l) =
(pp_global Cons r ++
match l with
| [] -> (mt ())
| _ -> (str " " ++
prlist_with_sep
(fun () -> (str " ")) (pp_type true pl) l))
in
str (if Array.length cv = 0 then "type " else "data ") ++
pp_global Type (IndRef ip) ++
prlist_strict (fun id -> str " " ++ pr_lower_id id) pl ++ str " =" ++
if Array.length cv = 0 then str " () -- empty inductive"
else
(fnl () ++ str " " ++
v 0 (str " " ++
prvect_with_sep (fun () -> fnl () ++ str "| ") pp_constructor
(Array.mapi (fun i c -> ConstructRef (ip,i+1),c) cv)))
let rec pp_ind first kn i ind =
if i >= Array.length ind.ind_packets then
if first then mt () else fnl ()
else
let ip = (kn,i) in
let p = ind.ind_packets.(i) in
if is_custom (IndRef (kn,i)) then pp_ind first kn (i+1) ind
else
if p.ip_logical then
pp_logical_ind p ++ pp_ind first kn (i+1) ind
else
pp_one_ind ip p.ip_vars p.ip_types ++ fnl () ++
pp_ind false kn (i+1) ind
(*s Pretty-printing of a declaration. *)
let pp_string_parameters ids = prlist (fun id -> str id ++ str " ")
let pp_decl = function
| Dind (kn,i) when i.ind_kind = Singleton ->
pp_singleton (mind_of_kn kn) i.ind_packets.(0) ++ fnl ()
| Dind (kn,i) -> hov 0 (pp_ind true (mind_of_kn kn) 0 i)
| Dtype (r, l, t) ->
if is_inline_custom r then mt ()
else
let l = rename_tvars keywords l in
let st =
try
let ids,s = find_type_custom r in
prlist (fun id -> str (id^" ")) ids ++ str "=" ++ spc () ++ str s
with Not_found ->
prlist (fun id -> pr_id id ++ str " ") l ++
if t = Taxiom then str "= () -- AXIOM TO BE REALIZED\n"
else str "=" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ pp_global Type r ++ spc () ++ st) ++ fnl2 ()
| Dfix (rv, defs, typs) ->
let names = Array.map
(fun r -> if is_inline_custom r then mt () else pp_global Term r) rv
in
prvecti
(fun i r ->
let void = is_inline_custom r ||
(not (is_custom r) && defs.(i) = MLexn "UNUSED")
in
if void then mt ()
else
names.(i) ++ str " :: " ++ pp_type false [] typs.(i) ++ fnl () ++
(if is_custom r then
(names.(i) ++ str " = " ++ str (find_custom r))
else
(pp_function (empty_env ()) names.(i) defs.(i)))
++ fnl2 ())
rv
| Dterm (r, a, t) ->
if is_inline_custom r then mt ()
else
let e = pp_global Term r in
e ++ str " :: " ++ pp_type false [] t ++ fnl () ++
if is_custom r then
hov 0 (e ++ str " = " ++ str (find_custom r) ++ fnl2 ())
else
hov 0 (pp_function (empty_env ()) e a ++ fnl2 ())
let rec pp_structure_elem = function
| (l,SEdecl d) -> pp_decl d
| (l,SEmodule m) -> pp_module_expr m.ml_mod_expr
| (l,SEmodtype m) -> mt ()
(* for the moment we simply discard module type *)
and pp_module_expr = function
| MEstruct (mp,sel) -> prlist_strict pp_structure_elem sel
| MEfunctor _ -> mt ()
(* for the moment we simply discard unapplied functors *)
| MEident _ | MEapply _ -> assert false
(* should be expansed in extract_env *)
let pp_struct =
let pp_sel (mp,sel) =
push_visible mp [];
let p = prlist_strict pp_structure_elem sel in
pop_visible (); p
in
prlist_strict pp_sel
let haskell_descr = {
keywords = keywords;
file_suffix = ".hs";
preamble = preamble;
pp_struct = pp_struct;
sig_suffix = None;
sig_preamble = (fun _ _ _ -> mt ());
pp_sig = (fun _ -> mt ());
pp_decl = pp_decl;
}
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/plugins/extraction/haskell.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
s Production of Haskell syntax.
s The pretty-printer for haskell syntax
s Pretty-printing of types. [par] is a boolean indicating whether parentheses
are needed or not.
now that we use the case ... of { ... } syntax
An [MLdummy] may be applied, but I don't really care.
s names of the functions ([ids]) are already pushed in [env],
and passed here just for convenience.
s Pretty-printing of inductive types declaration.
s Pretty-printing of a declaration.
for the moment we simply discard module type
for the moment we simply discard unapplied functors
should be expansed in extract_env | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
i $ I d : haskell.ml 14010 2011 - 04 - 15 16:05:07Z letouzey $ i
open Pp
open Util
open Names
open Nameops
open Libnames
open Table
open Miniml
open Mlutil
open Common
s renaming issues .
let pr_lower_id id = str (String.uncapitalize (string_of_id id))
let pr_upper_id id = str (String.capitalize (string_of_id id))
let keywords =
List.fold_right (fun s -> Idset.add (id_of_string s))
[ "case"; "class"; "data"; "default"; "deriving"; "do"; "else";
"if"; "import"; "in"; "infix"; "infixl"; "infixr"; "instance";
"let"; "module"; "newtype"; "of"; "then"; "type"; "where"; "_"; "__";
"as"; "qualified"; "hiding" ; "unit" ; "unsafeCoerce" ]
Idset.empty
let preamble mod_name used_modules usf =
let pp_import mp = str ("import qualified "^ string_of_modfile mp ^"\n")
in
(if not usf.magic then mt ()
else
str "{-# OPTIONS_GHC -cpp -fglasgow-exts #-}\n" ++
str "{- For Hugs, use the option -F\"cpp -P -traditional\" -}\n\n")
++
str "module " ++ pr_upper_id mod_name ++ str " where" ++ fnl2 () ++
str "import qualified Prelude" ++ fnl () ++
prlist pp_import used_modules ++ fnl () ++
(if used_modules = [] then mt () else fnl ()) ++
(if not usf.magic then mt ()
else str "\
#ifdef __GLASGOW_HASKELL__
import qualified GHC.Base
unsafeCoerce = GHC.Base.unsafeCoerce#
#else
-- HUGS
import qualified IOExts
unsafeCoerce = IOExts.unsafeCoerce
#endif" ++ fnl2 ())
++
(if not usf.mldummy then mt ()
else str "__ = Prelude.error \"Logical or arity value used\"" ++ fnl2 ())
let pp_abst = function
| [] -> (mt ())
| l -> (str "\\" ++
prlist_with_sep (fun () -> (str " ")) pr_id l ++
str " ->" ++ spc ())
let pp_global k r =
if is_inline_custom r then str (find_custom r)
else str (Common.pp_global k r)
let kn_sig =
let specif = MPfile (dirpath_of_string "Coq.Init.Specif") in
make_kn specif empty_dirpath (mk_label "sig")
let rec pp_type par vl t =
let rec pp_rec par = function
| Tmeta _ | Tvar' _ -> assert false
| Tvar i -> (try pr_id (List.nth vl (pred i)) with _ -> (str "a" ++ int i))
| Tglob (r,[]) -> pp_global Type r
| Tglob (r,l) ->
if r = IndRef (mind_of_kn kn_sig,0) then
pp_type true vl (List.hd l)
else
pp_par par
(pp_global Type r ++ spc () ++
prlist_with_sep spc (pp_type true vl) l)
| Tarr (t1,t2) ->
pp_par par
(pp_rec true t1 ++ spc () ++ str "->" ++ spc () ++ pp_rec false t2)
| Tdummy _ -> str "()"
| Tunknown -> str "()"
| Taxiom -> str "() -- AXIOM TO BE REALIZED\n"
in
hov 0 (pp_rec par t)
s Pretty - printing of expressions . [ par ] indicates whether
parentheses are needed or not . [ env ] is the list of names for the
de Bruijn variables . [ args ] is the list of collected arguments
( already pretty - printed ) .
parentheses are needed or not. [env] is the list of names for the
de Bruijn variables. [args] is the list of collected arguments
(already pretty-printed). *)
let expr_needs_par = function
| MLlam _ -> true
| _ -> false
let rec pp_expr par env args =
let par' = args <> [] || par
and apply st = pp_apply st par args in
function
| MLrel n ->
let id = get_db_name n env in apply (pr_id id)
| MLapp (f,args') ->
let stl = List.map (pp_expr true env []) args' in
pp_expr par env (stl @ args) f
| MLlam _ as a ->
let fl,a' = collect_lams a in
let fl,env' = push_vars (List.map id_of_mlid fl) env in
let st = (pp_abst (List.rev fl) ++ pp_expr false env' [] a') in
apply (pp_par par' st)
| MLletin (id,a1,a2) ->
let i,env' = push_vars [id_of_mlid id] env in
let pp_id = pr_id (List.hd i)
and pp_a1 = pp_expr false env [] a1
and pp_a2 = pp_expr (not par && expr_needs_par a2) env' [] a2 in
let pp_def =
str "let {" ++ cut () ++
hov 1 (pp_id ++ str " = " ++ pp_a1 ++ str "}")
in
apply
(pp_par par'
(hv 0 (hv 0 (hv 1 pp_def ++ spc () ++ str "in") ++
spc () ++ hov 0 pp_a2)))
| MLglob r ->
apply (pp_global Term r)
| MLcons _ as c when is_native_char c -> assert (args=[]); pp_native_char c
| MLcons (_,r,[]) ->
assert (args=[]); pp_global Cons r
| MLcons (_,r,[a]) ->
assert (args=[]);
pp_par par (pp_global Cons r ++ spc () ++ pp_expr true env [] a)
| MLcons (_,r,args') ->
assert (args=[]);
pp_par par (pp_global Cons r ++ spc () ++
prlist_with_sep spc (pp_expr true env []) args')
| MLcase (_,t, pv) when is_custom_match pv ->
let mkfun (_,ids,e) =
if ids <> [] then named_lams (List.rev ids) e
else dummy_lams (ast_lift 1 e) 1
in
apply
(pp_par par'
(hov 2
(str (find_custom_match pv) ++ fnl () ++
prvect (fun tr -> pp_expr true env [] (mkfun tr) ++ fnl ()) pv
++ pp_expr true env [] t)))
| MLcase (info,t, pv) ->
apply (pp_par par'
(v 0 (str "case " ++ pp_expr false env [] t ++ str " of {" ++
fnl () ++ pp_pat env info pv)))
| MLfix (i,ids,defs) ->
let ids',env' = push_vars (List.rev (Array.to_list ids)) env in
pp_fix par env' i (Array.of_list (List.rev ids'),defs) args
| MLexn s ->
An [ MLexn ] may be applied , but I do n't really care .
pp_par par (str "Prelude.error" ++ spc () ++ qs s)
| MLdummy ->
| MLmagic a ->
pp_apply (str "unsafeCoerce") par (pp_expr true env [] a :: args)
| MLaxiom -> pp_par par (str "Prelude.error \"AXIOM TO BE REALIZED\"")
and pp_pat env info pv =
let pp_one_pat (name,ids,t) =
let ids,env' = push_vars (List.rev_map id_of_mlid ids) env in
let par = expr_needs_par t in
hov 2 (str " " ++ pp_global Cons name ++
(match ids with
| [] -> mt ()
| _ -> (str " " ++
prlist_with_sep spc pr_id (List.rev ids))) ++
str " ->" ++ spc () ++ pp_expr par env' [] t)
in
let factor_br, factor_set = try match info.m_same with
| BranchFun ints ->
let i = Intset.choose ints in
branch_as_fun info.m_typs pv.(i), ints
| BranchCst ints ->
let i = Intset.choose ints in
ast_pop (branch_as_cst pv.(i)), ints
| BranchNone -> MLdummy, Intset.empty
with _ -> MLdummy, Intset.empty
in
let last = Array.length pv - 1 in
prvecti
(fun i x -> if Intset.mem i factor_set then mt () else
(pp_one_pat pv.(i) ++
if i = last && Intset.is_empty factor_set then str "}" else
(str ";" ++ fnl ()))) pv
++
if Intset.is_empty factor_set then mt () else
let par = expr_needs_par factor_br in
match info.m_same with
| BranchFun _ ->
let ids, env' = push_vars [anonymous_name] env in
hov 2 (str " " ++ pr_id (List.hd ids) ++ str " ->" ++ spc () ++
pp_expr par env' [] factor_br ++ str "}")
| BranchCst _ ->
hov 2 (str " _ ->" ++ spc () ++ pp_expr par env [] factor_br ++ str "}")
| BranchNone -> mt ()
and pp_fix par env i (ids,bl) args =
pp_par par
(v 0
(v 1 (str "let {" ++ fnl () ++
prvect_with_sep (fun () -> str ";" ++ fnl ())
(fun (fi,ti) -> pp_function env (pr_id fi) ti)
(array_map2 (fun a b -> a,b) ids bl) ++
str "}") ++
fnl () ++ str "in " ++ pp_apply (pr_id ids.(i)) false args))
and pp_function env f t =
let bl,t' = collect_lams t in
let bl,env' = push_vars (List.map id_of_mlid bl) env in
(f ++ pr_binding (List.rev bl) ++
str " =" ++ fnl () ++ str " " ++
hov 2 (pp_expr false env' [] t'))
let pp_comment s = str "-- " ++ s ++ fnl ()
let pp_logical_ind packet =
pp_comment (pr_id packet.ip_typename ++ str " : logical inductive") ++
pp_comment (str "with constructors : " ++
prvect_with_sep spc pr_id packet.ip_consnames)
let pp_singleton kn packet =
let l = rename_tvars keywords packet.ip_vars in
let l' = List.rev l in
hov 2 (str "type " ++ pp_global Type (IndRef (kn,0)) ++ spc () ++
prlist_with_sep spc pr_id l ++
(if l <> [] then str " " else mt ()) ++ str "=" ++ spc () ++
pp_type false l' (List.hd packet.ip_types.(0)) ++ fnl () ++
pp_comment (str "singleton inductive, whose constructor was " ++
pr_id packet.ip_consnames.(0)))
let pp_one_ind ip pl cv =
let pl = rename_tvars keywords pl in
let pp_constructor (r,l) =
(pp_global Cons r ++
match l with
| [] -> (mt ())
| _ -> (str " " ++
prlist_with_sep
(fun () -> (str " ")) (pp_type true pl) l))
in
str (if Array.length cv = 0 then "type " else "data ") ++
pp_global Type (IndRef ip) ++
prlist_strict (fun id -> str " " ++ pr_lower_id id) pl ++ str " =" ++
if Array.length cv = 0 then str " () -- empty inductive"
else
(fnl () ++ str " " ++
v 0 (str " " ++
prvect_with_sep (fun () -> fnl () ++ str "| ") pp_constructor
(Array.mapi (fun i c -> ConstructRef (ip,i+1),c) cv)))
let rec pp_ind first kn i ind =
if i >= Array.length ind.ind_packets then
if first then mt () else fnl ()
else
let ip = (kn,i) in
let p = ind.ind_packets.(i) in
if is_custom (IndRef (kn,i)) then pp_ind first kn (i+1) ind
else
if p.ip_logical then
pp_logical_ind p ++ pp_ind first kn (i+1) ind
else
pp_one_ind ip p.ip_vars p.ip_types ++ fnl () ++
pp_ind false kn (i+1) ind
let pp_string_parameters ids = prlist (fun id -> str id ++ str " ")
let pp_decl = function
| Dind (kn,i) when i.ind_kind = Singleton ->
pp_singleton (mind_of_kn kn) i.ind_packets.(0) ++ fnl ()
| Dind (kn,i) -> hov 0 (pp_ind true (mind_of_kn kn) 0 i)
| Dtype (r, l, t) ->
if is_inline_custom r then mt ()
else
let l = rename_tvars keywords l in
let st =
try
let ids,s = find_type_custom r in
prlist (fun id -> str (id^" ")) ids ++ str "=" ++ spc () ++ str s
with Not_found ->
prlist (fun id -> pr_id id ++ str " ") l ++
if t = Taxiom then str "= () -- AXIOM TO BE REALIZED\n"
else str "=" ++ spc () ++ pp_type false l t
in
hov 2 (str "type " ++ pp_global Type r ++ spc () ++ st) ++ fnl2 ()
| Dfix (rv, defs, typs) ->
let names = Array.map
(fun r -> if is_inline_custom r then mt () else pp_global Term r) rv
in
prvecti
(fun i r ->
let void = is_inline_custom r ||
(not (is_custom r) && defs.(i) = MLexn "UNUSED")
in
if void then mt ()
else
names.(i) ++ str " :: " ++ pp_type false [] typs.(i) ++ fnl () ++
(if is_custom r then
(names.(i) ++ str " = " ++ str (find_custom r))
else
(pp_function (empty_env ()) names.(i) defs.(i)))
++ fnl2 ())
rv
| Dterm (r, a, t) ->
if is_inline_custom r then mt ()
else
let e = pp_global Term r in
e ++ str " :: " ++ pp_type false [] t ++ fnl () ++
if is_custom r then
hov 0 (e ++ str " = " ++ str (find_custom r) ++ fnl2 ())
else
hov 0 (pp_function (empty_env ()) e a ++ fnl2 ())
let rec pp_structure_elem = function
| (l,SEdecl d) -> pp_decl d
| (l,SEmodule m) -> pp_module_expr m.ml_mod_expr
| (l,SEmodtype m) -> mt ()
and pp_module_expr = function
| MEstruct (mp,sel) -> prlist_strict pp_structure_elem sel
| MEfunctor _ -> mt ()
| MEident _ | MEapply _ -> assert false
let pp_struct =
let pp_sel (mp,sel) =
push_visible mp [];
let p = prlist_strict pp_structure_elem sel in
pop_visible (); p
in
prlist_strict pp_sel
let haskell_descr = {
keywords = keywords;
file_suffix = ".hs";
preamble = preamble;
pp_struct = pp_struct;
sig_suffix = None;
sig_preamble = (fun _ _ _ -> mt ());
pp_sig = (fun _ -> mt ());
pp_decl = pp_decl;
}
|
d118326086cebdf414cf6b216970159636e9c06b713b246dc5fc848c15365ac3 | seanomlor/programming-in-haskell | replicate.hs | replicate' :: Int -> a -> [a]
replicate' n x = [x | _ <- [0 .. n]]
| null | https://raw.githubusercontent.com/seanomlor/programming-in-haskell/e05142e6709eeba2e95cf86f376a32c9e629df88/05-list-comprehensions/replicate.hs | haskell | replicate' :: Int -> a -> [a]
replicate' n x = [x | _ <- [0 .. n]]
| |
a723f983a4457678ef6795becf5d8f73799dfa8ed5dc8d93465d0fa125b8d95b | tsloughter/kuberl | kuberl_v1_container_image.erl | -module(kuberl_v1_container_image).
-export([encode/1]).
-export_type([kuberl_v1_container_image/0]).
-type kuberl_v1_container_image() ::
#{ 'names' := list(),
'sizeBytes' => integer()
}.
encode(#{ 'names' := Names,
'sizeBytes' := SizeBytes
}) ->
#{ 'names' => Names,
'sizeBytes' => SizeBytes
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1_container_image.erl | erlang | -module(kuberl_v1_container_image).
-export([encode/1]).
-export_type([kuberl_v1_container_image/0]).
-type kuberl_v1_container_image() ::
#{ 'names' := list(),
'sizeBytes' => integer()
}.
encode(#{ 'names' := Names,
'sizeBytes' := SizeBytes
}) ->
#{ 'names' => Names,
'sizeBytes' => SizeBytes
}.
| |
7a920aa74cda6a81cfd5cc3a0e2ce375cc7b31cc9b5ee4df02ad069e481c2de0 | hslua/hslua | Types.hs | |
Module : HsLua . Packaging . Types
Copyright : © 2020 - 2023 : MIT
Maintainer : < tarleb+ >
Stability : alpha
Portability : Portable
Marshaling and documenting functions .
Module : HsLua.Packaging.Types
Copyright : © 2020-2023 Albert Krewinkel
License : MIT
Maintainer : Albert Krewinkel <tarleb+>
Stability : alpha
Portability : Portable
Marshaling and documenting Haskell functions.
-}
module HsLua.Packaging.Types
( -- * Documented module
Module (..)
, Field (..)
-- * Documented functions
, DocumentedFunction (..)
-- ** Documentation types
, FunctionDoc (..)
, ParameterDoc (..)
, ResultsDoc (..)
, ResultValueDoc (..)
) where
import Data.Text (Text)
import Data.Version (Version)
import HsLua.Core (LuaE, Name, NumResults)
import HsLua.ObjectOrientation (Operation)
import HsLua.Typing (TypeSpec)
-- | Named and documented Lua module.
data Module e = Module
{ moduleName :: Name
, moduleDescription :: Text
, moduleFields :: [Field e]
, moduleFunctions :: [DocumentedFunction e]
, moduleOperations :: [(Operation, DocumentedFunction e)]
, moduleTypeInitializers :: [LuaE e Name]
}
-- | Self-documenting module field
data Field e = Field
{ fieldName :: Text
, fieldType :: TypeSpec
, fieldDescription :: Text
, fieldPushValue :: LuaE e ()
}
--
-- Function components
--
| Haskell equivallent to CFunction , i.e. , function callable
from Lua .
data DocumentedFunction e = DocumentedFunction
{ callFunction :: LuaE e NumResults
, functionName :: Name
, functionDoc :: FunctionDoc
}
--
-- Documentation types
--
| Documentation for a function
data FunctionDoc = FunctionDoc
{ functionDescription :: Text
, parameterDocs :: [ParameterDoc]
, functionResultsDocs :: ResultsDoc
, functionSince :: Maybe Version -- ^ Version in which the function
-- was introduced.
}
deriving (Eq, Ord, Show)
-- | Documentation for function parameters.
data ParameterDoc = ParameterDoc
{ parameterName :: Text
, parameterType :: Text
, parameterDescription :: Text
, parameterIsOptional :: Bool
}
deriving (Eq, Ord, Show)
-- | Documentation for the return values of a function.
data ResultsDoc
= ResultsDocList [ResultValueDoc] -- ^ List of individual results
| ResultsDocMult Text -- ^ Flexible results
deriving (Eq, Ord, Show)
-- | Documentation for a single return value of a function.
data ResultValueDoc = ResultValueDoc
{ resultValueType :: Text
, resultValueDescription :: Text
}
deriving (Eq, Ord, Show)
| null | https://raw.githubusercontent.com/hslua/hslua/e86d00dfa8b0915f3060c9e618fc3e373048d9c2/hslua-packaging/src/HsLua/Packaging/Types.hs | haskell | * Documented module
* Documented functions
** Documentation types
| Named and documented Lua module.
| Self-documenting module field
Function components
Documentation types
^ Version in which the function
was introduced.
| Documentation for function parameters.
| Documentation for the return values of a function.
^ List of individual results
^ Flexible results
| Documentation for a single return value of a function. | |
Module : HsLua . Packaging . Types
Copyright : © 2020 - 2023 : MIT
Maintainer : < tarleb+ >
Stability : alpha
Portability : Portable
Marshaling and documenting functions .
Module : HsLua.Packaging.Types
Copyright : © 2020-2023 Albert Krewinkel
License : MIT
Maintainer : Albert Krewinkel <tarleb+>
Stability : alpha
Portability : Portable
Marshaling and documenting Haskell functions.
-}
module HsLua.Packaging.Types
Module (..)
, Field (..)
, DocumentedFunction (..)
, FunctionDoc (..)
, ParameterDoc (..)
, ResultsDoc (..)
, ResultValueDoc (..)
) where
import Data.Text (Text)
import Data.Version (Version)
import HsLua.Core (LuaE, Name, NumResults)
import HsLua.ObjectOrientation (Operation)
import HsLua.Typing (TypeSpec)
data Module e = Module
{ moduleName :: Name
, moduleDescription :: Text
, moduleFields :: [Field e]
, moduleFunctions :: [DocumentedFunction e]
, moduleOperations :: [(Operation, DocumentedFunction e)]
, moduleTypeInitializers :: [LuaE e Name]
}
data Field e = Field
{ fieldName :: Text
, fieldType :: TypeSpec
, fieldDescription :: Text
, fieldPushValue :: LuaE e ()
}
| Haskell equivallent to CFunction , i.e. , function callable
from Lua .
data DocumentedFunction e = DocumentedFunction
{ callFunction :: LuaE e NumResults
, functionName :: Name
, functionDoc :: FunctionDoc
}
| Documentation for a function
data FunctionDoc = FunctionDoc
{ functionDescription :: Text
, parameterDocs :: [ParameterDoc]
, functionResultsDocs :: ResultsDoc
}
deriving (Eq, Ord, Show)
data ParameterDoc = ParameterDoc
{ parameterName :: Text
, parameterType :: Text
, parameterDescription :: Text
, parameterIsOptional :: Bool
}
deriving (Eq, Ord, Show)
data ResultsDoc
deriving (Eq, Ord, Show)
data ResultValueDoc = ResultValueDoc
{ resultValueType :: Text
, resultValueDescription :: Text
}
deriving (Eq, Ord, Show)
|
e0d216b70b497288541be7b025e2f679f5c8c1aa776fadf806a0ecf11b8396a4 | thelema/ocaml-community | typecheck.ml | (*************************************************************************)
(* *)
(* OCaml LablTk library *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
(* General Public License, with the special exception on linking *)
(* described in file ../../../LICENSE. *)
(* *)
(*************************************************************************)
$ Id$
open StdLabels
open Tk
open Parsetree
open Typedtree
open Location
open Jg_tk
open Mytypes
(* Optionally preprocess a source file *)
let preprocess ~pp ~ext text =
let sourcefile = Filename.temp_file "caml" ext in
begin try
let oc = open_out_bin sourcefile in
output_string oc text;
flush oc;
close_out oc
with _ ->
failwith "Preprocessing error"
end;
let tmpfile = Filename.temp_file "camlpp" ext in
let comm = Printf.sprintf "%s %s > %s" pp sourcefile tmpfile in
if Ccomp.command comm <> 0 then begin
Sys.remove sourcefile;
Sys.remove tmpfile;
failwith "Preprocessing error"
end;
Sys.remove sourcefile;
tmpfile
exception Outdated_version
let parse_pp ~parse ~wrap ~ext text =
Location.input_name := "";
match !Clflags.preprocessor with
None ->
let buffer = Lexing.from_string text in
Location.init buffer "";
parse buffer
| Some pp ->
let tmpfile = preprocess ~pp ~ext text in
let ast_magic =
if ext = ".ml" then Config.ast_impl_magic_number
else Config.ast_intf_magic_number in
let ic = open_in_bin tmpfile in
let ast =
try
let buffer = Misc.input_bytes ic (String.length ast_magic) in
if buffer = ast_magic then begin
ignore (input_value ic);
wrap (input_value ic)
end else if String.sub buffer 0 9 = String.sub ast_magic 0 9 then
raise Outdated_version
else
raise Exit
with
Outdated_version ->
close_in ic;
Sys.remove tmpfile;
failwith "OCaml and preprocessor have incompatible versions"
| _ ->
seek_in ic 0;
let buffer = Lexing.from_channel ic in
Location.init buffer "";
parse buffer
in
close_in ic;
Sys.remove tmpfile;
ast
let nowarnings = ref false
let f txt =
let error_messages = ref [] in
let text = Jg_text.get_all txt.tw
and env = ref (Env.open_pers_signature "Pervasives" Env.initial) in
let tl, ew, end_message =
Jg_message.formatted ~title:"Warnings" ~ppf:Format.err_formatter () in
Text.tag_remove txt.tw ~tag:"error" ~start:tstart ~stop:tend;
txt.structure <- [];
txt.type_info <- [];
txt.signature <- [];
txt.psignature <- [];
ignore (Stypes.get_info ());
Clflags.annotations := true;
begin try
if Filename.check_suffix txt.name ".mli" then
let psign = parse_pp text ~ext:".mli"
~parse:Parse.interface ~wrap:(fun x -> x) in
txt.psignature <- psign;
txt.signature <- (Typemod.transl_signature !env psign).sig_type;
others are interpreted as .ml
let psl = parse_pp text ~ext:".ml"
~parse:Parse.use_file ~wrap:(fun x -> [Parsetree.Ptop_def x]) in
List.iter psl ~f:
begin function
Ptop_def pstr ->
let str, sign, env' = Typemod.type_structure !env pstr Location.none in
txt.structure <- txt.structure @ str.str_items;
txt.signature <- txt.signature @ sign;
env := env'
| Ptop_dir _ -> ()
end;
txt.type_info <- Stypes.get_info ();
with
Lexer.Error _ | Syntaxerr.Error _
| Typecore.Error _ | Typemod.Error _
| Typeclass.Error _ | Typedecl.Error _
| Typetexp.Error _ | Includemod.Error _
| Env.Error _ | Ctype.Tags _ | Failure _ as exn ->
txt.type_info <- Stypes.get_info ();
let et, ew, end_message = Jg_message.formatted ~title:"Error !" () in
error_messages := et :: !error_messages;
let range = match exn with
Lexer.Error (err, l) ->
Lexer.report_error Format.std_formatter err; l
| Syntaxerr.Error err ->
Syntaxerr.report_error Format.std_formatter err;
Syntaxerr.location_of_error err
| Typecore.Error (l,err) ->
Typecore.report_error Format.std_formatter err; l
| Typeclass.Error (l,err) ->
Typeclass.report_error Format.std_formatter err; l
| Typedecl.Error (l, err) ->
Typedecl.report_error Format.std_formatter err; l
| Typemod.Error (l,err) ->
Typemod.report_error Format.std_formatter err; l
| Typetexp.Error (l,err) ->
Typetexp.report_error Format.std_formatter err; l
| Includemod.Error errl ->
Includemod.report_error Format.std_formatter errl; Location.none
| Env.Error err ->
Env.report_error Format.std_formatter err; Location.none
| Cmi_format.Error err ->
Cmi_format.report_error Format.std_formatter err; Location.none
| Ctype.Tags(l, l') ->
Format.printf "In this program,@ variant constructors@ `%s and `%s@ have same hash value.@." l l';
Location.none
| Failure s ->
Format.printf "%s.@." s; Location.none
| _ -> assert false
in
end_message ();
let s = range.loc_start.Lexing.pos_cnum in
let e = range.loc_end.Lexing.pos_cnum in
if s < e then
Jg_text.tag_and_see txt.tw ~start:(tpos s) ~stop:(tpos e) ~tag:"error"
end;
end_message ();
if !nowarnings || Text.index ew ~index:tend = `Linechar (2,0)
then destroy tl
else begin
error_messages := tl :: !error_messages;
Text.configure ew ~state:`Disabled;
bind ew ~events:[`Modified([`Double], `ButtonReleaseDetail 1)]
~action:(fun _ ->
try
let start, ende = Text.tag_nextrange ew ~tag:"sel" ~start:(tpos 0) in
let s = Text.get ew ~start:(start,[]) ~stop:(ende,[]) in
let n = int_of_string s in
Text.mark_set txt.tw ~index:(tpos n) ~mark:"insert";
Text.see txt.tw ~index:(`Mark "insert", [])
with _ -> ())
end;
!error_messages
| null | https://raw.githubusercontent.com/thelema/ocaml-community/ed0a2424bbf13d1b33292725e089f0d7ba94b540/otherlibs/labltk/browser/typecheck.ml | ocaml | ***********************************************************************
OCaml LablTk library
General Public License, with the special exception on linking
described in file ../../../LICENSE.
***********************************************************************
Optionally preprocess a source file | , Kyoto University RIMS
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
$ Id$
open StdLabels
open Tk
open Parsetree
open Typedtree
open Location
open Jg_tk
open Mytypes
let preprocess ~pp ~ext text =
let sourcefile = Filename.temp_file "caml" ext in
begin try
let oc = open_out_bin sourcefile in
output_string oc text;
flush oc;
close_out oc
with _ ->
failwith "Preprocessing error"
end;
let tmpfile = Filename.temp_file "camlpp" ext in
let comm = Printf.sprintf "%s %s > %s" pp sourcefile tmpfile in
if Ccomp.command comm <> 0 then begin
Sys.remove sourcefile;
Sys.remove tmpfile;
failwith "Preprocessing error"
end;
Sys.remove sourcefile;
tmpfile
exception Outdated_version
let parse_pp ~parse ~wrap ~ext text =
Location.input_name := "";
match !Clflags.preprocessor with
None ->
let buffer = Lexing.from_string text in
Location.init buffer "";
parse buffer
| Some pp ->
let tmpfile = preprocess ~pp ~ext text in
let ast_magic =
if ext = ".ml" then Config.ast_impl_magic_number
else Config.ast_intf_magic_number in
let ic = open_in_bin tmpfile in
let ast =
try
let buffer = Misc.input_bytes ic (String.length ast_magic) in
if buffer = ast_magic then begin
ignore (input_value ic);
wrap (input_value ic)
end else if String.sub buffer 0 9 = String.sub ast_magic 0 9 then
raise Outdated_version
else
raise Exit
with
Outdated_version ->
close_in ic;
Sys.remove tmpfile;
failwith "OCaml and preprocessor have incompatible versions"
| _ ->
seek_in ic 0;
let buffer = Lexing.from_channel ic in
Location.init buffer "";
parse buffer
in
close_in ic;
Sys.remove tmpfile;
ast
let nowarnings = ref false
let f txt =
let error_messages = ref [] in
let text = Jg_text.get_all txt.tw
and env = ref (Env.open_pers_signature "Pervasives" Env.initial) in
let tl, ew, end_message =
Jg_message.formatted ~title:"Warnings" ~ppf:Format.err_formatter () in
Text.tag_remove txt.tw ~tag:"error" ~start:tstart ~stop:tend;
txt.structure <- [];
txt.type_info <- [];
txt.signature <- [];
txt.psignature <- [];
ignore (Stypes.get_info ());
Clflags.annotations := true;
begin try
if Filename.check_suffix txt.name ".mli" then
let psign = parse_pp text ~ext:".mli"
~parse:Parse.interface ~wrap:(fun x -> x) in
txt.psignature <- psign;
txt.signature <- (Typemod.transl_signature !env psign).sig_type;
others are interpreted as .ml
let psl = parse_pp text ~ext:".ml"
~parse:Parse.use_file ~wrap:(fun x -> [Parsetree.Ptop_def x]) in
List.iter psl ~f:
begin function
Ptop_def pstr ->
let str, sign, env' = Typemod.type_structure !env pstr Location.none in
txt.structure <- txt.structure @ str.str_items;
txt.signature <- txt.signature @ sign;
env := env'
| Ptop_dir _ -> ()
end;
txt.type_info <- Stypes.get_info ();
with
Lexer.Error _ | Syntaxerr.Error _
| Typecore.Error _ | Typemod.Error _
| Typeclass.Error _ | Typedecl.Error _
| Typetexp.Error _ | Includemod.Error _
| Env.Error _ | Ctype.Tags _ | Failure _ as exn ->
txt.type_info <- Stypes.get_info ();
let et, ew, end_message = Jg_message.formatted ~title:"Error !" () in
error_messages := et :: !error_messages;
let range = match exn with
Lexer.Error (err, l) ->
Lexer.report_error Format.std_formatter err; l
| Syntaxerr.Error err ->
Syntaxerr.report_error Format.std_formatter err;
Syntaxerr.location_of_error err
| Typecore.Error (l,err) ->
Typecore.report_error Format.std_formatter err; l
| Typeclass.Error (l,err) ->
Typeclass.report_error Format.std_formatter err; l
| Typedecl.Error (l, err) ->
Typedecl.report_error Format.std_formatter err; l
| Typemod.Error (l,err) ->
Typemod.report_error Format.std_formatter err; l
| Typetexp.Error (l,err) ->
Typetexp.report_error Format.std_formatter err; l
| Includemod.Error errl ->
Includemod.report_error Format.std_formatter errl; Location.none
| Env.Error err ->
Env.report_error Format.std_formatter err; Location.none
| Cmi_format.Error err ->
Cmi_format.report_error Format.std_formatter err; Location.none
| Ctype.Tags(l, l') ->
Format.printf "In this program,@ variant constructors@ `%s and `%s@ have same hash value.@." l l';
Location.none
| Failure s ->
Format.printf "%s.@." s; Location.none
| _ -> assert false
in
end_message ();
let s = range.loc_start.Lexing.pos_cnum in
let e = range.loc_end.Lexing.pos_cnum in
if s < e then
Jg_text.tag_and_see txt.tw ~start:(tpos s) ~stop:(tpos e) ~tag:"error"
end;
end_message ();
if !nowarnings || Text.index ew ~index:tend = `Linechar (2,0)
then destroy tl
else begin
error_messages := tl :: !error_messages;
Text.configure ew ~state:`Disabled;
bind ew ~events:[`Modified([`Double], `ButtonReleaseDetail 1)]
~action:(fun _ ->
try
let start, ende = Text.tag_nextrange ew ~tag:"sel" ~start:(tpos 0) in
let s = Text.get ew ~start:(start,[]) ~stop:(ende,[]) in
let n = int_of_string s in
Text.mark_set txt.tw ~index:(tpos n) ~mark:"insert";
Text.see txt.tw ~index:(`Mark "insert", [])
with _ -> ())
end;
!error_messages
|
b3a966be568dc58d130659ac52be2dfd77a06f6e55bcd7c7189d7315a45121bb | nandor/llir-ocaml | revapply.ml | (* TEST
*)
external ( |> ) : 'a -> ('a -> 'b) -> 'b = "%revapply"
let f x = x + x
let g x = x * x
let h x = x + 1
let add x y = x + y
let _ =
List.iter (fun x ->
print_int x; print_newline ()
)
[
6
36
18
37
260
]
| null | https://raw.githubusercontent.com/nandor/llir-ocaml/9c019f15c444e30c825b1673cbe827e0497868fe/testsuite/tests/prim-revapply/revapply.ml | ocaml | TEST
|
external ( |> ) : 'a -> ('a -> 'b) -> 'b = "%revapply"
let f x = x + x
let g x = x * x
let h x = x + 1
let add x y = x + y
let _ =
List.iter (fun x ->
print_int x; print_newline ()
)
[
6
36
18
37
260
]
|
495bfafc3dc998898524c2fd333e418c05f25c3f8f8b288db6d24ffa0535c50e | fetburner/Coq2SML | ide_slave.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
* [ Ide_slave ] : an implementation of [ Ide_intf ] , i.e. mainly an interp
function and a rewind function . This specialized loop is triggered
when the -ideslave option is passed to Coqtop . Currently CoqIDE is
the only one using this mode , but we try here to be as generic as
possible , so this may change in the future ...
function and a rewind function. This specialized loop is triggered
when the -ideslave option is passed to Coqtop. Currently CoqIDE is
the only one using this mode, but we try here to be as generic as
possible, so this may change in the future... *)
val init_stdout : unit -> unit
val loop : unit -> unit
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/toplevel/ide_slave.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* [ Ide_slave ] : an implementation of [ Ide_intf ] , i.e. mainly an interp
function and a rewind function . This specialized loop is triggered
when the -ideslave option is passed to Coqtop . Currently CoqIDE is
the only one using this mode , but we try here to be as generic as
possible , so this may change in the future ...
function and a rewind function. This specialized loop is triggered
when the -ideslave option is passed to Coqtop. Currently CoqIDE is
the only one using this mode, but we try here to be as generic as
possible, so this may change in the future... *)
val init_stdout : unit -> unit
val loop : unit -> unit
|
f791a56ef4f735060c033eb9d1336f7a5f876e348e3ddd276b01a8c6bb52371d | fetburner/compelib | bfs01.mli | 0 または 1 で重み付けされた有向グラフ
module type Weighted01DirectedGraph = sig
module Vertex : sig
type t
(* 現在の頂点までの経路長を取得する *)
val get_distance : t -> int
(* 頂点までの経路長を上書きする *)
val set_distance : t -> int -> unit
(* 最短経路を求めたいグラフの,ある頂点から伸びる辺に対してのイテレータ
重みが0の辺に対してはf0を,1の辺に対してはf1を用いる *)
val iter_adjacencies : t -> f0:(t -> unit) -> f1:(t -> unit) -> unit
end
end
(* BFSにより,重みが0または1のグラフの最短経路長を求める *)
val shortest_path :
(* グラフに含まれる頂点への経路長は全て無限大で初期化されている必要がある *)
(module Weighted01DirectedGraph with type Vertex.t = 'vertex) ->
(* 始点 *)
'vertex ->
終点を受け取って,始点からの最短距離を返す関数
始点から辿り着けない場合,無限大を返す
この関数を覚えておけば,呼び出しごとの途中までの計算結果がシェアされる
始点から辿り着けない場合,無限大を返す
この関数を覚えておけば,呼び出しごとの途中までの計算結果がシェアされる *)
('vertex -> int)
| null | https://raw.githubusercontent.com/fetburner/compelib/d8fc5d9acd04e676c4d4d2ca9c6a7140f1b85670/lib/graph/bfs01.mli | ocaml | 現在の頂点までの経路長を取得する
頂点までの経路長を上書きする
最短経路を求めたいグラフの,ある頂点から伸びる辺に対してのイテレータ
重みが0の辺に対してはf0を,1の辺に対してはf1を用いる
BFSにより,重みが0または1のグラフの最短経路長を求める
グラフに含まれる頂点への経路長は全て無限大で初期化されている必要がある
始点 | 0 または 1 で重み付けされた有向グラフ
module type Weighted01DirectedGraph = sig
module Vertex : sig
type t
val get_distance : t -> int
val set_distance : t -> int -> unit
val iter_adjacencies : t -> f0:(t -> unit) -> f1:(t -> unit) -> unit
end
end
val shortest_path :
(module Weighted01DirectedGraph with type Vertex.t = 'vertex) ->
'vertex ->
終点を受け取って,始点からの最短距離を返す関数
始点から辿り着けない場合,無限大を返す
この関数を覚えておけば,呼び出しごとの途中までの計算結果がシェアされる
始点から辿り着けない場合,無限大を返す
この関数を覚えておけば,呼び出しごとの途中までの計算結果がシェアされる *)
('vertex -> int)
|
08192d9d3a0e58fe8b53e7652598551a7590a0575b187a7ac3cec5065f736f54 | eeng/shevek | rpc.cljs | (ns shevek.rpc
(:require [ajax.core :refer [POST]]
[shevek.reflow.core :refer [dispatch] :refer-macros [defevh]]
[shevek.reflow.db :as db]))
(defn loading?
([] (seq (db/get :loading)))
([key] (db/get-in [:loading key])))
(defn loading [db key]
(assoc-in db [:loading key] true))
(defn loaded
([db] (assoc db :loading {}))
([db key] (update db :loading (fnil dissoc {}) key)))
(defn call [fid & {:keys [args handler error-handler]
:or {args [] error-handler #(dispatch :errors/from-server %)}}]
{:pre [(vector? args)]}
(POST "/rpc" {:params {:fn fid :args args}
:handler handler
:error-handler error-handler}))
(defevh :data-arrived [db db-key data db-handler]
(let [db-handler (or db-handler #(assoc % db-key data))]
(-> db (loaded db-key) (db-handler data))))
(defn fetch [db db-key fid & {:keys [args handler] :or {args []}}]
(call fid :args args :handler #(dispatch :data-arrived db-key % handler))
(loading db db-key))
(defn loading-class [loading-key]
{:class (when (loading? loading-key) "loading")})
| null | https://raw.githubusercontent.com/eeng/shevek/7783b8037303b8dd5f320f35edee3bfbb2b41c02/src/cljs/shevek/rpc.cljs | clojure | (ns shevek.rpc
(:require [ajax.core :refer [POST]]
[shevek.reflow.core :refer [dispatch] :refer-macros [defevh]]
[shevek.reflow.db :as db]))
(defn loading?
([] (seq (db/get :loading)))
([key] (db/get-in [:loading key])))
(defn loading [db key]
(assoc-in db [:loading key] true))
(defn loaded
([db] (assoc db :loading {}))
([db key] (update db :loading (fnil dissoc {}) key)))
(defn call [fid & {:keys [args handler error-handler]
:or {args [] error-handler #(dispatch :errors/from-server %)}}]
{:pre [(vector? args)]}
(POST "/rpc" {:params {:fn fid :args args}
:handler handler
:error-handler error-handler}))
(defevh :data-arrived [db db-key data db-handler]
(let [db-handler (or db-handler #(assoc % db-key data))]
(-> db (loaded db-key) (db-handler data))))
(defn fetch [db db-key fid & {:keys [args handler] :or {args []}}]
(call fid :args args :handler #(dispatch :data-arrived db-key % handler))
(loading db db-key))
(defn loading-class [loading-key]
{:class (when (loading? loading-key) "loading")})
| |
e2e3869c841dee5f0b7dec0eb25a87b03f422d811a27bc59609f64e2a76b1f62 | si14/erl_json_test | erl_json_test.erl | -module(erl_json_test).
-export([start/0]).
-define(RESULTS_FILE, "results.csv").
-define(NUM_TESTS, 300).
-define(PARSERS,
[{"jsone", fun jsone:encode/1, fun jsone:decode/1},
{"yawsjson2", fun json2:encode/1, fun json2:decode/1},
{"jiffy", fun jiffy:encode/1, fun jiffy:decode/1},
{"mochijson2", fun mochijson2:encode/1, fun mochijson2:decode/1},
{"jsx", fun jsx:encode/1, fun jsx:decode/1}]).
-define(TESTFILES,
[{"1x", "1x.json"},
{"3x", "3x.json"},
{"9x", "9x.json"},
{"27x", "27x.json"},
{"81x", "81x.json"},
{"243x", "243x.json"}]).
start() ->
JSONs = [begin
FullName = "priv/" ++ FileName,
{ok, File} = file:read_file(FullName),
{Name, File}
end
|| {Name, FileName} <- ?TESTFILES],
_A = [ jsone:encode(jsone:decode(File)) || {_, File} <- JSONs],
_B = [ jiffy:encode(jiffy:decode(File)) || {_, File} <- JSONs],
_C = [ mochijson2:encode(mochijson2:decode(File)) || {_, File} <- JSONs],
_D = [ jsx:encode(jsx:decode(File)) || {_, File} <- JSONs],
ResultsDeep = [[begin
T = {ParserName, TestName, size(JSON),
bench(EncFun, DecFun, JSON)},
io:format("~s ~s done~n", [ParserName, TestName]),
T
end
|| {TestName, JSON} <- JSONs]
|| {ParserName, EncFun, DecFun} <- ?PARSERS],
Results = lists:flatten(ResultsDeep),
format_results(Results),
init:stop().
bench(EncFun, DecFun, TestJSON) ->
DecThunk = fun() -> times(DecFun, TestJSON, ?NUM_TESTS) end,
{DecTime, Decoded} = timer:tc(DecThunk),
EncThunk = fun() -> times(EncFun, Decoded, ?NUM_TESTS) end,
{EncTime, _} = timer:tc(EncThunk),
{EncTime, DecTime}.
format_results(Results) ->
Header = io_lib:format("\"Parser\","
"\"Test\","
"\"TestSize\","
"\"ResultEnc\","
"\"ResultDec\"~n", []),
Out = [Header |
[io_lib:format("\"~s\",\"~s (~pb)\",~p,~p,~p~n",
[Parser, Test, TestSize, TestSize,
round(ResultEnc / ?NUM_TESTS),
round(ResultDec / ?NUM_TESTS)])
|| {Parser, Test, TestSize, {ResultEnc, ResultDec}} <- Results]],
file:write_file(?RESULTS_FILE, lists:flatten(Out)).
times(F, X, 0) -> F(X);
times(F, X, N) -> F(X), times(F, X, N-1).
| null | https://raw.githubusercontent.com/si14/erl_json_test/553f029a5dcefb8f7c8a0b101e6156a8903f8737/src/erl_json_test.erl | erlang | -module(erl_json_test).
-export([start/0]).
-define(RESULTS_FILE, "results.csv").
-define(NUM_TESTS, 300).
-define(PARSERS,
[{"jsone", fun jsone:encode/1, fun jsone:decode/1},
{"yawsjson2", fun json2:encode/1, fun json2:decode/1},
{"jiffy", fun jiffy:encode/1, fun jiffy:decode/1},
{"mochijson2", fun mochijson2:encode/1, fun mochijson2:decode/1},
{"jsx", fun jsx:encode/1, fun jsx:decode/1}]).
-define(TESTFILES,
[{"1x", "1x.json"},
{"3x", "3x.json"},
{"9x", "9x.json"},
{"27x", "27x.json"},
{"81x", "81x.json"},
{"243x", "243x.json"}]).
start() ->
JSONs = [begin
FullName = "priv/" ++ FileName,
{ok, File} = file:read_file(FullName),
{Name, File}
end
|| {Name, FileName} <- ?TESTFILES],
_A = [ jsone:encode(jsone:decode(File)) || {_, File} <- JSONs],
_B = [ jiffy:encode(jiffy:decode(File)) || {_, File} <- JSONs],
_C = [ mochijson2:encode(mochijson2:decode(File)) || {_, File} <- JSONs],
_D = [ jsx:encode(jsx:decode(File)) || {_, File} <- JSONs],
ResultsDeep = [[begin
T = {ParserName, TestName, size(JSON),
bench(EncFun, DecFun, JSON)},
io:format("~s ~s done~n", [ParserName, TestName]),
T
end
|| {TestName, JSON} <- JSONs]
|| {ParserName, EncFun, DecFun} <- ?PARSERS],
Results = lists:flatten(ResultsDeep),
format_results(Results),
init:stop().
bench(EncFun, DecFun, TestJSON) ->
DecThunk = fun() -> times(DecFun, TestJSON, ?NUM_TESTS) end,
{DecTime, Decoded} = timer:tc(DecThunk),
EncThunk = fun() -> times(EncFun, Decoded, ?NUM_TESTS) end,
{EncTime, _} = timer:tc(EncThunk),
{EncTime, DecTime}.
format_results(Results) ->
Header = io_lib:format("\"Parser\","
"\"Test\","
"\"TestSize\","
"\"ResultEnc\","
"\"ResultDec\"~n", []),
Out = [Header |
[io_lib:format("\"~s\",\"~s (~pb)\",~p,~p,~p~n",
[Parser, Test, TestSize, TestSize,
round(ResultEnc / ?NUM_TESTS),
round(ResultDec / ?NUM_TESTS)])
|| {Parser, Test, TestSize, {ResultEnc, ResultDec}} <- Results]],
file:write_file(?RESULTS_FILE, lists:flatten(Out)).
times(F, X, 0) -> F(X);
times(F, X, N) -> F(X), times(F, X, N-1).
| |
9a5f6f41277bd6459ff0129375087eda209ce2b7de9c851f17feb7de07464b9a | plande/grand-scheme | loops.scm | (define-module (grand loops)
#:use-module (grand define-keywords)
#:use-module (grand function)
#:use-module (grand examples)
#:use-module (grand list)
#:use-module (grand syntax)
#:export (numbers)
#:export-syntax (for collect))
;; Python-style for-loop and list comprehensions
;; "make you a python for great bad"
(define-syntax for
(syntax-rules (in =>)
((for (key => value) in hash-map actions . *)
(hash-for-each (lambda (key value) actions . *) hash-map))
((for x in list actions . *)
(for-each (lambda (x) actions . *) list))))
(define/keywords (numbers #:from start #:= 0
#:to end
#:by step #:= 1)
(let* ((step (* (if (is start > end) -1 1)
(if (positive? step) 1 -1)
step))
(exceeding? (cond ((positive? step) <)
((negative? step) >)
(else (lambda (x y) #true))))
(amount (floor (abs (/ (- end start) step)))))
(define (build-down result #;from end)
(if (is end exceeding? start)
result
(build-down `(,end . ,result)
(- end step))))
(build-down '() (+ start (* amount step)))))
(define-syntax collect
(syntax-rules (for in if)
((collect result)
`(,result))
((collect result for variable in list . *)
(append-map (lambda (variable)
(collect result . *))
list))
((collect result if condition . *)
(if condition
(collect result . *)
'()))))
(e.g.
(collect `(,x ,y ,z)
for z in (numbers #:from 1 #:to 20)
for y in (numbers #:from 1 #:to z)
for x in (numbers #:from 1 #:to y)
if (= (+ (* x x) (* y y))
(* z z)))
===> ((3 4 5) (6 8 10) (5 12 13) (9 12 15) (8 15 17) (12 16 20)))
| null | https://raw.githubusercontent.com/plande/grand-scheme/c40cc25373789a437fdf5e49f47f5dd456f76faf/grand/loops.scm | scheme | Python-style for-loop and list comprehensions
"make you a python for great bad"
from end) | (define-module (grand loops)
#:use-module (grand define-keywords)
#:use-module (grand function)
#:use-module (grand examples)
#:use-module (grand list)
#:use-module (grand syntax)
#:export (numbers)
#:export-syntax (for collect))
(define-syntax for
(syntax-rules (in =>)
((for (key => value) in hash-map actions . *)
(hash-for-each (lambda (key value) actions . *) hash-map))
((for x in list actions . *)
(for-each (lambda (x) actions . *) list))))
(define/keywords (numbers #:from start #:= 0
#:to end
#:by step #:= 1)
(let* ((step (* (if (is start > end) -1 1)
(if (positive? step) 1 -1)
step))
(exceeding? (cond ((positive? step) <)
((negative? step) >)
(else (lambda (x y) #true))))
(amount (floor (abs (/ (- end start) step)))))
(if (is end exceeding? start)
result
(build-down `(,end . ,result)
(- end step))))
(build-down '() (+ start (* amount step)))))
(define-syntax collect
(syntax-rules (for in if)
((collect result)
`(,result))
((collect result for variable in list . *)
(append-map (lambda (variable)
(collect result . *))
list))
((collect result if condition . *)
(if condition
(collect result . *)
'()))))
(e.g.
(collect `(,x ,y ,z)
for z in (numbers #:from 1 #:to 20)
for y in (numbers #:from 1 #:to z)
for x in (numbers #:from 1 #:to y)
if (= (+ (* x x) (* y y))
(* z z)))
===> ((3 4 5) (6 8 10) (5 12 13) (9 12 15) (8 15 17) (12 16 20)))
|
e17e4bff28a880ab5ecf32dadd2d11cf40ded9b349041a0de51a088885e73e82 | wavejumper/rehook | core.cljs | (ns rehook.core
(:require ["react" :as react]))
(defn use-state
[initial-value]
(react/useState initial-value))
(defn use-effect
([f]
(react/useEffect f))
([f deps]
(react/useEffect f (to-array deps))))
(defn use-atom-fn
[a getter-fn setter-fn]
(let [[val set-val] (use-state (getter-fn @a))]
(use-effect
(fn []
(let [id (str (random-uuid))]
(add-watch a id (fn [_ _ prev-state next-state]
(let [prev-value (getter-fn prev-state)
next-value (getter-fn next-state)]
(when-not (= prev-value next-value)
(set-val next-value)))))
#(remove-watch a id)))
[])
[val #(swap! a setter-fn %)]))
(defn use-atom
"(use-atom my-atom)"
[a]
(use-atom-fn a identity (fn [_ v] v)))
(defn use-atom-path
"(use-atom my-atom [:path :to :data])"
[a path]
(use-atom-fn a #(get-in % path) #(assoc-in %1 path %2)))
| null | https://raw.githubusercontent.com/wavejumper/rehook/c1a4207918827f4b738cdad9a9645385e5e10ff4/rehook-core/src/rehook/core.cljs | clojure | (ns rehook.core
(:require ["react" :as react]))
(defn use-state
[initial-value]
(react/useState initial-value))
(defn use-effect
([f]
(react/useEffect f))
([f deps]
(react/useEffect f (to-array deps))))
(defn use-atom-fn
[a getter-fn setter-fn]
(let [[val set-val] (use-state (getter-fn @a))]
(use-effect
(fn []
(let [id (str (random-uuid))]
(add-watch a id (fn [_ _ prev-state next-state]
(let [prev-value (getter-fn prev-state)
next-value (getter-fn next-state)]
(when-not (= prev-value next-value)
(set-val next-value)))))
#(remove-watch a id)))
[])
[val #(swap! a setter-fn %)]))
(defn use-atom
"(use-atom my-atom)"
[a]
(use-atom-fn a identity (fn [_ v] v)))
(defn use-atom-path
"(use-atom my-atom [:path :to :data])"
[a path]
(use-atom-fn a #(get-in % path) #(assoc-in %1 path %2)))
| |
aa39ee1528c038d6351736d337197f113e138e92bbdebca322fc4b559e88a66d | reborg/clojure-essential-reference | 4.clj | (def libs (loaded-libs))
< 1 >
# { clojure.core.protocols clojure.core.server clojure.edn
;; clojure.instant clojure.java.browse clojure.java.io
clojure.java.javadoc
;; clojure.repl clojure.string clojure.uuid clojure.walk}
(require '[clojure.data :refer [diff]])
(def nss (set (map ns-name (all-ns))))
< 2 >
;; [nil ; <3>
# { user clojure.core clojure.set clojure.data } ; < 4 >
# { clojure.core.protocols clojure.core.server clojure.edn ; < 5 >
clojure.instant
clojure.java.javadoc
;; clojure.repl clojure.string clojure.uuid clojure.walk}] | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/OtherFunctions/VarsandNamespaces/refer%2Crefer-clojure%2Crequire%2Cloaded-libs%2Cuse%2Cimport/4.clj | clojure | clojure.instant clojure.java.browse clojure.java.io
clojure.repl clojure.string clojure.uuid clojure.walk}
[nil ; <3>
< 4 >
< 5 >
clojure.repl clojure.string clojure.uuid clojure.walk}] | (def libs (loaded-libs))
< 1 >
# { clojure.core.protocols clojure.core.server clojure.edn
clojure.java.javadoc
(require '[clojure.data :refer [diff]])
(def nss (set (map ns-name (all-ns))))
< 2 >
clojure.instant
clojure.java.javadoc |
66ea5c69d640a4358b13bed89f5effb2cab390c4ef5f199d04690a227b332612 | rtoy/cmucl | support.lisp | ;;; support.lisp --- performance benchmarks for Common Lisp implementations
;;
Author : < >
Time - stamp : < 2004 - 08 - 01 emarsden >
;;
;;
;; The benchmarks consist of
;;
- the benchmarks
- some mathematical operations ( factorial , , CRC )
;; - some bignum-intensive operations
;; - hashtable and READ-LINE tests
;; - CLOS tests
;; - array, string and bitvector exercises
;;
(in-package :cl-bench)
(defvar *version* "20040801")
(defvar *benchmarks* '())
(defvar *benchmark-results* '())
(defvar *benchmark-file-directory*
(merge-pathnames (make-pathname :directory '(:relative "results"))
(make-pathname :directory (pathname-directory *load-truename*)))
"Directory where the benchmark report file is stored")
(defvar +implementation+
(concatenate 'string
(lisp-implementation-type) " "
(lisp-implementation-version)))
(defclass benchmark ()
((name :accessor benchmark-name
:initarg :name)
(short :accessor benchmark-short
:initarg :short
:type string)
(long :accessor benchmark-long
:initarg :long
:initform nil
:type string)
(group :accessor benchmark-group
:initarg :group)
(runs :accessor benchmark-runs
:initarg :runs
:initform 1
:type integer)
(disabled-for :accessor benchmark-disabled-for
:initarg :disabled-for
:initform nil)
(setup :initarg :setup
:initform nil)
(function :initarg :function
:accessor benchmark-function)))
(defmethod print-object ((self benchmark) stream)
(print-unreadable-object (self stream :type nil)
(format stream "benchmark ~a for ~d runs"
(benchmark-short self)
(benchmark-runs self))))
(defmethod initialize-instance :after ((self benchmark)
&rest initargs
&key &allow-other-keys)
(declare (ignore initargs))
(unless (slot-boundp self 'short)
(setf (benchmark-short self) (string (benchmark-name self))))
self)
( setf ( benchmark - function self )
;; (compile nil `(lambda ()
;; (dotimes (i ,(benchmark-runs self))
;; `(funcall ',(benchmark-function ,self))))))
(defmacro defbench (fun &rest args)
`(push (make-instance 'benchmark :name ',fun ,@args)
*benchmarks*))
(defvar *benchmark-output*)
(defvar *current-test*)
(defmacro with-bench-output (&body body)
`(with-open-file (f (benchmark-report-file)
:direction :output
:if-exists :supersede)
(let ((*benchmark-output* f)
(*load-verbose* nil)
(*print-length* nil)
(*compile-verbose* nil)
(*compile-print* nil))
(bench-report-header)
(progn ,@body)
(bench-report-footer))))
(defun bench-run ()
(with-open-file (f (benchmark-report-file)
:direction :output
:if-exists :supersede)
(let ((*benchmark-output* f)
(*print-length*)
(*load-verbose* nil)
(*compile-verbose* nil)
(*compile-print* nil))
(bench-report-header)
(dolist (b (reverse *benchmarks*))
(bench-gc)
(with-spawned-thread
(with-slots (setup function short runs) b
(when setup (funcall setup))
(format t "~&=== running ~a~%" b)
(bench-report function short runs))))
(bench-report-footer))))
(defun benchmark-report-file ()
(ensure-directories-exist *benchmark-file-directory*)
(multiple-value-bind (second minute hour date month year)
(get-decoded-time)
(declare (ignore second))
;; Should we use pathnames directly instead of creating a string
;; naming the file?
(format nil "~aCL-benchmark-~d~2,'0d~2,'0dT~2,'0d~2,'0d"
*benchmark-file-directory*
year month date hour minute)))
grr , CLISP does n't implement ~< .. ~ :>
CormanLisp bug :
;;; An error occurred in function FORMAT:
;;; Error: Invalid format directive : character #\< in control string ";; -*- lisp -*- ~a~%;;~%;; Implementation *features*:~%~@<;; ~@;~s~:>~%;;~%"
Entering debug loop .
(defun bench-report-header ()
(format *benchmark-output*
#-(or clisp ecl gcl cormanlisp) ";; -*- lisp -*- ~a~%;;~%;; Implementation *features*:~%~@<;; ~@;~s~:>~%;;~%"
#+(or clisp ecl gcl cormanlisp) ";; -*- lisp -*- ~a~%;; Implementation *features*: ~s~%;;~%"
+implementation+ *features*)
(format *benchmark-output*
";; Function real user sys consed~%")
(format *benchmark-output*
";; ----------------------------------------------------------------~%"))
(defun bench-report-footer ()
(format *benchmark-output* "~%~s~%"
(cons +implementation+ *benchmark-results*)))
;; generate a report to *benchmark-output* on the calling of FUNCTION
(defun bench-report (function name times)
(multiple-value-bind (real user sys consed)
(bench-time function times name)
(format *benchmark-output*
#-armedbear ";; ~25a ~8,2f ~8,2f ~8,2f ~12d"
#+armedbear ";; ~a ~f ~f ~f ~d"
name real user sys consed)
(terpri *benchmark-output*)
(force-output *benchmark-output*)
(push (cons name (list real user sys consed))
*benchmark-results*)))
;; a generic timing function, that depends on GET-INTERNAL-RUN-TIME
and GET - INTERNAL - REAL - TIME returning sensible results . If a version
was defined in / setup-<impl > , we use that instead
(defun generic-bench-time (fun times name)
(declare (ignore name))
(let (before-real after-real before-user after-user)
(setq before-user (get-internal-run-time))
(setq before-real (get-internal-real-time))
(dotimes (i times)
(funcall fun))
(setq after-user (get-internal-run-time))
(setq after-real (get-internal-real-time))
return real user sys consed
(values (/ (- after-real before-real) internal-time-units-per-second)
(/ (- after-user before-user) internal-time-units-per-second)
0 0)))
(eval-when (:load-toplevel :execute)
(unless (fboundp 'bench-time)
GCL as of 20040628 does not implement ( setf fdefinition )
#-gcl (setf (fdefinition 'bench-time) #'generic-bench-time)
#+gcl (defun bench-time (fun times name) (generic-bench-time fun times name))))
EOF
| null | https://raw.githubusercontent.com/rtoy/cmucl/9b1abca53598f03a5b39ded4185471a5b8777dea/benchmarks/cl-bench/support.lisp | lisp | support.lisp --- performance benchmarks for Common Lisp implementations
The benchmarks consist of
- some bignum-intensive operations
- hashtable and READ-LINE tests
- CLOS tests
- array, string and bitvector exercises
(compile nil `(lambda ()
(dotimes (i ,(benchmark-runs self))
`(funcall ',(benchmark-function ,self))))))
Should we use pathnames directly instead of creating a string
naming the file?
An error occurred in function FORMAT:
Error: Invalid format directive : character #\< in control string ";; -*- lisp -*- ~a~%;;~%;; Implementation *features*:~%~@<;; ~@;~s~:>~%;;~%"
generate a report to *benchmark-output* on the calling of FUNCTION
a generic timing function, that depends on GET-INTERNAL-RUN-TIME | Author : < >
Time - stamp : < 2004 - 08 - 01 emarsden >
- the benchmarks
- some mathematical operations ( factorial , , CRC )
(in-package :cl-bench)
(defvar *version* "20040801")
(defvar *benchmarks* '())
(defvar *benchmark-results* '())
(defvar *benchmark-file-directory*
(merge-pathnames (make-pathname :directory '(:relative "results"))
(make-pathname :directory (pathname-directory *load-truename*)))
"Directory where the benchmark report file is stored")
(defvar +implementation+
(concatenate 'string
(lisp-implementation-type) " "
(lisp-implementation-version)))
(defclass benchmark ()
((name :accessor benchmark-name
:initarg :name)
(short :accessor benchmark-short
:initarg :short
:type string)
(long :accessor benchmark-long
:initarg :long
:initform nil
:type string)
(group :accessor benchmark-group
:initarg :group)
(runs :accessor benchmark-runs
:initarg :runs
:initform 1
:type integer)
(disabled-for :accessor benchmark-disabled-for
:initarg :disabled-for
:initform nil)
(setup :initarg :setup
:initform nil)
(function :initarg :function
:accessor benchmark-function)))
(defmethod print-object ((self benchmark) stream)
(print-unreadable-object (self stream :type nil)
(format stream "benchmark ~a for ~d runs"
(benchmark-short self)
(benchmark-runs self))))
(defmethod initialize-instance :after ((self benchmark)
&rest initargs
&key &allow-other-keys)
(declare (ignore initargs))
(unless (slot-boundp self 'short)
(setf (benchmark-short self) (string (benchmark-name self))))
self)
( setf ( benchmark - function self )
(defmacro defbench (fun &rest args)
`(push (make-instance 'benchmark :name ',fun ,@args)
*benchmarks*))
(defvar *benchmark-output*)
(defvar *current-test*)
(defmacro with-bench-output (&body body)
`(with-open-file (f (benchmark-report-file)
:direction :output
:if-exists :supersede)
(let ((*benchmark-output* f)
(*load-verbose* nil)
(*print-length* nil)
(*compile-verbose* nil)
(*compile-print* nil))
(bench-report-header)
(progn ,@body)
(bench-report-footer))))
(defun bench-run ()
(with-open-file (f (benchmark-report-file)
:direction :output
:if-exists :supersede)
(let ((*benchmark-output* f)
(*print-length*)
(*load-verbose* nil)
(*compile-verbose* nil)
(*compile-print* nil))
(bench-report-header)
(dolist (b (reverse *benchmarks*))
(bench-gc)
(with-spawned-thread
(with-slots (setup function short runs) b
(when setup (funcall setup))
(format t "~&=== running ~a~%" b)
(bench-report function short runs))))
(bench-report-footer))))
(defun benchmark-report-file ()
(ensure-directories-exist *benchmark-file-directory*)
(multiple-value-bind (second minute hour date month year)
(get-decoded-time)
(declare (ignore second))
(format nil "~aCL-benchmark-~d~2,'0d~2,'0dT~2,'0d~2,'0d"
*benchmark-file-directory*
year month date hour minute)))
grr , CLISP does n't implement ~< .. ~ :>
CormanLisp bug :
Entering debug loop .
(defun bench-report-header ()
(format *benchmark-output*
#-(or clisp ecl gcl cormanlisp) ";; -*- lisp -*- ~a~%;;~%;; Implementation *features*:~%~@<;; ~@;~s~:>~%;;~%"
#+(or clisp ecl gcl cormanlisp) ";; -*- lisp -*- ~a~%;; Implementation *features*: ~s~%;;~%"
+implementation+ *features*)
(format *benchmark-output*
";; Function real user sys consed~%")
(format *benchmark-output*
";; ----------------------------------------------------------------~%"))
(defun bench-report-footer ()
(format *benchmark-output* "~%~s~%"
(cons +implementation+ *benchmark-results*)))
(defun bench-report (function name times)
(multiple-value-bind (real user sys consed)
(bench-time function times name)
(format *benchmark-output*
#-armedbear ";; ~25a ~8,2f ~8,2f ~8,2f ~12d"
#+armedbear ";; ~a ~f ~f ~f ~d"
name real user sys consed)
(terpri *benchmark-output*)
(force-output *benchmark-output*)
(push (cons name (list real user sys consed))
*benchmark-results*)))
and GET - INTERNAL - REAL - TIME returning sensible results . If a version
was defined in / setup-<impl > , we use that instead
(defun generic-bench-time (fun times name)
(declare (ignore name))
(let (before-real after-real before-user after-user)
(setq before-user (get-internal-run-time))
(setq before-real (get-internal-real-time))
(dotimes (i times)
(funcall fun))
(setq after-user (get-internal-run-time))
(setq after-real (get-internal-real-time))
return real user sys consed
(values (/ (- after-real before-real) internal-time-units-per-second)
(/ (- after-user before-user) internal-time-units-per-second)
0 0)))
(eval-when (:load-toplevel :execute)
(unless (fboundp 'bench-time)
GCL as of 20040628 does not implement ( setf fdefinition )
#-gcl (setf (fdefinition 'bench-time) #'generic-bench-time)
#+gcl (defun bench-time (fun times name) (generic-bench-time fun times name))))
EOF
|
21da834c4d66cc1df2253361fb0f7c9a1479fdfa3ad12e0206b560cd085cce18 | day8/re-com | box_test.cljs | (ns re-com.box-test
(:require [cljs.test :refer-macros [is are deftest]]
[reagent.core :as reagent]
[re-com.box :as box]))
(deftest test-flex-child-style
(are [expected actual] (= expected actual)
"initial" (:flex (box/flex-child-style "initial"))
"auto" (:flex (box/flex-child-style "auto"))
"none" (:flex (box/flex-child-style "none"))
"0 0 100px" (:flex (box/flex-child-style "100px"))
"0 0 4.5em" (:flex (box/flex-child-style "4.5em"))
"60 1 0px" (:flex (box/flex-child-style "60%"))
"60 1 0px" (:flex (box/flex-child-style "60"))
"5 4 0%" (:flex (box/flex-child-style "5 4 0%"))))
(deftest test-flex-flow-style
(is (= (box/flex-flow-style "row wrap")
{:-webkit-flex-flow "row wrap"
:flex-flow "row wrap"})))
(deftest test-justify-style
(let [make-expected (fn [x] {:-webkit-justify-content x
:justify-content x})]
(are [expected actual] (= expected actual)
(make-expected "flex-start") (box/justify-style :start)
(make-expected "flex-end") (box/justify-style :end)
(make-expected "center") (box/justify-style :center)
(make-expected "space-between") (box/justify-style :between)
(make-expected "space-around") (box/justify-style :around))))
(deftest test-align-style
(let [make-align-items (fn [x] {:-webkit-align-items x
:align-items x})]
(are [expected actual] (= expected actual)
(make-align-items "flex-start") (box/align-style :align-items :start)
(make-align-items "flex-end") (box/align-style :align-items :end)
(make-align-items "center") (box/align-style :align-items :center)
(make-align-items "baseline") (box/align-style :align-items :baseline)
(make-align-items "stretch") (box/align-style :align-items :stretch))))
(deftest test-scroll-style
(are [expected actual] (= expected actual)
{:overflow "auto"} (box/scroll-style :overflow :auto)
{:overflow "hidden"} (box/scroll-style :overflow :off)
{:overflow "scroll"} (box/scroll-style :overflow :on)
{:overflow "visible"} (box/scroll-style :overflow :spill)))
(defn without-debug
"Returns hiccup form without debug in first attrs as writing tests for that would be complex (i.e. equality of ref-fn fns etc)."
[[tag attrs & rest]]
(into [tag (dissoc attrs :data-rc :ref)] rest))
(deftest test-gap
(are [expected actual] (= expected actual)
[:div
{:class "rc-gap my-gap"
:style {:flex "0 0 1px"
:-webkit-flex "0 0 1px"}
:id "my-id"}]
(without-debug (box/gap :class "my-gap" :attr {:id "my-id"} :size "1px"))))
(deftest test-line
(are [expected actual] (= expected actual)
[:div
{:class "rc-line my-line"
:style {:flex "0 0 1px"
:-webkit-flex "0 0 1px"
:background-color "lightgray"}
:id "my-id"}]
(without-debug (box/line :class "my-line" :attr {:id "my-id"}))))
(deftest test-box
(are [expected actual] (= expected actual)
[:div
{:class "rc-box display-flex my-box"
:style {:flex "none"
:-webkit-flex "none"
:flex-flow "inherit"
:-webkit-flex-flow "inherit"}
:id "my-id"}
"text"]
(without-debug (box/box :class "my-box" :attr {:id "my-id"} :child "text"))))
(deftest test-scroller
(are [expected actual] (= expected actual)
[:div
{:class "rc-scroller display-flex my-scroller"
:style {:flex "auto"
:-webkit-flex "auto"
:flex-flow "inherit"
:-webkit-flex-flow "inherit"
:overflow "auto"}
:id "my-id"}
"text"]
(without-debug (box/scroller :class "my-scroller" :attr {:id "my-id"} :child "text"))))
(deftest test-border
(are [expected actual] (= expected actual)
[:div
{:class "rc-border display-flex my-border"
:style {:flex "none"
:-webkit-flex "none"
:flex-flow "inherit"
:-webkit-flex-flow "inherit"
:border "1px solid lightgrey"}
:id "my-id"}
"text"]
(without-debug (box/border :class "my-border" :attr {:id "my-id"} :child "text"))))
| null | https://raw.githubusercontent.com/day8/re-com/07451b1d19c59eb185548efe93e2d00b5d3eab89/test/re_com/box_test.cljs | clojure | (ns re-com.box-test
(:require [cljs.test :refer-macros [is are deftest]]
[reagent.core :as reagent]
[re-com.box :as box]))
(deftest test-flex-child-style
(are [expected actual] (= expected actual)
"initial" (:flex (box/flex-child-style "initial"))
"auto" (:flex (box/flex-child-style "auto"))
"none" (:flex (box/flex-child-style "none"))
"0 0 100px" (:flex (box/flex-child-style "100px"))
"0 0 4.5em" (:flex (box/flex-child-style "4.5em"))
"60 1 0px" (:flex (box/flex-child-style "60%"))
"60 1 0px" (:flex (box/flex-child-style "60"))
"5 4 0%" (:flex (box/flex-child-style "5 4 0%"))))
(deftest test-flex-flow-style
(is (= (box/flex-flow-style "row wrap")
{:-webkit-flex-flow "row wrap"
:flex-flow "row wrap"})))
(deftest test-justify-style
(let [make-expected (fn [x] {:-webkit-justify-content x
:justify-content x})]
(are [expected actual] (= expected actual)
(make-expected "flex-start") (box/justify-style :start)
(make-expected "flex-end") (box/justify-style :end)
(make-expected "center") (box/justify-style :center)
(make-expected "space-between") (box/justify-style :between)
(make-expected "space-around") (box/justify-style :around))))
(deftest test-align-style
(let [make-align-items (fn [x] {:-webkit-align-items x
:align-items x})]
(are [expected actual] (= expected actual)
(make-align-items "flex-start") (box/align-style :align-items :start)
(make-align-items "flex-end") (box/align-style :align-items :end)
(make-align-items "center") (box/align-style :align-items :center)
(make-align-items "baseline") (box/align-style :align-items :baseline)
(make-align-items "stretch") (box/align-style :align-items :stretch))))
(deftest test-scroll-style
(are [expected actual] (= expected actual)
{:overflow "auto"} (box/scroll-style :overflow :auto)
{:overflow "hidden"} (box/scroll-style :overflow :off)
{:overflow "scroll"} (box/scroll-style :overflow :on)
{:overflow "visible"} (box/scroll-style :overflow :spill)))
(defn without-debug
"Returns hiccup form without debug in first attrs as writing tests for that would be complex (i.e. equality of ref-fn fns etc)."
[[tag attrs & rest]]
(into [tag (dissoc attrs :data-rc :ref)] rest))
(deftest test-gap
(are [expected actual] (= expected actual)
[:div
{:class "rc-gap my-gap"
:style {:flex "0 0 1px"
:-webkit-flex "0 0 1px"}
:id "my-id"}]
(without-debug (box/gap :class "my-gap" :attr {:id "my-id"} :size "1px"))))
(deftest test-line
(are [expected actual] (= expected actual)
[:div
{:class "rc-line my-line"
:style {:flex "0 0 1px"
:-webkit-flex "0 0 1px"
:background-color "lightgray"}
:id "my-id"}]
(without-debug (box/line :class "my-line" :attr {:id "my-id"}))))
(deftest test-box
(are [expected actual] (= expected actual)
[:div
{:class "rc-box display-flex my-box"
:style {:flex "none"
:-webkit-flex "none"
:flex-flow "inherit"
:-webkit-flex-flow "inherit"}
:id "my-id"}
"text"]
(without-debug (box/box :class "my-box" :attr {:id "my-id"} :child "text"))))
(deftest test-scroller
(are [expected actual] (= expected actual)
[:div
{:class "rc-scroller display-flex my-scroller"
:style {:flex "auto"
:-webkit-flex "auto"
:flex-flow "inherit"
:-webkit-flex-flow "inherit"
:overflow "auto"}
:id "my-id"}
"text"]
(without-debug (box/scroller :class "my-scroller" :attr {:id "my-id"} :child "text"))))
(deftest test-border
(are [expected actual] (= expected actual)
[:div
{:class "rc-border display-flex my-border"
:style {:flex "none"
:-webkit-flex "none"
:flex-flow "inherit"
:-webkit-flex-flow "inherit"
:border "1px solid lightgrey"}
:id "my-id"}
"text"]
(without-debug (box/border :class "my-border" :attr {:id "my-id"} :child "text"))))
| |
109abd4fdca0a3a98b6784503ac184461eebc13eb8943b131c927df663f2d34b | exercism/babashka | project.clj | (defproject go-counting "0.1.0-SNAPSHOT"
:description "go-counting exercise."
:url "-counting"
:dependencies [[org.clojure/clojure "1.10.0"]])
| null | https://raw.githubusercontent.com/exercism/babashka/707356c52e08490e66cb1b2e63e4f4439d91cf08/exercises/practice/go-counting/project.clj | clojure | (defproject go-counting "0.1.0-SNAPSHOT"
:description "go-counting exercise."
:url "-counting"
:dependencies [[org.clojure/clojure "1.10.0"]])
| |
03cb550019d490aabf09fa050061af032f6dbb731bd43248af67e0d1817bb253 | Clozure/ccl | x86-backend.lisp | -*- Mode : Lisp ; Package : CCL -*-
;;;
Copyright 2005 - 2009 Clozure Associates
;;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;;; you may not use this file except in compliance with the License.
;;; You may obtain a copy of the License at
;;;
;;; -2.0
;;;
;;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;; See the License for the specific language governing permissions and
;;; limitations under the License.
(in-package "CCL")
(next-nx-defops)
(defvar *x862-specials* nil)
(let* ((newsize (%i+ (next-nx-num-ops) 10))
(old *x862-specials*)
(oldsize (length old)))
(declare (fixnum newsize oldsize))
(unless (>= oldsize newsize)
(let* ((v (make-array newsize :initial-element nil)))
(dotimes (i oldsize (setq *x862-specials* v))
(setf (svref v i) (svref old i))))))
(defun x86-encode-vinsn-operand-type (thing backend)
(when thing
(if (atom thing)
(x86::encode-operand-type :label)
(ecase (car thing)
(:% (ecase (arch::target-lisp-node-size (backend-target-arch backend))
(8 (x86::encode-operand-type :reg64))
(4 (x86::encode-operand-type :reg32))))
(:%acc (ecase (arch::target-lisp-node-size (backend-target-arch backend))
(8 (x86::encode-operand-type :reg64 :acc))
(4 (x86::encode-operand-type :reg32 :acc))))
(:%q (x86::encode-operand-type :reg64))
(:%accq (x86::encode-operand-type :reg64 :acc))
(:%l (x86::encode-operand-type :reg32))
(:%accl (x86::encode-operand-type :reg32 :acc))
(:%w (x86::encode-operand-type :reg16))
(:%accw (x86::encode-operand-type :reg16 :acc))
(:%b (x86::encode-operand-type :reg8))
(:%accb (x86::encode-operand-type :reg8 :acc))
(:%xmm (x86::encode-operand-type :regxmm))
(:%mmx (x86::encode-operand-type :regmmx))
(:@ (x86::encode-operand-type :anymem))
(:rcontext (x86::encode-operand-type :anymem))
(:$1 (x86::encode-operand-type :imm1) )
(:$b (x86::encode-operand-type :imm8s ))
(:$ub (x86::encode-operand-type :imm8))
(:$w (x86::encode-operand-type :imm16))
(:$l (x86::encode-operand-type :imm32s))
(:$ul (x86::encode-operand-type :imm32))
(:$q (x86::encode-operand-type :imm64))
(:%shift (x86::encode-operand-type :shiftcount :reg8))
(:$self (x86::encode-operand-type :self))))))
(defun lookup-x86-opcode (form backend)
(when (consp form)
(let* ((name (string (car form)))
(templates (gethash name x86::*x86-opcode-template-lists*)))
(when templates
(flet ((optype (thing)
(x86-encode-vinsn-operand-type thing backend)))
(let* ((operands (cdr form))
(type0 (optype (pop operands)))
(type1 (optype (pop operands)))
(type2 (optype (car operands))))
(dolist (template templates)
(when (x86::match-template-types template type0 type1 type2 backend)
(collect ((types))
(if type0 (types type0))
(if type1 (types type1))
(if type2 (types type2))
(return (values (x86::x86-opcode-template-ordinal template)
(types))))))))))))
(defun fixup-opcode-ordinals (vinsn-template opcode-templates &optional (backend *target-backend*))
(let* ((changed ()))
(dolist (vinsn-opcode (vinsn-template-opcode-alist vinsn-template))
(destructuring-bind (old-ordinal name &optional type0 type1 type2) vinsn-opcode
(let* ((opcode-templates (gethash name opcode-templates)))
(unless opcode-templates
(error "Unknown X86 instruction - ~a. Odd, because it was once a known instruction." name))
(let* ((new-ordinal (dolist (template opcode-templates)
(when (x86::match-template-types template type0 type1 type2 backend)
(return (x86::x86-opcode-template-ordinal template))))))
(unless new-ordinal
(error "No match for opcode ~s in ~s" vinsn-opcode vinsn-template))
(unless (eql old-ordinal new-ordinal)
(setf (car vinsn-opcode) new-ordinal)
(push (cons old-ordinal new-ordinal) changed))))))
(when changed
( format t " ~ & opcode ordinals changed in ~s : ~s " vinsn - template changed )
(flet ((update-instruction (i)
(when (consp i)
;; An :ANCHORED-UUO directive contains a real
( vinsn - encoded ) instruction ( typically a UUO ) in
its cadr . Other directives wo n't contain embedded
;; instructions and whatever's in their CARs won't
;; match in the assoc below.
(when (eq (car i) :anchored-uuo)
(setq i (cadr i)))
(let* ((pair (assoc (car i) changed :test #'eq)))
(when pair
(setf (car i) (cdr pair)))))))
(labels ((fixup-form (form)
(unless (atom form)
(if (atom (car form))
(update-instruction form)
(dolist (f (cdr form))
(fixup-form f))))))
(dolist (form (vinsn-template-body vinsn-template))
(fixup-form form)))))))
(defparameter *report-missing-vinsns* nil)
(defun fixup-x86-vinsn-templates (template-hash opcode-templates &optional (backend *target-backend*))
(maphash #'(lambda (name vinsn-template)
(if (not (cdr vinsn-template))
(when *report-missing-vinsns*
(warn "Reference to undefined vinsn ~s" name))
(fixup-opcode-ordinals (cdr vinsn-template) opcode-templates backend)))
template-hash))
;;; This defines a template. All expressions in the body must be
;;; evaluable at macroexpansion time.
(defun define-x86-vinsn (backend vinsn-name results args temps body)
(let* ((opcode-lookup (backend-lookup-opcode backend))
(backend-name (backend-name backend))
(arch-name (backend-target-arch-name backend))
(template-hash (backend-p2-template-hash-name backend))
(name-list ())
(attrs 0)
(nhybrids 0)
(local-labels ())
(referenced-labels ())
(source-indicator (form-symbol arch-name "-VINSN"))
(opcode-alist ()))
(flet ((valid-spec-name (x)
(or (and (consp x)
(consp (cdr x))
(null (cddr x))
(atom (car x))
(or (assoc (cadr x) *vreg-specifier-constant-constraints* :test #'eq)
(assoc (cadr x) *spec-class-storage-class-alist* :test #'eq)
(eq (cadr x) :label)
(and (consp (cadr x)) (eq (caadr x) :label) (consp (cdadr x)) (null (cddadr x)))
(and (consp (cadr x))
(or
(assoc (caadr x) *vreg-specifier-constant-constraints* :test #'eq)
(assoc (caadr x) *spec-class-storage-class-alist* :test #'eq))))
(car x))
(error "Invalid vreg spec: ~s" x)))
(add-spec-name (vname)
(if (member vname name-list :test #'eq)
(error "Duplicate name ~s in vinsn ~s" vname vinsn-name)
(push vname name-list))))
(declare (dynamic-extent #'valid-spec-name #'add-spec-name))
(when (consp vinsn-name)
(setq attrs (encode-vinsn-attributes (cdr vinsn-name))
vinsn-name (car vinsn-name)))
(unless (and (symbolp vinsn-name) (eq *CCL-PACKAGE* (symbol-package vinsn-name)))
(setq vinsn-name (intern (string vinsn-name) *CCL-PACKAGE*)))
(dolist (n (append args temps))
(add-spec-name (valid-spec-name n)))
(dolist (form body)
(if (atom form)
(add-spec-name form)))
(setq name-list (nreverse name-list))
;; We now know that "args" is an alist; we don't know if
" results " is . First , make sure that there are no duplicate
;; result names (and validate "results".)
(do* ((res results tail)
(tail (cdr res) (cdr tail)))
((null res))
(let* ((name (valid-spec-name (car res))))
(if (assoc name tail :test #'eq)
(error "Duplicate result name ~s in ~s." name results))))
(let* ((non-hybrid-results ())
(match-args args))
(dolist (res results)
(let* ((res-name (car res)))
(if (not (assoc res-name args :test #'eq))
(if (not (= nhybrids 0))
(error "result ~s should also name an argument. " res-name)
(push res-name non-hybrid-results))
(if (eq res-name (caar match-args))
(setf nhybrids (1+ nhybrids)
match-args (cdr match-args))
(error "~S - hybrid results should appear in same order as arguments." res-name)))))
(dolist (name non-hybrid-results)
(add-spec-name name)))
(let* ((k -1))
(declare (fixnum k))
(let* ((name-alist (mapcar #'(lambda (n) (cons n (list (incf k)))) name-list)))
(flet ((find-name (n)
(let* ((pair (assoc n name-alist :test #'eq)))
(declare (list pair))
(if pair
(cdr pair)
(or (subprim-name->offset n backend)
(error "Unknown name ~s" n))))))
(labels ((simplify-simple-operand (op)
(if (atom op)
(if (typep op 'fixnum)
op
(if (eq op :rcontext)
op
(if (constantp op)
(progn
(if (keywordp op)
(pushnew op referenced-labels))
(eval op))
(find-name op))))
(if (eq (car op) :^)
(list :^ (simplify-simple-operand (cadr op)))
(if (eq (car op) :apply)
`(,(cadr op) ,@(mapcar #'simplify-operand (cddr op)))
(if (member (car op)
'(:tra :align :byte :word :long :quad :talign))
`(,(car op) ,(simplify-operand (cadr op)))
(simplify-operand (eval op))))))) ; Handler-case this?
(simplify-memory-operand (op)
;; This happens to be the only place that
;; we allow segment registers.
(let* ((seg nil)
(disp nil)
(base nil)
(index nil)
(scale nil))
(do* ((form op (cdr form)))
((null form) (list seg disp base index scale))
(let* ((head (car form)))
(if (consp head)
(case (car head)
(:%seg
(if (eq form op)
(setq seg (simplify-operand (cadr head)))
(error "Bad :%seg in ~s" op)))
((:%q :% :%l)
(let* ((r (simplify-operand head)))
(if base
(if index
(error "Extra register ~s in ~s"
head op)
(setq index r))
(setq base r))))
(t
(if (and (null (cdr form))
(or disp base index))
(progn
(setq scale (simplify-simple-operand head))
(if (and base (not index))
(setq index base base nil)))
(if (not (or disp base index))
(setq disp (simplify-simple-operand head))
(error "~s not expected in ~s" head op)))))
(if (and (null (cdr form))
(or disp base index))
(progn
(setq scale (simplify-simple-operand head))
(if (and base (not index))
(setq index base base nil)))
(if (not (or disp base index))
(setq disp (simplify-simple-operand head))
(error "~s not expected in ~s" head op))))))))
(simplify-operand (op)
(cond ((atom op)
(simplify-simple-operand op))
((eq (car op) :@)
(cons :@
(simplify-memory-operand (cdr op))))
((eq (car op) :rcontext)
(list :rcontext
(simplify-simple-operand (cadr op))))
((member (car op)
'(:% :%q :%l :%w :%b
:%acc :%accq :%accl :%accw :%accb
:$ :$1 :$b :$ub :$w :$l
:$ul :$q :%mmx :%xmm :%shift :$self))
(simplify-simple-operand (cadr op)))
(t
(simplify-simple-operand op)))))
(labels ((simplify-constraint (guard)
;; A constraint is one of
(: constant ) ; " value "
of constant
;; (:pred <function-name> <operand>* ;
;; <function-name> unquoted, each <operand>
;; is a vreg-name or constant expression.
(: type vreg - name typeval ) ; vreg is of
;; "type" typeval
;;
;;(:not <constraint>) ; constraint is false
;; (:and <constraint> ...) ; conjuntion
;; (:or <constraint> ...) ; disjunction
;; There's no "else"; we'll see how ugly it
;; is without one.
(destructuring-bind (guardname &rest others) guard
(ecase guardname
(:not
(destructuring-bind (negation) others
`(:not ,(simplify-constraint negation))))
(:pred
(destructuring-bind (predicate &rest operands) others
`(:pred ,predicate ,@(mapcar #'simplify-operand operands))))
((:eq :lt :gt :type)
(destructuring-bind (vreg constant) others
(unless (constantp constant)
(error "~S : not constant in constraint ~s ." constant guard))
`(,guardname ,(find-name vreg) ,(eval constant))))
((:or :and)
(unless others (error "Missing constraint list in ~s ." guard))
`(,guardname ,(mapcar #'simplify-constraint others))))))
(simplify-form (form)
(if (atom form)
(progn
(if (keywordp form) (push form local-labels) )
form)
(destructuring-bind (&whole w opname &rest opvals) form
(if (consp opname) ; A constraint, we presume ...
(cons (simplify-constraint opname)
(mapcar #'simplify-form opvals))
(if (keywordp opname)
(case opname
(:if
(destructuring-bind (test true false) opvals
(list opname
(simplify-constraint test)
(simplify-form true)
(simplify-form false))))
(:progn
(cons opname (mapcar #'simplify-form opvals)))
(t
(list opname
(if (eq opname :anchored-uuo)
(simplify-form (car opvals))
(simplify-operand (car opvals))))))
(let* ((name (string opname)))
(multiple-value-bind (opnum types)
(funcall opcode-lookup form backend)
(if (not opnum)
(error "Unknown ~A instruction in ~s" backend-name form)
(let* ((opvals (mapcar #'simplify-operand opvals)))
(setf (assq opnum opcode-alist) (cons name types))
`(,opnum ,@opvals)))))))))))
(let* ((template (make-vinsn-template :name vinsn-name
:result-vreg-specs results
:argument-vreg-specs args
:temp-vreg-specs temps
:nhybrids nhybrids
:results&args (append results (nthcdr nhybrids args))
:nvp (- (+ (length results) (length args) (length temps))
nhybrids)
:body (prog1 (mapcar #'simplify-form body)
(dolist (ref referenced-labels)
(unless (memq ref local-labels)
(error
"local-label ~S was referenced but ~
never defined in VINSN-TEMPLATE definition for ~s"
ref vinsn-name))))
:local-labels local-labels
:attributes attrs
:opcode-alist opcode-alist)))
`(progn
(set-vinsn-template ',vinsn-name ,template ,template-hash)
(record-source-file ',vinsn-name ',source-indicator)
',vinsn-name))))))))))
#+x8632-target
(require "X8632-BACKEND")
#+x8664-target
(require "X8664-BACKEND")
(defparameter *x86-backend*
#+x8632-target *x8632-backend*
#+x8664-target *x8664-backend*
#-x86-target nil)
(defun fixup-x86-backend (&rest args)
#+x8632-target (apply #'fixup-x8632-backend args)
#+x8664-target (apply #'fixup-x8664-backend args)
#-x86-target (declare (ignore args))
)
(provide "X86-BACKEND")
| null | https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/compiler/X86/x86-backend.lisp | lisp | Package : CCL -*-
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
An :ANCHORED-UUO directive contains a real
instructions and whatever's in their CARs won't
match in the assoc below.
This defines a template. All expressions in the body must be
evaluable at macroexpansion time.
We now know that "args" is an alist; we don't know if
result names (and validate "results".)
Handler-case this?
This happens to be the only place that
we allow segment registers.
A constraint is one of
" value "
(:pred <function-name> <operand>* ;
<function-name> unquoted, each <operand>
is a vreg-name or constant expression.
vreg is of
"type" typeval
(:not <constraint>) ; constraint is false
(:and <constraint> ...) ; conjuntion
(:or <constraint> ...) ; disjunction
There's no "else"; we'll see how ugly it
is without one.
A constraint, we presume ... | Copyright 2005 - 2009 Clozure Associates
distributed under the License is distributed on an " AS IS " BASIS ,
(in-package "CCL")
(next-nx-defops)
(defvar *x862-specials* nil)
(let* ((newsize (%i+ (next-nx-num-ops) 10))
(old *x862-specials*)
(oldsize (length old)))
(declare (fixnum newsize oldsize))
(unless (>= oldsize newsize)
(let* ((v (make-array newsize :initial-element nil)))
(dotimes (i oldsize (setq *x862-specials* v))
(setf (svref v i) (svref old i))))))
(defun x86-encode-vinsn-operand-type (thing backend)
(when thing
(if (atom thing)
(x86::encode-operand-type :label)
(ecase (car thing)
(:% (ecase (arch::target-lisp-node-size (backend-target-arch backend))
(8 (x86::encode-operand-type :reg64))
(4 (x86::encode-operand-type :reg32))))
(:%acc (ecase (arch::target-lisp-node-size (backend-target-arch backend))
(8 (x86::encode-operand-type :reg64 :acc))
(4 (x86::encode-operand-type :reg32 :acc))))
(:%q (x86::encode-operand-type :reg64))
(:%accq (x86::encode-operand-type :reg64 :acc))
(:%l (x86::encode-operand-type :reg32))
(:%accl (x86::encode-operand-type :reg32 :acc))
(:%w (x86::encode-operand-type :reg16))
(:%accw (x86::encode-operand-type :reg16 :acc))
(:%b (x86::encode-operand-type :reg8))
(:%accb (x86::encode-operand-type :reg8 :acc))
(:%xmm (x86::encode-operand-type :regxmm))
(:%mmx (x86::encode-operand-type :regmmx))
(:@ (x86::encode-operand-type :anymem))
(:rcontext (x86::encode-operand-type :anymem))
(:$1 (x86::encode-operand-type :imm1) )
(:$b (x86::encode-operand-type :imm8s ))
(:$ub (x86::encode-operand-type :imm8))
(:$w (x86::encode-operand-type :imm16))
(:$l (x86::encode-operand-type :imm32s))
(:$ul (x86::encode-operand-type :imm32))
(:$q (x86::encode-operand-type :imm64))
(:%shift (x86::encode-operand-type :shiftcount :reg8))
(:$self (x86::encode-operand-type :self))))))
(defun lookup-x86-opcode (form backend)
(when (consp form)
(let* ((name (string (car form)))
(templates (gethash name x86::*x86-opcode-template-lists*)))
(when templates
(flet ((optype (thing)
(x86-encode-vinsn-operand-type thing backend)))
(let* ((operands (cdr form))
(type0 (optype (pop operands)))
(type1 (optype (pop operands)))
(type2 (optype (car operands))))
(dolist (template templates)
(when (x86::match-template-types template type0 type1 type2 backend)
(collect ((types))
(if type0 (types type0))
(if type1 (types type1))
(if type2 (types type2))
(return (values (x86::x86-opcode-template-ordinal template)
(types))))))))))))
(defun fixup-opcode-ordinals (vinsn-template opcode-templates &optional (backend *target-backend*))
(let* ((changed ()))
(dolist (vinsn-opcode (vinsn-template-opcode-alist vinsn-template))
(destructuring-bind (old-ordinal name &optional type0 type1 type2) vinsn-opcode
(let* ((opcode-templates (gethash name opcode-templates)))
(unless opcode-templates
(error "Unknown X86 instruction - ~a. Odd, because it was once a known instruction." name))
(let* ((new-ordinal (dolist (template opcode-templates)
(when (x86::match-template-types template type0 type1 type2 backend)
(return (x86::x86-opcode-template-ordinal template))))))
(unless new-ordinal
(error "No match for opcode ~s in ~s" vinsn-opcode vinsn-template))
(unless (eql old-ordinal new-ordinal)
(setf (car vinsn-opcode) new-ordinal)
(push (cons old-ordinal new-ordinal) changed))))))
(when changed
( format t " ~ & opcode ordinals changed in ~s : ~s " vinsn - template changed )
(flet ((update-instruction (i)
(when (consp i)
( vinsn - encoded ) instruction ( typically a UUO ) in
its cadr . Other directives wo n't contain embedded
(when (eq (car i) :anchored-uuo)
(setq i (cadr i)))
(let* ((pair (assoc (car i) changed :test #'eq)))
(when pair
(setf (car i) (cdr pair)))))))
(labels ((fixup-form (form)
(unless (atom form)
(if (atom (car form))
(update-instruction form)
(dolist (f (cdr form))
(fixup-form f))))))
(dolist (form (vinsn-template-body vinsn-template))
(fixup-form form)))))))
(defparameter *report-missing-vinsns* nil)
(defun fixup-x86-vinsn-templates (template-hash opcode-templates &optional (backend *target-backend*))
(maphash #'(lambda (name vinsn-template)
(if (not (cdr vinsn-template))
(when *report-missing-vinsns*
(warn "Reference to undefined vinsn ~s" name))
(fixup-opcode-ordinals (cdr vinsn-template) opcode-templates backend)))
template-hash))
(defun define-x86-vinsn (backend vinsn-name results args temps body)
(let* ((opcode-lookup (backend-lookup-opcode backend))
(backend-name (backend-name backend))
(arch-name (backend-target-arch-name backend))
(template-hash (backend-p2-template-hash-name backend))
(name-list ())
(attrs 0)
(nhybrids 0)
(local-labels ())
(referenced-labels ())
(source-indicator (form-symbol arch-name "-VINSN"))
(opcode-alist ()))
(flet ((valid-spec-name (x)
(or (and (consp x)
(consp (cdr x))
(null (cddr x))
(atom (car x))
(or (assoc (cadr x) *vreg-specifier-constant-constraints* :test #'eq)
(assoc (cadr x) *spec-class-storage-class-alist* :test #'eq)
(eq (cadr x) :label)
(and (consp (cadr x)) (eq (caadr x) :label) (consp (cdadr x)) (null (cddadr x)))
(and (consp (cadr x))
(or
(assoc (caadr x) *vreg-specifier-constant-constraints* :test #'eq)
(assoc (caadr x) *spec-class-storage-class-alist* :test #'eq))))
(car x))
(error "Invalid vreg spec: ~s" x)))
(add-spec-name (vname)
(if (member vname name-list :test #'eq)
(error "Duplicate name ~s in vinsn ~s" vname vinsn-name)
(push vname name-list))))
(declare (dynamic-extent #'valid-spec-name #'add-spec-name))
(when (consp vinsn-name)
(setq attrs (encode-vinsn-attributes (cdr vinsn-name))
vinsn-name (car vinsn-name)))
(unless (and (symbolp vinsn-name) (eq *CCL-PACKAGE* (symbol-package vinsn-name)))
(setq vinsn-name (intern (string vinsn-name) *CCL-PACKAGE*)))
(dolist (n (append args temps))
(add-spec-name (valid-spec-name n)))
(dolist (form body)
(if (atom form)
(add-spec-name form)))
(setq name-list (nreverse name-list))
" results " is . First , make sure that there are no duplicate
(do* ((res results tail)
(tail (cdr res) (cdr tail)))
((null res))
(let* ((name (valid-spec-name (car res))))
(if (assoc name tail :test #'eq)
(error "Duplicate result name ~s in ~s." name results))))
(let* ((non-hybrid-results ())
(match-args args))
(dolist (res results)
(let* ((res-name (car res)))
(if (not (assoc res-name args :test #'eq))
(if (not (= nhybrids 0))
(error "result ~s should also name an argument. " res-name)
(push res-name non-hybrid-results))
(if (eq res-name (caar match-args))
(setf nhybrids (1+ nhybrids)
match-args (cdr match-args))
(error "~S - hybrid results should appear in same order as arguments." res-name)))))
(dolist (name non-hybrid-results)
(add-spec-name name)))
(let* ((k -1))
(declare (fixnum k))
(let* ((name-alist (mapcar #'(lambda (n) (cons n (list (incf k)))) name-list)))
(flet ((find-name (n)
(let* ((pair (assoc n name-alist :test #'eq)))
(declare (list pair))
(if pair
(cdr pair)
(or (subprim-name->offset n backend)
(error "Unknown name ~s" n))))))
(labels ((simplify-simple-operand (op)
(if (atom op)
(if (typep op 'fixnum)
op
(if (eq op :rcontext)
op
(if (constantp op)
(progn
(if (keywordp op)
(pushnew op referenced-labels))
(eval op))
(find-name op))))
(if (eq (car op) :^)
(list :^ (simplify-simple-operand (cadr op)))
(if (eq (car op) :apply)
`(,(cadr op) ,@(mapcar #'simplify-operand (cddr op)))
(if (member (car op)
'(:tra :align :byte :word :long :quad :talign))
`(,(car op) ,(simplify-operand (cadr op)))
(simplify-memory-operand (op)
(let* ((seg nil)
(disp nil)
(base nil)
(index nil)
(scale nil))
(do* ((form op (cdr form)))
((null form) (list seg disp base index scale))
(let* ((head (car form)))
(if (consp head)
(case (car head)
(:%seg
(if (eq form op)
(setq seg (simplify-operand (cadr head)))
(error "Bad :%seg in ~s" op)))
((:%q :% :%l)
(let* ((r (simplify-operand head)))
(if base
(if index
(error "Extra register ~s in ~s"
head op)
(setq index r))
(setq base r))))
(t
(if (and (null (cdr form))
(or disp base index))
(progn
(setq scale (simplify-simple-operand head))
(if (and base (not index))
(setq index base base nil)))
(if (not (or disp base index))
(setq disp (simplify-simple-operand head))
(error "~s not expected in ~s" head op)))))
(if (and (null (cdr form))
(or disp base index))
(progn
(setq scale (simplify-simple-operand head))
(if (and base (not index))
(setq index base base nil)))
(if (not (or disp base index))
(setq disp (simplify-simple-operand head))
(error "~s not expected in ~s" head op))))))))
(simplify-operand (op)
(cond ((atom op)
(simplify-simple-operand op))
((eq (car op) :@)
(cons :@
(simplify-memory-operand (cdr op))))
((eq (car op) :rcontext)
(list :rcontext
(simplify-simple-operand (cadr op))))
((member (car op)
'(:% :%q :%l :%w :%b
:%acc :%accq :%accl :%accw :%accb
:$ :$1 :$b :$ub :$w :$l
:$ul :$q :%mmx :%xmm :%shift :$self))
(simplify-simple-operand (cadr op)))
(t
(simplify-simple-operand op)))))
(labels ((simplify-constraint (guard)
of constant
(destructuring-bind (guardname &rest others) guard
(ecase guardname
(:not
(destructuring-bind (negation) others
`(:not ,(simplify-constraint negation))))
(:pred
(destructuring-bind (predicate &rest operands) others
`(:pred ,predicate ,@(mapcar #'simplify-operand operands))))
((:eq :lt :gt :type)
(destructuring-bind (vreg constant) others
(unless (constantp constant)
(error "~S : not constant in constraint ~s ." constant guard))
`(,guardname ,(find-name vreg) ,(eval constant))))
((:or :and)
(unless others (error "Missing constraint list in ~s ." guard))
`(,guardname ,(mapcar #'simplify-constraint others))))))
(simplify-form (form)
(if (atom form)
(progn
(if (keywordp form) (push form local-labels) )
form)
(destructuring-bind (&whole w opname &rest opvals) form
(cons (simplify-constraint opname)
(mapcar #'simplify-form opvals))
(if (keywordp opname)
(case opname
(:if
(destructuring-bind (test true false) opvals
(list opname
(simplify-constraint test)
(simplify-form true)
(simplify-form false))))
(:progn
(cons opname (mapcar #'simplify-form opvals)))
(t
(list opname
(if (eq opname :anchored-uuo)
(simplify-form (car opvals))
(simplify-operand (car opvals))))))
(let* ((name (string opname)))
(multiple-value-bind (opnum types)
(funcall opcode-lookup form backend)
(if (not opnum)
(error "Unknown ~A instruction in ~s" backend-name form)
(let* ((opvals (mapcar #'simplify-operand opvals)))
(setf (assq opnum opcode-alist) (cons name types))
`(,opnum ,@opvals)))))))))))
(let* ((template (make-vinsn-template :name vinsn-name
:result-vreg-specs results
:argument-vreg-specs args
:temp-vreg-specs temps
:nhybrids nhybrids
:results&args (append results (nthcdr nhybrids args))
:nvp (- (+ (length results) (length args) (length temps))
nhybrids)
:body (prog1 (mapcar #'simplify-form body)
(dolist (ref referenced-labels)
(unless (memq ref local-labels)
(error
"local-label ~S was referenced but ~
never defined in VINSN-TEMPLATE definition for ~s"
ref vinsn-name))))
:local-labels local-labels
:attributes attrs
:opcode-alist opcode-alist)))
`(progn
(set-vinsn-template ',vinsn-name ,template ,template-hash)
(record-source-file ',vinsn-name ',source-indicator)
',vinsn-name))))))))))
#+x8632-target
(require "X8632-BACKEND")
#+x8664-target
(require "X8664-BACKEND")
(defparameter *x86-backend*
#+x8632-target *x8632-backend*
#+x8664-target *x8664-backend*
#-x86-target nil)
(defun fixup-x86-backend (&rest args)
#+x8632-target (apply #'fixup-x8632-backend args)
#+x8664-target (apply #'fixup-x8664-backend args)
#-x86-target (declare (ignore args))
)
(provide "X86-BACKEND")
|
ecd4cbefdcf36f2c9da5d67efe9eace1e5bbd5c13d5691bbe174c6ed2e1e5631 | fragnix/fragnix | Network.Wai.Handler.Warp.Some.hs | # LANGUAGE Haskell98 #
# LINE 1 " Network / Wai / Handler / Warp / Some.hs " #
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Handler.Warp.Some (
Some
, singleton
, top
, lookupWith
, union
, toList
, prune
) where
----------------------------------------------------------------
| One ore more list to implement .
data Some a = One !a
Two or more
deriving (Eq,Show)
# INLINE singleton #
singleton :: a -> Some a
singleton x = One x
# INLINE top #
top :: Some a -> a
top (One x) = x
top (Tom x _) = x
# INLINE lookupWith #
lookupWith :: (a -> Bool) -> Some a -> Maybe a
lookupWith f s = go s
where
go (One x)
| f x = Just x
| otherwise = Nothing
go (Tom x xs)
| f x = Just x
| otherwise = go xs
# INLINE union #
union :: Some a -> Some a -> Some a
union s t = go s t
where
go (One x) u = Tom x u
go (Tom x xs) u = go xs (Tom x u)
# INLINE toList #
toList :: Some a -> [a]
toList s = go s []
where
go (One x) !acc = x : acc
go (Tom x xs) !acc = go xs (x : acc)
# INLINE prune #
prune :: (a -> IO Bool) -> Some a -> IO (Maybe (Some a))
prune act s = go s
where
go (One x) = do
keep <- act x
return $ if keep then
Just (One x)
else
Nothing
go (Tom x xs) = do
keep <- act x
mys <- go xs
return $ if keep then
case mys of
Nothing -> Just (One x)
Just ys -> Just (Tom x ys)
else
mys
| null | https://raw.githubusercontent.com/fragnix/fragnix/b9969e9c6366e2917a782f3ac4e77cce0835448b/tests/packages/scotty/Network.Wai.Handler.Warp.Some.hs | haskell | # LANGUAGE BangPatterns #
-------------------------------------------------------------- | # LANGUAGE Haskell98 #
# LINE 1 " Network / Wai / Handler / Warp / Some.hs " #
module Network.Wai.Handler.Warp.Some (
Some
, singleton
, top
, lookupWith
, union
, toList
, prune
) where
| One ore more list to implement .
data Some a = One !a
Two or more
deriving (Eq,Show)
# INLINE singleton #
singleton :: a -> Some a
singleton x = One x
# INLINE top #
top :: Some a -> a
top (One x) = x
top (Tom x _) = x
# INLINE lookupWith #
lookupWith :: (a -> Bool) -> Some a -> Maybe a
lookupWith f s = go s
where
go (One x)
| f x = Just x
| otherwise = Nothing
go (Tom x xs)
| f x = Just x
| otherwise = go xs
# INLINE union #
union :: Some a -> Some a -> Some a
union s t = go s t
where
go (One x) u = Tom x u
go (Tom x xs) u = go xs (Tom x u)
# INLINE toList #
toList :: Some a -> [a]
toList s = go s []
where
go (One x) !acc = x : acc
go (Tom x xs) !acc = go xs (x : acc)
# INLINE prune #
prune :: (a -> IO Bool) -> Some a -> IO (Maybe (Some a))
prune act s = go s
where
go (One x) = do
keep <- act x
return $ if keep then
Just (One x)
else
Nothing
go (Tom x xs) = do
keep <- act x
mys <- go xs
return $ if keep then
case mys of
Nothing -> Just (One x)
Just ys -> Just (Tom x ys)
else
mys
|
373e10f3e32c62237f804cd2b5aca01d28224ddc5a6ca91b7aeafdb4b82a11f9 | Licenser/ecrdt | vgcounter2.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2013 , Heinz Nikolaus Gies
%%% @doc
%%% An Implementation of the GCounter (grow only counter) CvRDT
%%% allowing an unknown or changing set of masters masters using
unique ID 's for identification .
%%% @end
Created : 1 Jun 2013 by < >
%%%-------------------------------------------------------------------
-module(vgcounter2).
-behaviour(ecrdt).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([type/0, is_a/1, new/0, value/1, inc/2, inc/3, merge/2]).
-type vgcounter2_element() :: {Master::term(), Increment::pos_integer()}.
-record(vgcounter2, {vector = [] :: [vgcounter2_element()]}).
-opaque vgcounter2() :: #vgcounter2{}.
-export_type([vgcounter2/0]).
%%%===================================================================
%%% Implementation
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Tests is the passed data is implementing this type.
%% @end
%%--------------------------------------------------------------------
-spec is_a(any()) -> true | false.
is_a(#vgcounter2{}) ->
true;
is_a(_) ->
false.
%%--------------------------------------------------------------------
%% @doc
%% Returns the type of this object
%% @end
%%--------------------------------------------------------------------
-spec type() -> register | set | gset | counter | gcounter | map.
type() ->
gcounter.
%%--------------------------------------------------------------------
%% @doc
Creates a new empty vgcounter2 .
%% @end
%%--------------------------------------------------------------------
-spec new() -> vgcounter2().
new() ->
#vgcounter2{}.
inc(Increment, C) ->
inc(node(), Increment, C).
%%--------------------------------------------------------------------
%% @doc
%% Increments the counter for a given master, if the master is not yet
%% known it gets added.
%% @end
%%--------------------------------------------------------------------
-spec inc(Master::term(), Increment::pos_integer(), VGCounter::vgcounter2()) ->
VGCounter1::vgcounter2().
inc(Master, Increment,
#vgcounter2{vector = Vector0}) when Increment > 0 ->
case lists:keytake(Master, 1, Vector0) of
false ->
#vgcounter2{vector = [{Master, Increment} | Vector0]};
{value, {_, V0}, Vector1} ->
#vgcounter2{vector = [{Master, V0 + Increment} | Vector1]}
end.
%%--------------------------------------------------------------------
%% @doc
Merges to GCounters , by keeping the maximum known value for each
%% master.
%% @end
%%--------------------------------------------------------------------
-spec merge(VGCounter1::vgcounter2(), VGCounter2::vgcounter2()) ->
VGCounter::vgcounter2().
merge(#vgcounter2{vector = Vector0},
#vgcounter2{vector = Vector1}) ->
#vgcounter2{
vector = merge_vectors(
lists:sort(Vector0),
lists:sort(Vector1),
[])
}.
%%--------------------------------------------------------------------
%% @doc
%% Compiles the value of the counter by summing up all master values.
%% @end
%%--------------------------------------------------------------------
-spec value(VGCounter::vgcounter2()) -> Value::pos_integer().
value(#vgcounter2{vector = Vector}) ->
lists:sum([V || {_, V} <- Vector]).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
%% If the master exists in both vectors take the bigger value
merge_vectors([{M, V0} | R0],
[{M, _V1} | R1],
R) when V0 >= _V1->
merge_vectors(R0, R1, [{M, V0} | R]);
merge_vectors([{M, _} | R0],
[{M, V1} | R1],
R) ->
merge_vectors(R0, R1, [{M, V1} | R]);
If the master on V0 is bigger add .
merge_vectors([{_M0, _} = E0 | R0],
[{_M1, _} | _] = R1,
R) when _M0 < _M1 ->
merge_vectors(R0, R1, [E0 | R]);
%% If the master on V1 is bigger add it.
merge_vectors([{_M0, _} | _] = R0,
[{_M1, _} = E1 | R1 ],
R) when _M0 > _M1 ->
merge_vectors(R0, R1, [E1 | R]);
merge_vectors(R0, [], R) ->
R0 ++ R;
merge_vectors([], R1, R) ->
R1 ++ R.
%%%===================================================================
%%% Tests
%%%===================================================================
-ifdef(TEST).
op(a, E, C1, C2, Check) ->
{inc(a, E, C1), C2, inc(a, E, Check)};
op(b, E, C1, C2, Check) ->
{C1, inc(b, E, C2), inc(b, E, Check)};
op(ab1, E, C1, C2, Check) ->
{inc(a, E, C1), inc(a, E, C2), inc(a, E, Check)};
op(ab2, E, C1, C2, Check) ->
{inc(b, E, C1), inc(b, E, C2), inc(b, E, Check)}.
Applies the list of opperaitons to three empty sets .
apply_ops(Ops) ->
O = new(),
lists:foldl(fun({T, E}, {A, B, C}) ->
op(T, E, A, B, C)
end, {O, O, O}, Ops).
%% A list of opperations and targets.
targets() ->
list({oneof([a, b, ab1, ab2]), pos_integer()}).
prop_vgcounter2() ->
?FORALL(Ts, targets(),
begin
{A, B, C} = apply_ops(Ts),
value(C) =:= value(merge(A, B))
end).
propper_test() ->
?assertEqual([], proper:module(?MODULE, [{to_file, user}, long_result])).
-endif.
| null | https://raw.githubusercontent.com/Licenser/ecrdt/2024b465f4fdf3fb3d2a16679f9aee6c06e62ea4/src/vgcounter2.erl | erlang | -------------------------------------------------------------------
@doc
An Implementation of the GCounter (grow only counter) CvRDT
allowing an unknown or changing set of masters masters using
@end
-------------------------------------------------------------------
===================================================================
Implementation
===================================================================
--------------------------------------------------------------------
@doc
Tests is the passed data is implementing this type.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Returns the type of this object
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Increments the counter for a given master, if the master is not yet
known it gets added.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
master.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Compiles the value of the counter by summing up all master values.
@end
--------------------------------------------------------------------
===================================================================
Internal Functions
===================================================================
If the master exists in both vectors take the bigger value
If the master on V1 is bigger add it.
===================================================================
Tests
===================================================================
A list of opperations and targets. | @author < >
( C ) 2013 , Heinz Nikolaus Gies
unique ID 's for identification .
Created : 1 Jun 2013 by < >
-module(vgcounter2).
-behaviour(ecrdt).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([type/0, is_a/1, new/0, value/1, inc/2, inc/3, merge/2]).
-type vgcounter2_element() :: {Master::term(), Increment::pos_integer()}.
-record(vgcounter2, {vector = [] :: [vgcounter2_element()]}).
-opaque vgcounter2() :: #vgcounter2{}.
-export_type([vgcounter2/0]).
-spec is_a(any()) -> true | false.
is_a(#vgcounter2{}) ->
true;
is_a(_) ->
false.
-spec type() -> register | set | gset | counter | gcounter | map.
type() ->
gcounter.
Creates a new empty vgcounter2 .
-spec new() -> vgcounter2().
new() ->
#vgcounter2{}.
inc(Increment, C) ->
inc(node(), Increment, C).
-spec inc(Master::term(), Increment::pos_integer(), VGCounter::vgcounter2()) ->
VGCounter1::vgcounter2().
inc(Master, Increment,
#vgcounter2{vector = Vector0}) when Increment > 0 ->
case lists:keytake(Master, 1, Vector0) of
false ->
#vgcounter2{vector = [{Master, Increment} | Vector0]};
{value, {_, V0}, Vector1} ->
#vgcounter2{vector = [{Master, V0 + Increment} | Vector1]}
end.
Merges to GCounters , by keeping the maximum known value for each
-spec merge(VGCounter1::vgcounter2(), VGCounter2::vgcounter2()) ->
VGCounter::vgcounter2().
merge(#vgcounter2{vector = Vector0},
#vgcounter2{vector = Vector1}) ->
#vgcounter2{
vector = merge_vectors(
lists:sort(Vector0),
lists:sort(Vector1),
[])
}.
-spec value(VGCounter::vgcounter2()) -> Value::pos_integer().
value(#vgcounter2{vector = Vector}) ->
lists:sum([V || {_, V} <- Vector]).
merge_vectors([{M, V0} | R0],
[{M, _V1} | R1],
R) when V0 >= _V1->
merge_vectors(R0, R1, [{M, V0} | R]);
merge_vectors([{M, _} | R0],
[{M, V1} | R1],
R) ->
merge_vectors(R0, R1, [{M, V1} | R]);
If the master on V0 is bigger add .
merge_vectors([{_M0, _} = E0 | R0],
[{_M1, _} | _] = R1,
R) when _M0 < _M1 ->
merge_vectors(R0, R1, [E0 | R]);
merge_vectors([{_M0, _} | _] = R0,
[{_M1, _} = E1 | R1 ],
R) when _M0 > _M1 ->
merge_vectors(R0, R1, [E1 | R]);
merge_vectors(R0, [], R) ->
R0 ++ R;
merge_vectors([], R1, R) ->
R1 ++ R.
-ifdef(TEST).
op(a, E, C1, C2, Check) ->
{inc(a, E, C1), C2, inc(a, E, Check)};
op(b, E, C1, C2, Check) ->
{C1, inc(b, E, C2), inc(b, E, Check)};
op(ab1, E, C1, C2, Check) ->
{inc(a, E, C1), inc(a, E, C2), inc(a, E, Check)};
op(ab2, E, C1, C2, Check) ->
{inc(b, E, C1), inc(b, E, C2), inc(b, E, Check)}.
Applies the list of opperaitons to three empty sets .
apply_ops(Ops) ->
O = new(),
lists:foldl(fun({T, E}, {A, B, C}) ->
op(T, E, A, B, C)
end, {O, O, O}, Ops).
targets() ->
list({oneof([a, b, ab1, ab2]), pos_integer()}).
prop_vgcounter2() ->
?FORALL(Ts, targets(),
begin
{A, B, C} = apply_ops(Ts),
value(C) =:= value(merge(A, B))
end).
propper_test() ->
?assertEqual([], proper:module(?MODULE, [{to_file, user}, long_result])).
-endif.
|
4b43471f725fd8f40fe9a6d30d444e7fe1ff7409183155512aa1241dab8b3e7a | bytekid/mkbtt | uncurryx.ml | Copyright 2008 , Christian Sternagel ,
* GNU Lesser General Public License
*
* This file is part of TTT2 .
*
* TTT2 is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* TTT2 is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2 . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of TTT2.
*
* TTT2 is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* TTT2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2. If not, see </>.
*)
(*** OPENS ********************************************************************)
open Util;;
open Rewritingx;;
(*** MODULES ******************************************************************)
module C = Complexity;;
module F = Format;;
module Fun = Function;;
module M = Monad;;
module Sig = Signature;;
module FunList = struct
type t = Fun.t list;;
let compare = compare;;
let fprintf = List.fprintf Fun.fprintf ", ";;
end
module IntPair = struct
type t = (int * int);;
let compare = compare;;
let fprintf fmt (m, n)= F.fprintf fmt "(%d, %d)" m n;;
end
module FunMap = Map.Make (Fun) (FunList);;
module AAMap = Map.Make (Fun) (IntPair);;
(*** TYPES ********************************************************************)
type appsym_heuristic =
| MostFrequent
| Unique
;;
type aarity_heuristic =
maximal occurring applicative arity of TRS
minimal occurring applicative arity of TRS
| MinLeft
;;
type flags = {
aarity_heuristic : aarity_heuristic ref;
appsym_heuristic : appsym_heuristic ref;
help : bool ref;
top : bool ref;
};;
type info = {
appsym : Fun.t;
symtab : FunMap.t;
};;
type t = {
info : info;
input : Problem.t;
output : Problem.t;
applicative_top : bool;
eta_rules : Trs.t;
};;
(*** GLOBALS ******************************************************************)
let code = "uncurryx";;
let name = "Extended Uncurrying Processor";;
let keywords = ["uncurrying";"transformation"];;
let comment = "Implements uncurrying for applicative systems.";;
let flags = {
appsym_heuristic = ref MostFrequent;
aarity_heuristic = ref Maximum;
help = ref false;
top = ref false;
};;
let no_such_heur h = failwith ("'"^h^"': no such heuristic");;
let spec =
let spec = [
("--help",Arg.Set flags.help,"Prints information about flags.");
("-help",Arg.Set flags.help,"Prints information about flags.");
("-h",Arg.Set flags.help,"Prints information about flags.");
("-top",Arg.Set flags.top,"Special version of uncurrying for DP symbols.");
("-appsym",Arg.String (function
| "max" -> flags.appsym_heuristic := MostFrequent
| "unique" -> flags.appsym_heuristic := Unique
| h -> no_such_heur h),
"Heuristic to determine the application symbol (max, unique).");
("-aarity",Arg.String (function
| "max" -> flags.aarity_heuristic := Maximum
| "min" -> flags.aarity_heuristic := Minimum
| "minlhs" -> flags.aarity_heuristic := MinLeft
| h -> no_such_heur h),
"Heuristic to determine the applicative arity (max, min, minlhs).");
] in
Arg.alignx 80 spec
;;
let help = (comment,keywords,List.map Triple.drop_snd spec);;
(*** FUNCTIONS ****************************************************************)
let (>>=) = M.(>>=);;
let (>>) = M.(>>);;
let init _ =
flags.appsym_heuristic := MostFrequent;
flags.aarity_heuristic := Maximum;
flags.help := false;
flags.top := false;
;;
(* Printers for Debugging *)
let fprintf_aamap fmt tab =
AAMap.fold (fun f (min, max) m ->
M.fprintf_fun fmt f >>= fun _ ->
F.fprintf fmt " has arities from %d to %d@\n" min max;
m
) tab (M.return ())
;;
(* Printers *)
let fprintf_entry fmt f k fs =
M.fprintf_fun fmt f >>= fun _ ->
F.fprintf fmt " ==>";
M.iter (fun f ->
F.fprintf fmt " ";
M.fprintf_fun fmt f >>= fun _ ->
F.fprintf fmt "/";
M.find_ari f >>= fun n ->
M.return (F.fprintf fmt "%d" n)
) fs >>= fun _ ->
M.return (F.fprintf fmt "@\n")
;;
let fprintf_symtab fmt tab =
FunMap.fold (fun f fs m ->
M.find_ari f >>= fun k ->
fprintf_entry fmt f k fs >>= fun _ ->
m
) tab (M.return ())
;;
let fprintf_info fmt p =
F.fprintf fmt "application symbol: ";
M.fprintf_fun fmt p.info.appsym >>= fun _ ->
F.fprintf fmt "@\n@[<1>symbol table:@\n";
fprintf_symtab fmt p.info.symtab >>= fun _ ->
F.fprintf fmt "@]";
F.fprintf fmt "@\n@[<1>eta-rules:@\n";
Trs.fprintfm fmt p.eta_rules >>= fun _ ->
F.fprintf fmt "@]";
M.return ()
;;
let fprintf fs fmt p =
F.fprintf fmt "@[<1>%s:@\n" name;
fprintf_info fmt p >>= fun _ ->
F.fprintf fmt "@\n@[<1>problem:@\n";
Problem.fprintfm fmt p.output >>= fun _ ->
F.fprintf fmt "@]@\n";
List.hd fs fmt >>= fun _ ->
M.return (F.fprintf fmt "@]")
;;
(* Processor *)
(* unapply o (((t o s1) o ...) o sN) = (t, [s1; ...; sN]) *)
let rec unapply o = function
| Term.Fun (f, [t1; t2]) when Fun.equal o f ->
let (h, ts) = unapply o t1
in (h, ts @ [t2])
| t -> (t, [])
;;
(* apply o t [s1; ...; sN] = ((t o s1) o ...) o sN *)
let apply o = List.foldl (fun t s -> Term.Fun (o, [t; s]));;
(* unapply_top # #(t1, t2, ..., tN) = (t1, [t2; ...; tN]) *)
let unapply_top o = function
| Term.Fun (f, t::ts) when Fun.equal o f -> (t, [ts])
| t -> (t, [])
;;
(* apply_top # t [t1; ...; tN] = #(t, t1, ..., tN) *)
let apply_top o t ts = Term.Fun (o, t::ts);;
let update_used_aarities f tab aa =
if AAMap.mem f tab then (
let (aa_min, aa_max) = AAMap.find f tab in
let tab' = AAMap.remove f tab in
AAMap.add f (min aa_min aa, max aa_max aa) tab'
) else AAMap.add f (aa, aa) tab
;;
let rec used_aarities_term o tab t = match unapply o t with
| (Term.Fun (f, ts), ss) ->
let len = List.length ss in
let tab' = List.foldl (used_aarities_term o) tab (ts@ss) in
update_used_aarities f tab' len
| (_, ss) -> List.foldl (used_aarities_term o) tab ss
;;
let used_aarities_term_top o tab t = match unapply_top o t with
| (Term.Fun (f, _), ss) ->
let len = List.length ss in
update_used_aarities f tab len
| _ -> tab
;;
let used_aarities_rule o tab = Rule.fold (flip (used_aarities_term o)) tab;;
let used_aarities_rule_top o tab = Rule.fold (flip (used_aarities_term_top o)) tab;;
let used_aarities_trs o = Trs.fold (flip (used_aarities_rule o));;
let used_aarities_trs_top o = Trs.fold (flip (used_aarities_rule_top o));;
let rec is_head_var_free o t = match unapply o t with
| (Term.Var _, ts) -> ts = []
| (Term.Fun (f, ts), ss) ->
List.for_all (is_head_var_free o) ts &&
List.for_all (is_head_var_free o) ss
;;
let is_head_var_free_top o t = match unapply_top o t with
| (Term.Var _, ts) -> ts = []
| (Term.Fun (f, _), _) -> true
;;
let is_left_head_var_free o = List.for_all (is_head_var_free o) <.> Trs.lhs;;
let is_left_head_var_free_top o = List.for_all (is_head_var_free_top o) <.> Trs.lhs;;
let get_symbol tab f = List.nth (FunMap.find f tab);;
let get_default_symbol tab f =
if FunMap.mem f tab then get_symbol tab f 0
else f
;;
let aarity_fun tab f = List.length (FunMap.find f tab) - 1;;
let aarity_term_gen unapp o tab t = match unapp o t with
| (Term.Fun (f, _), ss) ->
Some (max 0 (aarity_fun tab f - List.length ss))
| _ -> None
;;
let aarity_term = aarity_term_gen unapply;;
let aarity_term_top = aarity_term_gen unapply_top;;
let rec uncurry_term o tab t = match unapply o t with
| (Term.Var _ as x, ss) -> apply o x (List.map (uncurry_term o tab) ss)
| (Term.Fun (f, ts), ss) ->
let uts = List.map (uncurry_term o tab) ts in
let uss = List.map (uncurry_term o tab) ss in
let aa = aarity_fun tab f in
let k = min (List.length ss) aa in
let fk = get_symbol tab f k in
apply o (Term.Fun (fk, (uts @ List.take k uss))) (List.drop k uss)
;;
let uncurry_term_top o tab = function
| Term.Var _ as x -> x
| Term.Fun (f, (Term.Fun (g, ss))::ts)
when Fun.equal f o && aarity_fun tab g > 0 ->
Term.Fun (get_symbol tab g 1, ss @ ts)
| Term.Fun (f, ts) ->
Term.Fun (get_default_symbol tab f, ts)
;;
let uncurry_trs o tab = Trs.project (Rule.project (uncurry_term o tab));;
let uncurry_trs_top o tab = Trs.project (Rule.project (uncurry_term_top o tab));;
let fresh_var = M.fresh_var >>= (M.return <.> Term.make_var);;
let fresh_vars n = M.replicate n fresh_var;;
let eta_rules_rule_gen aarity o tab rule =
let (l, r) = Rule.to_terms rule in
let Some aa = aarity o tab l in
let rec add l r aa rules =
if aa <= 0 then rules else (
M.find_ari o >>= fun n ->
fresh_vars (n-1) >>= fun xs ->
rules >>= fun rs ->
let l' = Term.Fun (o, l :: xs) in
let r' = Term.Fun (o, r :: xs) in
add l' r' (aa-1) (M.return (Rule.of_terms l' r' :: rs))
)
in add l r aa (M.return [])
;;
let eta_rules_trs o tab trs =
M.lift Trs.of_list (M.flat_map
(eta_rules_rule_gen aarity_term o tab) (Trs.to_list trs))
;;
let eta_rules_trs_top o tab trs =
M.lift Trs.of_list (M.flat_map
(eta_rules_rule_gen aarity_term_top o tab) (Trs.to_list trs))
;;
(*
let fun_k f i =
M.find_ari f >>= fun n ->
M.set_curry ~arity:(n+i) f i
;;
*)
let fun_k f i =
M.find_fun_name f >>= fun id ->
M.find_ari f >>= fun n ->
let name = id ^ "_" ^ (string_of_int i) in
M.create_fun (n+i) name >>= fun fk ->
M.is_dp f >>= function
| true -> M.set_dp fk
| false -> M.return fk
;;
let fun_ks f n = M.replicatei (n+1) (fun_k f);;
(*
let fun_k_top o f i = if i = 0 then (
fun_k f 0
) else (
M.find_ari f >>= fun n ->
M.find_ari o >>= fun k ->
M.set_curry ~arity:(n+k-1) f i >>= fun fk ->
M.set_dp fk
);;
*)
let fun_k_top o f i = if i = 0 then (
fun_k f 0
) else (
M.find_fun_name f >>= fun id ->
M.find_fun_name o >>= fun ap ->
M.find_ari f >>= fun n ->
M.find_ari o >>= fun k ->
let name = id ^ "_" ^ ap ^ "_" ^ (string_of_int i) in
M.create_fun (n+k-1) name >>= fun fk ->
M.set_dp fk
);;
let fun_ks_top o f n = M.replicatei (n+1) (fun_k_top o f);;
let uncurry_rules_fun o tab f =
M.find_fun_name f >>= fun id ->
let aa = aarity_fun tab f in
M.find_ari f >>= fun n ->
fresh_vars (n+aa+1) >>= fun xs ->
let rec add k rules =
if k >= aa then rules else (
rules >>= fun rs ->
fun_k f k >>= fun fk ->
fun_k f (k+1) >>= fun fk1 ->
let (xs, y::_) = List.split_at (n+k) xs in
let l = apply o (Term.Fun (fk, xs)) [y] in
let r = Term.Fun (fk1, xs @ [y]) in
add (k+1) (M.return (Rule.of_terms l r :: rs))
)
in
add 0 (M.return [])
;;
let uncurrying_rules o tab fs =
M.lift Trs.of_list (M.flat_map (uncurry_rules_fun o tab) fs);;
let uncurry_rules_fun_top o tab =
FunMap.fold (fun f fs m -> match fs with
| [_; f_sharp] ->
m >>= fun rs ->
M.find_ari f >>= fun n ->
M.find_ari o >>= fun k ->
fresh_vars (n+k-1) >>= fun xs ->
let (ys, zs) = List.split_at n xs in
let l = apply_top o (Term.Fun (f, ys)) zs in
let r = Term.Fun (f_sharp, xs) in
M.return (Rule.of_terms l r :: rs)
| _ -> m
) tab (M.return [])
;;
let uncurrying_rules_top o tab = M.lift Trs.of_list (uncurry_rules_fun_top o tab);;
(* heuristics for finding the application symbol *)
let weighted_funas fs trs =
M.map (fun f -> M.find_ari f >>= fun n -> M.return (f, n)) fs >>= fun fs1 ->
M.map (fun fn -> M.return (fn, Trs.count_fun (fst fn) trs)) fs1 >>= fun fs2 ->
let sorted = List.sort (fun (_,v) (_,w) -> compare (-v) (-w)) fs2 in
M.return (List.map fst fs2)
;;
let find_appsym_with_arity p get fs trs =
weighted_funas fs trs >>= (M.return <.> (get <.> List.filter (p <.> snd)))
;;
let get_first = function (f,_)::_ -> Some f | _ -> None;;
let get_unique = function [(f,_)] -> Some f | _ -> None;;
let find_most_frequent_binary_appsym = find_appsym_with_arity ((=) 2) get_first;;
let find_most_frequent_appsym = find_appsym_with_arity ((<) 0) get_first;;
let find_unique_binary_appsym = find_appsym_with_arity ((=) 2) get_unique;;
let find_unique_appsym = find_appsym_with_arity ((<) 0) get_unique;;
(* heuristics for computing the symbol table *)
let symtab_for_maximal_occurring_aarity_gen used_aas funs tab o trs =
let uas = used_aas o tab trs in
AAMap.fold (fun f (_, u) m ->
m >>= fun tab ->
funs f u >>= fun fks ->
M.return (FunMap.add f fks tab)
) uas (M.return FunMap.empty)
;;
let symtab_for_maximal_occurring_aarity =
symtab_for_maximal_occurring_aarity_gen used_aarities_trs fun_ks AAMap.empty
;;
let symtab_for_maximal_occurring_top_aarity o trs =
let fs = List.remove o (Trs.funs trs) in
(* make sure that all function symbols are part of the map *)
let tab = List.foldl (fun tab f -> AAMap.add f (0, 0) tab) AAMap.empty fs in
symtab_for_maximal_occurring_aarity_gen
used_aarities_trs_top (fun_ks_top o) tab o trs
;;
let symtab_for_minimal_occurring_aarity_gen used_aas funs tab o trs =
let uas = used_aas o tab trs in
AAMap.fold (fun f (l, _) m ->
m >>= fun tab ->
funs f l >>= fun fks ->
M.return (FunMap.add f fks tab)
) uas (M.return FunMap.empty)
;;
let symtab_for_minimal_occurring_aarity =
symtab_for_minimal_occurring_aarity_gen used_aarities_trs fun_ks AAMap.empty
;;
let symtab_for_minimal_occurring_top_aarity o trs =
let fs = List.remove o (Trs.funs trs) in
let tab = List.foldl (fun tab f -> AAMap.add f (0, 0) tab) AAMap.empty fs in
symtab_for_minimal_occurring_aarity_gen
used_aarities_trs_top (fun_ks_top o) tab o trs
;;
let symtab_for_minimal_lhs_aarity_gen aarity maximal o trs =
maximal o trs >>= fun tab ->
let tab = Trs.fold (fun rule tab ->
let l = Rule.lhs rule in
let Some f = Term.root l in
let Some aa = aarity o tab l in
if FunMap.mem f tab && aa > 0 then (
let fs = FunMap.find f tab in
let tab' = FunMap.remove f tab in
FunMap.add f (List.take (List.length fs - aa) fs) tab'
) else tab
) tab trs
in
M.return tab
;;
let symtab_for_minimal_lhs_aarity =
symtab_for_minimal_lhs_aarity_gen
aarity_term symtab_for_maximal_occurring_aarity;;
let symtab_for_minimal_lhs_top_aarity =
symtab_for_minimal_lhs_aarity_gen
aarity_term_top symtab_for_maximal_occurring_top_aarity;;
let rec apply_symtab_term o tab = function
| Term.Var _ as x -> x
| Term.Fun (f, ts) ->
let ts' = List.map (apply_symtab_term o tab) ts in
if FunMap.mem f tab
then Term.Fun (get_symbol tab f 0, ts')
else Term.Fun (f, ts')
;;
let apply_symtab_trs o tab =
Trs.project (Rule.project (apply_symtab_term o tab));;
let apply_symtab p =
let o = p.info.appsym in
let tab = p.info.symtab in
let (dps, trs) = Problem.get_sw p.output in
let dps' = apply_symtab_trs o tab dps in
let trs' = apply_symtab_trs o tab trs in
let output' = Problem.set_sw dps' trs' p.output in { p with
(*reset graph, since all function symbols changed*)
output = if Problem.is_dp output'
then Problem.set_dg Problem.Complete output'
else output';
}
;;
let uncurry_dp_top find_o mk_symtab p =
let (dps,trs) = Problem.get_sw p in
let sharps = Trs.roots dps in
let both = Trs.union dps trs in
find_o sharps dps >>= function None -> M.return None | Some o -> (
if not (is_left_head_var_free_top o dps) then M.return None else (
mk_symtab o both >>= fun tab ->
M.exists (M.lift Term.is_var <.>
flip Trs.etcap trs <.> fst <.> unapply_top o <.> Rule.rhs)
(Trs.to_list dps) >>= (function
| false -> M.return (uncurry_trs_top o tab dps, trs, Trs.empty)
| true ->
uncurrying_rules_top o tab >>= fun us ->
eta_rules_trs_top o tab trs >>= fun eta ->
let uncurried_eta = uncurry_trs_top o tab eta in
let uncurried_dps = uncurry_trs_top o tab dps in
let dps' = Trs.union us (Trs.union uncurried_eta uncurried_dps) in
M.return (dps', trs, eta)
) >>= fun (dps', trs', eta) ->
let p' = Problem.set_sw dps' trs' p in
let info = { appsym = o; symtab = tab } in
M.return (Some (info, p', eta))
)
)
;;
let uncurry_dp find_o mk_symtab p =
let (dps,trs) = Problem.get_sw p in
let funs = List.union (Trs.funs trs) (Trs.funs dps) in
let nonsharps = List.diff funs (Trs.roots dps) in
let both = Trs.union dps trs in
find_o nonsharps both >>= function None -> M.return None | Some o -> (
if not (is_left_head_var_free o both) then M.return None else (
let fs' = List.remove o funs in
mk_symtab o both >>= fun tab ->
eta_rules_trs o tab trs >>= fun eta ->
uncurrying_rules o tab fs' >>= fun us ->
let dps' = uncurry_trs o tab dps in
let trs' = Trs.union (uncurry_trs o tab (Trs.union trs eta)) us in
let p' = Problem.set_sw dps' trs' p in
let info = { appsym = o; symtab = tab } in
M.return (Some (info, p', eta))
)
)
;;
let uncurry_sp find_o mk_symtab p =
let trs = Problem.get_trs p in
let fs = Trs.funs trs in
find_o fs trs >>= function None -> M.return None | Some o -> (
if not (is_left_head_var_free o trs) then M.return None else (
let fs' = List.remove o fs in
mk_symtab o trs >>= fun tab ->
eta_rules_trs o tab trs >>= fun eta ->
uncurrying_rules o tab fs' >>= fun us ->
let trs' = Trs.union (uncurry_trs o tab (Trs.union trs eta)) us in
let p' = Problem.set_trs trs' p in
let info = { appsym = o; symtab = tab } in
M.return (Some (info, p', eta))
)
)
;;
let solve_aux find_o mk_symtab top p =
if Problem.is_sp p then uncurry_sp find_o mk_symtab p
else if top && Problem.is_dp p then uncurry_dp_top find_o mk_symtab p
else if Problem.is_dp p then uncurry_dp find_o mk_symtab p
else M.return None
;;
let solve fs p =
let configurate s = F.printf "%s@\n%!" s; flags.help := true in
(try init (); Arg.parsex code spec fs with Arg.Bad s -> configurate s);
if !(flags.help) then (Arg.usage spec ("Options for "^code^":"); exit 0);
let top = !(flags.top) in
let find_o = match !(flags.appsym_heuristic) with
| MostFrequent -> if top
then find_most_frequent_appsym
else find_most_frequent_binary_appsym
| Unique -> if top
then find_unique_appsym
else find_unique_binary_appsym
in
let mk_symtab = match !(flags.aarity_heuristic) with
| Maximum -> if top
then symtab_for_maximal_occurring_top_aarity
else symtab_for_maximal_occurring_aarity
| Minimum -> if top
then symtab_for_minimal_occurring_top_aarity
else symtab_for_minimal_occurring_aarity
| MinLeft -> if top
then symtab_for_minimal_lhs_top_aarity
else symtab_for_minimal_lhs_aarity
in
solve_aux find_o mk_symtab top p >>= function
| None -> M.return None
| Some (info, p', eta) ->
let p = {
info = info;
applicative_top = top;
input = p;
output = p';
eta_rules = eta;
} in M.return (Some (apply_symtab p))
;;
(* Destructors *)
let get_ip p = p.input;;
let get_op p = p.output;;
(* Complexity Bounds *)
let complexity c _ = C.mul c C.constant;;
(* Compare Functions *)
let equal p q =
Problem.equal (get_ip p) (get_ip q) && Problem.equal (get_op p) (get_op q)
;;
(* XML Printing *)
let fprintfx_entry fmt f k fs =
F.fprintf fmt "@{<uncurriedSymbolEntry>";
M.fprintfx_fun fmt f >>= fun _ ->
F.fprintf fmt "@{<arity>%d@}" k;
M.iter (fun f ->
M.fprintfx_fun fmt f
) fs >>= fun _ ->
M.return (F.fprintf fmt "@}")
;;
let fprintfx_symtab fmt tab =
FunMap.fold (fun f fs m ->
M.find_ari f >>= fun k ->
fprintfx_entry fmt f k fs >>= fun _ ->
m
) tab (M.return ())
;;
let fprintfx_info fmt p =
F.fprintf fmt "@{<uncurryInformation>";
M.fprintfx_fun fmt p.info.appsym >>= fun _ ->
F.fprintf fmt "@{<uncurriedSymbols>";
fprintfx_symtab fmt p.info.symtab >>= fun _ ->
F.fprintf fmt "@}";
F.fprintf fmt "@{<etaRules>";
Trs.fprintfx fmt p.eta_rules >>= fun _ ->
F.fprintf fmt "@}";
M.return (F.fprintf fmt "@}")
;;
let fprintfx_applicative_top fmt p =
if p.applicative_top then (
M.find_ari p.info.appsym >>= fun n ->
F.fprintf fmt "@{<applicativeTop>%d@}" n;
M.return ()
) else M.return ()
;;
let fprintfx fs fmt p =
let tag = if Problem.is_sp (get_op p) then "uncurry" else "uncurryProc" in
F.fprintf fmt "@{<%s>" tag;
fprintfx_applicative_top fmt p >>= fun _ ->
fprintfx_info fmt p >>= fun _ ->
Problem.fprintfx fmt (get_op p) >>= fun _ ->
List.hd fs fmt >>= fun _ ->
M.return (F.fprintf fmt "@}")
;;
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/processors/src/transformation/uncurryx.ml | ocaml | ** OPENS *******************************************************************
** MODULES *****************************************************************
** TYPES *******************************************************************
** GLOBALS *****************************************************************
** FUNCTIONS ***************************************************************
Printers for Debugging
Printers
Processor
unapply o (((t o s1) o ...) o sN) = (t, [s1; ...; sN])
apply o t [s1; ...; sN] = ((t o s1) o ...) o sN
unapply_top # #(t1, t2, ..., tN) = (t1, [t2; ...; tN])
apply_top # t [t1; ...; tN] = #(t, t1, ..., tN)
let fun_k f i =
M.find_ari f >>= fun n ->
M.set_curry ~arity:(n+i) f i
;;
let fun_k_top o f i = if i = 0 then (
fun_k f 0
) else (
M.find_ari f >>= fun n ->
M.find_ari o >>= fun k ->
M.set_curry ~arity:(n+k-1) f i >>= fun fk ->
M.set_dp fk
);;
heuristics for finding the application symbol
heuristics for computing the symbol table
make sure that all function symbols are part of the map
reset graph, since all function symbols changed
Destructors
Complexity Bounds
Compare Functions
XML Printing | Copyright 2008 , Christian Sternagel ,
* GNU Lesser General Public License
*
* This file is part of TTT2 .
*
* TTT2 is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* TTT2 is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2 . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of TTT2.
*
* TTT2 is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* TTT2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2. If not, see </>.
*)
open Util;;
open Rewritingx;;
module C = Complexity;;
module F = Format;;
module Fun = Function;;
module M = Monad;;
module Sig = Signature;;
module FunList = struct
type t = Fun.t list;;
let compare = compare;;
let fprintf = List.fprintf Fun.fprintf ", ";;
end
module IntPair = struct
type t = (int * int);;
let compare = compare;;
let fprintf fmt (m, n)= F.fprintf fmt "(%d, %d)" m n;;
end
module FunMap = Map.Make (Fun) (FunList);;
module AAMap = Map.Make (Fun) (IntPair);;
type appsym_heuristic =
| MostFrequent
| Unique
;;
type aarity_heuristic =
maximal occurring applicative arity of TRS
minimal occurring applicative arity of TRS
| MinLeft
;;
type flags = {
aarity_heuristic : aarity_heuristic ref;
appsym_heuristic : appsym_heuristic ref;
help : bool ref;
top : bool ref;
};;
type info = {
appsym : Fun.t;
symtab : FunMap.t;
};;
type t = {
info : info;
input : Problem.t;
output : Problem.t;
applicative_top : bool;
eta_rules : Trs.t;
};;
let code = "uncurryx";;
let name = "Extended Uncurrying Processor";;
let keywords = ["uncurrying";"transformation"];;
let comment = "Implements uncurrying for applicative systems.";;
let flags = {
appsym_heuristic = ref MostFrequent;
aarity_heuristic = ref Maximum;
help = ref false;
top = ref false;
};;
let no_such_heur h = failwith ("'"^h^"': no such heuristic");;
let spec =
let spec = [
("--help",Arg.Set flags.help,"Prints information about flags.");
("-help",Arg.Set flags.help,"Prints information about flags.");
("-h",Arg.Set flags.help,"Prints information about flags.");
("-top",Arg.Set flags.top,"Special version of uncurrying for DP symbols.");
("-appsym",Arg.String (function
| "max" -> flags.appsym_heuristic := MostFrequent
| "unique" -> flags.appsym_heuristic := Unique
| h -> no_such_heur h),
"Heuristic to determine the application symbol (max, unique).");
("-aarity",Arg.String (function
| "max" -> flags.aarity_heuristic := Maximum
| "min" -> flags.aarity_heuristic := Minimum
| "minlhs" -> flags.aarity_heuristic := MinLeft
| h -> no_such_heur h),
"Heuristic to determine the applicative arity (max, min, minlhs).");
] in
Arg.alignx 80 spec
;;
let help = (comment,keywords,List.map Triple.drop_snd spec);;
let (>>=) = M.(>>=);;
let (>>) = M.(>>);;
let init _ =
flags.appsym_heuristic := MostFrequent;
flags.aarity_heuristic := Maximum;
flags.help := false;
flags.top := false;
;;
let fprintf_aamap fmt tab =
AAMap.fold (fun f (min, max) m ->
M.fprintf_fun fmt f >>= fun _ ->
F.fprintf fmt " has arities from %d to %d@\n" min max;
m
) tab (M.return ())
;;
let fprintf_entry fmt f k fs =
M.fprintf_fun fmt f >>= fun _ ->
F.fprintf fmt " ==>";
M.iter (fun f ->
F.fprintf fmt " ";
M.fprintf_fun fmt f >>= fun _ ->
F.fprintf fmt "/";
M.find_ari f >>= fun n ->
M.return (F.fprintf fmt "%d" n)
) fs >>= fun _ ->
M.return (F.fprintf fmt "@\n")
;;
let fprintf_symtab fmt tab =
FunMap.fold (fun f fs m ->
M.find_ari f >>= fun k ->
fprintf_entry fmt f k fs >>= fun _ ->
m
) tab (M.return ())
;;
let fprintf_info fmt p =
F.fprintf fmt "application symbol: ";
M.fprintf_fun fmt p.info.appsym >>= fun _ ->
F.fprintf fmt "@\n@[<1>symbol table:@\n";
fprintf_symtab fmt p.info.symtab >>= fun _ ->
F.fprintf fmt "@]";
F.fprintf fmt "@\n@[<1>eta-rules:@\n";
Trs.fprintfm fmt p.eta_rules >>= fun _ ->
F.fprintf fmt "@]";
M.return ()
;;
let fprintf fs fmt p =
F.fprintf fmt "@[<1>%s:@\n" name;
fprintf_info fmt p >>= fun _ ->
F.fprintf fmt "@\n@[<1>problem:@\n";
Problem.fprintfm fmt p.output >>= fun _ ->
F.fprintf fmt "@]@\n";
List.hd fs fmt >>= fun _ ->
M.return (F.fprintf fmt "@]")
;;
let rec unapply o = function
| Term.Fun (f, [t1; t2]) when Fun.equal o f ->
let (h, ts) = unapply o t1
in (h, ts @ [t2])
| t -> (t, [])
;;
let apply o = List.foldl (fun t s -> Term.Fun (o, [t; s]));;
let unapply_top o = function
| Term.Fun (f, t::ts) when Fun.equal o f -> (t, [ts])
| t -> (t, [])
;;
let apply_top o t ts = Term.Fun (o, t::ts);;
let update_used_aarities f tab aa =
if AAMap.mem f tab then (
let (aa_min, aa_max) = AAMap.find f tab in
let tab' = AAMap.remove f tab in
AAMap.add f (min aa_min aa, max aa_max aa) tab'
) else AAMap.add f (aa, aa) tab
;;
let rec used_aarities_term o tab t = match unapply o t with
| (Term.Fun (f, ts), ss) ->
let len = List.length ss in
let tab' = List.foldl (used_aarities_term o) tab (ts@ss) in
update_used_aarities f tab' len
| (_, ss) -> List.foldl (used_aarities_term o) tab ss
;;
let used_aarities_term_top o tab t = match unapply_top o t with
| (Term.Fun (f, _), ss) ->
let len = List.length ss in
update_used_aarities f tab len
| _ -> tab
;;
let used_aarities_rule o tab = Rule.fold (flip (used_aarities_term o)) tab;;
let used_aarities_rule_top o tab = Rule.fold (flip (used_aarities_term_top o)) tab;;
let used_aarities_trs o = Trs.fold (flip (used_aarities_rule o));;
let used_aarities_trs_top o = Trs.fold (flip (used_aarities_rule_top o));;
let rec is_head_var_free o t = match unapply o t with
| (Term.Var _, ts) -> ts = []
| (Term.Fun (f, ts), ss) ->
List.for_all (is_head_var_free o) ts &&
List.for_all (is_head_var_free o) ss
;;
let is_head_var_free_top o t = match unapply_top o t with
| (Term.Var _, ts) -> ts = []
| (Term.Fun (f, _), _) -> true
;;
let is_left_head_var_free o = List.for_all (is_head_var_free o) <.> Trs.lhs;;
let is_left_head_var_free_top o = List.for_all (is_head_var_free_top o) <.> Trs.lhs;;
let get_symbol tab f = List.nth (FunMap.find f tab);;
let get_default_symbol tab f =
if FunMap.mem f tab then get_symbol tab f 0
else f
;;
let aarity_fun tab f = List.length (FunMap.find f tab) - 1;;
let aarity_term_gen unapp o tab t = match unapp o t with
| (Term.Fun (f, _), ss) ->
Some (max 0 (aarity_fun tab f - List.length ss))
| _ -> None
;;
let aarity_term = aarity_term_gen unapply;;
let aarity_term_top = aarity_term_gen unapply_top;;
let rec uncurry_term o tab t = match unapply o t with
| (Term.Var _ as x, ss) -> apply o x (List.map (uncurry_term o tab) ss)
| (Term.Fun (f, ts), ss) ->
let uts = List.map (uncurry_term o tab) ts in
let uss = List.map (uncurry_term o tab) ss in
let aa = aarity_fun tab f in
let k = min (List.length ss) aa in
let fk = get_symbol tab f k in
apply o (Term.Fun (fk, (uts @ List.take k uss))) (List.drop k uss)
;;
let uncurry_term_top o tab = function
| Term.Var _ as x -> x
| Term.Fun (f, (Term.Fun (g, ss))::ts)
when Fun.equal f o && aarity_fun tab g > 0 ->
Term.Fun (get_symbol tab g 1, ss @ ts)
| Term.Fun (f, ts) ->
Term.Fun (get_default_symbol tab f, ts)
;;
let uncurry_trs o tab = Trs.project (Rule.project (uncurry_term o tab));;
let uncurry_trs_top o tab = Trs.project (Rule.project (uncurry_term_top o tab));;
let fresh_var = M.fresh_var >>= (M.return <.> Term.make_var);;
let fresh_vars n = M.replicate n fresh_var;;
let eta_rules_rule_gen aarity o tab rule =
let (l, r) = Rule.to_terms rule in
let Some aa = aarity o tab l in
let rec add l r aa rules =
if aa <= 0 then rules else (
M.find_ari o >>= fun n ->
fresh_vars (n-1) >>= fun xs ->
rules >>= fun rs ->
let l' = Term.Fun (o, l :: xs) in
let r' = Term.Fun (o, r :: xs) in
add l' r' (aa-1) (M.return (Rule.of_terms l' r' :: rs))
)
in add l r aa (M.return [])
;;
let eta_rules_trs o tab trs =
M.lift Trs.of_list (M.flat_map
(eta_rules_rule_gen aarity_term o tab) (Trs.to_list trs))
;;
let eta_rules_trs_top o tab trs =
M.lift Trs.of_list (M.flat_map
(eta_rules_rule_gen aarity_term_top o tab) (Trs.to_list trs))
;;
let fun_k f i =
M.find_fun_name f >>= fun id ->
M.find_ari f >>= fun n ->
let name = id ^ "_" ^ (string_of_int i) in
M.create_fun (n+i) name >>= fun fk ->
M.is_dp f >>= function
| true -> M.set_dp fk
| false -> M.return fk
;;
let fun_ks f n = M.replicatei (n+1) (fun_k f);;
let fun_k_top o f i = if i = 0 then (
fun_k f 0
) else (
M.find_fun_name f >>= fun id ->
M.find_fun_name o >>= fun ap ->
M.find_ari f >>= fun n ->
M.find_ari o >>= fun k ->
let name = id ^ "_" ^ ap ^ "_" ^ (string_of_int i) in
M.create_fun (n+k-1) name >>= fun fk ->
M.set_dp fk
);;
let fun_ks_top o f n = M.replicatei (n+1) (fun_k_top o f);;
let uncurry_rules_fun o tab f =
M.find_fun_name f >>= fun id ->
let aa = aarity_fun tab f in
M.find_ari f >>= fun n ->
fresh_vars (n+aa+1) >>= fun xs ->
let rec add k rules =
if k >= aa then rules else (
rules >>= fun rs ->
fun_k f k >>= fun fk ->
fun_k f (k+1) >>= fun fk1 ->
let (xs, y::_) = List.split_at (n+k) xs in
let l = apply o (Term.Fun (fk, xs)) [y] in
let r = Term.Fun (fk1, xs @ [y]) in
add (k+1) (M.return (Rule.of_terms l r :: rs))
)
in
add 0 (M.return [])
;;
let uncurrying_rules o tab fs =
M.lift Trs.of_list (M.flat_map (uncurry_rules_fun o tab) fs);;
let uncurry_rules_fun_top o tab =
FunMap.fold (fun f fs m -> match fs with
| [_; f_sharp] ->
m >>= fun rs ->
M.find_ari f >>= fun n ->
M.find_ari o >>= fun k ->
fresh_vars (n+k-1) >>= fun xs ->
let (ys, zs) = List.split_at n xs in
let l = apply_top o (Term.Fun (f, ys)) zs in
let r = Term.Fun (f_sharp, xs) in
M.return (Rule.of_terms l r :: rs)
| _ -> m
) tab (M.return [])
;;
let uncurrying_rules_top o tab = M.lift Trs.of_list (uncurry_rules_fun_top o tab);;
let weighted_funas fs trs =
M.map (fun f -> M.find_ari f >>= fun n -> M.return (f, n)) fs >>= fun fs1 ->
M.map (fun fn -> M.return (fn, Trs.count_fun (fst fn) trs)) fs1 >>= fun fs2 ->
let sorted = List.sort (fun (_,v) (_,w) -> compare (-v) (-w)) fs2 in
M.return (List.map fst fs2)
;;
let find_appsym_with_arity p get fs trs =
weighted_funas fs trs >>= (M.return <.> (get <.> List.filter (p <.> snd)))
;;
let get_first = function (f,_)::_ -> Some f | _ -> None;;
let get_unique = function [(f,_)] -> Some f | _ -> None;;
let find_most_frequent_binary_appsym = find_appsym_with_arity ((=) 2) get_first;;
let find_most_frequent_appsym = find_appsym_with_arity ((<) 0) get_first;;
let find_unique_binary_appsym = find_appsym_with_arity ((=) 2) get_unique;;
let find_unique_appsym = find_appsym_with_arity ((<) 0) get_unique;;
let symtab_for_maximal_occurring_aarity_gen used_aas funs tab o trs =
let uas = used_aas o tab trs in
AAMap.fold (fun f (_, u) m ->
m >>= fun tab ->
funs f u >>= fun fks ->
M.return (FunMap.add f fks tab)
) uas (M.return FunMap.empty)
;;
let symtab_for_maximal_occurring_aarity =
symtab_for_maximal_occurring_aarity_gen used_aarities_trs fun_ks AAMap.empty
;;
let symtab_for_maximal_occurring_top_aarity o trs =
let fs = List.remove o (Trs.funs trs) in
let tab = List.foldl (fun tab f -> AAMap.add f (0, 0) tab) AAMap.empty fs in
symtab_for_maximal_occurring_aarity_gen
used_aarities_trs_top (fun_ks_top o) tab o trs
;;
let symtab_for_minimal_occurring_aarity_gen used_aas funs tab o trs =
let uas = used_aas o tab trs in
AAMap.fold (fun f (l, _) m ->
m >>= fun tab ->
funs f l >>= fun fks ->
M.return (FunMap.add f fks tab)
) uas (M.return FunMap.empty)
;;
let symtab_for_minimal_occurring_aarity =
symtab_for_minimal_occurring_aarity_gen used_aarities_trs fun_ks AAMap.empty
;;
let symtab_for_minimal_occurring_top_aarity o trs =
let fs = List.remove o (Trs.funs trs) in
let tab = List.foldl (fun tab f -> AAMap.add f (0, 0) tab) AAMap.empty fs in
symtab_for_minimal_occurring_aarity_gen
used_aarities_trs_top (fun_ks_top o) tab o trs
;;
let symtab_for_minimal_lhs_aarity_gen aarity maximal o trs =
maximal o trs >>= fun tab ->
let tab = Trs.fold (fun rule tab ->
let l = Rule.lhs rule in
let Some f = Term.root l in
let Some aa = aarity o tab l in
if FunMap.mem f tab && aa > 0 then (
let fs = FunMap.find f tab in
let tab' = FunMap.remove f tab in
FunMap.add f (List.take (List.length fs - aa) fs) tab'
) else tab
) tab trs
in
M.return tab
;;
let symtab_for_minimal_lhs_aarity =
symtab_for_minimal_lhs_aarity_gen
aarity_term symtab_for_maximal_occurring_aarity;;
let symtab_for_minimal_lhs_top_aarity =
symtab_for_minimal_lhs_aarity_gen
aarity_term_top symtab_for_maximal_occurring_top_aarity;;
let rec apply_symtab_term o tab = function
| Term.Var _ as x -> x
| Term.Fun (f, ts) ->
let ts' = List.map (apply_symtab_term o tab) ts in
if FunMap.mem f tab
then Term.Fun (get_symbol tab f 0, ts')
else Term.Fun (f, ts')
;;
let apply_symtab_trs o tab =
Trs.project (Rule.project (apply_symtab_term o tab));;
let apply_symtab p =
let o = p.info.appsym in
let tab = p.info.symtab in
let (dps, trs) = Problem.get_sw p.output in
let dps' = apply_symtab_trs o tab dps in
let trs' = apply_symtab_trs o tab trs in
let output' = Problem.set_sw dps' trs' p.output in { p with
output = if Problem.is_dp output'
then Problem.set_dg Problem.Complete output'
else output';
}
;;
let uncurry_dp_top find_o mk_symtab p =
let (dps,trs) = Problem.get_sw p in
let sharps = Trs.roots dps in
let both = Trs.union dps trs in
find_o sharps dps >>= function None -> M.return None | Some o -> (
if not (is_left_head_var_free_top o dps) then M.return None else (
mk_symtab o both >>= fun tab ->
M.exists (M.lift Term.is_var <.>
flip Trs.etcap trs <.> fst <.> unapply_top o <.> Rule.rhs)
(Trs.to_list dps) >>= (function
| false -> M.return (uncurry_trs_top o tab dps, trs, Trs.empty)
| true ->
uncurrying_rules_top o tab >>= fun us ->
eta_rules_trs_top o tab trs >>= fun eta ->
let uncurried_eta = uncurry_trs_top o tab eta in
let uncurried_dps = uncurry_trs_top o tab dps in
let dps' = Trs.union us (Trs.union uncurried_eta uncurried_dps) in
M.return (dps', trs, eta)
) >>= fun (dps', trs', eta) ->
let p' = Problem.set_sw dps' trs' p in
let info = { appsym = o; symtab = tab } in
M.return (Some (info, p', eta))
)
)
;;
let uncurry_dp find_o mk_symtab p =
let (dps,trs) = Problem.get_sw p in
let funs = List.union (Trs.funs trs) (Trs.funs dps) in
let nonsharps = List.diff funs (Trs.roots dps) in
let both = Trs.union dps trs in
find_o nonsharps both >>= function None -> M.return None | Some o -> (
if not (is_left_head_var_free o both) then M.return None else (
let fs' = List.remove o funs in
mk_symtab o both >>= fun tab ->
eta_rules_trs o tab trs >>= fun eta ->
uncurrying_rules o tab fs' >>= fun us ->
let dps' = uncurry_trs o tab dps in
let trs' = Trs.union (uncurry_trs o tab (Trs.union trs eta)) us in
let p' = Problem.set_sw dps' trs' p in
let info = { appsym = o; symtab = tab } in
M.return (Some (info, p', eta))
)
)
;;
let uncurry_sp find_o mk_symtab p =
let trs = Problem.get_trs p in
let fs = Trs.funs trs in
find_o fs trs >>= function None -> M.return None | Some o -> (
if not (is_left_head_var_free o trs) then M.return None else (
let fs' = List.remove o fs in
mk_symtab o trs >>= fun tab ->
eta_rules_trs o tab trs >>= fun eta ->
uncurrying_rules o tab fs' >>= fun us ->
let trs' = Trs.union (uncurry_trs o tab (Trs.union trs eta)) us in
let p' = Problem.set_trs trs' p in
let info = { appsym = o; symtab = tab } in
M.return (Some (info, p', eta))
)
)
;;
let solve_aux find_o mk_symtab top p =
if Problem.is_sp p then uncurry_sp find_o mk_symtab p
else if top && Problem.is_dp p then uncurry_dp_top find_o mk_symtab p
else if Problem.is_dp p then uncurry_dp find_o mk_symtab p
else M.return None
;;
let solve fs p =
let configurate s = F.printf "%s@\n%!" s; flags.help := true in
(try init (); Arg.parsex code spec fs with Arg.Bad s -> configurate s);
if !(flags.help) then (Arg.usage spec ("Options for "^code^":"); exit 0);
let top = !(flags.top) in
let find_o = match !(flags.appsym_heuristic) with
| MostFrequent -> if top
then find_most_frequent_appsym
else find_most_frequent_binary_appsym
| Unique -> if top
then find_unique_appsym
else find_unique_binary_appsym
in
let mk_symtab = match !(flags.aarity_heuristic) with
| Maximum -> if top
then symtab_for_maximal_occurring_top_aarity
else symtab_for_maximal_occurring_aarity
| Minimum -> if top
then symtab_for_minimal_occurring_top_aarity
else symtab_for_minimal_occurring_aarity
| MinLeft -> if top
then symtab_for_minimal_lhs_top_aarity
else symtab_for_minimal_lhs_aarity
in
solve_aux find_o mk_symtab top p >>= function
| None -> M.return None
| Some (info, p', eta) ->
let p = {
info = info;
applicative_top = top;
input = p;
output = p';
eta_rules = eta;
} in M.return (Some (apply_symtab p))
;;
let get_ip p = p.input;;
let get_op p = p.output;;
let complexity c _ = C.mul c C.constant;;
let equal p q =
Problem.equal (get_ip p) (get_ip q) && Problem.equal (get_op p) (get_op q)
;;
let fprintfx_entry fmt f k fs =
F.fprintf fmt "@{<uncurriedSymbolEntry>";
M.fprintfx_fun fmt f >>= fun _ ->
F.fprintf fmt "@{<arity>%d@}" k;
M.iter (fun f ->
M.fprintfx_fun fmt f
) fs >>= fun _ ->
M.return (F.fprintf fmt "@}")
;;
let fprintfx_symtab fmt tab =
FunMap.fold (fun f fs m ->
M.find_ari f >>= fun k ->
fprintfx_entry fmt f k fs >>= fun _ ->
m
) tab (M.return ())
;;
let fprintfx_info fmt p =
F.fprintf fmt "@{<uncurryInformation>";
M.fprintfx_fun fmt p.info.appsym >>= fun _ ->
F.fprintf fmt "@{<uncurriedSymbols>";
fprintfx_symtab fmt p.info.symtab >>= fun _ ->
F.fprintf fmt "@}";
F.fprintf fmt "@{<etaRules>";
Trs.fprintfx fmt p.eta_rules >>= fun _ ->
F.fprintf fmt "@}";
M.return (F.fprintf fmt "@}")
;;
let fprintfx_applicative_top fmt p =
if p.applicative_top then (
M.find_ari p.info.appsym >>= fun n ->
F.fprintf fmt "@{<applicativeTop>%d@}" n;
M.return ()
) else M.return ()
;;
let fprintfx fs fmt p =
let tag = if Problem.is_sp (get_op p) then "uncurry" else "uncurryProc" in
F.fprintf fmt "@{<%s>" tag;
fprintfx_applicative_top fmt p >>= fun _ ->
fprintfx_info fmt p >>= fun _ ->
Problem.fprintfx fmt (get_op p) >>= fun _ ->
List.hd fs fmt >>= fun _ ->
M.return (F.fprintf fmt "@}")
;;
|
9cdda43983c33168da5d7b045364e14b68786ddefa24394bb9e9e3626a0eb164 | wilkerlucio/pathom-viz | trace_cards.cljs | (ns com.wsscode.pathom.viz.trace-cards
(:require [com.wsscode.pathom.viz.trace :as trace]
[nubank.workspaces.card-types.fulcro3 :as ct.fulcro]
[nubank.workspaces.core :as ws]
[nubank.workspaces.model :as wsm]
[com.wsscode.pathom.connect :as pc]
[com.wsscode.common.async-cljs :refer [<? go-catch]]
[cljs.core.async :as async]
[com.wsscode.pathom.core :as p]))
(def indexes (atom {}))
(defmulti resolver-fn pc/resolver-dispatch)
(def defresolver (pc/resolver-factory resolver-fn indexes))
(defmulti mutation-fn pc/mutation-dispatch)
(def defmutation (pc/mutation-factory mutation-fn indexes))
(def color-map
{1 "red"
2 "green"
3 "blue"})
(defresolver `color
{::pc/input #{::id}
::pc/output [::color]
::pc/batch? true}
(pc/batch-resolver
(fn [_env {::keys [id]}]
(go-catch
(<? (async/timeout 300))
{::color
(get color-map id "black")}))
(fn [_env ids]
(go-catch
(<? (async/timeout 300))
(mapv
#(hash-map ::color (get color-map (::id %) "black"))
ids)))))
(defresolver `weight
{::pc/input #{::id}
::pc/output [::weight ::size]}
(fn [_env {::keys [id]}]
(go-catch
(<? (async/timeout 100))
{::weight
(case id
1 30
2 80
3 200
0)
::size
(case id
1 1
2 3
3 9
0)})))
(defresolver `rel
{::pc/input #{::id}
::pc/output [::relation]}
(fn [_env {::keys [id]}]
(go-catch
(<? (async/timeout 50))
{::relation
{::id
(case id
1 2
2 3
3 1
1)}})))
(defresolver `all
{::pc/output [{::all [::id]}]}
(fn [_env _]
{::all [{::id 1} {::id 2} {::id 3} {::id 2}]}))
(defresolver `error
{::pc/input #{}
::pc/output [::error]}
(fn [_env _]
(throw (ex-info "Error" {:ex "data"}))))
(defresolver `darken-color
{::pc/input #{::color}
::pc/output [::color-darken]}
(fn [_env {::keys [color]}]
(go-catch
(<? (async/timeout 20))
{::color-darken (str color "-darken")})))
(defresolver `lighter-color
{::pc/input #{::color}
::pc/output [::color-lighter]}
(fn [_env {::keys [color]}]
(go-catch
(<? (async/timeout 50))
{::color-lighter (str color "-lighter")})))
(def demo-parser
(p/parallel-parser {::p/env {::p/reader [p/map-reader pc/all-parallel-readers
p/env-placeholder-reader]
::pc/resolver-dispatch resolver-fn
::pc/mutate-dispatch mutation-fn
::pc/indexes @indexes
::p/placeholder-prefixes #{">"}}
::p/mutate pc/mutate-async
::p/plugins [p/error-handler-plugin
p/request-cache-plugin
p/trace-plugin]}))
(def sample-trace
'{:start 0
:path []
:duration 366
:details [{:event "process-pending" :duration 0 :start 366 :provides #{[:dev.playground/id 1]} :merge-result? true}]
:children [{:start 1
:path [[:dev.playground/id 1]]
:duration 365
:details [{:event "compute-plan" :duration 0 :start 1}
{:event "call-read" :duration 0 :start 1}
{:event "async-return" :duration 0 :start 1}
{:event "process-pending"
:duration 0
:start 109
:provides #{:dev.playground/size :dev.playground/weight}
:merge-result? false}
{:event "reset-loop"
:duration 0
:start 110
:loop-keys [:dev.playground/size :dev.playground/weight]}
{:event "process-pending"
:duration 0
:start 305
:provides #{:dev.playground/color}
:merge-result? false}
{:event "reset-loop" :duration 0 :start 305 :loop-keys [:dev.playground/color]}
{:event "process-pending"
:duration 0
:start 330
:provides #{:dev.playground/color-darken}
:merge-result? false}
{:event "reset-loop" :duration 0 :start 330 :loop-keys [:dev.playground/color-darken]}
{:event "process-pending"
:duration 0
:start 362
:provides #{:dev.playground/color-lighter}
:merge-result? false}
{:event "reset-loop" :duration 0 :start 362 :loop-keys [:dev.playground/color-lighter]}
{:event "merge-result" :duration 0 :start 366}]
:name "[:dev.playground/id 1]"
:children [{:start 1
:path [[:dev.playground/id 1] :dev.playground/color-lighter]
:duration 364
:details [{:event "compute-plan"
:duration 0
:start 1
:plan (([:dev.playground/color dev.playground/color]
[:dev.playground/color-lighter dev.playground/lighter-color]))}
{:event "call-resolver-with-cache"
:duration 0
:start 1
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "call-read" :duration 0 :start 1}
{:event "schedule-resolver"
:duration 0
:start 2
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "call-resolver"
:duration 303
:start 2
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "merge-resolver-response"
:duration 0
:start 305
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "call-resolver-with-cache"
:duration 0
:start 305
:input-data #:dev.playground{:color "red"}
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "schedule-resolver"
:duration 0
:start 308
:input-data #:dev.playground{:color "red"}
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "call-resolver"
:duration 53
:start 308
:input-data #:dev.playground{:color "red"}
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "merge-resolver-response"
:duration 0
:start 362
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "call-read" :duration 0 :start 362}
{:event "value-return" :duration 0 :start 365}]
:name ":dev.playground/color-lighter"}
{:start 3
:path [[:dev.playground/id 1] :dev.playground/color]
:duration 303
:details [{:event "skip-wait-key" :duration 0 :start 3}
{:event "call-read" :duration 0 :start 305}
{:event "value-return" :duration 0 :start 306}]
:name ":dev.playground/color"}
{:start 3
:path [[:dev.playground/id 1] :dev.playground/weight]
:duration 107
:details [{:event "compute-plan"
:duration 0
:start 4
:plan (([:dev.playground/weight dev.playground/weight]))}
{:event "call-resolver-with-cache"
:duration 0
:start 4
:input-data #:dev.playground{:id 1}
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "call-read" :duration 0 :start 4}
{:event "schedule-resolver"
:duration 0
:start 6
:input-data #:dev.playground{:id 1}
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "call-resolver"
:duration 103
:start 6
:input-data #:dev.playground{:id 1}
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "merge-resolver-response"
:duration 0
:start 109
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "call-read" :duration 0 :start 110}
{:event "value-return" :duration 0 :start 110}]
:name ":dev.playground/weight"}
{:start 6
:path [[:dev.playground/id 1] :dev.playground/color-darken]
:duration 325
:details [{:event "compute-plan"
:duration 1
:start 6
:plan (([:dev.playground/color dev.playground/color]
[:dev.playground/color-darken dev.playground/darken-color]))}
{:event "call-read" :duration 0 :start 6}
{:event "waiting-resolver"
:duration 0
:start 7
:waiting-key :dev.playground/color
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-darken}
{:event "call-resolver-with-cache"
:duration 0
:start 305
:input-data #:dev.playground{:color "red"}
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "schedule-resolver"
:duration 0
:start 306
:input-data #:dev.playground{:color "red"}
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "call-resolver"
:duration 24
:start 306
:input-data #:dev.playground{:color "red"}
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "merge-resolver-response"
:duration 0
:start 330
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "call-read" :duration 0 :start 330}
{:event "value-return" :duration 0 :start 331}]
:name ":dev.playground/color-darken"}
{:start 9
:path [[:dev.playground/id 1] :dev.playground/size]
:duration 101
:details [{:event "skip-wait-key" :duration 0 :start 9}
{:event "call-read" :duration 0 :start 110}
{:event "value-return" :duration 0 :start 110}]
:name ":dev.playground/size"}]}
{:start 1
:path [:com.wsscode.pathom/trace]
:duration 0
:details [{:event "compute-plan" :duration 0 :start 1}
{:event "call-read" :duration 0 :start 1}
{:event "value-return" :duration 0 :start 1}]
:name ":com.wsscode.pathom/trace"}]
:hint "Query"})
(ws/defcard trace-view-card
{::wsm/align {:flex 1}}
(ct.fulcro/fulcro-card
{::ct.fulcro/root trace/D3Trace
::ct.fulcro/initial-state (fn [_]
{:trace-data sample-trace})}))
| null | https://raw.githubusercontent.com/wilkerlucio/pathom-viz/6bed1d2adf556655892213de2d50fbe2db3c5423/src/cards/com/wsscode/pathom/viz/trace_cards.cljs | clojure | (ns com.wsscode.pathom.viz.trace-cards
(:require [com.wsscode.pathom.viz.trace :as trace]
[nubank.workspaces.card-types.fulcro3 :as ct.fulcro]
[nubank.workspaces.core :as ws]
[nubank.workspaces.model :as wsm]
[com.wsscode.pathom.connect :as pc]
[com.wsscode.common.async-cljs :refer [<? go-catch]]
[cljs.core.async :as async]
[com.wsscode.pathom.core :as p]))
(def indexes (atom {}))
(defmulti resolver-fn pc/resolver-dispatch)
(def defresolver (pc/resolver-factory resolver-fn indexes))
(defmulti mutation-fn pc/mutation-dispatch)
(def defmutation (pc/mutation-factory mutation-fn indexes))
(def color-map
{1 "red"
2 "green"
3 "blue"})
(defresolver `color
{::pc/input #{::id}
::pc/output [::color]
::pc/batch? true}
(pc/batch-resolver
(fn [_env {::keys [id]}]
(go-catch
(<? (async/timeout 300))
{::color
(get color-map id "black")}))
(fn [_env ids]
(go-catch
(<? (async/timeout 300))
(mapv
#(hash-map ::color (get color-map (::id %) "black"))
ids)))))
(defresolver `weight
{::pc/input #{::id}
::pc/output [::weight ::size]}
(fn [_env {::keys [id]}]
(go-catch
(<? (async/timeout 100))
{::weight
(case id
1 30
2 80
3 200
0)
::size
(case id
1 1
2 3
3 9
0)})))
(defresolver `rel
{::pc/input #{::id}
::pc/output [::relation]}
(fn [_env {::keys [id]}]
(go-catch
(<? (async/timeout 50))
{::relation
{::id
(case id
1 2
2 3
3 1
1)}})))
(defresolver `all
{::pc/output [{::all [::id]}]}
(fn [_env _]
{::all [{::id 1} {::id 2} {::id 3} {::id 2}]}))
(defresolver `error
{::pc/input #{}
::pc/output [::error]}
(fn [_env _]
(throw (ex-info "Error" {:ex "data"}))))
(defresolver `darken-color
{::pc/input #{::color}
::pc/output [::color-darken]}
(fn [_env {::keys [color]}]
(go-catch
(<? (async/timeout 20))
{::color-darken (str color "-darken")})))
(defresolver `lighter-color
{::pc/input #{::color}
::pc/output [::color-lighter]}
(fn [_env {::keys [color]}]
(go-catch
(<? (async/timeout 50))
{::color-lighter (str color "-lighter")})))
(def demo-parser
(p/parallel-parser {::p/env {::p/reader [p/map-reader pc/all-parallel-readers
p/env-placeholder-reader]
::pc/resolver-dispatch resolver-fn
::pc/mutate-dispatch mutation-fn
::pc/indexes @indexes
::p/placeholder-prefixes #{">"}}
::p/mutate pc/mutate-async
::p/plugins [p/error-handler-plugin
p/request-cache-plugin
p/trace-plugin]}))
(def sample-trace
'{:start 0
:path []
:duration 366
:details [{:event "process-pending" :duration 0 :start 366 :provides #{[:dev.playground/id 1]} :merge-result? true}]
:children [{:start 1
:path [[:dev.playground/id 1]]
:duration 365
:details [{:event "compute-plan" :duration 0 :start 1}
{:event "call-read" :duration 0 :start 1}
{:event "async-return" :duration 0 :start 1}
{:event "process-pending"
:duration 0
:start 109
:provides #{:dev.playground/size :dev.playground/weight}
:merge-result? false}
{:event "reset-loop"
:duration 0
:start 110
:loop-keys [:dev.playground/size :dev.playground/weight]}
{:event "process-pending"
:duration 0
:start 305
:provides #{:dev.playground/color}
:merge-result? false}
{:event "reset-loop" :duration 0 :start 305 :loop-keys [:dev.playground/color]}
{:event "process-pending"
:duration 0
:start 330
:provides #{:dev.playground/color-darken}
:merge-result? false}
{:event "reset-loop" :duration 0 :start 330 :loop-keys [:dev.playground/color-darken]}
{:event "process-pending"
:duration 0
:start 362
:provides #{:dev.playground/color-lighter}
:merge-result? false}
{:event "reset-loop" :duration 0 :start 362 :loop-keys [:dev.playground/color-lighter]}
{:event "merge-result" :duration 0 :start 366}]
:name "[:dev.playground/id 1]"
:children [{:start 1
:path [[:dev.playground/id 1] :dev.playground/color-lighter]
:duration 364
:details [{:event "compute-plan"
:duration 0
:start 1
:plan (([:dev.playground/color dev.playground/color]
[:dev.playground/color-lighter dev.playground/lighter-color]))}
{:event "call-resolver-with-cache"
:duration 0
:start 1
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "call-read" :duration 0 :start 1}
{:event "schedule-resolver"
:duration 0
:start 2
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "call-resolver"
:duration 303
:start 2
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "merge-resolver-response"
:duration 0
:start 305
:sym dev.playground/color
:key :dev.playground/color-lighter}
{:event "call-resolver-with-cache"
:duration 0
:start 305
:input-data #:dev.playground{:color "red"}
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "schedule-resolver"
:duration 0
:start 308
:input-data #:dev.playground{:color "red"}
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "call-resolver"
:duration 53
:start 308
:input-data #:dev.playground{:color "red"}
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "merge-resolver-response"
:duration 0
:start 362
:sym dev.playground/lighter-color
:key :dev.playground/color-lighter}
{:event "call-read" :duration 0 :start 362}
{:event "value-return" :duration 0 :start 365}]
:name ":dev.playground/color-lighter"}
{:start 3
:path [[:dev.playground/id 1] :dev.playground/color]
:duration 303
:details [{:event "skip-wait-key" :duration 0 :start 3}
{:event "call-read" :duration 0 :start 305}
{:event "value-return" :duration 0 :start 306}]
:name ":dev.playground/color"}
{:start 3
:path [[:dev.playground/id 1] :dev.playground/weight]
:duration 107
:details [{:event "compute-plan"
:duration 0
:start 4
:plan (([:dev.playground/weight dev.playground/weight]))}
{:event "call-resolver-with-cache"
:duration 0
:start 4
:input-data #:dev.playground{:id 1}
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "call-read" :duration 0 :start 4}
{:event "schedule-resolver"
:duration 0
:start 6
:input-data #:dev.playground{:id 1}
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "call-resolver"
:duration 103
:start 6
:input-data #:dev.playground{:id 1}
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "merge-resolver-response"
:duration 0
:start 109
:sym dev.playground/weight
:key :dev.playground/weight}
{:event "call-read" :duration 0 :start 110}
{:event "value-return" :duration 0 :start 110}]
:name ":dev.playground/weight"}
{:start 6
:path [[:dev.playground/id 1] :dev.playground/color-darken]
:duration 325
:details [{:event "compute-plan"
:duration 1
:start 6
:plan (([:dev.playground/color dev.playground/color]
[:dev.playground/color-darken dev.playground/darken-color]))}
{:event "call-read" :duration 0 :start 6}
{:event "waiting-resolver"
:duration 0
:start 7
:waiting-key :dev.playground/color
:input-data #:dev.playground{:id 1}
:sym dev.playground/color
:key :dev.playground/color-darken}
{:event "call-resolver-with-cache"
:duration 0
:start 305
:input-data #:dev.playground{:color "red"}
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "schedule-resolver"
:duration 0
:start 306
:input-data #:dev.playground{:color "red"}
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "call-resolver"
:duration 24
:start 306
:input-data #:dev.playground{:color "red"}
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "merge-resolver-response"
:duration 0
:start 330
:sym dev.playground/darken-color
:key :dev.playground/color-darken}
{:event "call-read" :duration 0 :start 330}
{:event "value-return" :duration 0 :start 331}]
:name ":dev.playground/color-darken"}
{:start 9
:path [[:dev.playground/id 1] :dev.playground/size]
:duration 101
:details [{:event "skip-wait-key" :duration 0 :start 9}
{:event "call-read" :duration 0 :start 110}
{:event "value-return" :duration 0 :start 110}]
:name ":dev.playground/size"}]}
{:start 1
:path [:com.wsscode.pathom/trace]
:duration 0
:details [{:event "compute-plan" :duration 0 :start 1}
{:event "call-read" :duration 0 :start 1}
{:event "value-return" :duration 0 :start 1}]
:name ":com.wsscode.pathom/trace"}]
:hint "Query"})
(ws/defcard trace-view-card
{::wsm/align {:flex 1}}
(ct.fulcro/fulcro-card
{::ct.fulcro/root trace/D3Trace
::ct.fulcro/initial-state (fn [_]
{:trace-data sample-trace})}))
| |
019accf79695f25cc633a0fa7eada548ab12fd4db20574bc0280883f4bb3a049 | tcsprojects/pgsolver | fearnleysubexp.ml | open Arg ;;
open Tcsargs;;
open Tcsbasedata;;
open Paritygame;;
open Stratimprgenerators;;
type gamenode = DecLaneEven of int (*a*)
| DecLaneOdd of int (*t*)
| DecLaneRoot (*c*)
| CycleNode of int * int (*d*)
| CycleCenter of int (*e*)
| CycleCenterBadEntry of int (*m*)
| CycleCenterBadEntryX of int (*q*)
| CycleNodeCho of int * int (*u*)
| CycleBadEntrySel of int * int (*v*)
| CycleBadEntrySelX of int * int (*w*)
| CycleAccess of int (*f*)
| CycleSelector of int (*g*)
| CycleLeaver of int (*h*)
| UpperSelector of int (*k*)
| FinalSink (*z*)
| FinalCycle (*x*)
| BitSelector (*r*)
| StartEven (*s*)
let symb_to_str = function DecLaneEven i -> "a" ^ string_of_int i | DecLaneOdd i -> "t" ^ string_of_int i | DecLaneRoot -> "c" | CycleNode (i,j) -> "d(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleCenter i -> "e" ^ string_of_int i | CycleCenterBadEntry i -> "m" ^ string_of_int i | CycleCenterBadEntryX i -> "q" ^ string_of_int i | CycleNodeCho (i,j) -> "u(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleBadEntrySel (i,j) -> "v(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleBadEntrySelX (i,j) -> "w(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleAccess i -> "f" ^ string_of_int i | CycleSelector i -> "g" ^ string_of_int i | CycleLeaver i -> "h" ^ string_of_int i | UpperSelector i -> "k" ^ string_of_int i | FinalSink -> "z" | FinalCycle -> "x" | StartEven -> "s" | BitSelector -> "r"
let mkli n f = (Array.to_list (Array.init n f))
let generator_game_func arguments =
let n = ref None in
let withfearnley = ref true in
SimpleArgs.parsearr arguments
[(["-nogadget"], Unit (fun _ -> withfearnley := false), "\n disable fearnley gadgets")]
(fun s -> n := Some (int_of_string s)) ("Options are")
SimpleArgs.argprint_help SimpleArgs.argprint_bad;
let n = OptionUtils.get_some !n in
let withfearnley = !withfearnley in
let pg = SymbolicParityGame.create_new FinalCycle in
let add sy pr pl li = SymbolicParityGame.add_node pg sy pr pl (Array.of_list li) (Some (symb_to_str sy)) in
add FinalCycle 1 plr_Odd [FinalCycle];
add StartEven (10 * n + 6) plr_Even ( (FinalSink::(mkli n (fun j -> CycleAccess j))));
add DecLaneRoot (10 * n + 4) plr_Even [StartEven; BitSelector];
add BitSelector (10 * n + 8) plr_Even ( (FinalSink::(mkli n (fun j -> CycleSelector j))));
add FinalSink (18 * n + 10) plr_Odd [FinalCycle];
for i = 0 to 3 * n - 1 do
add (DecLaneEven i) (4 * n + 2 * i + 4) plr_Odd [DecLaneOdd i];
add (DecLaneOdd i) (4 * n + 2 * i + 3) plr_Even (if i = 0 then [DecLaneRoot; BitSelector; StartEven] else [DecLaneOdd (i - 1); BitSelector; StartEven])
done;
for i = 0 to n - 1 do
if withfearnley then (
for j = i + 1 to n - 1 do
add (CycleNodeCho (i,j)) 3 plr_Even [CycleBadEntrySel (i,j); CycleBadEntrySelX (i,j)];
add (CycleBadEntrySel (i,j)) 3 plr_Odd [CycleCenterBadEntry j; (if j = i+1 then CycleCenter i else CycleNodeCho (i,j-1))];
add (CycleBadEntrySelX (i,j)) 2 plr_Odd [CycleCenterBadEntryX j; (if j = i+1 then CycleCenter i else CycleNodeCho (i,j-1))];
done;
);
add (CycleNode (i,1)) (4 * i + 3) plr_Even ( ([StartEven; CycleNode (i,0)] @ (mkli (3 * i + 3) (fun j -> DecLaneEven j)) @ [BitSelector]));
add (CycleNode (i,0)) (4 * i + 3) plr_Even ( ([StartEven; (if (i = n-1) || not withfearnley then CycleCenter i else CycleNodeCho (i,n-1))] @ (mkli (3 * i + 3) (fun j -> DecLaneEven j)) @ [BitSelector]));
add (CycleCenter i) (4 * i + 4) plr_Odd [CycleNode (i,1); CycleLeaver i];
if withfearnley then (
add (CycleCenterBadEntry i) (10 * n + 9 + 2 * i) plr_Odd [CycleCenter i];
add (CycleCenterBadEntryX i) (12 * n + 6 * i + 13) plr_Odd [CycleLeaver i];
);
add (CycleLeaver i) (12 * n + 6 * i + 14) plr_Odd [UpperSelector i];
add (UpperSelector i) (12 * n + 6 * i + 9) plr_Even ((FinalSink::(mkli (n - i - 1) (fun j -> CycleSelector (n - j - 1)))));
add (CycleAccess i) (12 * n + 6 * i + 11) plr_Odd [CycleCenter i];
add (CycleSelector i) (4 * i + 6) plr_Even [CycleAccess i; UpperSelector i];
done;
SymbolicParityGame.to_paritygame pg;;
let register _ =
register_strat_impr_gen {
ident = "fearnleysubexp";
description = "Subexponential Lower Bound for Fearnley's rule";
parity_game = Some generator_game_func;
generalized_mdp = None;
}
| null | https://raw.githubusercontent.com/tcsprojects/pgsolver/b0c31a8b367c405baed961385ad645d52f648325/src/generators/policyiter/generators/fearnleysubexp.ml | ocaml | a
t
c
d
e
m
q
u
v
w
f
g
h
k
z
x
r
s | open Arg ;;
open Tcsargs;;
open Tcsbasedata;;
open Paritygame;;
open Stratimprgenerators;;
let symb_to_str = function DecLaneEven i -> "a" ^ string_of_int i | DecLaneOdd i -> "t" ^ string_of_int i | DecLaneRoot -> "c" | CycleNode (i,j) -> "d(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleCenter i -> "e" ^ string_of_int i | CycleCenterBadEntry i -> "m" ^ string_of_int i | CycleCenterBadEntryX i -> "q" ^ string_of_int i | CycleNodeCho (i,j) -> "u(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleBadEntrySel (i,j) -> "v(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleBadEntrySelX (i,j) -> "w(" ^ string_of_int i ^ "," ^ string_of_int j ^ ")" | CycleAccess i -> "f" ^ string_of_int i | CycleSelector i -> "g" ^ string_of_int i | CycleLeaver i -> "h" ^ string_of_int i | UpperSelector i -> "k" ^ string_of_int i | FinalSink -> "z" | FinalCycle -> "x" | StartEven -> "s" | BitSelector -> "r"
let mkli n f = (Array.to_list (Array.init n f))
let generator_game_func arguments =
let n = ref None in
let withfearnley = ref true in
SimpleArgs.parsearr arguments
[(["-nogadget"], Unit (fun _ -> withfearnley := false), "\n disable fearnley gadgets")]
(fun s -> n := Some (int_of_string s)) ("Options are")
SimpleArgs.argprint_help SimpleArgs.argprint_bad;
let n = OptionUtils.get_some !n in
let withfearnley = !withfearnley in
let pg = SymbolicParityGame.create_new FinalCycle in
let add sy pr pl li = SymbolicParityGame.add_node pg sy pr pl (Array.of_list li) (Some (symb_to_str sy)) in
add FinalCycle 1 plr_Odd [FinalCycle];
add StartEven (10 * n + 6) plr_Even ( (FinalSink::(mkli n (fun j -> CycleAccess j))));
add DecLaneRoot (10 * n + 4) plr_Even [StartEven; BitSelector];
add BitSelector (10 * n + 8) plr_Even ( (FinalSink::(mkli n (fun j -> CycleSelector j))));
add FinalSink (18 * n + 10) plr_Odd [FinalCycle];
for i = 0 to 3 * n - 1 do
add (DecLaneEven i) (4 * n + 2 * i + 4) plr_Odd [DecLaneOdd i];
add (DecLaneOdd i) (4 * n + 2 * i + 3) plr_Even (if i = 0 then [DecLaneRoot; BitSelector; StartEven] else [DecLaneOdd (i - 1); BitSelector; StartEven])
done;
for i = 0 to n - 1 do
if withfearnley then (
for j = i + 1 to n - 1 do
add (CycleNodeCho (i,j)) 3 plr_Even [CycleBadEntrySel (i,j); CycleBadEntrySelX (i,j)];
add (CycleBadEntrySel (i,j)) 3 plr_Odd [CycleCenterBadEntry j; (if j = i+1 then CycleCenter i else CycleNodeCho (i,j-1))];
add (CycleBadEntrySelX (i,j)) 2 plr_Odd [CycleCenterBadEntryX j; (if j = i+1 then CycleCenter i else CycleNodeCho (i,j-1))];
done;
);
add (CycleNode (i,1)) (4 * i + 3) plr_Even ( ([StartEven; CycleNode (i,0)] @ (mkli (3 * i + 3) (fun j -> DecLaneEven j)) @ [BitSelector]));
add (CycleNode (i,0)) (4 * i + 3) plr_Even ( ([StartEven; (if (i = n-1) || not withfearnley then CycleCenter i else CycleNodeCho (i,n-1))] @ (mkli (3 * i + 3) (fun j -> DecLaneEven j)) @ [BitSelector]));
add (CycleCenter i) (4 * i + 4) plr_Odd [CycleNode (i,1); CycleLeaver i];
if withfearnley then (
add (CycleCenterBadEntry i) (10 * n + 9 + 2 * i) plr_Odd [CycleCenter i];
add (CycleCenterBadEntryX i) (12 * n + 6 * i + 13) plr_Odd [CycleLeaver i];
);
add (CycleLeaver i) (12 * n + 6 * i + 14) plr_Odd [UpperSelector i];
add (UpperSelector i) (12 * n + 6 * i + 9) plr_Even ((FinalSink::(mkli (n - i - 1) (fun j -> CycleSelector (n - j - 1)))));
add (CycleAccess i) (12 * n + 6 * i + 11) plr_Odd [CycleCenter i];
add (CycleSelector i) (4 * i + 6) plr_Even [CycleAccess i; UpperSelector i];
done;
SymbolicParityGame.to_paritygame pg;;
let register _ =
register_strat_impr_gen {
ident = "fearnleysubexp";
description = "Subexponential Lower Bound for Fearnley's rule";
parity_game = Some generator_game_func;
generalized_mdp = None;
}
|
b86cfeda9a00ce70847941fcf4ad5b8787e99ca2ed0178e39cef99f9c2fcfd2e | DKurilo/hackerrank | findsingle.hs | -- find element that don't have pair in the given list in constant space and O(n) complexity
module Main where
findSingle :: [Int] -> Int
findSingle ns = unadjust . foldl (\p x -> let x' = adjust x in
if p `mod` x' == 0 then p `div` x' else p * x') 1 $ ns
where bottom = minimum ns
top = maximum ns
adjust x = x - bottom + 1 + top
unadjust x = x + bottom - 1 - top
count :: Int -> [Int] -> Int
count x = foldl (\s n -> (if n == x then 1 else 0) + s) 0
test :: ([Int] -> Int) -> [Int] -> Bool
test f ns = length ns `mod` 2 == 0 || count x ns `mod` 2 == 1
where x = f ns
main :: IO ()
main = mapM_ (print . test findSingle) [ [1, 9, 9, 1, 12, 12, 5]
, [1, 9, 9, 100, 1, 12, 12]
, [1, 9, 9, 1, 12, 5, 12]
, [1, 9, 9, 1, 12, 50, 12]
, [1, 9, 9, 1, 12, -50, 12]
, [1, 9, 9, 1, 12, 9, 12]
, [1, 9, 9, 1, 12, 12]
, [1, 9]
, [1]
, [0]
, [1, 9, 9, -5, 1, -12, -12]
, []
, [1,2,7,3,0,5,7,1,2,7,3,5,7]
]
| null | https://raw.githubusercontent.com/DKurilo/hackerrank/37063170567b397b25a2b7123bc9c1299d34814a/nothackerrank/findsingle.hs | haskell | find element that don't have pair in the given list in constant space and O(n) complexity | module Main where
findSingle :: [Int] -> Int
findSingle ns = unadjust . foldl (\p x -> let x' = adjust x in
if p `mod` x' == 0 then p `div` x' else p * x') 1 $ ns
where bottom = minimum ns
top = maximum ns
adjust x = x - bottom + 1 + top
unadjust x = x + bottom - 1 - top
count :: Int -> [Int] -> Int
count x = foldl (\s n -> (if n == x then 1 else 0) + s) 0
test :: ([Int] -> Int) -> [Int] -> Bool
test f ns = length ns `mod` 2 == 0 || count x ns `mod` 2 == 1
where x = f ns
main :: IO ()
main = mapM_ (print . test findSingle) [ [1, 9, 9, 1, 12, 12, 5]
, [1, 9, 9, 100, 1, 12, 12]
, [1, 9, 9, 1, 12, 5, 12]
, [1, 9, 9, 1, 12, 50, 12]
, [1, 9, 9, 1, 12, -50, 12]
, [1, 9, 9, 1, 12, 9, 12]
, [1, 9, 9, 1, 12, 12]
, [1, 9]
, [1]
, [0]
, [1, 9, 9, -5, 1, -12, -12]
, []
, [1,2,7,3,0,5,7,1,2,7,3,5,7]
]
|
660fdf5986ed185f22142139fd5551037b7d388a3f02c33156199053268c396b | puppetlabs/puppetdb | core.clj | (ns puppetlabs.puppetdb.core
"PuppetDBs normal entry point. Dispatches to command line subcommands."
(:require
[clojure.string :as str]
[puppetlabs.puppetdb.cli.util
:refer [err-exit-status exit run-cli-cmd]]))
(def usage-lines
["Available subcommands:"
" version Display version information"
" services Run PuppetDB"
" upgrade Upgrade to latest version and exit"
" benchmark Run development-only benchmarking tool"
" fact-storage-benchmark"
" help Display usage summary"
"For help on a given subcommand, invoke it with -h"])
(defn usage
[stream]
(binding [*out* stream]
(println (str/join "\n" usage-lines))))
(defn help [args]
(if (zero? (count args))
(do (usage *out*) 0)
(do (usage *err*) err-exit-status)))
Resolve the subcommands dynamically to avoid loading the world just
;; to print the version.
(defn run-resolved [cli-name fn-name args]
(let [namespace (symbol (str "puppetlabs.puppetdb.cli." cli-name))]
(require (vector namespace))
(apply (ns-resolve namespace fn-name) args)))
(defn run-subcommand
"Runs the given subcommand, which should handle shutdown and the
process exit status itself."
[subcommand args]
(case subcommand
"help" (run-cli-cmd #(help args))
"upgrade" (run-resolved "services" 'cli [args {:upgrade-and-exit? true}])
"services" (run-resolved "services" 'cli [args])
("benchmark" "fact-storage-benchmark" "version")
(run-resolved subcommand 'cli [args])
(do
(usage *err*)
err-exit-status)))
(defn -main
[subcommand & args]
(exit (run-subcommand subcommand args)))
| null | https://raw.githubusercontent.com/puppetlabs/puppetdb/b3d6d10555561657150fa70b6d1e609fba9c0eda/src/puppetlabs/puppetdb/core.clj | clojure | to print the version. | (ns puppetlabs.puppetdb.core
"PuppetDBs normal entry point. Dispatches to command line subcommands."
(:require
[clojure.string :as str]
[puppetlabs.puppetdb.cli.util
:refer [err-exit-status exit run-cli-cmd]]))
(def usage-lines
["Available subcommands:"
" version Display version information"
" services Run PuppetDB"
" upgrade Upgrade to latest version and exit"
" benchmark Run development-only benchmarking tool"
" fact-storage-benchmark"
" help Display usage summary"
"For help on a given subcommand, invoke it with -h"])
(defn usage
[stream]
(binding [*out* stream]
(println (str/join "\n" usage-lines))))
(defn help [args]
(if (zero? (count args))
(do (usage *out*) 0)
(do (usage *err*) err-exit-status)))
Resolve the subcommands dynamically to avoid loading the world just
(defn run-resolved [cli-name fn-name args]
(let [namespace (symbol (str "puppetlabs.puppetdb.cli." cli-name))]
(require (vector namespace))
(apply (ns-resolve namespace fn-name) args)))
(defn run-subcommand
"Runs the given subcommand, which should handle shutdown and the
process exit status itself."
[subcommand args]
(case subcommand
"help" (run-cli-cmd #(help args))
"upgrade" (run-resolved "services" 'cli [args {:upgrade-and-exit? true}])
"services" (run-resolved "services" 'cli [args])
("benchmark" "fact-storage-benchmark" "version")
(run-resolved subcommand 'cli [args])
(do
(usage *err*)
err-exit-status)))
(defn -main
[subcommand & args]
(exit (run-subcommand subcommand args)))
|
1852724d776fdb478e4945fc7734e135a4aa9ddf38b2f2cc336d0397c2aa08f2 | albertoruiz/easyVision | conic.hs | import Vision.GUI.Simple
import Image
import Contours(asSegments)
import Util.Ellipses
import Util.Geometry
main = runIt $ clickPoints "conic" "--points" () (sh.fst)
l = gjoin (Point 0.5 0) (Point 0 0.5)
sh pts | length pts >= 5 = Draw
[ drwpts
, (color col . drawConic) c
, color red l
, color yellow . pointSz 3 $ intersectionConicLine c l
]
| otherwise = drwpts
where
c = computeConic pts
drwpts = color white . drawPointsLabeled $ pts
col = if isEllipse c then green else orange
isEllipse c = null (intersectionConicLine c linf)
linf = HLine 0 0 1
drawConic c = Draw ss
where
ps = pointsConic 50 c
ss = filter ((1>).segmentLength) $ asSegments (Closed ps)
| null | https://raw.githubusercontent.com/albertoruiz/easyVision/26bb2efaa676c902cecb12047560a09377a969f2/projects/vision/geom/conic.hs | haskell | import Vision.GUI.Simple
import Image
import Contours(asSegments)
import Util.Ellipses
import Util.Geometry
main = runIt $ clickPoints "conic" "--points" () (sh.fst)
l = gjoin (Point 0.5 0) (Point 0 0.5)
sh pts | length pts >= 5 = Draw
[ drwpts
, (color col . drawConic) c
, color red l
, color yellow . pointSz 3 $ intersectionConicLine c l
]
| otherwise = drwpts
where
c = computeConic pts
drwpts = color white . drawPointsLabeled $ pts
col = if isEllipse c then green else orange
isEllipse c = null (intersectionConicLine c linf)
linf = HLine 0 0 1
drawConic c = Draw ss
where
ps = pointsConic 50 c
ss = filter ((1>).segmentLength) $ asSegments (Closed ps)
| |
d1b886d7f951a0156674fc316c82eeca606fe52f66df0baf850a3fab272ea6dc | SuYi1995/game_server | sup.erl | %%%-------------------------------------------------------------------
%%% @author sy
( C ) 2019 , < COMPANY >
%%% @doc
%%% 节点监控树
%%% @end
Created : 29 . 9月 2019 14:56
%%%-------------------------------------------------------------------
-module(sup).
-author("sy").
-behaviour(supervisor).
%% API
-export([start_link/0, start_link/1]).
%% Supervisor callbacks
-export([init/1]).
-define(SERVER, ?MODULE).
%%%===================================================================
%%% API functions
%%%===================================================================
%% @doc Starts the supervisor
-spec(start_link() -> {ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
start_link([Ip, Port, Id]) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Ip, Port, Id]).
%%%===================================================================
%%% Supervisor callbacks
%%%===================================================================
@private
%% @doc Whenever a supervisor is started using supervisor:start_link/[2,3],
%% this function is called by the new process to find out about
%% restart strategy, maximum restart frequency and child
%% specifications.
-spec(init(Args :: term()) ->
{ok, {SupFlags :: {RestartStrategy :: supervisor:strategy(),
MaxR :: non_neg_integer(), MaxT :: non_neg_integer()},
[ChildSpec :: supervisor:child_spec()]}}
| ignore | {error, Reason :: term()}).
init(Args) ->
MaxRestarts = 50,
MaxSecondsBetweenRestarts = 1,
SupFlags = #{strategy => one_for_one,
intensity => MaxRestarts,
period => MaxSecondsBetweenRestarts},
CoreL = get_core(Args),
AcceptorL = get_acceptor(Args),
{ok, {SupFlags, boot_misc:swap_sup_child(CoreL ++ AcceptorL)}}.
%%%===================================================================
Internal functions
%%%===================================================================
核心模块
get_core([Ip, Port, Id]) ->
[
{svr_time, {svr_time, start_link, []}}
, {svr_logic, {svr_logic, start_link, []}}
, {svr_node, {svr_node, start_link, [Ip, Port, Id]}}
].
%% 链接器模块
get_acceptor([_Host, Port, _Id]) ->
[
{sup_acceptor, {sup_acceptor, start_link, []}, permanent, 10000, supervisor, [sup_acceptor]}
,{sys_listener, {sys_listener, start_link, [Port]}}
]. | null | https://raw.githubusercontent.com/SuYi1995/game_server/b9a8574589075a1264c3d1f9a564d6d2ea8ae574/src/base/sys/sup.erl | erlang | -------------------------------------------------------------------
@author sy
@doc
节点监控树
@end
-------------------------------------------------------------------
API
Supervisor callbacks
===================================================================
API functions
===================================================================
@doc Starts the supervisor
===================================================================
Supervisor callbacks
===================================================================
@doc Whenever a supervisor is started using supervisor:start_link/[2,3],
this function is called by the new process to find out about
restart strategy, maximum restart frequency and child
specifications.
===================================================================
===================================================================
链接器模块 | ( C ) 2019 , < COMPANY >
Created : 29 . 9月 2019 14:56
-module(sup).
-author("sy").
-behaviour(supervisor).
-export([start_link/0, start_link/1]).
-export([init/1]).
-define(SERVER, ?MODULE).
-spec(start_link() -> {ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
start_link([Ip, Port, Id]) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Ip, Port, Id]).
@private
-spec(init(Args :: term()) ->
{ok, {SupFlags :: {RestartStrategy :: supervisor:strategy(),
MaxR :: non_neg_integer(), MaxT :: non_neg_integer()},
[ChildSpec :: supervisor:child_spec()]}}
| ignore | {error, Reason :: term()}).
init(Args) ->
MaxRestarts = 50,
MaxSecondsBetweenRestarts = 1,
SupFlags = #{strategy => one_for_one,
intensity => MaxRestarts,
period => MaxSecondsBetweenRestarts},
CoreL = get_core(Args),
AcceptorL = get_acceptor(Args),
{ok, {SupFlags, boot_misc:swap_sup_child(CoreL ++ AcceptorL)}}.
Internal functions
核心模块
get_core([Ip, Port, Id]) ->
[
{svr_time, {svr_time, start_link, []}}
, {svr_logic, {svr_logic, start_link, []}}
, {svr_node, {svr_node, start_link, [Ip, Port, Id]}}
].
get_acceptor([_Host, Port, _Id]) ->
[
{sup_acceptor, {sup_acceptor, start_link, []}, permanent, 10000, supervisor, [sup_acceptor]}
,{sys_listener, {sys_listener, start_link, [Port]}}
]. |
20e93b589b229478332e7eef5aa9a78603cbfb1aba8ee9680908344f8abd7b89 | clojureverse/clojurians-log-app | slack_api.clj | (ns clojurians-log.slack-api
(:require [co.gaiwan.slack.api.core :as slack]
[clj-slack.channels :as slack-channels]
[clj-slack.core :refer [slack-request stringify-keys]]
[clj-slack.emoji :as slack-emoji]
[clj-slack.users :as slack-users]
[clojurians-log.application :as cl-app]
[clojurians-log.datomic :as d]
[clojurians-log.db.import :as import]
[clojurians-log.db.queries :as queries]))
(defn slack-conn []
{:api-url ""
;; TODO: get rid of this global config access
:token (get-in cl-app/config [:slack :api-token])})
(defn emoji [] (slack/get-emoji (slack-conn)))
(defn users [] (slack/get-users (slack-conn)))
(defn channels [] (slack/get-channels (slack-conn)))
(defn import-users!
([conn]
(import-users! conn (users)))
([conn users]
(doseq [users (partition-all 1000 users)]
@(d/transact conn (mapv import/user->tx users)))))
(defn import-channels! [conn]
(let [channel->db-id (queries/channel-id-map (d/db conn))
channels (mapv import/channel->tx (channels))]
@(d/transact conn
(mapv (fn [{slack-id :channel/slack-id :as ch}]
(if-let [db-id (channel->db-id slack-id)]
(assoc ch :db/id db-id)
ch))
channels))))
(defn import-emojis!
([conn]
(import-emojis! conn (emoji)))
([conn emojis]
(doseq [emojis (partition-all 1000 emojis)]
@(d/transact conn (mapv import/emoji->tx emojis)))))
| null | https://raw.githubusercontent.com/clojureverse/clojurians-log-app/8b3d7f24b6ddc2edb01ce702c118c0fba8b95190/src/clojurians_log/slack_api.clj | clojure | TODO: get rid of this global config access | (ns clojurians-log.slack-api
(:require [co.gaiwan.slack.api.core :as slack]
[clj-slack.channels :as slack-channels]
[clj-slack.core :refer [slack-request stringify-keys]]
[clj-slack.emoji :as slack-emoji]
[clj-slack.users :as slack-users]
[clojurians-log.application :as cl-app]
[clojurians-log.datomic :as d]
[clojurians-log.db.import :as import]
[clojurians-log.db.queries :as queries]))
(defn slack-conn []
{:api-url ""
:token (get-in cl-app/config [:slack :api-token])})
(defn emoji [] (slack/get-emoji (slack-conn)))
(defn users [] (slack/get-users (slack-conn)))
(defn channels [] (slack/get-channels (slack-conn)))
(defn import-users!
([conn]
(import-users! conn (users)))
([conn users]
(doseq [users (partition-all 1000 users)]
@(d/transact conn (mapv import/user->tx users)))))
(defn import-channels! [conn]
(let [channel->db-id (queries/channel-id-map (d/db conn))
channels (mapv import/channel->tx (channels))]
@(d/transact conn
(mapv (fn [{slack-id :channel/slack-id :as ch}]
(if-let [db-id (channel->db-id slack-id)]
(assoc ch :db/id db-id)
ch))
channels))))
(defn import-emojis!
([conn]
(import-emojis! conn (emoji)))
([conn emojis]
(doseq [emojis (partition-all 1000 emojis)]
@(d/transact conn (mapv import/emoji->tx emojis)))))
|
08e1a0aceffdc6f2dc317cf80bcc738ccf8690289fdbe9ad1837b25ea73fc92c | cs51project/ocaml-chess | server.ml | open Board
open Engine
(* Web server for user interface:
* GUI is displayed via HTML and CSS on a browser, and AJAX
* is used to transmit user moves to the chess engine.
*
* Adapted from moogle web server.
*)
let debug = false
Read the command line arguments and return the
* port number which OCamlChess should use for serving .
* port number which OCamlChess should use for serving. *)
let server_port =
let args = Sys.argv in
try
int_of_string (Array.get args 1)
with _ ->
(Printf.printf
"usage: %s <port>\n"
(Array.get args 0) ;
exit 1)
let response_header mime =
"HTTP/1.1 200 OK\n" ^
"Server: OCamlChess/1.0\n" ^
"Content-Type: " ^ mime ^ "\n" ^
"Content-Language: en-us\n" ^
"Connection: close\n\n"
let fail_header =
"HTTP/1.1 404 NOT FOUND\n" ^
"Server: OCamlChess/1.0\n" ^
"Connection: close\n\n"
A post request will have a bunch of headers
* on it separated from the actual data by two newlines ( or two
* carriage - returns / line - feeds . ) This finds those two spaces and
* strips off all the headers . ( Copied from moogle . )
* on it separated from the actual data by two newlines (or two
* carriage-returns/line-feeds.) This finds those two spaces and
* strips off all the headers. (Copied from moogle.) *)
let strip_headers request =
let rec find_two_newlines i =
if i+2 < String.length request then
match String.sub request i 2 with
| "\n\n" -> Some (i+2)
| "\r\n" ->
if i+4 < String.length request then
(match String.sub request (i+2) 2 with
| "\r\n" -> Some (i+4)
| _ -> find_two_newlines (i+1))
else None
| _ -> find_two_newlines (i+1)
else None
in
match find_two_newlines 0 with
| None -> request
| Some i -> String.sub request i (String.length request - i)
module RequestMap = Map.Make(String)
(* A post request is encoded in URL form. This function extracts
* the key-value pairs.
*)
let url_decode request =
let bindings = Str.split (Str.regexp_string "&") request in
let add_binding map str =
let binding_re = Str.regexp_case_fold "^\\([^ \t]+\\)=\\([^ \t]+\\)$" in
if Str.string_match binding_re str 0 then
let k = Str.matched_group 1 str in
let v = Str.matched_group 2 str in
let decode_re = Str.regexp_string "+" in
let decoded_k = Str.global_replace decode_re " " k in
let decoded_v = Str.global_replace decode_re " " v in
RequestMap.add decoded_k decoded_v map
else map
in
List.fold_left add_binding RequestMap.empty bindings
let request_move board_fen =
match StdBoard.fen_decode board_fen with
| None -> "false"
| Some board ->
if StdBoard.checkmate board then "checkmate"
else match StdEngine.strat StdEval.init_eval board with
| None -> "false"
| Some mv ->
match StdBoard.play board mv with
| None -> "false"
| Some new_board -> StdBoard.fen_encode new_board
let submit_move board_fen move_str =
match StdBoard.fen_decode board_fen with
| None -> "false"
| Some board ->
if StdBoard.checkmate board then "checkmate"
else let move_re =
Str.regexp_case_fold "^\\([a-h][1-8]\\)\\([a-h][1-8]\\)\\|OOO\\|OO$"
in
if not (Str.string_match move_re move_str 0) then "false"
else let move =
if move_str = "OOO" then
Some (StdBoard.Castle StdBoard.Queenside)
else if move_str = "OO" then
Some (StdBoard.Castle StdBoard.Kingside)
else
let pos1 = StdBoard.fen_to_pos (Str.matched_group 1 move_str) in
let pos2 = StdBoard.fen_to_pos (Str.matched_group 2 move_str) in
match (pos1, pos2) with
| (None, _) | (_, None) -> None
| (Some pos1, Some pos2) ->
Some (StdBoard.Standard(pos1, pos2))
in match move with
| None -> "false"
| Some move ->
match StdBoard.play board move with
| None -> "false"
| Some new_board -> StdBoard.fen_encode new_board
(* Given a requested path, return the corresponding local path *)
let local_path qs =
Filename.concat (Unix.getcwd()) qs
(* read in all the lines from a file and concatenate them into
* a big string. *)
let rec input_lines inchan lines =
try
input_lines inchan ((input_line inchan)::lines)
with End_of_file -> List.rev lines
let read_text file =
let _ = flush_all () in
let ch = open_in file in
let lines = input_lines ch [] in
let resp = String.concat "\n" lines in
close_in ch ; resp
let read_bin file =
let _ = flush_all () in
let ch = open_in_bin file in
let rec read_bin_r str =
try
read_bin_r (str ^ String.make 1 (input_char ch))
with End_of_file -> str
in
read_bin_r ""
let read_file file =
let ext =
let ext_re = Str.regexp_case_fold "\\.\\([a-z]+\\)$" in
try
let _ = Str.search_forward ext_re file 0 in
Str.matched_group 1 file
with Not_found -> ""
in
let text_response mime =
(response_header (mime ^ "; charset=utf-8")) ^
(read_text file)
in
let bin_response mime =
(response_header mime) ^ (read_bin file)
in
if ext = "html" then text_response "text/html; charset=utf-8"
else if ext = "css" then text_response "text/css; charset=utf-8"
else if ext = "js" then text_response "text/javascript; charset=utf-8"
else if ext = "txt" then text_response "text/plain; charset=utf-8"
else if ext = "svg" then text_response "image/svg+xml"
else if ext = "png" then bin_response "image/png"
else if ext = "ico" then bin_response "image/vnd.microsoft.icon"
else bin_response "application/octet_stream"
let std_response =
read_file "./index.html"
let send_std_response client_fd =
Unix.send client_fd std_response 0 (String.length std_response) []
let send_all fd buf =
let rec more st size =
let res = Unix.send fd buf st size [] in
if res < size then
more (st + res) (size - res)
else ()
in
let size = String.length buf in
let _ = more 0 size in size
let http_get_re =
Str.regexp_case_fold "GET[ \t]+/\\([^ \t]*\\)[ \t]+HTTP/1\\.[0-9]"
let http_post_re =
Str.regexp_case_fold "POST[ \t]+/index\\.html[ \t]+HTTP/1\\.[0-9]"
let process_request client_fd request =
let is_safe s =
(* At least check that the passed in path doesn't contain .. *)
let r = Str.regexp_string ".." in
try
let _ = Str.search_forward r s 0 in
false
with Not_found -> true
in
if Str.string_match http_get_re request 0 then
let query_string = Str.matched_group 1 request in
let response =
if is_safe query_string then
let path = local_path query_string in
try
match (Unix.stat path).Unix.st_kind with
| Unix.S_REG -> read_file path
| Unix.S_DIR ->
read_file (Filename.concat path "index.html")
| _ -> fail_header
with _ -> std_response
else fail_header
in
send_all client_fd response
else if Str.string_match http_post_re request 0 then
let data_urlencoded = strip_headers request in
let map = url_decode data_urlencoded in
let response =
try
let query = RequestMap.find "q" map in
let board = RequestMap.find "board" map in
if query = "submit_move" then
let move = RequestMap.find "move" map in
submit_move board move
else if query = "request_move" then
request_move board
else "false"
with Not_found -> "false"
in
let header = response_header "text/plain; charset=utf-8" in
send_all client_fd (header ^ response)
else send_all client_fd fail_header
;;
(* The server loop, adapted from moogle (and see e.g.,
* -book/html/book-ora187.html). *)
let server () =
let fd = Unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in
let sock_addr = Unix.ADDR_INET (Unix.inet_addr_any, server_port) in
let _ = Unix.setsockopt fd Unix.SO_REUSEADDR true in
let _ = Unix.bind fd sock_addr in
at most 5 queued requests
(* loop on a set of client-server games *)
let rec server_loop () =
(* allow a client to connect *)
let (client_fd, client_addr) = Unix.accept fd in
match Unix.fork() with
| 0 ->
let buf = String.create 4096 in
let len = Unix.recv client_fd buf 0 (String.length buf) [] in
let request = String.sub buf 0 len in
let _ = process_request client_fd request in
Unix.close client_fd ;
| pid ->
let _ = Unix.close client_fd in
let _ = Unix.waitpid [] pid in server_loop ()
in server_loop ()
in server ()
| null | https://raw.githubusercontent.com/cs51project/ocaml-chess/6bafa77156cfd00aa3e30be2bb12880bfe74fc5b/server.ml | ocaml | Web server for user interface:
* GUI is displayed via HTML and CSS on a browser, and AJAX
* is used to transmit user moves to the chess engine.
*
* Adapted from moogle web server.
A post request is encoded in URL form. This function extracts
* the key-value pairs.
Given a requested path, return the corresponding local path
read in all the lines from a file and concatenate them into
* a big string.
At least check that the passed in path doesn't contain ..
The server loop, adapted from moogle (and see e.g.,
* -book/html/book-ora187.html).
loop on a set of client-server games
allow a client to connect | open Board
open Engine
let debug = false
Read the command line arguments and return the
* port number which OCamlChess should use for serving .
* port number which OCamlChess should use for serving. *)
let server_port =
let args = Sys.argv in
try
int_of_string (Array.get args 1)
with _ ->
(Printf.printf
"usage: %s <port>\n"
(Array.get args 0) ;
exit 1)
let response_header mime =
"HTTP/1.1 200 OK\n" ^
"Server: OCamlChess/1.0\n" ^
"Content-Type: " ^ mime ^ "\n" ^
"Content-Language: en-us\n" ^
"Connection: close\n\n"
let fail_header =
"HTTP/1.1 404 NOT FOUND\n" ^
"Server: OCamlChess/1.0\n" ^
"Connection: close\n\n"
A post request will have a bunch of headers
* on it separated from the actual data by two newlines ( or two
* carriage - returns / line - feeds . ) This finds those two spaces and
* strips off all the headers . ( Copied from moogle . )
* on it separated from the actual data by two newlines (or two
* carriage-returns/line-feeds.) This finds those two spaces and
* strips off all the headers. (Copied from moogle.) *)
let strip_headers request =
let rec find_two_newlines i =
if i+2 < String.length request then
match String.sub request i 2 with
| "\n\n" -> Some (i+2)
| "\r\n" ->
if i+4 < String.length request then
(match String.sub request (i+2) 2 with
| "\r\n" -> Some (i+4)
| _ -> find_two_newlines (i+1))
else None
| _ -> find_two_newlines (i+1)
else None
in
match find_two_newlines 0 with
| None -> request
| Some i -> String.sub request i (String.length request - i)
module RequestMap = Map.Make(String)
let url_decode request =
let bindings = Str.split (Str.regexp_string "&") request in
let add_binding map str =
let binding_re = Str.regexp_case_fold "^\\([^ \t]+\\)=\\([^ \t]+\\)$" in
if Str.string_match binding_re str 0 then
let k = Str.matched_group 1 str in
let v = Str.matched_group 2 str in
let decode_re = Str.regexp_string "+" in
let decoded_k = Str.global_replace decode_re " " k in
let decoded_v = Str.global_replace decode_re " " v in
RequestMap.add decoded_k decoded_v map
else map
in
List.fold_left add_binding RequestMap.empty bindings
let request_move board_fen =
match StdBoard.fen_decode board_fen with
| None -> "false"
| Some board ->
if StdBoard.checkmate board then "checkmate"
else match StdEngine.strat StdEval.init_eval board with
| None -> "false"
| Some mv ->
match StdBoard.play board mv with
| None -> "false"
| Some new_board -> StdBoard.fen_encode new_board
let submit_move board_fen move_str =
match StdBoard.fen_decode board_fen with
| None -> "false"
| Some board ->
if StdBoard.checkmate board then "checkmate"
else let move_re =
Str.regexp_case_fold "^\\([a-h][1-8]\\)\\([a-h][1-8]\\)\\|OOO\\|OO$"
in
if not (Str.string_match move_re move_str 0) then "false"
else let move =
if move_str = "OOO" then
Some (StdBoard.Castle StdBoard.Queenside)
else if move_str = "OO" then
Some (StdBoard.Castle StdBoard.Kingside)
else
let pos1 = StdBoard.fen_to_pos (Str.matched_group 1 move_str) in
let pos2 = StdBoard.fen_to_pos (Str.matched_group 2 move_str) in
match (pos1, pos2) with
| (None, _) | (_, None) -> None
| (Some pos1, Some pos2) ->
Some (StdBoard.Standard(pos1, pos2))
in match move with
| None -> "false"
| Some move ->
match StdBoard.play board move with
| None -> "false"
| Some new_board -> StdBoard.fen_encode new_board
let local_path qs =
Filename.concat (Unix.getcwd()) qs
let rec input_lines inchan lines =
try
input_lines inchan ((input_line inchan)::lines)
with End_of_file -> List.rev lines
let read_text file =
let _ = flush_all () in
let ch = open_in file in
let lines = input_lines ch [] in
let resp = String.concat "\n" lines in
close_in ch ; resp
let read_bin file =
let _ = flush_all () in
let ch = open_in_bin file in
let rec read_bin_r str =
try
read_bin_r (str ^ String.make 1 (input_char ch))
with End_of_file -> str
in
read_bin_r ""
let read_file file =
let ext =
let ext_re = Str.regexp_case_fold "\\.\\([a-z]+\\)$" in
try
let _ = Str.search_forward ext_re file 0 in
Str.matched_group 1 file
with Not_found -> ""
in
let text_response mime =
(response_header (mime ^ "; charset=utf-8")) ^
(read_text file)
in
let bin_response mime =
(response_header mime) ^ (read_bin file)
in
if ext = "html" then text_response "text/html; charset=utf-8"
else if ext = "css" then text_response "text/css; charset=utf-8"
else if ext = "js" then text_response "text/javascript; charset=utf-8"
else if ext = "txt" then text_response "text/plain; charset=utf-8"
else if ext = "svg" then text_response "image/svg+xml"
else if ext = "png" then bin_response "image/png"
else if ext = "ico" then bin_response "image/vnd.microsoft.icon"
else bin_response "application/octet_stream"
let std_response =
read_file "./index.html"
let send_std_response client_fd =
Unix.send client_fd std_response 0 (String.length std_response) []
let send_all fd buf =
let rec more st size =
let res = Unix.send fd buf st size [] in
if res < size then
more (st + res) (size - res)
else ()
in
let size = String.length buf in
let _ = more 0 size in size
let http_get_re =
Str.regexp_case_fold "GET[ \t]+/\\([^ \t]*\\)[ \t]+HTTP/1\\.[0-9]"
let http_post_re =
Str.regexp_case_fold "POST[ \t]+/index\\.html[ \t]+HTTP/1\\.[0-9]"
let process_request client_fd request =
let is_safe s =
let r = Str.regexp_string ".." in
try
let _ = Str.search_forward r s 0 in
false
with Not_found -> true
in
if Str.string_match http_get_re request 0 then
let query_string = Str.matched_group 1 request in
let response =
if is_safe query_string then
let path = local_path query_string in
try
match (Unix.stat path).Unix.st_kind with
| Unix.S_REG -> read_file path
| Unix.S_DIR ->
read_file (Filename.concat path "index.html")
| _ -> fail_header
with _ -> std_response
else fail_header
in
send_all client_fd response
else if Str.string_match http_post_re request 0 then
let data_urlencoded = strip_headers request in
let map = url_decode data_urlencoded in
let response =
try
let query = RequestMap.find "q" map in
let board = RequestMap.find "board" map in
if query = "submit_move" then
let move = RequestMap.find "move" map in
submit_move board move
else if query = "request_move" then
request_move board
else "false"
with Not_found -> "false"
in
let header = response_header "text/plain; charset=utf-8" in
send_all client_fd (header ^ response)
else send_all client_fd fail_header
;;
let server () =
let fd = Unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in
let sock_addr = Unix.ADDR_INET (Unix.inet_addr_any, server_port) in
let _ = Unix.setsockopt fd Unix.SO_REUSEADDR true in
let _ = Unix.bind fd sock_addr in
at most 5 queued requests
let rec server_loop () =
let (client_fd, client_addr) = Unix.accept fd in
match Unix.fork() with
| 0 ->
let buf = String.create 4096 in
let len = Unix.recv client_fd buf 0 (String.length buf) [] in
let request = String.sub buf 0 len in
let _ = process_request client_fd request in
Unix.close client_fd ;
| pid ->
let _ = Unix.close client_fd in
let _ = Unix.waitpid [] pid in server_loop ()
in server_loop ()
in server ()
|
f0066515ea5445e914b92f8fcb724ff02000966166afbfd6c6b2a50405879b90 | arow-oss/goat-guardian | Password.hs |
module GoatGuardian.Password where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Crypto.Scrypt (EncryptedPass(EncryptedPass), Pass(Pass), encryptPassIO', getEncryptedPass, verifyPass')
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8With, encodeUtf8)
import Data.Text.Encoding.Error (lenientDecode)
hashPass
:: MonadIO m
=> Text -- ^ password
-> m Text -- ^ hashed password
hashPass pass = do
hashedPassBS <- liftIO $ encryptPassIO' (Pass $ encodeUtf8 pass)
pure $ decodeUtf8With lenientDecode (getEncryptedPass hashedPassBS)
checkPass
:: Text -- ^ password
-> Text -- ^ hashed password
-> Bool
checkPass pass hash =
verifyPass'
(Pass $ encodeUtf8 pass)
(EncryptedPass $ encodeUtf8 hash)
| null | https://raw.githubusercontent.com/arow-oss/goat-guardian/85e25fe8d618a0707b54e0e25858429158d96f75/src/GoatGuardian/Password.hs | haskell | ^ password
^ hashed password
^ password
^ hashed password |
module GoatGuardian.Password where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Crypto.Scrypt (EncryptedPass(EncryptedPass), Pass(Pass), encryptPassIO', getEncryptedPass, verifyPass')
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8With, encodeUtf8)
import Data.Text.Encoding.Error (lenientDecode)
hashPass
:: MonadIO m
hashPass pass = do
hashedPassBS <- liftIO $ encryptPassIO' (Pass $ encodeUtf8 pass)
pure $ decodeUtf8With lenientDecode (getEncryptedPass hashedPassBS)
checkPass
-> Bool
checkPass pass hash =
verifyPass'
(Pass $ encodeUtf8 pass)
(EncryptedPass $ encodeUtf8 hash)
|
bd8c238bdb664ff77ab7d437c115033979f10aa6d9a793d317f91f9a7a434f9b | anoma/juvix | Error.hs | module Juvix.Compiler.Asm.Interpreter.Error where
import Control.Exception qualified as Exception
import GHC.Show
import Juvix.Compiler.Asm.Interpreter.RuntimeState
data RunError = RunError
{ _runErrorMsg :: Text,
_runErrorState :: RuntimeState
}
makeLenses ''RunError
instance Show RunError where
show :: RunError -> String
show (RunError {..}) =
"runtime error: "
++ fromText _runErrorMsg
instance Exception.Exception RunError
throwRunError :: RuntimeState -> Text -> a
throwRunError st msg = Exception.throw (RunError msg st)
| null | https://raw.githubusercontent.com/anoma/juvix/f0ade4be7ca0159e9b953821618486816bf91035/src/Juvix/Compiler/Asm/Interpreter/Error.hs | haskell | module Juvix.Compiler.Asm.Interpreter.Error where
import Control.Exception qualified as Exception
import GHC.Show
import Juvix.Compiler.Asm.Interpreter.RuntimeState
data RunError = RunError
{ _runErrorMsg :: Text,
_runErrorState :: RuntimeState
}
makeLenses ''RunError
instance Show RunError where
show :: RunError -> String
show (RunError {..}) =
"runtime error: "
++ fromText _runErrorMsg
instance Exception.Exception RunError
throwRunError :: RuntimeState -> Text -> a
throwRunError st msg = Exception.throw (RunError msg st)
| |
b10fc4bcef611bd16dce902801d87c528a258308f02db9515d6d65122d2bb403 | libguestfs/virt-v2v | output_null.mli | virt - v2v
* Copyright ( C ) 2009 - 2021 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2009-2021 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
(** [-o null] output mode. *)
module Null : Output.OUTPUT
| null | https://raw.githubusercontent.com/libguestfs/virt-v2v/7f16a93b424253d8c4c738c3c53b56598215689d/output/output_null.mli | ocaml | * [-o null] output mode. | virt - v2v
* Copyright ( C ) 2009 - 2021 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2009-2021 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
module Null : Output.OUTPUT
|
065425008ea28e96305667539607878ebb9029c9434b026a57fc8387da603c7d | Frama-C/Frama-C-snapshot | slicingActions.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** This module deals with the action management.
* It consists of the definitions of the different kinds of actions,
* and the management of the action list.
*)
(**/**)
type select = SlicingTypes.sl_mark PdgMarks.select
type n_or_d_marks = (SlicingInternals.node_or_dpds * SlicingInternals.pdg_mark) list
(**/**)
(*============================================================================*)
(** {2 Build} *)
(** {3 How the elements will be selected} *)
(** Build a description to tell that the associated nodes have to be marked
* with the given mark, and than the same one will be propagated through
* their dependencies. (see also {!build_node_and_dpds_selection}) *)
let build_simple_node_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwNode, mark)::nd_marks
(** Only the control dependencies of the nodes will be marked *)
let build_addr_dpds_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwAddrDpds, mark)::nd_marks
(** Only the control dependencies of the nodes will be marked *)
let build_data_dpds_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwDataDpds, mark)::nd_marks
(** Only the control dependencies of the nodes will be marked *)
let build_ctrl_dpds_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwCtrlDpds, mark)::nd_marks
* Build a description to tell how the selected nodes and their
* dependencies will have to be marked
* ( see { ! type : SlicingTypes . Internals.node_or_dpds } ) .
* This description depend on the mark that has been asked for .
* First of all , whatever the mark is , the node is selected as [ spare ] ,
* so that it will be visible , and so will its dependencies . Then ,
* if [ is_ctrl mark ] propagate a m1 control mark through the control dependencies
* and do a similar thing for [ addr ] and [ data ]
* dependencies will have to be marked
* (see {!type:SlicingTypes.Internals.node_or_dpds}).
* This description depend on the mark that has been asked for.
* First of all, whatever the mark is, the node is selected as [spare],
* so that it will be visible, and so will its dependencies. Then,
* if [is_ctrl mark] propagate a m1 control mark through the control dependencies
* and do a similar thing for [addr] and [data] *)
let build_node_and_dpds_selection ?(nd_marks=[]) mark =
let m_spare = SlicingMarks.mk_user_spare in
let nd_marks = build_simple_node_selection ~nd_marks:nd_marks m_spare in
let nd_marks =
if SlicingMarks.is_ctrl_mark mark
then
let m_ctrl = SlicingMarks.mk_user_mark ~ctrl:true ~data:false ~addr:false in
build_ctrl_dpds_selection ~nd_marks:nd_marks m_ctrl
else nd_marks
in
let nd_marks =
if SlicingMarks.is_addr_mark mark
then
let m_addr = SlicingMarks.mk_user_mark ~ctrl:false ~data:false ~addr:true in
build_addr_dpds_selection ~nd_marks:nd_marks m_addr
else nd_marks
in
let nd_marks =
if SlicingMarks.is_data_mark mark
then
let m_data = SlicingMarks.mk_user_mark ~ctrl:false ~data:true ~addr:false in
build_data_dpds_selection ~nd_marks:nd_marks m_data
else nd_marks
in
nd_marks
* { 3 Translations to a mapping between marks and program elements }
let translate_crit_to_select pdg ?(to_select=[]) list_crit =
let translate acc (nodes, nd_mark) =
let add_pdg_mark acc (nd, mark) =
let add_nodes m acc nodes =
let add m acc nodepart =
PdgMarks.add_node_to_select acc nodepart m
in
List.fold_left (add m) acc nodes
in
let add_node_dpds dpd_mark f_dpds acc (node, _node_z_part) =
let nodes = f_dpds node in
add_nodes dpd_mark acc nodes
in
let acc = match nd with
| SlicingInternals.CwNode -> add_nodes mark acc nodes
| SlicingInternals.CwAddrDpds -> let f = PdgTypes.Pdg.get_x_direct_dpds PdgTypes.Dpd.Addr pdg in
List.fold_left (add_node_dpds mark f) acc nodes
| SlicingInternals.CwCtrlDpds -> let f = PdgTypes.Pdg.get_x_direct_dpds PdgTypes.Dpd.Ctrl pdg in
List.fold_left (add_node_dpds mark f) acc nodes
| SlicingInternals.CwDataDpds -> let f = PdgTypes.Pdg.get_x_direct_dpds PdgTypes.Dpd.Data pdg in
List.fold_left (add_node_dpds mark f) acc nodes
in acc
in List.fold_left add_pdg_mark acc nd_mark
in List.fold_left translate to_select list_crit
* { 3 Function criteria }
(** build an action to apply the criteria to the persistent selection of the
* function. It means that it will be applied to all slices. *)
let mk_fct_crit fi crit =
SlicingInternals.CrFct { SlicingInternals.cf_fct = SlicingInternals.FctSrc fi ; SlicingInternals.cf_info = crit }
let mk_fct_user_crit fi crit = mk_fct_crit fi (SlicingInternals.CcUserMark crit)
let mk_crit_fct_top fi m = mk_fct_user_crit fi (SlicingInternals.CuTop m)
let mk_crit_fct_user_select fi select = mk_fct_user_crit fi (SlicingInternals.CuSelect select)
let mk_crit_prop_persit_marks fi node_marks =
mk_fct_crit fi (SlicingInternals.CcPropagate node_marks)
(** build an action to apply the criteria to the given slice. *)
let mk_ff_crit ff crit =
SlicingInternals.CrFct { SlicingInternals.cf_fct = SlicingInternals.FctSliced ff ; SlicingInternals.cf_info = crit }
let mk_ff_user_select ff crit = mk_ff_crit ff (SlicingInternals.CcUserMark (SlicingInternals.CuSelect crit))
let mk_crit_choose_call ff call = mk_ff_crit ff (SlicingInternals.CcChooseCall call)
let mk_crit_change_call ff call f = mk_ff_crit ff (SlicingInternals.CcChangeCall (call, f))
let mk_crit_missing_inputs ff call (input_marks, more_inputs) =
mk_ff_crit ff (SlicingInternals.CcMissingInputs (call, input_marks, more_inputs))
let mk_crit_missing_outputs ff call (output_marks, more_outputs) =
mk_ff_crit ff (SlicingInternals.CcMissingOutputs (call, output_marks, more_outputs))
let mk_crit_examines_calls ff call_out_marks =
mk_ff_crit ff (SlicingInternals.CcExamineCalls call_out_marks)
let mk_appli_select_calls fi = SlicingInternals.CrAppli (SlicingInternals.CaCall fi)
* { 3 Shortcut functions for previous things }
let mk_crit_mark_calls fi_caller to_call mark =
let select = try
let caller = SlicingMacros.get_fi_kf fi_caller in
let pdg_caller = !Db.Pdg.get caller in
let call_stmts = !Db.Pdg.find_call_stmts ~caller to_call in
let stmt_mark stmt =
let stmt_ctrl_node = !Db.Pdg.find_call_ctrl_node pdg_caller stmt in
(PdgMarks.mk_select_node stmt_ctrl_node, mark)
in
let select = List.map stmt_mark call_stmts in
SlicingInternals.CuSelect select
with PdgTypes.Pdg.Top -> SlicingInternals.CuTop mark
in
mk_fct_user_crit fi_caller select
let mk_crit_add_output_marks ff select =
let pdg = SlicingMacros.get_ff_pdg ff in
let add acc ( out , m ) =
let nd_m = build_simple_node_selection m in
let node = out in
mk_mark_nodes pdg ~marks : acc [ node ] nd_m
in let select = List.fold_left add [ ] output_marks in
let pdg = SlicingMacros.get_ff_pdg ff in
let add acc (out, m) =
let nd_m = build_simple_node_selection m in
let node = out in
mk_mark_nodes pdg ~marks:acc [node] nd_m
in let select = List.fold_left add [] output_marks in
*)
mk_ff_user_select ff select
let =
let pdg = SlicingMacros.get_ff_pdg ff in
let nodes = ! Db . Pdg.find_all_outputs_nodes pdg in
let nd_m = build_simple_node_selection mark in
let select = mk_mark_nodes nodes nd_m in
mk_ff_user_crit ff select
let mk_crit_add_all_outputs_mark ff mark =
let pdg = SlicingMacros.get_ff_pdg ff in
let nodes = !Db.Pdg.find_all_outputs_nodes pdg in
let nd_m = build_simple_node_selection mark in
let select = mk_mark_nodes nodes nd_m in
mk_ff_user_crit ff select
*)
(*============================================================================*)
* { 2 Print }
let print_nd_and_mark f (nd, m) =
let str = match nd with
| SlicingInternals.CwNode -> ""
| SlicingInternals.CwAddrDpds -> "addr->"
| SlicingInternals.CwDataDpds -> "data->"
| SlicingInternals.CwCtrlDpds -> "ctrl->"
in Format.fprintf f "%s%a" str SlicingMarks.pretty_mark m
let rec print_nd_and_mark_list fmt ndm_list =
match ndm_list with
| [] -> ()
| x :: ndm_list ->
print_nd_and_mark fmt x; print_nd_and_mark_list fmt ndm_list
let print_nodes fmt nodes =
let print n = Format.fprintf fmt "%a " (!Db.Pdg.pretty_node true) n in
List.iter print nodes
let print_node_mark fmt n z m =
Format.fprintf fmt "(%a ,%a)"
(PdgTypes.Node.pretty_with_part) (n, z) SlicingMarks.pretty_mark m
let print_sel_marks_list fmt to_select =
let print_sel (s, m) = match s with
| PdgMarks.SelNode (n, z) -> print_node_mark fmt n z m
| PdgMarks.SelIn l ->
Format.fprintf fmt "(UndefIn %a:%a)"
Locations.Zone.pretty l SlicingMarks.pretty_mark m
in match to_select with [] -> Format.fprintf fmt "<empty>"
| _ -> List.iter print_sel to_select
let _print_ndm fmt (nodes, ndm_list) =
Format.fprintf fmt "(%a,%a)" print_nodes nodes
print_nd_and_mark_list ndm_list
let print_f_crit fmt f_crit =
match f_crit with
| SlicingInternals.CuTop m -> Format.fprintf fmt "top(%a)" SlicingMarks.pretty_mark m
| SlicingInternals.CuSelect to_select -> print_sel_marks_list fmt to_select
let print_crit fmt crit =
match crit with
| SlicingInternals.CrFct fct_crit ->
let fct = fct_crit.SlicingInternals.cf_fct in
let name = SlicingMacros.f_name fct in
Format.fprintf fmt "[%s = " name;
let _ = match fct_crit.SlicingInternals.cf_info with
| SlicingInternals.CcUserMark info -> print_f_crit fmt info
| SlicingInternals.CcMissingInputs (call, _input_marks, more_inputs)
-> Format.fprintf fmt "missing_inputs for call %d (%s)"
call.Cil_types.sid
(if more_inputs then "more_inputs" else "marks only")
| SlicingInternals.CcMissingOutputs (call, _output_marks, more_outputs)
-> Format.fprintf fmt "missing_outputs for call %d (%s)"
call.Cil_types.sid
(if more_outputs then "more_outputs" else "marks only")
| SlicingInternals.CcChooseCall call
-> Format.fprintf fmt "choose_call for call %d" call.Cil_types.sid
| SlicingInternals.CcChangeCall (call,f)
-> let fname = match f with
| SlicingInternals.CallSlice ff -> SlicingMacros.ff_name ff
| SlicingInternals.CallSrc (Some fi) -> ("(src:"^( SlicingMacros.fi_name fi)^")")
| SlicingInternals.CallSrc None -> "(src)"
in Format.fprintf fmt "change_call for call %d -> %s"
call.Cil_types.sid fname
| SlicingInternals.CcPropagate nl ->
Format.fprintf fmt "propagate %a"
print_sel_marks_list nl
| SlicingInternals.CcExamineCalls _ -> Format.fprintf fmt "examine_calls"
in Format.fprintf fmt "]"
| SlicingInternals.CrAppli (SlicingInternals.CaCall fi) ->
let name = SlicingMacros.fi_name fi in
Format.fprintf fmt "[Appli : calls to %s]" name
| _ ->
SlicingParameters.not_yet_implemented "Printing this slicing criterion "
let print_list_crit fmt list_crit =
List.iter (print_crit fmt) list_crit
(*============================================================================*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/slicing/slicingActions.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* This module deals with the action management.
* It consists of the definitions of the different kinds of actions,
* and the management of the action list.
*/*
*/*
============================================================================
* {2 Build}
* {3 How the elements will be selected}
* Build a description to tell that the associated nodes have to be marked
* with the given mark, and than the same one will be propagated through
* their dependencies. (see also {!build_node_and_dpds_selection})
* Only the control dependencies of the nodes will be marked
* Only the control dependencies of the nodes will be marked
* Only the control dependencies of the nodes will be marked
* build an action to apply the criteria to the persistent selection of the
* function. It means that it will be applied to all slices.
* build an action to apply the criteria to the given slice.
============================================================================
============================================================================ | This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
type select = SlicingTypes.sl_mark PdgMarks.select
type n_or_d_marks = (SlicingInternals.node_or_dpds * SlicingInternals.pdg_mark) list
let build_simple_node_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwNode, mark)::nd_marks
let build_addr_dpds_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwAddrDpds, mark)::nd_marks
let build_data_dpds_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwDataDpds, mark)::nd_marks
let build_ctrl_dpds_selection ?(nd_marks=[]) mark =
(SlicingInternals.CwCtrlDpds, mark)::nd_marks
* Build a description to tell how the selected nodes and their
* dependencies will have to be marked
* ( see { ! type : SlicingTypes . Internals.node_or_dpds } ) .
* This description depend on the mark that has been asked for .
* First of all , whatever the mark is , the node is selected as [ spare ] ,
* so that it will be visible , and so will its dependencies . Then ,
* if [ is_ctrl mark ] propagate a m1 control mark through the control dependencies
* and do a similar thing for [ addr ] and [ data ]
* dependencies will have to be marked
* (see {!type:SlicingTypes.Internals.node_or_dpds}).
* This description depend on the mark that has been asked for.
* First of all, whatever the mark is, the node is selected as [spare],
* so that it will be visible, and so will its dependencies. Then,
* if [is_ctrl mark] propagate a m1 control mark through the control dependencies
* and do a similar thing for [addr] and [data] *)
let build_node_and_dpds_selection ?(nd_marks=[]) mark =
let m_spare = SlicingMarks.mk_user_spare in
let nd_marks = build_simple_node_selection ~nd_marks:nd_marks m_spare in
let nd_marks =
if SlicingMarks.is_ctrl_mark mark
then
let m_ctrl = SlicingMarks.mk_user_mark ~ctrl:true ~data:false ~addr:false in
build_ctrl_dpds_selection ~nd_marks:nd_marks m_ctrl
else nd_marks
in
let nd_marks =
if SlicingMarks.is_addr_mark mark
then
let m_addr = SlicingMarks.mk_user_mark ~ctrl:false ~data:false ~addr:true in
build_addr_dpds_selection ~nd_marks:nd_marks m_addr
else nd_marks
in
let nd_marks =
if SlicingMarks.is_data_mark mark
then
let m_data = SlicingMarks.mk_user_mark ~ctrl:false ~data:true ~addr:false in
build_data_dpds_selection ~nd_marks:nd_marks m_data
else nd_marks
in
nd_marks
* { 3 Translations to a mapping between marks and program elements }
let translate_crit_to_select pdg ?(to_select=[]) list_crit =
let translate acc (nodes, nd_mark) =
let add_pdg_mark acc (nd, mark) =
let add_nodes m acc nodes =
let add m acc nodepart =
PdgMarks.add_node_to_select acc nodepart m
in
List.fold_left (add m) acc nodes
in
let add_node_dpds dpd_mark f_dpds acc (node, _node_z_part) =
let nodes = f_dpds node in
add_nodes dpd_mark acc nodes
in
let acc = match nd with
| SlicingInternals.CwNode -> add_nodes mark acc nodes
| SlicingInternals.CwAddrDpds -> let f = PdgTypes.Pdg.get_x_direct_dpds PdgTypes.Dpd.Addr pdg in
List.fold_left (add_node_dpds mark f) acc nodes
| SlicingInternals.CwCtrlDpds -> let f = PdgTypes.Pdg.get_x_direct_dpds PdgTypes.Dpd.Ctrl pdg in
List.fold_left (add_node_dpds mark f) acc nodes
| SlicingInternals.CwDataDpds -> let f = PdgTypes.Pdg.get_x_direct_dpds PdgTypes.Dpd.Data pdg in
List.fold_left (add_node_dpds mark f) acc nodes
in acc
in List.fold_left add_pdg_mark acc nd_mark
in List.fold_left translate to_select list_crit
* { 3 Function criteria }
let mk_fct_crit fi crit =
SlicingInternals.CrFct { SlicingInternals.cf_fct = SlicingInternals.FctSrc fi ; SlicingInternals.cf_info = crit }
let mk_fct_user_crit fi crit = mk_fct_crit fi (SlicingInternals.CcUserMark crit)
let mk_crit_fct_top fi m = mk_fct_user_crit fi (SlicingInternals.CuTop m)
let mk_crit_fct_user_select fi select = mk_fct_user_crit fi (SlicingInternals.CuSelect select)
let mk_crit_prop_persit_marks fi node_marks =
mk_fct_crit fi (SlicingInternals.CcPropagate node_marks)
let mk_ff_crit ff crit =
SlicingInternals.CrFct { SlicingInternals.cf_fct = SlicingInternals.FctSliced ff ; SlicingInternals.cf_info = crit }
let mk_ff_user_select ff crit = mk_ff_crit ff (SlicingInternals.CcUserMark (SlicingInternals.CuSelect crit))
let mk_crit_choose_call ff call = mk_ff_crit ff (SlicingInternals.CcChooseCall call)
let mk_crit_change_call ff call f = mk_ff_crit ff (SlicingInternals.CcChangeCall (call, f))
let mk_crit_missing_inputs ff call (input_marks, more_inputs) =
mk_ff_crit ff (SlicingInternals.CcMissingInputs (call, input_marks, more_inputs))
let mk_crit_missing_outputs ff call (output_marks, more_outputs) =
mk_ff_crit ff (SlicingInternals.CcMissingOutputs (call, output_marks, more_outputs))
let mk_crit_examines_calls ff call_out_marks =
mk_ff_crit ff (SlicingInternals.CcExamineCalls call_out_marks)
let mk_appli_select_calls fi = SlicingInternals.CrAppli (SlicingInternals.CaCall fi)
* { 3 Shortcut functions for previous things }
let mk_crit_mark_calls fi_caller to_call mark =
let select = try
let caller = SlicingMacros.get_fi_kf fi_caller in
let pdg_caller = !Db.Pdg.get caller in
let call_stmts = !Db.Pdg.find_call_stmts ~caller to_call in
let stmt_mark stmt =
let stmt_ctrl_node = !Db.Pdg.find_call_ctrl_node pdg_caller stmt in
(PdgMarks.mk_select_node stmt_ctrl_node, mark)
in
let select = List.map stmt_mark call_stmts in
SlicingInternals.CuSelect select
with PdgTypes.Pdg.Top -> SlicingInternals.CuTop mark
in
mk_fct_user_crit fi_caller select
let mk_crit_add_output_marks ff select =
let pdg = SlicingMacros.get_ff_pdg ff in
let add acc ( out , m ) =
let nd_m = build_simple_node_selection m in
let node = out in
mk_mark_nodes pdg ~marks : acc [ node ] nd_m
in let select = List.fold_left add [ ] output_marks in
let pdg = SlicingMacros.get_ff_pdg ff in
let add acc (out, m) =
let nd_m = build_simple_node_selection m in
let node = out in
mk_mark_nodes pdg ~marks:acc [node] nd_m
in let select = List.fold_left add [] output_marks in
*)
mk_ff_user_select ff select
let =
let pdg = SlicingMacros.get_ff_pdg ff in
let nodes = ! Db . Pdg.find_all_outputs_nodes pdg in
let nd_m = build_simple_node_selection mark in
let select = mk_mark_nodes nodes nd_m in
mk_ff_user_crit ff select
let mk_crit_add_all_outputs_mark ff mark =
let pdg = SlicingMacros.get_ff_pdg ff in
let nodes = !Db.Pdg.find_all_outputs_nodes pdg in
let nd_m = build_simple_node_selection mark in
let select = mk_mark_nodes nodes nd_m in
mk_ff_user_crit ff select
*)
* { 2 Print }
let print_nd_and_mark f (nd, m) =
let str = match nd with
| SlicingInternals.CwNode -> ""
| SlicingInternals.CwAddrDpds -> "addr->"
| SlicingInternals.CwDataDpds -> "data->"
| SlicingInternals.CwCtrlDpds -> "ctrl->"
in Format.fprintf f "%s%a" str SlicingMarks.pretty_mark m
let rec print_nd_and_mark_list fmt ndm_list =
match ndm_list with
| [] -> ()
| x :: ndm_list ->
print_nd_and_mark fmt x; print_nd_and_mark_list fmt ndm_list
let print_nodes fmt nodes =
let print n = Format.fprintf fmt "%a " (!Db.Pdg.pretty_node true) n in
List.iter print nodes
let print_node_mark fmt n z m =
Format.fprintf fmt "(%a ,%a)"
(PdgTypes.Node.pretty_with_part) (n, z) SlicingMarks.pretty_mark m
let print_sel_marks_list fmt to_select =
let print_sel (s, m) = match s with
| PdgMarks.SelNode (n, z) -> print_node_mark fmt n z m
| PdgMarks.SelIn l ->
Format.fprintf fmt "(UndefIn %a:%a)"
Locations.Zone.pretty l SlicingMarks.pretty_mark m
in match to_select with [] -> Format.fprintf fmt "<empty>"
| _ -> List.iter print_sel to_select
let _print_ndm fmt (nodes, ndm_list) =
Format.fprintf fmt "(%a,%a)" print_nodes nodes
print_nd_and_mark_list ndm_list
let print_f_crit fmt f_crit =
match f_crit with
| SlicingInternals.CuTop m -> Format.fprintf fmt "top(%a)" SlicingMarks.pretty_mark m
| SlicingInternals.CuSelect to_select -> print_sel_marks_list fmt to_select
let print_crit fmt crit =
match crit with
| SlicingInternals.CrFct fct_crit ->
let fct = fct_crit.SlicingInternals.cf_fct in
let name = SlicingMacros.f_name fct in
Format.fprintf fmt "[%s = " name;
let _ = match fct_crit.SlicingInternals.cf_info with
| SlicingInternals.CcUserMark info -> print_f_crit fmt info
| SlicingInternals.CcMissingInputs (call, _input_marks, more_inputs)
-> Format.fprintf fmt "missing_inputs for call %d (%s)"
call.Cil_types.sid
(if more_inputs then "more_inputs" else "marks only")
| SlicingInternals.CcMissingOutputs (call, _output_marks, more_outputs)
-> Format.fprintf fmt "missing_outputs for call %d (%s)"
call.Cil_types.sid
(if more_outputs then "more_outputs" else "marks only")
| SlicingInternals.CcChooseCall call
-> Format.fprintf fmt "choose_call for call %d" call.Cil_types.sid
| SlicingInternals.CcChangeCall (call,f)
-> let fname = match f with
| SlicingInternals.CallSlice ff -> SlicingMacros.ff_name ff
| SlicingInternals.CallSrc (Some fi) -> ("(src:"^( SlicingMacros.fi_name fi)^")")
| SlicingInternals.CallSrc None -> "(src)"
in Format.fprintf fmt "change_call for call %d -> %s"
call.Cil_types.sid fname
| SlicingInternals.CcPropagate nl ->
Format.fprintf fmt "propagate %a"
print_sel_marks_list nl
| SlicingInternals.CcExamineCalls _ -> Format.fprintf fmt "examine_calls"
in Format.fprintf fmt "]"
| SlicingInternals.CrAppli (SlicingInternals.CaCall fi) ->
let name = SlicingMacros.fi_name fi in
Format.fprintf fmt "[Appli : calls to %s]" name
| _ ->
SlicingParameters.not_yet_implemented "Printing this slicing criterion "
let print_list_crit fmt list_crit =
List.iter (print_crit fmt) list_crit
|
96cc2da16e33278787ce0baa1d5947d234ad63211cabc9966f89c26eaac7d547 | processone/rtb | mod_xmpp_proxy65.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2002 - 2019 ProcessOne , SARL . All Rights Reserved .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%-------------------------------------------------------------------
-module(mod_xmpp_proxy65).
-compile([{parse_transform, lager_transform}]).
-behaviour(gen_server).
%% API
-export([recv/6, connect/6, activate/1, format_error/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(BUF_SIZE, 65536).
%%-define(BUF_SIZE, 8192).
SOCKS5 stuff
-define(VERSION_5, 5).
-define(AUTH_ANONYMOUS, 0).
-define(CMD_CONNECT, 1).
-define(ATYP_DOMAINNAME, 3).
-define(SUCCESS, 0).
-type sockmod() :: gen_tcp | ssl.
-type socket() :: gen_tcp:socket() | ssl:sslsocket().
-type proxy65_error() :: {socks5, atom()} | {sockmod(), atom()} |
crashed | shutdown | init_timeout |
activation_timeout.
-record(state, {host :: string(),
port :: inet:port_number(),
hash :: binary(),
sockmod :: sockmod(),
socket :: socket(),
opts :: [gen_tcp:option()],
timeout :: non_neg_integer(),
size :: non_neg_integer(),
owner :: pid(),
action :: send | recv}).
%%%===================================================================
%%% API
%%%===================================================================
recv(Host, Port, Hash, Size, ConnOpts, Timeout) ->
start(recv, Host, Port, Hash, Size, ConnOpts, Timeout).
connect(Host, Port, Hash, Size, ConnOpts, Timeout) ->
start(send, Host, Port, Hash, Size, ConnOpts, Timeout).
activate(Pid) ->
gen_server:cast(Pid, activate).
-spec format_error(proxy65_error()) -> string().
format_error({socks5, unexpected_response}) ->
"Proxy65 failure: unexpected SOCKS5 response";
format_error(crashed) ->
"Proxy65 failure: connection has been crashed";
format_error(shutdown) ->
"Proxy65 failure: the system is shutting down";
format_error(init_timeout) ->
"Proxy65 failure: timed out during initialization";
format_error(activation_timeout) ->
"Proxy65 failure: timed out waiting for activation";
format_error(Reason) ->
"Proxy65 failure: " ++ format_socket_error(Reason).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
init([Action, Owner, Host, Port, Hash, Size, ConnOpts, Timeout]) ->
erlang:monitor(process, Owner),
{ok, #state{host = Host, port = Port, hash = Hash,
action = Action, size = Size, opts = ConnOpts,
owner = Owner, timeout = Timeout}, Timeout}.
handle_call(connect, From, #state{host = Host, port = Port,
opts = ConnOpts, action = Action,
hash = Hash, timeout = Timeout,
size = Size} = State) ->
case connect(Host, Port, Hash, ConnOpts, Timeout) of
{ok, SockMod, Sock} ->
State1 = State#state{sockmod = SockMod, socket = Sock},
gen_server:reply(From, {ok, self()}),
case Action of
recv ->
Result = recv(SockMod, Sock, Size, Timeout),
reply(State1, Result),
{stop, normal, State1};
send ->
noreply(State1)
end;
{error, _} = Err ->
{stop, normal, Err, State}
end;
handle_call(Request, _From, State) ->
lager:warning("Unexpected call: ~p", [Request]),
noreply(State).
handle_cast(activate, #state{sockmod = SockMod, socket = Sock,
size = Size} = State) ->
Chunk = p1_rand:bytes(?BUF_SIZE),
Result = send(SockMod, Sock, Size, Chunk),
reply(State, Result),
{stop, normal, State};
handle_cast(Msg, State) ->
lager:warning("Unexpected cast: ~p", [Msg]),
noreply(State).
handle_info({'DOWN', _, _, _, _}, State) ->
{stop, normal, State};
handle_info(timeout, State) ->
Reason = case State#state.socket of
undefined -> init_timeout;
_ -> activation_timeout
end,
reply(State, {error, Reason}),
{stop, normal, State};
handle_info(Info, State) ->
lager:warning("Unexpected info: ~p", [Info]),
noreply(State).
terminate(normal, _) ->
ok;
terminate(shutdown, State) ->
reply(State, {error, shutdown});
terminate(_, State) ->
reply(State, {error, crashed}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
start(Action, Host, Port, Hash, Size, ConnOpts, Timeout) ->
case gen_server:start(
?MODULE,
[Action, self(), binary_to_list(Host), Port,
Hash, Size, ConnOpts, Timeout], []) of
{ok, Pid} ->
gen_server:call(Pid, connect, 2*Timeout);
{error, _} ->
{error, crashed}
end.
-spec format_socket_error({sockmod(), atom()}) -> string().
format_socket_error({_, closed}) ->
"connection closed";
format_socket_error({_, timeout}) ->
inet:format_error(etimedout);
format_socket_error({ssl, Reason}) ->
ssl:format_error(Reason);
format_socket_error({gen_tcp, Reason}) ->
case inet:format_error(Reason) of
"unknown POSIX error" -> atom_to_list(Reason);
Txt -> Txt
end.
reply(#state{owner = Owner, action = Action}, Result) ->
Owner ! {proxy65_result, Action, Result}.
noreply(#state{timeout = Timeout} = State) ->
{noreply, State, Timeout}.
connect(Host, Port, Hash, ConnOpts, Timeout) ->
Opts = opts(ConnOpts, Timeout),
SockMod = gen_tcp,
try
{ok, Sock} = SockMod:connect(Host, Port, Opts, Timeout),
Init = <<?VERSION_5, 1, ?AUTH_ANONYMOUS>>,
InitAck = <<?VERSION_5, ?AUTH_ANONYMOUS>>,
Req = <<?VERSION_5, ?CMD_CONNECT, 0,
?ATYP_DOMAINNAME, 40, Hash:40/binary, 0, 0>>,
Resp = <<?VERSION_5, ?SUCCESS, 0, ?ATYP_DOMAINNAME,
40, Hash:40/binary, 0, 0>>,
ok = SockMod:send(Sock, Init),
{ok, InitAck} = gen_tcp:recv(Sock, size(InitAck)),
ok = gen_tcp:send(Sock, Req),
{ok, Resp} = gen_tcp:recv(Sock, size(Resp)),
{ok, SockMod, Sock}
catch _:{badmatch, {error, Reason}} ->
{error, {SockMod, Reason}};
_:{badmatch, {ok, _}} ->
{error, {socks5, unexpected_response}}
end.
send(_SockMod, _Sock, 0, _Chunk) ->
ok;
send(SockMod, Sock, Size, Chunk) ->
Data = if Size >= ?BUF_SIZE ->
Chunk;
true ->
binary:part(Chunk, 0, Size)
end,
case SockMod:send(Sock, Data) of
ok ->
NewSize = receive {'DOWN', _, _, _, _} -> 0
after 0 -> Size - min(Size, ?BUF_SIZE)
end,
send(SockMod, Sock, NewSize, Chunk);
{error, Reason} ->
{error, {SockMod, Reason}}
end.
recv(_SockMod, _Sock, 0, _Timeout) ->
ok;
recv(SockMod, Sock, Size, Timeout) ->
ChunkSize = min(Size, ?BUF_SIZE),
case SockMod:recv(Sock, ChunkSize, Timeout) of
{ok, Data} ->
NewSize = receive {'DOWN', _, _, _, _} -> 0
after 0 -> Size-size(Data)
end,
recv(SockMod, Sock, NewSize, Timeout);
{error, Reason} ->
{error, {SockMod, Reason}}
end.
opts(Opts, Timeout) ->
[binary,
{packet, 0},
{send_timeout, Timeout},
{send_timeout_close, true},
{recbuf, ?BUF_SIZE},
{sndbuf, ?BUF_SIZE},
{active, false}|Opts].
| null | https://raw.githubusercontent.com/processone/rtb/ede10174af8e135bc78df11ad7c68d242e6e6534/src/mod_xmpp_proxy65.erl | erlang | -------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------
API
gen_server callbacks
-define(BUF_SIZE, 8192).
===================================================================
API
===================================================================
===================================================================
gen_server callbacks
===================================================================
===================================================================
=================================================================== | @author < >
( C ) 2002 - 2019 ProcessOne , SARL . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_xmpp_proxy65).
-compile([{parse_transform, lager_transform}]).
-behaviour(gen_server).
-export([recv/6, connect/6, activate/1, format_error/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(BUF_SIZE, 65536).
SOCKS5 stuff
-define(VERSION_5, 5).
-define(AUTH_ANONYMOUS, 0).
-define(CMD_CONNECT, 1).
-define(ATYP_DOMAINNAME, 3).
-define(SUCCESS, 0).
-type sockmod() :: gen_tcp | ssl.
-type socket() :: gen_tcp:socket() | ssl:sslsocket().
-type proxy65_error() :: {socks5, atom()} | {sockmod(), atom()} |
crashed | shutdown | init_timeout |
activation_timeout.
-record(state, {host :: string(),
port :: inet:port_number(),
hash :: binary(),
sockmod :: sockmod(),
socket :: socket(),
opts :: [gen_tcp:option()],
timeout :: non_neg_integer(),
size :: non_neg_integer(),
owner :: pid(),
action :: send | recv}).
recv(Host, Port, Hash, Size, ConnOpts, Timeout) ->
start(recv, Host, Port, Hash, Size, ConnOpts, Timeout).
connect(Host, Port, Hash, Size, ConnOpts, Timeout) ->
start(send, Host, Port, Hash, Size, ConnOpts, Timeout).
activate(Pid) ->
gen_server:cast(Pid, activate).
-spec format_error(proxy65_error()) -> string().
format_error({socks5, unexpected_response}) ->
"Proxy65 failure: unexpected SOCKS5 response";
format_error(crashed) ->
"Proxy65 failure: connection has been crashed";
format_error(shutdown) ->
"Proxy65 failure: the system is shutting down";
format_error(init_timeout) ->
"Proxy65 failure: timed out during initialization";
format_error(activation_timeout) ->
"Proxy65 failure: timed out waiting for activation";
format_error(Reason) ->
"Proxy65 failure: " ++ format_socket_error(Reason).
init([Action, Owner, Host, Port, Hash, Size, ConnOpts, Timeout]) ->
erlang:monitor(process, Owner),
{ok, #state{host = Host, port = Port, hash = Hash,
action = Action, size = Size, opts = ConnOpts,
owner = Owner, timeout = Timeout}, Timeout}.
handle_call(connect, From, #state{host = Host, port = Port,
opts = ConnOpts, action = Action,
hash = Hash, timeout = Timeout,
size = Size} = State) ->
case connect(Host, Port, Hash, ConnOpts, Timeout) of
{ok, SockMod, Sock} ->
State1 = State#state{sockmod = SockMod, socket = Sock},
gen_server:reply(From, {ok, self()}),
case Action of
recv ->
Result = recv(SockMod, Sock, Size, Timeout),
reply(State1, Result),
{stop, normal, State1};
send ->
noreply(State1)
end;
{error, _} = Err ->
{stop, normal, Err, State}
end;
handle_call(Request, _From, State) ->
lager:warning("Unexpected call: ~p", [Request]),
noreply(State).
handle_cast(activate, #state{sockmod = SockMod, socket = Sock,
size = Size} = State) ->
Chunk = p1_rand:bytes(?BUF_SIZE),
Result = send(SockMod, Sock, Size, Chunk),
reply(State, Result),
{stop, normal, State};
handle_cast(Msg, State) ->
lager:warning("Unexpected cast: ~p", [Msg]),
noreply(State).
handle_info({'DOWN', _, _, _, _}, State) ->
{stop, normal, State};
handle_info(timeout, State) ->
Reason = case State#state.socket of
undefined -> init_timeout;
_ -> activation_timeout
end,
reply(State, {error, Reason}),
{stop, normal, State};
handle_info(Info, State) ->
lager:warning("Unexpected info: ~p", [Info]),
noreply(State).
terminate(normal, _) ->
ok;
terminate(shutdown, State) ->
reply(State, {error, shutdown});
terminate(_, State) ->
reply(State, {error, crashed}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
start(Action, Host, Port, Hash, Size, ConnOpts, Timeout) ->
case gen_server:start(
?MODULE,
[Action, self(), binary_to_list(Host), Port,
Hash, Size, ConnOpts, Timeout], []) of
{ok, Pid} ->
gen_server:call(Pid, connect, 2*Timeout);
{error, _} ->
{error, crashed}
end.
-spec format_socket_error({sockmod(), atom()}) -> string().
format_socket_error({_, closed}) ->
"connection closed";
format_socket_error({_, timeout}) ->
inet:format_error(etimedout);
format_socket_error({ssl, Reason}) ->
ssl:format_error(Reason);
format_socket_error({gen_tcp, Reason}) ->
case inet:format_error(Reason) of
"unknown POSIX error" -> atom_to_list(Reason);
Txt -> Txt
end.
reply(#state{owner = Owner, action = Action}, Result) ->
Owner ! {proxy65_result, Action, Result}.
noreply(#state{timeout = Timeout} = State) ->
{noreply, State, Timeout}.
connect(Host, Port, Hash, ConnOpts, Timeout) ->
Opts = opts(ConnOpts, Timeout),
SockMod = gen_tcp,
try
{ok, Sock} = SockMod:connect(Host, Port, Opts, Timeout),
Init = <<?VERSION_5, 1, ?AUTH_ANONYMOUS>>,
InitAck = <<?VERSION_5, ?AUTH_ANONYMOUS>>,
Req = <<?VERSION_5, ?CMD_CONNECT, 0,
?ATYP_DOMAINNAME, 40, Hash:40/binary, 0, 0>>,
Resp = <<?VERSION_5, ?SUCCESS, 0, ?ATYP_DOMAINNAME,
40, Hash:40/binary, 0, 0>>,
ok = SockMod:send(Sock, Init),
{ok, InitAck} = gen_tcp:recv(Sock, size(InitAck)),
ok = gen_tcp:send(Sock, Req),
{ok, Resp} = gen_tcp:recv(Sock, size(Resp)),
{ok, SockMod, Sock}
catch _:{badmatch, {error, Reason}} ->
{error, {SockMod, Reason}};
_:{badmatch, {ok, _}} ->
{error, {socks5, unexpected_response}}
end.
send(_SockMod, _Sock, 0, _Chunk) ->
ok;
send(SockMod, Sock, Size, Chunk) ->
Data = if Size >= ?BUF_SIZE ->
Chunk;
true ->
binary:part(Chunk, 0, Size)
end,
case SockMod:send(Sock, Data) of
ok ->
NewSize = receive {'DOWN', _, _, _, _} -> 0
after 0 -> Size - min(Size, ?BUF_SIZE)
end,
send(SockMod, Sock, NewSize, Chunk);
{error, Reason} ->
{error, {SockMod, Reason}}
end.
recv(_SockMod, _Sock, 0, _Timeout) ->
ok;
recv(SockMod, Sock, Size, Timeout) ->
ChunkSize = min(Size, ?BUF_SIZE),
case SockMod:recv(Sock, ChunkSize, Timeout) of
{ok, Data} ->
NewSize = receive {'DOWN', _, _, _, _} -> 0
after 0 -> Size-size(Data)
end,
recv(SockMod, Sock, NewSize, Timeout);
{error, Reason} ->
{error, {SockMod, Reason}}
end.
opts(Opts, Timeout) ->
[binary,
{packet, 0},
{send_timeout, Timeout},
{send_timeout_close, true},
{recbuf, ?BUF_SIZE},
{sndbuf, ?BUF_SIZE},
{active, false}|Opts].
|
f139157fe43db56f002a4bbdbc1fb6560a8970cffe1e37606be692650fe18eeb | mflatt/shrubbery-rhombus-0 | set.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse
"srcloc.rkt")
"expression.rkt"
"binding.rkt"
(submod "annotation.rkt" for-class)
"static-info.rkt"
"map-ref-set-key.rkt"
"call-result-key.rkt"
"parse.rkt")
(provide Set
(for-space rhombus/annotation Set)
(for-space rhombus/static-info Set)
make_set
(for-space rhombus/static-info make_set))
(module+ for-ref
(provide set?
set-ht
set))
(module+ for-info
(provide (for-syntax set-static-info)))
(struct set (ht))
(define (set-member? s v)
(hash-ref (set-ht s) v #f))
(define (set-member! s v in?)
(if in?
(hash-set! (set-ht s) v #t)
(hash-remove! (set-ht s) v)))
(define (Set . vals)
(define base-ht (hash))
(set (for/fold ([ht base-ht]) ([val (in-list vals)])
(hash-set ht val #t))))
(define-for-syntax set-static-info
#'((#%map-ref set-member?)
(#%map-set! set-member!)
(#%map-append set-append)))
(define-annotation-syntax Set
(annotation-constructor #'Set #'set? set-static-info
1
(lambda (arg-id predicate-stxs)
#`(for/and ([v (in-hash-keys (set-ht #,arg-id))])
(#,(car predicate-stxs) v)))
(lambda (static-infoss)
#`())))
(define-static-info-syntax Set
(#%call-result ((#%map-ref set-ref))))
(define (make_set . vals)
(define ht (make-hash))
(for ([v (in-list vals)])
(hash-set! ht v #t))
(set ht))
(define-static-info-syntax make_set
(#%call-result ((#%map-ref set-member?)
(#%map-set! set-member!))))
(define (set-ref s v)
(hash-ref (set-ht s) v #f))
;; macro to optimize to an inline functional update
(define-syntax (set-append stx)
(syntax-parse stx
#:literals (Set)
[(_ set1 set2)
(syntax-parse (unwrap-static-infos #'set2)
#:literals (Set)
[(Set v)
#'(set (hash-set (set-ht set1) v #t))]
[_
#'(set-append/proc set1 set2)])]))
(define (set-append/proc set1 set2)
(set (for/fold ([ht (set-ht set1)]) ([k (in-hash-keys (set-ht set2))])
(hash-set ht k #t))))
| null | https://raw.githubusercontent.com/mflatt/shrubbery-rhombus-0/018867f02041c92369f3833efb5bc343982d9362/rhombus/private/set.rkt | racket | macro to optimize to an inline functional update | #lang racket/base
(require (for-syntax racket/base
syntax/parse
"srcloc.rkt")
"expression.rkt"
"binding.rkt"
(submod "annotation.rkt" for-class)
"static-info.rkt"
"map-ref-set-key.rkt"
"call-result-key.rkt"
"parse.rkt")
(provide Set
(for-space rhombus/annotation Set)
(for-space rhombus/static-info Set)
make_set
(for-space rhombus/static-info make_set))
(module+ for-ref
(provide set?
set-ht
set))
(module+ for-info
(provide (for-syntax set-static-info)))
(struct set (ht))
(define (set-member? s v)
(hash-ref (set-ht s) v #f))
(define (set-member! s v in?)
(if in?
(hash-set! (set-ht s) v #t)
(hash-remove! (set-ht s) v)))
(define (Set . vals)
(define base-ht (hash))
(set (for/fold ([ht base-ht]) ([val (in-list vals)])
(hash-set ht val #t))))
(define-for-syntax set-static-info
#'((#%map-ref set-member?)
(#%map-set! set-member!)
(#%map-append set-append)))
(define-annotation-syntax Set
(annotation-constructor #'Set #'set? set-static-info
1
(lambda (arg-id predicate-stxs)
#`(for/and ([v (in-hash-keys (set-ht #,arg-id))])
(#,(car predicate-stxs) v)))
(lambda (static-infoss)
#`())))
(define-static-info-syntax Set
(#%call-result ((#%map-ref set-ref))))
(define (make_set . vals)
(define ht (make-hash))
(for ([v (in-list vals)])
(hash-set! ht v #t))
(set ht))
(define-static-info-syntax make_set
(#%call-result ((#%map-ref set-member?)
(#%map-set! set-member!))))
(define (set-ref s v)
(hash-ref (set-ht s) v #f))
(define-syntax (set-append stx)
(syntax-parse stx
#:literals (Set)
[(_ set1 set2)
(syntax-parse (unwrap-static-infos #'set2)
#:literals (Set)
[(Set v)
#'(set (hash-set (set-ht set1) v #t))]
[_
#'(set-append/proc set1 set2)])]))
(define (set-append/proc set1 set2)
(set (for/fold ([ht (set-ht set1)]) ([k (in-hash-keys (set-ht set2))])
(hash-set ht k #t))))
|
e9151f395efeb4a67582d944c2f59fe7211db7787d9d39462452423e5e3a7a7a | depressed-pho/HsOpenSSL | SSL.hs | # LANGUAGE ForeignFunctionInterface #
module OpenSSL.SSL
( loadErrorStrings
, addAllAlgorithms
, libraryInit
)
where
foreign import ccall unsafe "SSL_load_error_strings"
loadErrorStrings :: IO ()
foreign import ccall unsafe "HsOpenSSL_OpenSSL_add_all_algorithms"
addAllAlgorithms :: IO ()
foreign import ccall unsafe "SSL_library_init"
libraryInit :: IO ()
| null | https://raw.githubusercontent.com/depressed-pho/HsOpenSSL/9e6a2be8298a9cbcffdfff55eab90e1e497628c3/OpenSSL/SSL.hs | haskell | # LANGUAGE ForeignFunctionInterface #
module OpenSSL.SSL
( loadErrorStrings
, addAllAlgorithms
, libraryInit
)
where
foreign import ccall unsafe "SSL_load_error_strings"
loadErrorStrings :: IO ()
foreign import ccall unsafe "HsOpenSSL_OpenSSL_add_all_algorithms"
addAllAlgorithms :: IO ()
foreign import ccall unsafe "SSL_library_init"
libraryInit :: IO ()
| |
1880ff44663a81b462d20c591e2a7815f6b65689f23a8215a9e1fac55367b925 | willf/lisp_dmap | micro-dmap.lisp | (in-package "DMAP")
;;------------------------------------------------------------------------------
;;
;; File: MICRO-DMAP.LISP
;; Created: 10/19/94
Author :
;;
;; Description: Direct Memory Access Parsing.
based on various versions of DMAP by .
;;
;;------------------------------------------------------------------------------
;;------------------------------------------------------------------------------
;; Packages
;;------------------------------------------------------------------------------
(eval-when (load eval compile)
(unless (find-package :dmap)
(make-package :dmap)))
(in-package :dmap)
(use-package :tables)
(use-package :frames)
(export '(add-phrasal-pattern def-phrase def-phrases
parse reset-parser
clear-predictions
call-backs))
;;------------------------------------------------------------------------------
;; Data structure for predictions. These are stored in tables keyed on the
" target " of their first phrasal pattern element
;;------------------------------------------------------------------------------
(defclass prediction ()
((base :initarg :base :initform nil :accessor base)
(phrasal-pattern :initarg :phrasal-pattern :initform nil :accessor phrasal-pattern)
(start :initarg :start :initform nil :accessor start)
(next :initarg :next :initform nil :accessor next)
(slots :initarg :slots :initform nil :accessor slots)))
(defun make-prediction (&key base phrasal-pattern start next slots)
(make-instance 'prediction
:base base :phrasal-pattern phrasal-pattern :start start :next next :slots slots))
(eval-when (:compile-toplevel :load-toplevel :execute)
(tables:deftable anytime-predictions-on)
(tables:deftable dynamic-predictions-on))
(defun add-phrasal-pattern (base phrasal-pattern)
"Adds the phrasal pattern of base to the table of static predictions."
(if (and (eql base (first phrasal-pattern)) (null (rest phrasal-pattern)))
nil
(progn (index-anytime-prediction
(make-prediction :base base :phrasal-pattern phrasal-pattern))
phrasal-pattern)))
(defmacro def-phrase (base &rest phrasal-pattern)
(if (and (eql base (car phrasal-pattern)) (null (cdr phrasal-pattern)))
(error "~S can't reference itself" base)
`(progn (add-phrasal-pattern ',base ',phrasal-pattern)
',phrasal-pattern)))
(defmacro def-phrases (base &rest phrasal-patterns)
`(loop for phrasal-pattern in ',phrasal-patterns doing
(add-phrasal-pattern ',base phrasal-pattern)))
(defun index-anytime-prediction (prediction)
"Put the phrasal pattern/prediction in the table for its target."
(push prediction (anytime-predictions-on (prediction-target prediction))))
(defun index-dynamic-prediction (prediction)
"Put the phrasal pattern/prediction in the table for its target."
(push prediction (dynamic-predictions-on (prediction-target prediction))))
(defun predictions-on (index)
(append (anytime-predictions-on index)
(dynamic-predictions-on index)))
(defun clear-predictions (&optional (which :dynamic))
(ecase which
(:dynamic (clear-table (dynamic-predictions-on)))
(:anytime (clear-table (anytime-predictions-on)))
(:all (clear-table (dynamic-predictions-on))
(clear-table (anytime-predictions-on)))))
;;------------------------------------------------------------------------------
;; Misc. data structures.
;;------------------------------------------------------------------------------
(defvar *dmap-pos* 0) ;;global text position
Call backs are ad - hoc functions run when a concept ( or one of its
specializations ) is referenced . Function should take three
;; parameters: the item referenced, the start position in the text, and
;; the end position in the text.
(eval-when (:compile-toplevel :load-toplevel :execute)
(tables:deftable call-backs))
;;------------------------------------------------------------------------------
;; To parse is to reference every word in the text, looking for predictions
;; on the words.
;;------------------------------------------------------------------------------
(defun parse (sent)
(dolist (w sent)
(setq *dmap-pos* (1+ *dmap-pos*))
(reference w *dmap-pos* *dmap-pos*)))
(defun reference (item start end)
(dolist (abst (all-abstractions item))
(dolist (prediction (predictions-on abst))
(advance-prediction prediction item start end))
(dolist (fn (call-backs abst))
(funcall fn item start end))))
(defun advance-prediction (prediction item start end)
"Advancing a phrasal pattern/prediction means:
if the predicted phrasal pattern has been completely seen, to reference
the base of the prediction with the slots that have been collected;
otherwise, to create a new prediction for the next item in the
prediction phrasal pattern."
(when (or (null (next prediction))
(= (next prediction) start))
(let ((base (base prediction))
(phrasal-pattern (cdr (phrasal-pattern prediction)))
(start (or (start prediction) start))
(slots (extend-slots prediction item)))
(if (null phrasal-pattern)
(reference (find-frame base slots) start end)
(index-dynamic-prediction
(make-prediction :base base :phrasal-pattern phrasal-pattern :start start :next (1+ *dmap-pos*)
:slots slots))))))
(defun extend-slots (prediction item)
(let ((spec (first (phrasal-pattern prediction)))
(slots (slots prediction)))
(if (role-specifier-p spec)
(if (abstp item (prediction-target prediction))
slots
(cons (list (role-specifier spec) (->name item)) slots))
slots)))
(defun prediction-target (prediction)
"The target of a phrasal pattern is based on the first item in the
phrasal pattern yet to be seen.
If that item is a role-specifier, then the target is the
inherited filler of its role;
Otherwise, it is just the item itself."
(let ((spec (first (phrasal-pattern prediction))))
(if (role-specifier-p spec)
(let ((base (base prediction)))
(or (inherited-attribute-value (frame-of base) (role-specifier spec))
(error "~S not a role in ~S" (first spec) base)))
spec)))
(defun role-specifier-p (item) (keywordp item))
(defun role-specifier (item) item)
;;------------------------------------------------------------------------------
;; Resetting the parser.
;;------------------------------------------------------------------------------
(defun reset-parser ()
(setf *dmap-pos* 0)
(clear-predictions :dynamic)
t)
| null | https://raw.githubusercontent.com/willf/lisp_dmap/f3befb3fe7b409cd5127618dd3ca303a32cf59e8/Code/micro-dmap.lisp | lisp | ------------------------------------------------------------------------------
File: MICRO-DMAP.LISP
Created: 10/19/94
Description: Direct Memory Access Parsing.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Packages
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Data structure for predictions. These are stored in tables keyed on the
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Misc. data structures.
------------------------------------------------------------------------------
global text position
parameters: the item referenced, the start position in the text, and
the end position in the text.
------------------------------------------------------------------------------
To parse is to reference every word in the text, looking for predictions
on the words.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Resetting the parser.
------------------------------------------------------------------------------
| (in-package "DMAP")
Author :
based on various versions of DMAP by .
(eval-when (load eval compile)
(unless (find-package :dmap)
(make-package :dmap)))
(in-package :dmap)
(use-package :tables)
(use-package :frames)
(export '(add-phrasal-pattern def-phrase def-phrases
parse reset-parser
clear-predictions
call-backs))
" target " of their first phrasal pattern element
(defclass prediction ()
((base :initarg :base :initform nil :accessor base)
(phrasal-pattern :initarg :phrasal-pattern :initform nil :accessor phrasal-pattern)
(start :initarg :start :initform nil :accessor start)
(next :initarg :next :initform nil :accessor next)
(slots :initarg :slots :initform nil :accessor slots)))
(defun make-prediction (&key base phrasal-pattern start next slots)
(make-instance 'prediction
:base base :phrasal-pattern phrasal-pattern :start start :next next :slots slots))
(eval-when (:compile-toplevel :load-toplevel :execute)
(tables:deftable anytime-predictions-on)
(tables:deftable dynamic-predictions-on))
(defun add-phrasal-pattern (base phrasal-pattern)
"Adds the phrasal pattern of base to the table of static predictions."
(if (and (eql base (first phrasal-pattern)) (null (rest phrasal-pattern)))
nil
(progn (index-anytime-prediction
(make-prediction :base base :phrasal-pattern phrasal-pattern))
phrasal-pattern)))
(defmacro def-phrase (base &rest phrasal-pattern)
(if (and (eql base (car phrasal-pattern)) (null (cdr phrasal-pattern)))
(error "~S can't reference itself" base)
`(progn (add-phrasal-pattern ',base ',phrasal-pattern)
',phrasal-pattern)))
(defmacro def-phrases (base &rest phrasal-patterns)
`(loop for phrasal-pattern in ',phrasal-patterns doing
(add-phrasal-pattern ',base phrasal-pattern)))
(defun index-anytime-prediction (prediction)
"Put the phrasal pattern/prediction in the table for its target."
(push prediction (anytime-predictions-on (prediction-target prediction))))
(defun index-dynamic-prediction (prediction)
"Put the phrasal pattern/prediction in the table for its target."
(push prediction (dynamic-predictions-on (prediction-target prediction))))
(defun predictions-on (index)
(append (anytime-predictions-on index)
(dynamic-predictions-on index)))
(defun clear-predictions (&optional (which :dynamic))
(ecase which
(:dynamic (clear-table (dynamic-predictions-on)))
(:anytime (clear-table (anytime-predictions-on)))
(:all (clear-table (dynamic-predictions-on))
(clear-table (anytime-predictions-on)))))
Call backs are ad - hoc functions run when a concept ( or one of its
specializations ) is referenced . Function should take three
(eval-when (:compile-toplevel :load-toplevel :execute)
(tables:deftable call-backs))
(defun parse (sent)
(dolist (w sent)
(setq *dmap-pos* (1+ *dmap-pos*))
(reference w *dmap-pos* *dmap-pos*)))
(defun reference (item start end)
(dolist (abst (all-abstractions item))
(dolist (prediction (predictions-on abst))
(advance-prediction prediction item start end))
(dolist (fn (call-backs abst))
(funcall fn item start end))))
(defun advance-prediction (prediction item start end)
"Advancing a phrasal pattern/prediction means:
if the predicted phrasal pattern has been completely seen, to reference
otherwise, to create a new prediction for the next item in the
prediction phrasal pattern."
(when (or (null (next prediction))
(= (next prediction) start))
(let ((base (base prediction))
(phrasal-pattern (cdr (phrasal-pattern prediction)))
(start (or (start prediction) start))
(slots (extend-slots prediction item)))
(if (null phrasal-pattern)
(reference (find-frame base slots) start end)
(index-dynamic-prediction
(make-prediction :base base :phrasal-pattern phrasal-pattern :start start :next (1+ *dmap-pos*)
:slots slots))))))
(defun extend-slots (prediction item)
(let ((spec (first (phrasal-pattern prediction)))
(slots (slots prediction)))
(if (role-specifier-p spec)
(if (abstp item (prediction-target prediction))
slots
(cons (list (role-specifier spec) (->name item)) slots))
slots)))
(defun prediction-target (prediction)
"The target of a phrasal pattern is based on the first item in the
phrasal pattern yet to be seen.
If that item is a role-specifier, then the target is the
Otherwise, it is just the item itself."
(let ((spec (first (phrasal-pattern prediction))))
(if (role-specifier-p spec)
(let ((base (base prediction)))
(or (inherited-attribute-value (frame-of base) (role-specifier spec))
(error "~S not a role in ~S" (first spec) base)))
spec)))
(defun role-specifier-p (item) (keywordp item))
(defun role-specifier (item) item)
(defun reset-parser ()
(setf *dmap-pos* 0)
(clear-predictions :dynamic)
t)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.