_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
6d9e9f97cbfe9a6783e14e3ba35cced5a0b21d16c422a9ab9394e129206fd46d | startling/partly | Setup.hs | #!/usr/bin/env runhaskell
import Distribution.Simple
main = defaultMain | null | https://raw.githubusercontent.com/startling/partly/d23b27910084eede0828bc421d7c1923660826b2/Setup.hs | haskell | #!/usr/bin/env runhaskell
import Distribution.Simple
main = defaultMain | |
ae924c9ef1338e083e7edb736b7ed24844dccbb1e62a959e284f77775462fe44 | overtone/overtone | io.clj | (ns overtone.sc.cgens.io
(:use [overtone.sc.defaults]
[overtone.sc.defcgen]
[overtone.sc.ugens]
[overtone.helpers.seq]))
(defcgen sound-in
"read audio from hardware inputs"
[bus {:default 0 :doc "the channel (or array of channels) to read in. These start at 0, which will correspond to the first audio input." :modulatable false}]
"Reads audio from the input of your computer or soundcard. It is a wrapper UGen based on In, which offsets the index such that 0 will always correspond to the first input regardless of the number of inputs present.
N.B. On Intel based Macs, reading the built-in microphone or input may require creating an aggregate device in AudioMIDI Setup."
(:ar (cond
(integer? bus) (in:ar (+ (num-output-buses:ir) bus) 1)
(consecutive-ints? bus) (in:ar (+ (num-output-buses:ir) (first bus)) (count bus))
:else (in:ar (+ (num-output-buses:ir) bus)))))
(defcgen scaled-v-disk-in
"Stream in audio from a file to a buffer with a rate scaled depending on the buffer's sample-rate."
[num-channels {:doc "The number of channels that the buffer will be. This must be a fixed integer." :modulatable false}
buf-num {:default 0 :doc "The index of the buffer to use."}
rate {:default 1 :doc "Rate multiplier. 1.0 is the default rate for the specified buffer, 2.0 is one octave up, 0.5 is one octave down -1.0 is backwards normal rate ... etc. Interpolation is cubic."}
loop {:default 0.0 :doc "1 means true, 0 means false. This is modulateable."}
sendID {:default 0 :doc "send an osc message with this id and the file position each time the buffer is reloaded: ['/diskin', nodeID, sendID, frame] "}]
"Uses buf-rate-scale to determine the rate at which to stream data through the specified buffer for playback with v-disk-in."
(:ar (v-disk-in:ar num-channels buf-num (* rate (buf-rate-scale:kr buf-num)) loop sendID)))
| null | https://raw.githubusercontent.com/overtone/overtone/9afb513297662716860a4010bc76a0e73c65ca37/src/overtone/sc/cgens/io.clj | clojure | (ns overtone.sc.cgens.io
(:use [overtone.sc.defaults]
[overtone.sc.defcgen]
[overtone.sc.ugens]
[overtone.helpers.seq]))
(defcgen sound-in
"read audio from hardware inputs"
[bus {:default 0 :doc "the channel (or array of channels) to read in. These start at 0, which will correspond to the first audio input." :modulatable false}]
"Reads audio from the input of your computer or soundcard. It is a wrapper UGen based on In, which offsets the index such that 0 will always correspond to the first input regardless of the number of inputs present.
N.B. On Intel based Macs, reading the built-in microphone or input may require creating an aggregate device in AudioMIDI Setup."
(:ar (cond
(integer? bus) (in:ar (+ (num-output-buses:ir) bus) 1)
(consecutive-ints? bus) (in:ar (+ (num-output-buses:ir) (first bus)) (count bus))
:else (in:ar (+ (num-output-buses:ir) bus)))))
(defcgen scaled-v-disk-in
"Stream in audio from a file to a buffer with a rate scaled depending on the buffer's sample-rate."
[num-channels {:doc "The number of channels that the buffer will be. This must be a fixed integer." :modulatable false}
buf-num {:default 0 :doc "The index of the buffer to use."}
rate {:default 1 :doc "Rate multiplier. 1.0 is the default rate for the specified buffer, 2.0 is one octave up, 0.5 is one octave down -1.0 is backwards normal rate ... etc. Interpolation is cubic."}
loop {:default 0.0 :doc "1 means true, 0 means false. This is modulateable."}
sendID {:default 0 :doc "send an osc message with this id and the file position each time the buffer is reloaded: ['/diskin', nodeID, sendID, frame] "}]
"Uses buf-rate-scale to determine the rate at which to stream data through the specified buffer for playback with v-disk-in."
(:ar (v-disk-in:ar num-channels buf-num (* rate (buf-rate-scale:kr buf-num)) loop sendID)))
| |
bc7264fc9d132effe06dc8bad435ddfe7189992b9450a129b2aa703a5c941ef7 | spurious/chibi-scheme-mirror | srfi-38-tests.scm |
(cond-expand
(chibi (import (chibi) (chibi test) (srfi 1) (srfi 38)))
(chicken (use chicken test srfi-38)))
(test-begin "read/write")
(define (read-from-string str)
(call-with-input-string str
(lambda (in) (read/ss in))))
(define (write-to-string x . o)
(call-with-output-string
(lambda (out) (apply write/ss x out o))))
(define-syntax test-io
(syntax-rules ()
((test-io str-expr expr)
(let ((str str-expr)
(value expr))
(test str (write-to-string value))
(test str (write-to-string (read-from-string str)))))))
(define-syntax test-cyclic-io
(syntax-rules ()
((test-io str-expr expr)
(let ((str str-expr)
(value expr))
(test str (write-to-string value #t))
(test str (write-to-string (read-from-string str) #t))))))
(test-io "(1)" (list 1))
(test-io "(1 2)" (list 1 2))
(test-io "(1 . 2)" (cons 1 2))
(test-io "#0=(1 . #0#)" (circular-list 1))
(test-io "#0=(1 2 . #0#)" (circular-list 1 2))
(test-io "(1 . #0=(2 . #0#))" (cons 1 (circular-list 2)))
(test-io "#0=(1 #0# 3)"
(let ((x (list 1 2 3))) (set-car! (cdr x) x) x))
(test-io "(#0=(1 #0# 3))"
(let ((x (list 1 2 3))) (set-car! (cdr x) x) (list x)))
(test-io "(#0=(1 #0# 3) #0#)"
(let ((x (list 1 2 3))) (set-car! (cdr x) x) (list x x)))
(test-io "(#0=(1 . #0#) #1=(1 . #1#))"
(list (circular-list 1) (circular-list 1)))
(test-io "(#0=(1 . 2) #1=(1 . 2) #2=(3 . 4) #0# #1# #2#)"
(let ((a (cons 1 2)) (b (cons 1 2)) (c (cons 3 4)))
(list a b c a b c)))
(test-cyclic-io "((1 . 2) (1 . 2) (3 . 4) (1 . 2) (1 . 2) (3 . 4))"
(let ((a (cons 1 2)) (b (cons 1 2)) (c (cons 3 4)))
(list a b c a b c)))
(test-cyclic-io "#0=((1 . 2) (1 . 2) (3 . 4) . #0#)"
(let* ((a (cons 1 2))
(b (cons 1 2))
(c (cons 3 4))
(ls (list a b c)))
(set-cdr! (cddr ls) ls)
ls))
(test-io "#0=#(#0#)"
(let ((x (vector 1))) (vector-set! x 0 x) x))
(test-io "#0=#(1 #0#)"
(let ((x (vector 1 2))) (vector-set! x 1 x) x))
(test-io "#0=#(1 #0# 3)"
(let ((x (vector 1 2 3))) (vector-set! x 1 x) x))
(test-io "(#0=#(1 #0# 3))"
(let ((x (vector 1 2 3))) (vector-set! x 1 x) (list x)))
(test-io "#0=#(#0# 2 #0#)"
(let ((x (vector 1 2 3)))
(vector-set! x 0 x)
(vector-set! x 2 x)
x))
(test 255 (read-from-string "#xff"))
(test 99 (read-from-string "#d99"))
(test 63 (read-from-string "#o77"))
(test 3 (read-from-string "#b11"))
(test 5 (read-from-string "#e5.0"))
(test 5.0 (read-from-string "#i5"))
(test 15 (read-from-string "#e#xf"))
(test 15.0 (read-from-string "#i#xf"))
(test (expt 10 100) (read-from-string "#e1e100"))
(cond-expand
(chicken
(test-io "(#0=\"abc\" #0# #0#)"
(let ((str (string #\a #\b #\c))) (list str str str)))
(test "(\"abc\" \"abc\" \"abc\")"
(let ((str (string #\a #\b #\c)))
(call-with-output-string
(lambda (out)
(write/ss (list str str str) out ignore-strings: #t))))))
(else
))
(test-end)
| null | https://raw.githubusercontent.com/spurious/chibi-scheme-mirror/49168ab073f64a95c834b5f584a9aaea3469594d/tests/srfi-38-tests.scm | scheme |
(cond-expand
(chibi (import (chibi) (chibi test) (srfi 1) (srfi 38)))
(chicken (use chicken test srfi-38)))
(test-begin "read/write")
(define (read-from-string str)
(call-with-input-string str
(lambda (in) (read/ss in))))
(define (write-to-string x . o)
(call-with-output-string
(lambda (out) (apply write/ss x out o))))
(define-syntax test-io
(syntax-rules ()
((test-io str-expr expr)
(let ((str str-expr)
(value expr))
(test str (write-to-string value))
(test str (write-to-string (read-from-string str)))))))
(define-syntax test-cyclic-io
(syntax-rules ()
((test-io str-expr expr)
(let ((str str-expr)
(value expr))
(test str (write-to-string value #t))
(test str (write-to-string (read-from-string str) #t))))))
(test-io "(1)" (list 1))
(test-io "(1 2)" (list 1 2))
(test-io "(1 . 2)" (cons 1 2))
(test-io "#0=(1 . #0#)" (circular-list 1))
(test-io "#0=(1 2 . #0#)" (circular-list 1 2))
(test-io "(1 . #0=(2 . #0#))" (cons 1 (circular-list 2)))
(test-io "#0=(1 #0# 3)"
(let ((x (list 1 2 3))) (set-car! (cdr x) x) x))
(test-io "(#0=(1 #0# 3))"
(let ((x (list 1 2 3))) (set-car! (cdr x) x) (list x)))
(test-io "(#0=(1 #0# 3) #0#)"
(let ((x (list 1 2 3))) (set-car! (cdr x) x) (list x x)))
(test-io "(#0=(1 . #0#) #1=(1 . #1#))"
(list (circular-list 1) (circular-list 1)))
(test-io "(#0=(1 . 2) #1=(1 . 2) #2=(3 . 4) #0# #1# #2#)"
(let ((a (cons 1 2)) (b (cons 1 2)) (c (cons 3 4)))
(list a b c a b c)))
(test-cyclic-io "((1 . 2) (1 . 2) (3 . 4) (1 . 2) (1 . 2) (3 . 4))"
(let ((a (cons 1 2)) (b (cons 1 2)) (c (cons 3 4)))
(list a b c a b c)))
(test-cyclic-io "#0=((1 . 2) (1 . 2) (3 . 4) . #0#)"
(let* ((a (cons 1 2))
(b (cons 1 2))
(c (cons 3 4))
(ls (list a b c)))
(set-cdr! (cddr ls) ls)
ls))
(test-io "#0=#(#0#)"
(let ((x (vector 1))) (vector-set! x 0 x) x))
(test-io "#0=#(1 #0#)"
(let ((x (vector 1 2))) (vector-set! x 1 x) x))
(test-io "#0=#(1 #0# 3)"
(let ((x (vector 1 2 3))) (vector-set! x 1 x) x))
(test-io "(#0=#(1 #0# 3))"
(let ((x (vector 1 2 3))) (vector-set! x 1 x) (list x)))
(test-io "#0=#(#0# 2 #0#)"
(let ((x (vector 1 2 3)))
(vector-set! x 0 x)
(vector-set! x 2 x)
x))
(test 255 (read-from-string "#xff"))
(test 99 (read-from-string "#d99"))
(test 63 (read-from-string "#o77"))
(test 3 (read-from-string "#b11"))
(test 5 (read-from-string "#e5.0"))
(test 5.0 (read-from-string "#i5"))
(test 15 (read-from-string "#e#xf"))
(test 15.0 (read-from-string "#i#xf"))
(test (expt 10 100) (read-from-string "#e1e100"))
(cond-expand
(chicken
(test-io "(#0=\"abc\" #0# #0#)"
(let ((str (string #\a #\b #\c))) (list str str str)))
(test "(\"abc\" \"abc\" \"abc\")"
(let ((str (string #\a #\b #\c)))
(call-with-output-string
(lambda (out)
(write/ss (list str str str) out ignore-strings: #t))))))
(else
))
(test-end)
| |
024155506e03efd209bc1ee29f18917611fe53c5b85352b5fe3151e4adca6a4a | mokus0/junkbox | Underscore.hs | # LANGUAGE TemplateHaskell #
module Underscore where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
underscore = QuasiQuoter
{ quoteExp = underscoreE
, quotePat = const [p| _ |]
, quoteType = const underscoreT
, quoteDec = undefined
}
underscoreE msg = [| error msg |]
underscoreT = do
n <- newName "_"
varT n
| null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/Template%20Haskell/Underscore.hs | haskell | # LANGUAGE TemplateHaskell #
module Underscore where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
underscore = QuasiQuoter
{ quoteExp = underscoreE
, quotePat = const [p| _ |]
, quoteType = const underscoreT
, quoteDec = undefined
}
underscoreE msg = [| error msg |]
underscoreT = do
n <- newName "_"
varT n
| |
4b5f1c096f54b198b5c8ff91549e6bda2c2d91afadfd3c57f1b00e9785886159 | BillHallahan/G2 | M44.hs | {-@ LIQUID "--no-termination" @-}
module M44 (main) where
@ main : : Int - > Int - > { b : | b } @
main :: Int -> Int -> Bool
main k flag =
case while (cond k) (loop flag (if flag == 1 then 1 else 2) k) (0, 0) of
(i', j') -> if flag == 1 then j' == i' else True
while :: (a -> Bool) -> (a -> a) -> a -> a
while pred body x = if pred x then while pred body (body x) else x
{-@ cond :: Int -> (Int, Int) -> Bool @-}
cond :: Int -> (Int, Int) -> Bool
cond k (i, j) = i <= k
@ loop : : flag : Int
- > n : { n : Int | flag = = 1 < = > n = = 1 }
- > k : Int
- > pre : { t:(Int , Int ) | ( n = = 1 = > x_Tuple21 t = = x_Tuple22 t ) }
- > { t:(Int , Int ) | ( n = = 1 = > x_Tuple21 t = = x_Tuple22 t ) } @
-> n:{ n:Int | flag == 1 <=> n == 1 }
-> k:Int
-> pre:{ t:(Int, Int) | (n == 1 => x_Tuple21 t == x_Tuple22 t) }
-> { t:(Int, Int) | (n == 1 => x_Tuple21 t == x_Tuple22 t) } @-}
loop :: Int -> Int -> Int -> (Int, Int) -> (Int, Int)
loop flag n k (i, j) = (i + 1, j + n)
| null | https://raw.githubusercontent.com/BillHallahan/G2/f2584eb2ec211aed73b3ccd88c6e232c3cf4386d/tests/LiquidInf/Paper/Eval/CompareVerified/M44.hs | haskell | @ LIQUID "--no-termination" @
@ cond :: Int -> (Int, Int) -> Bool @ |
module M44 (main) where
@ main : : Int - > Int - > { b : | b } @
main :: Int -> Int -> Bool
main k flag =
case while (cond k) (loop flag (if flag == 1 then 1 else 2) k) (0, 0) of
(i', j') -> if flag == 1 then j' == i' else True
while :: (a -> Bool) -> (a -> a) -> a -> a
while pred body x = if pred x then while pred body (body x) else x
cond :: Int -> (Int, Int) -> Bool
cond k (i, j) = i <= k
@ loop : : flag : Int
- > n : { n : Int | flag = = 1 < = > n = = 1 }
- > k : Int
- > pre : { t:(Int , Int ) | ( n = = 1 = > x_Tuple21 t = = x_Tuple22 t ) }
- > { t:(Int , Int ) | ( n = = 1 = > x_Tuple21 t = = x_Tuple22 t ) } @
-> n:{ n:Int | flag == 1 <=> n == 1 }
-> k:Int
-> pre:{ t:(Int, Int) | (n == 1 => x_Tuple21 t == x_Tuple22 t) }
-> { t:(Int, Int) | (n == 1 => x_Tuple21 t == x_Tuple22 t) } @-}
loop :: Int -> Int -> Int -> (Int, Int) -> (Int, Int)
loop flag n k (i, j) = (i + 1, j + n)
|
97683d0c44bc40af06909bf6e8cfc5bd2975dda6962be81fb172d0708b2bf773 | UBTECH-Walker/WalkerSimulationFor2020WAIC | EcatGetPVT.lisp | ; Auto-generated. Do not edit!
(cl:in-package servo_ctrl-srv)
// ! \htmlinclude
(cl:defclass <EcatGetPVT-request> (roslisp-msg-protocol:ros-message)
((servo
:reader servo
:initarg :servo
:type cl:string
:initform ""))
)
(cl:defclass EcatGetPVT-request (<EcatGetPVT-request>)
())
(cl:defmethod cl:initialize-instance :after ((m <EcatGetPVT-request>) cl:&rest args)
(cl:declare (cl:ignorable args))
(cl:unless (cl:typep m 'EcatGetPVT-request)
(roslisp-msg-protocol:msg-deprecation-warning "using old message class name servo_ctrl-srv:<EcatGetPVT-request> is deprecated: use servo_ctrl-srv:EcatGetPVT-request instead.")))
(cl:ensure-generic-function 'servo-val :lambda-list '(m))
(cl:defmethod servo-val ((m <EcatGetPVT-request>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:servo-val is deprecated. Use servo_ctrl-srv:servo instead.")
(servo m))
(cl:defmethod roslisp-msg-protocol:serialize ((msg <EcatGetPVT-request>) ostream)
"Serializes a message object of type '<EcatGetPVT-request>"
(cl:let ((__ros_str_len (cl:length (cl:slot-value msg 'servo))))
(cl:write-byte (cl:ldb (cl:byte 8 0) __ros_str_len) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) __ros_str_len) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) __ros_str_len) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) __ros_str_len) ostream))
(cl:map cl:nil #'(cl:lambda (c) (cl:write-byte (cl:char-code c) ostream)) (cl:slot-value msg 'servo))
)
(cl:defmethod roslisp-msg-protocol:deserialize ((msg <EcatGetPVT-request>) istream)
"Deserializes a message object of type '<EcatGetPVT-request>"
(cl:let ((__ros_str_len 0))
(cl:setf (cl:ldb (cl:byte 8 0) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'servo) (cl:make-string __ros_str_len))
(cl:dotimes (__ros_str_idx __ros_str_len msg)
(cl:setf (cl:char (cl:slot-value msg 'servo) __ros_str_idx) (cl:code-char (cl:read-byte istream)))))
msg
)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql '<EcatGetPVT-request>)))
"Returns string type for a service object of type '<EcatGetPVT-request>"
"servo_ctrl/EcatGetPVTRequest")
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'EcatGetPVT-request)))
"Returns string type for a service object of type 'EcatGetPVT-request"
"servo_ctrl/EcatGetPVTRequest")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql '<EcatGetPVT-request>)))
"Returns md5sum for a message object of type '<EcatGetPVT-request>"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql 'EcatGetPVT-request)))
"Returns md5sum for a message object of type 'EcatGetPVT-request"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql '<EcatGetPVT-request>)))
"Returns full string definition for message of type '<EcatGetPVT-request>"
(cl:format cl:nil "~%string servo~%~%~%"))
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql 'EcatGetPVT-request)))
"Returns full string definition for message of type 'EcatGetPVT-request"
(cl:format cl:nil "~%string servo~%~%~%"))
(cl:defmethod roslisp-msg-protocol:serialization-length ((msg <EcatGetPVT-request>))
(cl:+ 0
4 (cl:length (cl:slot-value msg 'servo))
))
(cl:defmethod roslisp-msg-protocol:ros-message-to-list ((msg <EcatGetPVT-request>))
"Converts a ROS message object to a list"
(cl:list 'EcatGetPVT-request
(cl:cons ':servo (servo msg))
))
;//! \htmlinclude EcatGetPVT-response.msg.html
(cl:defclass <EcatGetPVT-response> (roslisp-msg-protocol:ros-message)
((pos
:reader pos
:initarg :pos
:type cl:float
:initform 0.0)
(vel
:reader vel
:initarg :vel
:type cl:float
:initform 0.0)
(trq
:reader trq
:initarg :trq
:type cl:float
:initform 0.0)
(errcode
:reader errcode
:initarg :errcode
:type cl:fixnum
:initform 0))
)
(cl:defclass EcatGetPVT-response (<EcatGetPVT-response>)
())
(cl:defmethod cl:initialize-instance :after ((m <EcatGetPVT-response>) cl:&rest args)
(cl:declare (cl:ignorable args))
(cl:unless (cl:typep m 'EcatGetPVT-response)
(roslisp-msg-protocol:msg-deprecation-warning "using old message class name servo_ctrl-srv:<EcatGetPVT-response> is deprecated: use servo_ctrl-srv:EcatGetPVT-response instead.")))
(cl:ensure-generic-function 'pos-val :lambda-list '(m))
(cl:defmethod pos-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:pos-val is deprecated. Use servo_ctrl-srv:pos instead.")
(pos m))
(cl:ensure-generic-function 'vel-val :lambda-list '(m))
(cl:defmethod vel-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:vel-val is deprecated. Use servo_ctrl-srv:vel instead.")
(vel m))
(cl:ensure-generic-function 'trq-val :lambda-list '(m))
(cl:defmethod trq-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:trq-val is deprecated. Use servo_ctrl-srv:trq instead.")
(trq m))
(cl:ensure-generic-function 'errcode-val :lambda-list '(m))
(cl:defmethod errcode-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:errcode-val is deprecated. Use servo_ctrl-srv:errcode instead.")
(errcode m))
(cl:defmethod roslisp-msg-protocol:serialize ((msg <EcatGetPVT-response>) ostream)
"Serializes a message object of type '<EcatGetPVT-response>"
(cl:let ((bits (roslisp-utils:encode-single-float-bits (cl:slot-value msg 'pos))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream))
(cl:let ((bits (roslisp-utils:encode-single-float-bits (cl:slot-value msg 'vel))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream))
(cl:let ((bits (roslisp-utils:encode-single-float-bits (cl:slot-value msg 'trq))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream))
(cl:write-byte (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'errcode)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'errcode)) ostream)
)
(cl:defmethod roslisp-msg-protocol:deserialize ((msg <EcatGetPVT-response>) istream)
"Deserializes a message object of type '<EcatGetPVT-response>"
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'pos) (roslisp-utils:decode-single-float-bits bits)))
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'vel) (roslisp-utils:decode-single-float-bits bits)))
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'trq) (roslisp-utils:decode-single-float-bits bits)))
(cl:setf (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'errcode)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'errcode)) (cl:read-byte istream))
msg
)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql '<EcatGetPVT-response>)))
"Returns string type for a service object of type '<EcatGetPVT-response>"
"servo_ctrl/EcatGetPVTResponse")
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'EcatGetPVT-response)))
"Returns string type for a service object of type 'EcatGetPVT-response"
"servo_ctrl/EcatGetPVTResponse")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql '<EcatGetPVT-response>)))
"Returns md5sum for a message object of type '<EcatGetPVT-response>"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql 'EcatGetPVT-response)))
"Returns md5sum for a message object of type 'EcatGetPVT-response"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql '<EcatGetPVT-response>)))
"Returns full string definition for message of type '<EcatGetPVT-response>"
(cl:format cl:nil "float32 pos~%float32 vel~%float32 trq~%uint16 errcode~%~%~%~%"))
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql 'EcatGetPVT-response)))
"Returns full string definition for message of type 'EcatGetPVT-response"
(cl:format cl:nil "float32 pos~%float32 vel~%float32 trq~%uint16 errcode~%~%~%~%"))
(cl:defmethod roslisp-msg-protocol:serialization-length ((msg <EcatGetPVT-response>))
(cl:+ 0
4
4
4
2
))
(cl:defmethod roslisp-msg-protocol:ros-message-to-list ((msg <EcatGetPVT-response>))
"Converts a ROS message object to a list"
(cl:list 'EcatGetPVT-response
(cl:cons ':pos (pos msg))
(cl:cons ':vel (vel msg))
(cl:cons ':trq (trq msg))
(cl:cons ':errcode (errcode msg))
))
(cl:defmethod roslisp-msg-protocol:service-request-type ((msg (cl:eql 'EcatGetPVT)))
'EcatGetPVT-request)
(cl:defmethod roslisp-msg-protocol:service-response-type ((msg (cl:eql 'EcatGetPVT)))
'EcatGetPVT-response)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'EcatGetPVT)))
"Returns string type for a service object of type '<EcatGetPVT>"
"servo_ctrl/EcatGetPVT") | null | https://raw.githubusercontent.com/UBTECH-Walker/WalkerSimulationFor2020WAIC/7cdb21dabb8423994ba3f6021bc7934290d5faa9/walker_WAIC_16.04_v1.2_20200616/walker_install/share/common-lisp/ros/servo_ctrl/srv/EcatGetPVT.lisp | lisp | Auto-generated. Do not edit!
//! \htmlinclude EcatGetPVT-response.msg.html |
(cl:in-package servo_ctrl-srv)
// ! \htmlinclude
(cl:defclass <EcatGetPVT-request> (roslisp-msg-protocol:ros-message)
((servo
:reader servo
:initarg :servo
:type cl:string
:initform ""))
)
(cl:defclass EcatGetPVT-request (<EcatGetPVT-request>)
())
(cl:defmethod cl:initialize-instance :after ((m <EcatGetPVT-request>) cl:&rest args)
(cl:declare (cl:ignorable args))
(cl:unless (cl:typep m 'EcatGetPVT-request)
(roslisp-msg-protocol:msg-deprecation-warning "using old message class name servo_ctrl-srv:<EcatGetPVT-request> is deprecated: use servo_ctrl-srv:EcatGetPVT-request instead.")))
(cl:ensure-generic-function 'servo-val :lambda-list '(m))
(cl:defmethod servo-val ((m <EcatGetPVT-request>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:servo-val is deprecated. Use servo_ctrl-srv:servo instead.")
(servo m))
(cl:defmethod roslisp-msg-protocol:serialize ((msg <EcatGetPVT-request>) ostream)
"Serializes a message object of type '<EcatGetPVT-request>"
(cl:let ((__ros_str_len (cl:length (cl:slot-value msg 'servo))))
(cl:write-byte (cl:ldb (cl:byte 8 0) __ros_str_len) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) __ros_str_len) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) __ros_str_len) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) __ros_str_len) ostream))
(cl:map cl:nil #'(cl:lambda (c) (cl:write-byte (cl:char-code c) ostream)) (cl:slot-value msg 'servo))
)
(cl:defmethod roslisp-msg-protocol:deserialize ((msg <EcatGetPVT-request>) istream)
"Deserializes a message object of type '<EcatGetPVT-request>"
(cl:let ((__ros_str_len 0))
(cl:setf (cl:ldb (cl:byte 8 0) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) __ros_str_len) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'servo) (cl:make-string __ros_str_len))
(cl:dotimes (__ros_str_idx __ros_str_len msg)
(cl:setf (cl:char (cl:slot-value msg 'servo) __ros_str_idx) (cl:code-char (cl:read-byte istream)))))
msg
)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql '<EcatGetPVT-request>)))
"Returns string type for a service object of type '<EcatGetPVT-request>"
"servo_ctrl/EcatGetPVTRequest")
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'EcatGetPVT-request)))
"Returns string type for a service object of type 'EcatGetPVT-request"
"servo_ctrl/EcatGetPVTRequest")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql '<EcatGetPVT-request>)))
"Returns md5sum for a message object of type '<EcatGetPVT-request>"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql 'EcatGetPVT-request)))
"Returns md5sum for a message object of type 'EcatGetPVT-request"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql '<EcatGetPVT-request>)))
"Returns full string definition for message of type '<EcatGetPVT-request>"
(cl:format cl:nil "~%string servo~%~%~%"))
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql 'EcatGetPVT-request)))
"Returns full string definition for message of type 'EcatGetPVT-request"
(cl:format cl:nil "~%string servo~%~%~%"))
(cl:defmethod roslisp-msg-protocol:serialization-length ((msg <EcatGetPVT-request>))
(cl:+ 0
4 (cl:length (cl:slot-value msg 'servo))
))
(cl:defmethod roslisp-msg-protocol:ros-message-to-list ((msg <EcatGetPVT-request>))
"Converts a ROS message object to a list"
(cl:list 'EcatGetPVT-request
(cl:cons ':servo (servo msg))
))
(cl:defclass <EcatGetPVT-response> (roslisp-msg-protocol:ros-message)
((pos
:reader pos
:initarg :pos
:type cl:float
:initform 0.0)
(vel
:reader vel
:initarg :vel
:type cl:float
:initform 0.0)
(trq
:reader trq
:initarg :trq
:type cl:float
:initform 0.0)
(errcode
:reader errcode
:initarg :errcode
:type cl:fixnum
:initform 0))
)
(cl:defclass EcatGetPVT-response (<EcatGetPVT-response>)
())
(cl:defmethod cl:initialize-instance :after ((m <EcatGetPVT-response>) cl:&rest args)
(cl:declare (cl:ignorable args))
(cl:unless (cl:typep m 'EcatGetPVT-response)
(roslisp-msg-protocol:msg-deprecation-warning "using old message class name servo_ctrl-srv:<EcatGetPVT-response> is deprecated: use servo_ctrl-srv:EcatGetPVT-response instead.")))
(cl:ensure-generic-function 'pos-val :lambda-list '(m))
(cl:defmethod pos-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:pos-val is deprecated. Use servo_ctrl-srv:pos instead.")
(pos m))
(cl:ensure-generic-function 'vel-val :lambda-list '(m))
(cl:defmethod vel-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:vel-val is deprecated. Use servo_ctrl-srv:vel instead.")
(vel m))
(cl:ensure-generic-function 'trq-val :lambda-list '(m))
(cl:defmethod trq-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:trq-val is deprecated. Use servo_ctrl-srv:trq instead.")
(trq m))
(cl:ensure-generic-function 'errcode-val :lambda-list '(m))
(cl:defmethod errcode-val ((m <EcatGetPVT-response>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader servo_ctrl-srv:errcode-val is deprecated. Use servo_ctrl-srv:errcode instead.")
(errcode m))
(cl:defmethod roslisp-msg-protocol:serialize ((msg <EcatGetPVT-response>) ostream)
"Serializes a message object of type '<EcatGetPVT-response>"
(cl:let ((bits (roslisp-utils:encode-single-float-bits (cl:slot-value msg 'pos))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream))
(cl:let ((bits (roslisp-utils:encode-single-float-bits (cl:slot-value msg 'vel))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream))
(cl:let ((bits (roslisp-utils:encode-single-float-bits (cl:slot-value msg 'trq))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream))
(cl:write-byte (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'errcode)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'errcode)) ostream)
)
(cl:defmethod roslisp-msg-protocol:deserialize ((msg <EcatGetPVT-response>) istream)
"Deserializes a message object of type '<EcatGetPVT-response>"
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'pos) (roslisp-utils:decode-single-float-bits bits)))
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'vel) (roslisp-utils:decode-single-float-bits bits)))
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'trq) (roslisp-utils:decode-single-float-bits bits)))
(cl:setf (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'errcode)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'errcode)) (cl:read-byte istream))
msg
)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql '<EcatGetPVT-response>)))
"Returns string type for a service object of type '<EcatGetPVT-response>"
"servo_ctrl/EcatGetPVTResponse")
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'EcatGetPVT-response)))
"Returns string type for a service object of type 'EcatGetPVT-response"
"servo_ctrl/EcatGetPVTResponse")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql '<EcatGetPVT-response>)))
"Returns md5sum for a message object of type '<EcatGetPVT-response>"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql 'EcatGetPVT-response)))
"Returns md5sum for a message object of type 'EcatGetPVT-response"
"c489f93ef6aafee9eaeb152fcdcb6ab1")
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql '<EcatGetPVT-response>)))
"Returns full string definition for message of type '<EcatGetPVT-response>"
(cl:format cl:nil "float32 pos~%float32 vel~%float32 trq~%uint16 errcode~%~%~%~%"))
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql 'EcatGetPVT-response)))
"Returns full string definition for message of type 'EcatGetPVT-response"
(cl:format cl:nil "float32 pos~%float32 vel~%float32 trq~%uint16 errcode~%~%~%~%"))
(cl:defmethod roslisp-msg-protocol:serialization-length ((msg <EcatGetPVT-response>))
(cl:+ 0
4
4
4
2
))
(cl:defmethod roslisp-msg-protocol:ros-message-to-list ((msg <EcatGetPVT-response>))
"Converts a ROS message object to a list"
(cl:list 'EcatGetPVT-response
(cl:cons ':pos (pos msg))
(cl:cons ':vel (vel msg))
(cl:cons ':trq (trq msg))
(cl:cons ':errcode (errcode msg))
))
(cl:defmethod roslisp-msg-protocol:service-request-type ((msg (cl:eql 'EcatGetPVT)))
'EcatGetPVT-request)
(cl:defmethod roslisp-msg-protocol:service-response-type ((msg (cl:eql 'EcatGetPVT)))
'EcatGetPVT-response)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'EcatGetPVT)))
"Returns string type for a service object of type '<EcatGetPVT>"
"servo_ctrl/EcatGetPVT") |
9cae1ec7fe7ba90871fa0b43c954d4eb7c6d52009d8b912bdb203686c87ee419 | TerrorJack/ghc-alter | PrimopWrappers.hs | {-# LANGUAGE MagicHash, NoImplicitPrelude, UnboxedTuples #-}
module GHC.PrimopWrappers where
import qualified GHC.Prim
import GHC.Tuple ()
import GHC.Prim (Char#, Int#, Word#, Float#, Double#, State#, MutableArray#, Array#, SmallMutableArray#, SmallArray#, MutableByteArray#, ByteArray#, Addr#, StablePtr#, MutableArrayArray#, ArrayArray#, MutVar#, RealWorld, TVar#, MVar#, ThreadId#, Weak#, StableName#, Compact#, BCO#)
# NOINLINE gtChar # #
gtChar# :: Char# -> Char# -> Int#
gtChar# a1 a2 = (GHC.Prim.gtChar#) a1 a2
{-# NOINLINE geChar# #-}
geChar# :: Char# -> Char# -> Int#
geChar# a1 a2 = (GHC.Prim.geChar#) a1 a2
# NOINLINE eqChar # #
eqChar# :: Char# -> Char# -> Int#
eqChar# a1 a2 = (GHC.Prim.eqChar#) a1 a2
{-# NOINLINE neChar# #-}
neChar# :: Char# -> Char# -> Int#
neChar# a1 a2 = (GHC.Prim.neChar#) a1 a2
# NOINLINE ltChar # #
ltChar# :: Char# -> Char# -> Int#
ltChar# a1 a2 = (GHC.Prim.ltChar#) a1 a2
# NOINLINE leChar # #
leChar# :: Char# -> Char# -> Int#
leChar# a1 a2 = (GHC.Prim.leChar#) a1 a2
# NOINLINE ord # #
ord# :: Char# -> Int#
ord# a1 = (GHC.Prim.ord#) a1
# NOINLINE ( + # ) #
(+#) :: Int# -> Int# -> Int#
(+#) a1 a2 = (GHC.Prim.+#) a1 a2
# NOINLINE ( - # ) #
(-#) :: Int# -> Int# -> Int#
(-#) a1 a2 = (GHC.Prim.-#) a1 a2
{-# NOINLINE (*#) #-}
(*#) :: Int# -> Int# -> Int#
(*#) a1 a2 = (GHC.Prim.*#) a1 a2
# NOINLINE mulIntMayOflo # #
mulIntMayOflo# :: Int# -> Int# -> Int#
mulIntMayOflo# a1 a2 = (GHC.Prim.mulIntMayOflo#) a1 a2
# NOINLINE quotInt # #
quotInt# :: Int# -> Int# -> Int#
quotInt# a1 a2 = (GHC.Prim.quotInt#) a1 a2
# NOINLINE remInt # #
remInt# :: Int# -> Int# -> Int#
remInt# a1 a2 = (GHC.Prim.remInt#) a1 a2
# NOINLINE quotRemInt # #
quotRemInt# :: Int# -> Int# -> (# Int#,Int# #)
quotRemInt# a1 a2 = (GHC.Prim.quotRemInt#) a1 a2
# NOINLINE andI # #
andI# :: Int# -> Int# -> Int#
andI# a1 a2 = (GHC.Prim.andI#) a1 a2
# NOINLINE orI # #
orI# :: Int# -> Int# -> Int#
orI# a1 a2 = (GHC.Prim.orI#) a1 a2
# NOINLINE xorI # #
xorI# :: Int# -> Int# -> Int#
xorI# a1 a2 = (GHC.Prim.xorI#) a1 a2
# NOINLINE notI # #
notI# :: Int# -> Int#
notI# a1 = (GHC.Prim.notI#) a1
# NOINLINE negateInt # #
negateInt# :: Int# -> Int#
negateInt# a1 = (GHC.Prim.negateInt#) a1
# NOINLINE addIntC # #
addIntC# :: Int# -> Int# -> (# Int#,Int# #)
addIntC# a1 a2 = (GHC.Prim.addIntC#) a1 a2
# NOINLINE subIntC # #
subIntC# :: Int# -> Int# -> (# Int#,Int# #)
subIntC# a1 a2 = (GHC.Prim.subIntC#) a1 a2
# NOINLINE ( > # ) #
(>#) :: Int# -> Int# -> Int#
(>#) a1 a2 = (GHC.Prim.>#) a1 a2
# NOINLINE ( > = # ) #
(>=#) :: Int# -> Int# -> Int#
(>=#) a1 a2 = (GHC.Prim.>=#) a1 a2
{-# NOINLINE (==#) #-}
(==#) :: Int# -> Int# -> Int#
(==#) a1 a2 = (GHC.Prim.==#) a1 a2
{-# NOINLINE (/=#) #-}
(/=#) :: Int# -> Int# -> Int#
(/=#) a1 a2 = (GHC.Prim./=#) a1 a2
# NOINLINE ( < # ) #
(<#) :: Int# -> Int# -> Int#
(<#) a1 a2 = (GHC.Prim.<#) a1 a2
# NOINLINE ( < = # ) #
(<=#) :: Int# -> Int# -> Int#
(<=#) a1 a2 = (GHC.Prim.<=#) a1 a2
# NOINLINE chr # #
chr# :: Int# -> Char#
chr# a1 = (GHC.Prim.chr#) a1
# NOINLINE int2Word # #
int2Word# :: Int# -> Word#
int2Word# a1 = (GHC.Prim.int2Word#) a1
# NOINLINE int2Float # #
int2Float# :: Int# -> Float#
int2Float# a1 = (GHC.Prim.int2Float#) a1
# NOINLINE int2Double # #
int2Double# :: Int# -> Double#
int2Double# a1 = (GHC.Prim.int2Double#) a1
# NOINLINE word2Float # #
word2Float# :: Word# -> Float#
word2Float# a1 = (GHC.Prim.word2Float#) a1
# NOINLINE word2Double # #
word2Double# :: Word# -> Double#
word2Double# a1 = (GHC.Prim.word2Double#) a1
{-# NOINLINE uncheckedIShiftL# #-}
uncheckedIShiftL# :: Int# -> Int# -> Int#
uncheckedIShiftL# a1 a2 = (GHC.Prim.uncheckedIShiftL#) a1 a2
# NOINLINE uncheckedIShiftRA # #
uncheckedIShiftRA# :: Int# -> Int# -> Int#
uncheckedIShiftRA# a1 a2 = (GHC.Prim.uncheckedIShiftRA#) a1 a2
# NOINLINE uncheckedIShiftRL # #
uncheckedIShiftRL# :: Int# -> Int# -> Int#
uncheckedIShiftRL# a1 a2 = (GHC.Prim.uncheckedIShiftRL#) a1 a2
# NOINLINE plusWord # #
plusWord# :: Word# -> Word# -> Word#
plusWord# a1 a2 = (GHC.Prim.plusWord#) a1 a2
# NOINLINE subWordC # #
subWordC# :: Word# -> Word# -> (# Word#,Int# #)
subWordC# a1 a2 = (GHC.Prim.subWordC#) a1 a2
# NOINLINE plusWord2 # #
plusWord2# :: Word# -> Word# -> (# Word#,Word# #)
plusWord2# a1 a2 = (GHC.Prim.plusWord2#) a1 a2
# NOINLINE minusWord # #
minusWord# :: Word# -> Word# -> Word#
minusWord# a1 a2 = (GHC.Prim.minusWord#) a1 a2
{-# NOINLINE timesWord# #-}
timesWord# :: Word# -> Word# -> Word#
timesWord# a1 a2 = (GHC.Prim.timesWord#) a1 a2
# NOINLINE timesWord2 # #
timesWord2# :: Word# -> Word# -> (# Word#,Word# #)
timesWord2# a1 a2 = (GHC.Prim.timesWord2#) a1 a2
# NOINLINE quotWord # #
quotWord# :: Word# -> Word# -> Word#
quotWord# a1 a2 = (GHC.Prim.quotWord#) a1 a2
# NOINLINE remWord # #
remWord# :: Word# -> Word# -> Word#
remWord# a1 a2 = (GHC.Prim.remWord#) a1 a2
{-# NOINLINE quotRemWord# #-}
quotRemWord# :: Word# -> Word# -> (# Word#,Word# #)
quotRemWord# a1 a2 = (GHC.Prim.quotRemWord#) a1 a2
# NOINLINE quotRemWord2 # #
quotRemWord2# :: Word# -> Word# -> Word# -> (# Word#,Word# #)
quotRemWord2# a1 a2 a3 = (GHC.Prim.quotRemWord2#) a1 a2 a3
# NOINLINE and # #
and# :: Word# -> Word# -> Word#
and# a1 a2 = (GHC.Prim.and#) a1 a2
# NOINLINE or # #
or# :: Word# -> Word# -> Word#
or# a1 a2 = (GHC.Prim.or#) a1 a2
# NOINLINE xor # #
xor# :: Word# -> Word# -> Word#
xor# a1 a2 = (GHC.Prim.xor#) a1 a2
# NOINLINE not # #
not# :: Word# -> Word#
not# a1 = (GHC.Prim.not#) a1
# NOINLINE uncheckedShiftL # #
uncheckedShiftL# :: Word# -> Int# -> Word#
uncheckedShiftL# a1 a2 = (GHC.Prim.uncheckedShiftL#) a1 a2
# NOINLINE uncheckedShiftRL # #
uncheckedShiftRL# :: Word# -> Int# -> Word#
uncheckedShiftRL# a1 a2 = (GHC.Prim.uncheckedShiftRL#) a1 a2
# NOINLINE word2Int # #
word2Int# :: Word# -> Int#
word2Int# a1 = (GHC.Prim.word2Int#) a1
# NOINLINE gtWord # #
gtWord# :: Word# -> Word# -> Int#
gtWord# a1 a2 = (GHC.Prim.gtWord#) a1 a2
# NOINLINE geWord # #
geWord# :: Word# -> Word# -> Int#
geWord# a1 a2 = (GHC.Prim.geWord#) a1 a2
# NOINLINE eqWord # #
eqWord# :: Word# -> Word# -> Int#
eqWord# a1 a2 = (GHC.Prim.eqWord#) a1 a2
# NOINLINE neWord # #
neWord# :: Word# -> Word# -> Int#
neWord# a1 a2 = (GHC.Prim.neWord#) a1 a2
# NOINLINE ltWord # #
ltWord# :: Word# -> Word# -> Int#
ltWord# a1 a2 = (GHC.Prim.ltWord#) a1 a2
# NOINLINE leWord # #
leWord# :: Word# -> Word# -> Int#
leWord# a1 a2 = (GHC.Prim.leWord#) a1 a2
# NOINLINE popCnt8 # #
popCnt8# :: Word# -> Word#
popCnt8# a1 = (GHC.Prim.popCnt8#) a1
{-# NOINLINE popCnt16# #-}
popCnt16# :: Word# -> Word#
popCnt16# a1 = (GHC.Prim.popCnt16#) a1
# NOINLINE popCnt32 # #
popCnt32# :: Word# -> Word#
popCnt32# a1 = (GHC.Prim.popCnt32#) a1
# NOINLINE popCnt64 # #
popCnt64# :: Word# -> Word#
popCnt64# a1 = (GHC.Prim.popCnt64#) a1
# NOINLINE popCnt # #
popCnt# :: Word# -> Word#
popCnt# a1 = (GHC.Prim.popCnt#) a1
# NOINLINE clz8 # #
clz8# :: Word# -> Word#
clz8# a1 = (GHC.Prim.clz8#) a1
# NOINLINE clz16 # #
clz16# :: Word# -> Word#
clz16# a1 = (GHC.Prim.clz16#) a1
# NOINLINE clz32 # #
clz32# :: Word# -> Word#
clz32# a1 = (GHC.Prim.clz32#) a1
# NOINLINE clz64 # #
clz64# :: Word# -> Word#
clz64# a1 = (GHC.Prim.clz64#) a1
# NOINLINE clz # #
clz# :: Word# -> Word#
clz# a1 = (GHC.Prim.clz#) a1
# NOINLINE ctz8 # #
ctz8# :: Word# -> Word#
ctz8# a1 = (GHC.Prim.ctz8#) a1
# NOINLINE ctz16 # #
ctz16# :: Word# -> Word#
ctz16# a1 = (GHC.Prim.ctz16#) a1
# NOINLINE ctz32 # #
ctz32# :: Word# -> Word#
ctz32# a1 = (GHC.Prim.ctz32#) a1
# NOINLINE ctz64 # #
ctz64# :: Word# -> Word#
ctz64# a1 = (GHC.Prim.ctz64#) a1
# NOINLINE ctz # #
ctz# :: Word# -> Word#
ctz# a1 = (GHC.Prim.ctz#) a1
# NOINLINE byteSwap16 # #
byteSwap16# :: Word# -> Word#
byteSwap16# a1 = (GHC.Prim.byteSwap16#) a1
# NOINLINE byteSwap32 # #
byteSwap32# :: Word# -> Word#
byteSwap32# a1 = (GHC.Prim.byteSwap32#) a1
# NOINLINE byteSwap64 # #
byteSwap64# :: Word# -> Word#
byteSwap64# a1 = (GHC.Prim.byteSwap64#) a1
# NOINLINE byteSwap # #
byteSwap# :: Word# -> Word#
byteSwap# a1 = (GHC.Prim.byteSwap#) a1
# NOINLINE narrow8Int # #
narrow8Int# :: Int# -> Int#
narrow8Int# a1 = (GHC.Prim.narrow8Int#) a1
# NOINLINE narrow16Int # #
narrow16Int# :: Int# -> Int#
narrow16Int# a1 = (GHC.Prim.narrow16Int#) a1
# NOINLINE narrow32Int # #
narrow32Int# :: Int# -> Int#
narrow32Int# a1 = (GHC.Prim.narrow32Int#) a1
# NOINLINE narrow8Word # #
narrow8Word# :: Word# -> Word#
narrow8Word# a1 = (GHC.Prim.narrow8Word#) a1
# NOINLINE narrow16Word # #
narrow16Word# :: Word# -> Word#
narrow16Word# a1 = (GHC.Prim.narrow16Word#) a1
# NOINLINE narrow32Word # #
narrow32Word# :: Word# -> Word#
narrow32Word# a1 = (GHC.Prim.narrow32Word#) a1
# NOINLINE ( > # # ) #
(>##) :: Double# -> Double# -> Int#
(>##) a1 a2 = (GHC.Prim.>##) a1 a2
# NOINLINE ( > = # # ) #
(>=##) :: Double# -> Double# -> Int#
(>=##) a1 a2 = (GHC.Prim.>=##) a1 a2
{-# NOINLINE (==##) #-}
(==##) :: Double# -> Double# -> Int#
(==##) a1 a2 = (GHC.Prim.==##) a1 a2
{-# NOINLINE (/=##) #-}
(/=##) :: Double# -> Double# -> Int#
(/=##) a1 a2 = (GHC.Prim./=##) a1 a2
# NOINLINE ( < # # ) #
(<##) :: Double# -> Double# -> Int#
(<##) a1 a2 = (GHC.Prim.<##) a1 a2
# NOINLINE ( < = # # ) #
(<=##) :: Double# -> Double# -> Int#
(<=##) a1 a2 = (GHC.Prim.<=##) a1 a2
# NOINLINE ( + # # ) #
(+##) :: Double# -> Double# -> Double#
(+##) a1 a2 = (GHC.Prim.+##) a1 a2
# NOINLINE ( - # # ) #
(-##) :: Double# -> Double# -> Double#
(-##) a1 a2 = (GHC.Prim.-##) a1 a2
# NOINLINE ( * # # ) #
(*##) :: Double# -> Double# -> Double#
(*##) a1 a2 = (GHC.Prim.*##) a1 a2
# NOINLINE ( / # # ) #
(/##) :: Double# -> Double# -> Double#
(/##) a1 a2 = (GHC.Prim./##) a1 a2
# NOINLINE negateDouble # #
negateDouble# :: Double# -> Double#
negateDouble# a1 = (GHC.Prim.negateDouble#) a1
# NOINLINE fabsDouble # #
fabsDouble# :: Double# -> Double#
fabsDouble# a1 = (GHC.Prim.fabsDouble#) a1
# NOINLINE double2Int # #
double2Int# :: Double# -> Int#
double2Int# a1 = (GHC.Prim.double2Int#) a1
{-# NOINLINE double2Float# #-}
double2Float# :: Double# -> Float#
double2Float# a1 = (GHC.Prim.double2Float#) a1
# NOINLINE expDouble # #
expDouble# :: Double# -> Double#
expDouble# a1 = (GHC.Prim.expDouble#) a1
# NOINLINE logDouble # #
logDouble# :: Double# -> Double#
logDouble# a1 = (GHC.Prim.logDouble#) a1
# NOINLINE sqrtDouble # #
sqrtDouble# :: Double# -> Double#
sqrtDouble# a1 = (GHC.Prim.sqrtDouble#) a1
# NOINLINE sinDouble # #
sinDouble# :: Double# -> Double#
sinDouble# a1 = (GHC.Prim.sinDouble#) a1
# NOINLINE cosDouble # #
cosDouble# :: Double# -> Double#
cosDouble# a1 = (GHC.Prim.cosDouble#) a1
# NOINLINE tanDouble # #
tanDouble# :: Double# -> Double#
tanDouble# a1 = (GHC.Prim.tanDouble#) a1
# NOINLINE asinDouble # #
asinDouble# :: Double# -> Double#
asinDouble# a1 = (GHC.Prim.asinDouble#) a1
# NOINLINE acosDouble # #
acosDouble# :: Double# -> Double#
acosDouble# a1 = (GHC.Prim.acosDouble#) a1
# NOINLINE atanDouble # #
atanDouble# :: Double# -> Double#
atanDouble# a1 = (GHC.Prim.atanDouble#) a1
# NOINLINE sinhDouble # #
sinhDouble# :: Double# -> Double#
sinhDouble# a1 = (GHC.Prim.sinhDouble#) a1
# NOINLINE coshDouble # #
coshDouble# :: Double# -> Double#
coshDouble# a1 = (GHC.Prim.coshDouble#) a1
# NOINLINE tanhDouble # #
tanhDouble# :: Double# -> Double#
tanhDouble# a1 = (GHC.Prim.tanhDouble#) a1
# NOINLINE ( * * # # ) #
(**##) :: Double# -> Double# -> Double#
(**##) a1 a2 = (GHC.Prim.**##) a1 a2
# NOINLINE decodeDouble_2Int # #
decodeDouble_2Int# :: Double# -> (# Int#,Word#,Word#,Int# #)
decodeDouble_2Int# a1 = (GHC.Prim.decodeDouble_2Int#) a1
# NOINLINE decodeDouble_Int64 # #
decodeDouble_Int64# :: Double# -> (# Int#,Int# #)
decodeDouble_Int64# a1 = (GHC.Prim.decodeDouble_Int64#) a1
# NOINLINE gtFloat # #
gtFloat# :: Float# -> Float# -> Int#
gtFloat# a1 a2 = (GHC.Prim.gtFloat#) a1 a2
# NOINLINE geFloat # #
geFloat# :: Float# -> Float# -> Int#
geFloat# a1 a2 = (GHC.Prim.geFloat#) a1 a2
# NOINLINE eqFloat # #
eqFloat# :: Float# -> Float# -> Int#
eqFloat# a1 a2 = (GHC.Prim.eqFloat#) a1 a2
# NOINLINE neFloat # #
neFloat# :: Float# -> Float# -> Int#
neFloat# a1 a2 = (GHC.Prim.neFloat#) a1 a2
# NOINLINE ltFloat # #
ltFloat# :: Float# -> Float# -> Int#
ltFloat# a1 a2 = (GHC.Prim.ltFloat#) a1 a2
# NOINLINE leFloat # #
leFloat# :: Float# -> Float# -> Int#
leFloat# a1 a2 = (GHC.Prim.leFloat#) a1 a2
# NOINLINE plusFloat # #
plusFloat# :: Float# -> Float# -> Float#
plusFloat# a1 a2 = (GHC.Prim.plusFloat#) a1 a2
# NOINLINE minusFloat # #
minusFloat# :: Float# -> Float# -> Float#
minusFloat# a1 a2 = (GHC.Prim.minusFloat#) a1 a2
# NOINLINE timesFloat # #
timesFloat# :: Float# -> Float# -> Float#
timesFloat# a1 a2 = (GHC.Prim.timesFloat#) a1 a2
# NOINLINE divideFloat # #
divideFloat# :: Float# -> Float# -> Float#
divideFloat# a1 a2 = (GHC.Prim.divideFloat#) a1 a2
# NOINLINE negateFloat # #
negateFloat# :: Float# -> Float#
negateFloat# a1 = (GHC.Prim.negateFloat#) a1
# NOINLINE fabsFloat # #
fabsFloat# :: Float# -> Float#
fabsFloat# a1 = (GHC.Prim.fabsFloat#) a1
# NOINLINE float2Int # #
float2Int# :: Float# -> Int#
float2Int# a1 = (GHC.Prim.float2Int#) a1
# NOINLINE expFloat # #
expFloat# :: Float# -> Float#
expFloat# a1 = (GHC.Prim.expFloat#) a1
# NOINLINE logFloat # #
logFloat# :: Float# -> Float#
logFloat# a1 = (GHC.Prim.logFloat#) a1
# NOINLINE sqrtFloat # #
sqrtFloat# :: Float# -> Float#
sqrtFloat# a1 = (GHC.Prim.sqrtFloat#) a1
{-# NOINLINE sinFloat# #-}
sinFloat# :: Float# -> Float#
sinFloat# a1 = (GHC.Prim.sinFloat#) a1
{-# NOINLINE cosFloat# #-}
cosFloat# :: Float# -> Float#
cosFloat# a1 = (GHC.Prim.cosFloat#) a1
# NOINLINE tanFloat # #
tanFloat# :: Float# -> Float#
tanFloat# a1 = (GHC.Prim.tanFloat#) a1
# NOINLINE asinFloat # #
asinFloat# :: Float# -> Float#
asinFloat# a1 = (GHC.Prim.asinFloat#) a1
# NOINLINE acosFloat # #
acosFloat# :: Float# -> Float#
acosFloat# a1 = (GHC.Prim.acosFloat#) a1
# NOINLINE atanFloat # #
atanFloat# :: Float# -> Float#
atanFloat# a1 = (GHC.Prim.atanFloat#) a1
# NOINLINE sinhFloat # #
sinhFloat# :: Float# -> Float#
sinhFloat# a1 = (GHC.Prim.sinhFloat#) a1
# NOINLINE coshFloat # #
coshFloat# :: Float# -> Float#
coshFloat# a1 = (GHC.Prim.coshFloat#) a1
# NOINLINE tanhFloat # #
tanhFloat# :: Float# -> Float#
tanhFloat# a1 = (GHC.Prim.tanhFloat#) a1
# NOINLINE powerFloat # #
powerFloat# :: Float# -> Float# -> Float#
powerFloat# a1 a2 = (GHC.Prim.powerFloat#) a1 a2
{-# NOINLINE float2Double# #-}
float2Double# :: Float# -> Double#
float2Double# a1 = (GHC.Prim.float2Double#) a1
# NOINLINE decodeFloat_Int # #
decodeFloat_Int# :: Float# -> (# Int#,Int# #)
decodeFloat_Int# a1 = (GHC.Prim.decodeFloat_Int#) a1
# NOINLINE newArray # #
newArray# :: Int# -> a -> State# s -> (# State# s,MutableArray# s a #)
newArray# a1 a2 a3 = (GHC.Prim.newArray#) a1 a2 a3
# NOINLINE sameMutableArray # #
sameMutableArray# :: MutableArray# s a -> MutableArray# s a -> Int#
sameMutableArray# a1 a2 = (GHC.Prim.sameMutableArray#) a1 a2
# NOINLINE readArray # #
readArray# :: MutableArray# s a -> Int# -> State# s -> (# State# s,a #)
readArray# a1 a2 a3 = (GHC.Prim.readArray#) a1 a2 a3
# NOINLINE writeArray # #
writeArray# :: MutableArray# s a -> Int# -> a -> State# s -> State# s
writeArray# a1 a2 a3 a4 = (GHC.Prim.writeArray#) a1 a2 a3 a4
# NOINLINE sizeofArray # #
sizeofArray# :: Array# a -> Int#
sizeofArray# a1 = (GHC.Prim.sizeofArray#) a1
# NOINLINE sizeofMutableArray # #
sizeofMutableArray# :: MutableArray# s a -> Int#
sizeofMutableArray# a1 = (GHC.Prim.sizeofMutableArray#) a1
# NOINLINE indexArray # #
indexArray# :: Array# a -> Int# -> (# a #)
indexArray# a1 a2 = (GHC.Prim.indexArray#) a1 a2
# NOINLINE unsafeFreezeArray # #
unsafeFreezeArray# :: MutableArray# s a -> State# s -> (# State# s,Array# a #)
unsafeFreezeArray# a1 a2 = (GHC.Prim.unsafeFreezeArray#) a1 a2
# NOINLINE unsafeThawArray # #
unsafeThawArray# :: Array# a -> State# s -> (# State# s,MutableArray# s a #)
unsafeThawArray# a1 a2 = (GHC.Prim.unsafeThawArray#) a1 a2
# NOINLINE copyArray # #
copyArray# :: Array# a -> Int# -> MutableArray# s a -> Int# -> Int# -> State# s -> State# s
copyArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyMutableArray # #
copyMutableArray# :: MutableArray# s a -> Int# -> MutableArray# s a -> Int# -> Int# -> State# s -> State# s
copyMutableArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyMutableArray#) a1 a2 a3 a4 a5 a6
# NOINLINE cloneArray # #
cloneArray# :: Array# a -> Int# -> Int# -> Array# a
cloneArray# a1 a2 a3 = (GHC.Prim.cloneArray#) a1 a2 a3
# NOINLINE cloneMutableArray # #
cloneMutableArray# :: MutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,MutableArray# s a #)
cloneMutableArray# a1 a2 a3 a4 = (GHC.Prim.cloneMutableArray#) a1 a2 a3 a4
# NOINLINE freezeArray # #
freezeArray# :: MutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,Array# a #)
freezeArray# a1 a2 a3 a4 = (GHC.Prim.freezeArray#) a1 a2 a3 a4
# NOINLINE thawArray # #
thawArray# :: Array# a -> Int# -> Int# -> State# s -> (# State# s,MutableArray# s a #)
thawArray# a1 a2 a3 a4 = (GHC.Prim.thawArray#) a1 a2 a3 a4
# NOINLINE casArray # #
casArray# :: MutableArray# s a -> Int# -> a -> a -> State# s -> (# State# s,Int#,a #)
casArray# a1 a2 a3 a4 a5 = (GHC.Prim.casArray#) a1 a2 a3 a4 a5
# NOINLINE newSmallArray # #
newSmallArray# :: Int# -> a -> State# s -> (# State# s,SmallMutableArray# s a #)
newSmallArray# a1 a2 a3 = (GHC.Prim.newSmallArray#) a1 a2 a3
# NOINLINE sameSmallMutableArray # #
sameSmallMutableArray# :: SmallMutableArray# s a -> SmallMutableArray# s a -> Int#
sameSmallMutableArray# a1 a2 = (GHC.Prim.sameSmallMutableArray#) a1 a2
# NOINLINE readSmallArray # #
readSmallArray# :: SmallMutableArray# s a -> Int# -> State# s -> (# State# s,a #)
readSmallArray# a1 a2 a3 = (GHC.Prim.readSmallArray#) a1 a2 a3
# NOINLINE writeSmallArray # #
writeSmallArray# :: SmallMutableArray# s a -> Int# -> a -> State# s -> State# s
writeSmallArray# a1 a2 a3 a4 = (GHC.Prim.writeSmallArray#) a1 a2 a3 a4
# NOINLINE sizeofSmallArray # #
sizeofSmallArray# :: SmallArray# a -> Int#
sizeofSmallArray# a1 = (GHC.Prim.sizeofSmallArray#) a1
# NOINLINE sizeofSmallMutableArray # #
sizeofSmallMutableArray# :: SmallMutableArray# s a -> Int#
sizeofSmallMutableArray# a1 = (GHC.Prim.sizeofSmallMutableArray#) a1
# NOINLINE indexSmallArray # #
indexSmallArray# :: SmallArray# a -> Int# -> (# a #)
indexSmallArray# a1 a2 = (GHC.Prim.indexSmallArray#) a1 a2
# NOINLINE unsafeFreezeSmallArray # #
unsafeFreezeSmallArray# :: SmallMutableArray# s a -> State# s -> (# State# s,SmallArray# a #)
unsafeFreezeSmallArray# a1 a2 = (GHC.Prim.unsafeFreezeSmallArray#) a1 a2
# NOINLINE unsafeThawSmallArray # #
unsafeThawSmallArray# :: SmallArray# a -> State# s -> (# State# s,SmallMutableArray# s a #)
unsafeThawSmallArray# a1 a2 = (GHC.Prim.unsafeThawSmallArray#) a1 a2
# NOINLINE copySmallArray # #
copySmallArray# :: SmallArray# a -> Int# -> SmallMutableArray# s a -> Int# -> Int# -> State# s -> State# s
copySmallArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copySmallArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copySmallMutableArray # #
copySmallMutableArray# :: SmallMutableArray# s a -> Int# -> SmallMutableArray# s a -> Int# -> Int# -> State# s -> State# s
copySmallMutableArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copySmallMutableArray#) a1 a2 a3 a4 a5 a6
{-# NOINLINE cloneSmallArray# #-}
cloneSmallArray# :: SmallArray# a -> Int# -> Int# -> SmallArray# a
cloneSmallArray# a1 a2 a3 = (GHC.Prim.cloneSmallArray#) a1 a2 a3
# NOINLINE cloneSmallMutableArray # #
cloneSmallMutableArray# :: SmallMutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,SmallMutableArray# s a #)
cloneSmallMutableArray# a1 a2 a3 a4 = (GHC.Prim.cloneSmallMutableArray#) a1 a2 a3 a4
# NOINLINE freezeSmallArray # #
freezeSmallArray# :: SmallMutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,SmallArray# a #)
freezeSmallArray# a1 a2 a3 a4 = (GHC.Prim.freezeSmallArray#) a1 a2 a3 a4
# NOINLINE thawSmallArray # #
thawSmallArray# :: SmallArray# a -> Int# -> Int# -> State# s -> (# State# s,SmallMutableArray# s a #)
thawSmallArray# a1 a2 a3 a4 = (GHC.Prim.thawSmallArray#) a1 a2 a3 a4
# NOINLINE casSmallArray # #
casSmallArray# :: SmallMutableArray# s a -> Int# -> a -> a -> State# s -> (# State# s,Int#,a #)
casSmallArray# a1 a2 a3 a4 a5 = (GHC.Prim.casSmallArray#) a1 a2 a3 a4 a5
# NOINLINE newByteArray # #
newByteArray# :: Int# -> State# s -> (# State# s,MutableByteArray# s #)
newByteArray# a1 a2 = (GHC.Prim.newByteArray#) a1 a2
# NOINLINE newPinnedByteArray # #
newPinnedByteArray# :: Int# -> State# s -> (# State# s,MutableByteArray# s #)
newPinnedByteArray# a1 a2 = (GHC.Prim.newPinnedByteArray#) a1 a2
# NOINLINE newAlignedPinnedByteArray # #
newAlignedPinnedByteArray# :: Int# -> Int# -> State# s -> (# State# s,MutableByteArray# s #)
newAlignedPinnedByteArray# a1 a2 a3 = (GHC.Prim.newAlignedPinnedByteArray#) a1 a2 a3
# NOINLINE isMutableByteArrayPinned # #
isMutableByteArrayPinned# :: MutableByteArray# s -> Int#
isMutableByteArrayPinned# a1 = (GHC.Prim.isMutableByteArrayPinned#) a1
# NOINLINE isByteArrayPinned # #
isByteArrayPinned# :: ByteArray# -> Int#
isByteArrayPinned# a1 = (GHC.Prim.isByteArrayPinned#) a1
# NOINLINE byteArrayContents # #
byteArrayContents# :: ByteArray# -> Addr#
byteArrayContents# a1 = (GHC.Prim.byteArrayContents#) a1
# NOINLINE sameMutableByteArray # #
sameMutableByteArray# :: MutableByteArray# s -> MutableByteArray# s -> Int#
sameMutableByteArray# a1 a2 = (GHC.Prim.sameMutableByteArray#) a1 a2
# NOINLINE shrinkMutableByteArray # #
shrinkMutableByteArray# :: MutableByteArray# s -> Int# -> State# s -> State# s
shrinkMutableByteArray# a1 a2 a3 = (GHC.Prim.shrinkMutableByteArray#) a1 a2 a3
# NOINLINE resizeMutableByteArray # #
resizeMutableByteArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,MutableByteArray# s #)
resizeMutableByteArray# a1 a2 a3 = (GHC.Prim.resizeMutableByteArray#) a1 a2 a3
# NOINLINE unsafeFreezeByteArray # #
unsafeFreezeByteArray# :: MutableByteArray# s -> State# s -> (# State# s,ByteArray# #)
unsafeFreezeByteArray# a1 a2 = (GHC.Prim.unsafeFreezeByteArray#) a1 a2
# NOINLINE sizeofByteArray # #
sizeofByteArray# :: ByteArray# -> Int#
sizeofByteArray# a1 = (GHC.Prim.sizeofByteArray#) a1
# NOINLINE sizeofMutableByteArray # #
sizeofMutableByteArray# :: MutableByteArray# s -> Int#
sizeofMutableByteArray# a1 = (GHC.Prim.sizeofMutableByteArray#) a1
# NOINLINE getSizeofMutableByteArray # #
getSizeofMutableByteArray# :: MutableByteArray# s -> State# s -> (# State# s,Int# #)
getSizeofMutableByteArray# a1 a2 = (GHC.Prim.getSizeofMutableByteArray#) a1 a2
# NOINLINE indexCharArray # #
indexCharArray# :: ByteArray# -> Int# -> Char#
indexCharArray# a1 a2 = (GHC.Prim.indexCharArray#) a1 a2
# NOINLINE indexWideCharArray # #
indexWideCharArray# :: ByteArray# -> Int# -> Char#
indexWideCharArray# a1 a2 = (GHC.Prim.indexWideCharArray#) a1 a2
# NOINLINE indexIntArray # #
indexIntArray# :: ByteArray# -> Int# -> Int#
indexIntArray# a1 a2 = (GHC.Prim.indexIntArray#) a1 a2
# NOINLINE indexWordArray # #
indexWordArray# :: ByteArray# -> Int# -> Word#
indexWordArray# a1 a2 = (GHC.Prim.indexWordArray#) a1 a2
# NOINLINE indexAddrArray # #
indexAddrArray# :: ByteArray# -> Int# -> Addr#
indexAddrArray# a1 a2 = (GHC.Prim.indexAddrArray#) a1 a2
# NOINLINE indexFloatArray # #
indexFloatArray# :: ByteArray# -> Int# -> Float#
indexFloatArray# a1 a2 = (GHC.Prim.indexFloatArray#) a1 a2
# NOINLINE indexDoubleArray # #
indexDoubleArray# :: ByteArray# -> Int# -> Double#
indexDoubleArray# a1 a2 = (GHC.Prim.indexDoubleArray#) a1 a2
# NOINLINE indexStablePtrArray # #
indexStablePtrArray# :: ByteArray# -> Int# -> StablePtr# a
indexStablePtrArray# a1 a2 = (GHC.Prim.indexStablePtrArray#) a1 a2
# NOINLINE indexInt8Array # #
indexInt8Array# :: ByteArray# -> Int# -> Int#
indexInt8Array# a1 a2 = (GHC.Prim.indexInt8Array#) a1 a2
# NOINLINE indexInt16Array # #
indexInt16Array# :: ByteArray# -> Int# -> Int#
indexInt16Array# a1 a2 = (GHC.Prim.indexInt16Array#) a1 a2
# NOINLINE indexInt32Array # #
indexInt32Array# :: ByteArray# -> Int# -> Int#
indexInt32Array# a1 a2 = (GHC.Prim.indexInt32Array#) a1 a2
# NOINLINE indexInt64Array # #
indexInt64Array# :: ByteArray# -> Int# -> Int#
indexInt64Array# a1 a2 = (GHC.Prim.indexInt64Array#) a1 a2
# NOINLINE indexWord8Array # #
indexWord8Array# :: ByteArray# -> Int# -> Word#
indexWord8Array# a1 a2 = (GHC.Prim.indexWord8Array#) a1 a2
# NOINLINE indexWord16Array # #
indexWord16Array# :: ByteArray# -> Int# -> Word#
indexWord16Array# a1 a2 = (GHC.Prim.indexWord16Array#) a1 a2
# NOINLINE indexWord32Array # #
indexWord32Array# :: ByteArray# -> Int# -> Word#
indexWord32Array# a1 a2 = (GHC.Prim.indexWord32Array#) a1 a2
# NOINLINE indexWord64Array # #
indexWord64Array# :: ByteArray# -> Int# -> Word#
indexWord64Array# a1 a2 = (GHC.Prim.indexWord64Array#) a1 a2
# NOINLINE readCharArray # #
readCharArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Char# #)
readCharArray# a1 a2 a3 = (GHC.Prim.readCharArray#) a1 a2 a3
# NOINLINE readWideCharArray # #
readWideCharArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Char# #)
readWideCharArray# a1 a2 a3 = (GHC.Prim.readWideCharArray#) a1 a2 a3
# NOINLINE readIntArray # #
readIntArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readIntArray# a1 a2 a3 = (GHC.Prim.readIntArray#) a1 a2 a3
# NOINLINE readWordArray # #
readWordArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWordArray# a1 a2 a3 = (GHC.Prim.readWordArray#) a1 a2 a3
# NOINLINE readAddrArray # #
readAddrArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Addr# #)
readAddrArray# a1 a2 a3 = (GHC.Prim.readAddrArray#) a1 a2 a3
# NOINLINE readFloatArray # #
readFloatArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Float# #)
readFloatArray# a1 a2 a3 = (GHC.Prim.readFloatArray#) a1 a2 a3
# NOINLINE readDoubleArray # #
readDoubleArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Double# #)
readDoubleArray# a1 a2 a3 = (GHC.Prim.readDoubleArray#) a1 a2 a3
# NOINLINE readStablePtrArray # #
readStablePtrArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,StablePtr# a #)
readStablePtrArray# a1 a2 a3 = (GHC.Prim.readStablePtrArray#) a1 a2 a3
# NOINLINE readInt8Array # #
readInt8Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt8Array# a1 a2 a3 = (GHC.Prim.readInt8Array#) a1 a2 a3
# NOINLINE readInt16Array # #
readInt16Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt16Array# a1 a2 a3 = (GHC.Prim.readInt16Array#) a1 a2 a3
# NOINLINE readInt32Array # #
readInt32Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt32Array# a1 a2 a3 = (GHC.Prim.readInt32Array#) a1 a2 a3
# NOINLINE readInt64Array # #
readInt64Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt64Array# a1 a2 a3 = (GHC.Prim.readInt64Array#) a1 a2 a3
# NOINLINE readWord8Array # #
readWord8Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord8Array# a1 a2 a3 = (GHC.Prim.readWord8Array#) a1 a2 a3
# NOINLINE readWord16Array # #
readWord16Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord16Array# a1 a2 a3 = (GHC.Prim.readWord16Array#) a1 a2 a3
# NOINLINE readWord32Array # #
readWord32Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord32Array# a1 a2 a3 = (GHC.Prim.readWord32Array#) a1 a2 a3
# NOINLINE readWord64Array # #
readWord64Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord64Array# a1 a2 a3 = (GHC.Prim.readWord64Array#) a1 a2 a3
# NOINLINE writeCharArray # #
writeCharArray# :: MutableByteArray# s -> Int# -> Char# -> State# s -> State# s
writeCharArray# a1 a2 a3 a4 = (GHC.Prim.writeCharArray#) a1 a2 a3 a4
# NOINLINE writeWideCharArray # #
writeWideCharArray# :: MutableByteArray# s -> Int# -> Char# -> State# s -> State# s
writeWideCharArray# a1 a2 a3 a4 = (GHC.Prim.writeWideCharArray#) a1 a2 a3 a4
# NOINLINE writeIntArray # #
writeIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeIntArray# a1 a2 a3 a4 = (GHC.Prim.writeIntArray#) a1 a2 a3 a4
# NOINLINE writeWordArray # #
writeWordArray# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWordArray# a1 a2 a3 a4 = (GHC.Prim.writeWordArray#) a1 a2 a3 a4
# NOINLINE writeAddrArray # #
writeAddrArray# :: MutableByteArray# s -> Int# -> Addr# -> State# s -> State# s
writeAddrArray# a1 a2 a3 a4 = (GHC.Prim.writeAddrArray#) a1 a2 a3 a4
# NOINLINE writeFloatArray # #
writeFloatArray# :: MutableByteArray# s -> Int# -> Float# -> State# s -> State# s
writeFloatArray# a1 a2 a3 a4 = (GHC.Prim.writeFloatArray#) a1 a2 a3 a4
# NOINLINE writeDoubleArray # #
writeDoubleArray# :: MutableByteArray# s -> Int# -> Double# -> State# s -> State# s
writeDoubleArray# a1 a2 a3 a4 = (GHC.Prim.writeDoubleArray#) a1 a2 a3 a4
# NOINLINE writeStablePtrArray # #
writeStablePtrArray# :: MutableByteArray# s -> Int# -> StablePtr# a -> State# s -> State# s
writeStablePtrArray# a1 a2 a3 a4 = (GHC.Prim.writeStablePtrArray#) a1 a2 a3 a4
# NOINLINE writeInt8Array # #
writeInt8Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt8Array# a1 a2 a3 a4 = (GHC.Prim.writeInt8Array#) a1 a2 a3 a4
# NOINLINE writeInt16Array # #
writeInt16Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt16Array# a1 a2 a3 a4 = (GHC.Prim.writeInt16Array#) a1 a2 a3 a4
# NOINLINE writeInt32Array # #
writeInt32Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt32Array# a1 a2 a3 a4 = (GHC.Prim.writeInt32Array#) a1 a2 a3 a4
# NOINLINE writeInt64Array # #
writeInt64Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt64Array# a1 a2 a3 a4 = (GHC.Prim.writeInt64Array#) a1 a2 a3 a4
# NOINLINE writeWord8Array # #
writeWord8Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord8Array# a1 a2 a3 a4 = (GHC.Prim.writeWord8Array#) a1 a2 a3 a4
# NOINLINE writeWord16Array # #
writeWord16Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord16Array# a1 a2 a3 a4 = (GHC.Prim.writeWord16Array#) a1 a2 a3 a4
# NOINLINE writeWord32Array # #
writeWord32Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord32Array# a1 a2 a3 a4 = (GHC.Prim.writeWord32Array#) a1 a2 a3 a4
# NOINLINE writeWord64Array # #
writeWord64Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord64Array# a1 a2 a3 a4 = (GHC.Prim.writeWord64Array#) a1 a2 a3 a4
# NOINLINE copyByteArray # #
copyByteArray# :: ByteArray# -> Int# -> MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
copyByteArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyByteArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyMutableByteArray # #
copyMutableByteArray# :: MutableByteArray# s -> Int# -> MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
copyMutableByteArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyMutableByteArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyByteArrayToAddr # #
copyByteArrayToAddr# :: ByteArray# -> Int# -> Addr# -> Int# -> State# s -> State# s
copyByteArrayToAddr# a1 a2 a3 a4 a5 = (GHC.Prim.copyByteArrayToAddr#) a1 a2 a3 a4 a5
# NOINLINE copyMutableByteArrayToAddr # #
copyMutableByteArrayToAddr# :: MutableByteArray# s -> Int# -> Addr# -> Int# -> State# s -> State# s
copyMutableByteArrayToAddr# a1 a2 a3 a4 a5 = (GHC.Prim.copyMutableByteArrayToAddr#) a1 a2 a3 a4 a5
# NOINLINE copyAddrToByteArray # #
copyAddrToByteArray# :: Addr# -> MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
copyAddrToByteArray# a1 a2 a3 a4 a5 = (GHC.Prim.copyAddrToByteArray#) a1 a2 a3 a4 a5
# NOINLINE setByteArray # #
setByteArray# :: MutableByteArray# s -> Int# -> Int# -> Int# -> State# s -> State# s
setByteArray# a1 a2 a3 a4 a5 = (GHC.Prim.setByteArray#) a1 a2 a3 a4 a5
# NOINLINE atomicReadIntArray # #
atomicReadIntArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
atomicReadIntArray# a1 a2 a3 = (GHC.Prim.atomicReadIntArray#) a1 a2 a3
# NOINLINE atomicWriteIntArray # #
atomicWriteIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
atomicWriteIntArray# a1 a2 a3 a4 = (GHC.Prim.atomicWriteIntArray#) a1 a2 a3 a4
# NOINLINE casIntArray # #
casIntArray# :: MutableByteArray# s -> Int# -> Int# -> Int# -> State# s -> (# State# s,Int# #)
casIntArray# a1 a2 a3 a4 a5 = (GHC.Prim.casIntArray#) a1 a2 a3 a4 a5
# NOINLINE fetchAddIntArray # #
fetchAddIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchAddIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchAddIntArray#) a1 a2 a3 a4
# NOINLINE fetchSubIntArray # #
fetchSubIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchSubIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchSubIntArray#) a1 a2 a3 a4
# NOINLINE fetchAndIntArray # #
fetchAndIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchAndIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchAndIntArray#) a1 a2 a3 a4
# NOINLINE fetchNandIntArray # #
fetchNandIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchNandIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchNandIntArray#) a1 a2 a3 a4
# NOINLINE fetchOrIntArray # #
fetchOrIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchOrIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchOrIntArray#) a1 a2 a3 a4
# NOINLINE fetchXorIntArray # #
fetchXorIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchXorIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchXorIntArray#) a1 a2 a3 a4
# NOINLINE newArrayArray # #
newArrayArray# :: Int# -> State# s -> (# State# s,MutableArrayArray# s #)
newArrayArray# a1 a2 = (GHC.Prim.newArrayArray#) a1 a2
# NOINLINE sameMutableArrayArray # #
sameMutableArrayArray# :: MutableArrayArray# s -> MutableArrayArray# s -> Int#
sameMutableArrayArray# a1 a2 = (GHC.Prim.sameMutableArrayArray#) a1 a2
# NOINLINE unsafeFreezeArrayArray # #
unsafeFreezeArrayArray# :: MutableArrayArray# s -> State# s -> (# State# s,ArrayArray# #)
unsafeFreezeArrayArray# a1 a2 = (GHC.Prim.unsafeFreezeArrayArray#) a1 a2
# NOINLINE sizeofArrayArray # #
sizeofArrayArray# :: ArrayArray# -> Int#
sizeofArrayArray# a1 = (GHC.Prim.sizeofArrayArray#) a1
# NOINLINE sizeofMutableArrayArray # #
sizeofMutableArrayArray# :: MutableArrayArray# s -> Int#
sizeofMutableArrayArray# a1 = (GHC.Prim.sizeofMutableArrayArray#) a1
# NOINLINE indexByteArrayArray # #
indexByteArrayArray# :: ArrayArray# -> Int# -> ByteArray#
indexByteArrayArray# a1 a2 = (GHC.Prim.indexByteArrayArray#) a1 a2
# NOINLINE indexArrayArrayArray # #
indexArrayArrayArray# :: ArrayArray# -> Int# -> ArrayArray#
indexArrayArrayArray# a1 a2 = (GHC.Prim.indexArrayArrayArray#) a1 a2
# NOINLINE readByteArrayArray # #
readByteArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,ByteArray# #)
readByteArrayArray# a1 a2 a3 = (GHC.Prim.readByteArrayArray#) a1 a2 a3
# NOINLINE readMutableByteArrayArray # #
readMutableByteArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,MutableByteArray# s #)
readMutableByteArrayArray# a1 a2 a3 = (GHC.Prim.readMutableByteArrayArray#) a1 a2 a3
# NOINLINE readArrayArrayArray # #
readArrayArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,ArrayArray# #)
readArrayArrayArray# a1 a2 a3 = (GHC.Prim.readArrayArrayArray#) a1 a2 a3
# NOINLINE readMutableArrayArrayArray # #
readMutableArrayArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,MutableArrayArray# s #)
readMutableArrayArrayArray# a1 a2 a3 = (GHC.Prim.readMutableArrayArrayArray#) a1 a2 a3
# NOINLINE writeByteArrayArray # #
writeByteArrayArray# :: MutableArrayArray# s -> Int# -> ByteArray# -> State# s -> State# s
writeByteArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeByteArrayArray#) a1 a2 a3 a4
# NOINLINE writeMutableByteArrayArray # #
writeMutableByteArrayArray# :: MutableArrayArray# s -> Int# -> MutableByteArray# s -> State# s -> State# s
writeMutableByteArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeMutableByteArrayArray#) a1 a2 a3 a4
# NOINLINE writeArrayArrayArray # #
writeArrayArrayArray# :: MutableArrayArray# s -> Int# -> ArrayArray# -> State# s -> State# s
writeArrayArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeArrayArrayArray#) a1 a2 a3 a4
# NOINLINE writeMutableArrayArrayArray # #
writeMutableArrayArrayArray# :: MutableArrayArray# s -> Int# -> MutableArrayArray# s -> State# s -> State# s
writeMutableArrayArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeMutableArrayArrayArray#) a1 a2 a3 a4
# NOINLINE copyArrayArray # #
copyArrayArray# :: ArrayArray# -> Int# -> MutableArrayArray# s -> Int# -> Int# -> State# s -> State# s
copyArrayArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyArrayArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyMutableArrayArray # #
copyMutableArrayArray# :: MutableArrayArray# s -> Int# -> MutableArrayArray# s -> Int# -> Int# -> State# s -> State# s
copyMutableArrayArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyMutableArrayArray#) a1 a2 a3 a4 a5 a6
# NOINLINE plusAddr # #
plusAddr# :: Addr# -> Int# -> Addr#
plusAddr# a1 a2 = (GHC.Prim.plusAddr#) a1 a2
# NOINLINE minusAddr # #
minusAddr# :: Addr# -> Addr# -> Int#
minusAddr# a1 a2 = (GHC.Prim.minusAddr#) a1 a2
# NOINLINE remAddr # #
remAddr# :: Addr# -> Int# -> Int#
remAddr# a1 a2 = (GHC.Prim.remAddr#) a1 a2
# NOINLINE addr2Int # #
addr2Int# :: Addr# -> Int#
addr2Int# a1 = (GHC.Prim.addr2Int#) a1
# NOINLINE int2Addr # #
int2Addr# :: Int# -> Addr#
int2Addr# a1 = (GHC.Prim.int2Addr#) a1
# NOINLINE gtAddr # #
gtAddr# :: Addr# -> Addr# -> Int#
gtAddr# a1 a2 = (GHC.Prim.gtAddr#) a1 a2
# NOINLINE geAddr # #
geAddr# :: Addr# -> Addr# -> Int#
geAddr# a1 a2 = (GHC.Prim.geAddr#) a1 a2
# NOINLINE eqAddr # #
eqAddr# :: Addr# -> Addr# -> Int#
eqAddr# a1 a2 = (GHC.Prim.eqAddr#) a1 a2
# NOINLINE neAddr # #
neAddr# :: Addr# -> Addr# -> Int#
neAddr# a1 a2 = (GHC.Prim.neAddr#) a1 a2
# NOINLINE ltAddr # #
ltAddr# :: Addr# -> Addr# -> Int#
ltAddr# a1 a2 = (GHC.Prim.ltAddr#) a1 a2
# NOINLINE leAddr # #
leAddr# :: Addr# -> Addr# -> Int#
leAddr# a1 a2 = (GHC.Prim.leAddr#) a1 a2
# NOINLINE indexCharOffAddr # #
indexCharOffAddr# :: Addr# -> Int# -> Char#
indexCharOffAddr# a1 a2 = (GHC.Prim.indexCharOffAddr#) a1 a2
# NOINLINE indexWideCharOffAddr # #
indexWideCharOffAddr# :: Addr# -> Int# -> Char#
indexWideCharOffAddr# a1 a2 = (GHC.Prim.indexWideCharOffAddr#) a1 a2
# NOINLINE indexIntOffAddr # #
indexIntOffAddr# :: Addr# -> Int# -> Int#
indexIntOffAddr# a1 a2 = (GHC.Prim.indexIntOffAddr#) a1 a2
# NOINLINE indexWordOffAddr # #
indexWordOffAddr# :: Addr# -> Int# -> Word#
indexWordOffAddr# a1 a2 = (GHC.Prim.indexWordOffAddr#) a1 a2
# NOINLINE indexAddrOffAddr # #
indexAddrOffAddr# :: Addr# -> Int# -> Addr#
indexAddrOffAddr# a1 a2 = (GHC.Prim.indexAddrOffAddr#) a1 a2
# NOINLINE indexFloatOffAddr # #
indexFloatOffAddr# :: Addr# -> Int# -> Float#
indexFloatOffAddr# a1 a2 = (GHC.Prim.indexFloatOffAddr#) a1 a2
# NOINLINE indexDoubleOffAddr # #
indexDoubleOffAddr# :: Addr# -> Int# -> Double#
indexDoubleOffAddr# a1 a2 = (GHC.Prim.indexDoubleOffAddr#) a1 a2
# NOINLINE indexStablePtrOffAddr # #
indexStablePtrOffAddr# :: Addr# -> Int# -> StablePtr# a
indexStablePtrOffAddr# a1 a2 = (GHC.Prim.indexStablePtrOffAddr#) a1 a2
# NOINLINE indexInt8OffAddr # #
indexInt8OffAddr# :: Addr# -> Int# -> Int#
indexInt8OffAddr# a1 a2 = (GHC.Prim.indexInt8OffAddr#) a1 a2
# NOINLINE indexInt16OffAddr # #
indexInt16OffAddr# :: Addr# -> Int# -> Int#
indexInt16OffAddr# a1 a2 = (GHC.Prim.indexInt16OffAddr#) a1 a2
# NOINLINE indexInt32OffAddr # #
indexInt32OffAddr# :: Addr# -> Int# -> Int#
indexInt32OffAddr# a1 a2 = (GHC.Prim.indexInt32OffAddr#) a1 a2
# NOINLINE indexInt64OffAddr # #
indexInt64OffAddr# :: Addr# -> Int# -> Int#
indexInt64OffAddr# a1 a2 = (GHC.Prim.indexInt64OffAddr#) a1 a2
# NOINLINE indexWord8OffAddr # #
indexWord8OffAddr# :: Addr# -> Int# -> Word#
indexWord8OffAddr# a1 a2 = (GHC.Prim.indexWord8OffAddr#) a1 a2
# NOINLINE indexWord16OffAddr # #
indexWord16OffAddr# :: Addr# -> Int# -> Word#
indexWord16OffAddr# a1 a2 = (GHC.Prim.indexWord16OffAddr#) a1 a2
# NOINLINE indexWord32OffAddr # #
indexWord32OffAddr# :: Addr# -> Int# -> Word#
indexWord32OffAddr# a1 a2 = (GHC.Prim.indexWord32OffAddr#) a1 a2
# NOINLINE indexWord64OffAddr # #
indexWord64OffAddr# :: Addr# -> Int# -> Word#
indexWord64OffAddr# a1 a2 = (GHC.Prim.indexWord64OffAddr#) a1 a2
# NOINLINE readCharOffAddr # #
readCharOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Char# #)
readCharOffAddr# a1 a2 a3 = (GHC.Prim.readCharOffAddr#) a1 a2 a3
# NOINLINE readWideCharOffAddr # #
readWideCharOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Char# #)
readWideCharOffAddr# a1 a2 a3 = (GHC.Prim.readWideCharOffAddr#) a1 a2 a3
# NOINLINE readIntOffAddr # #
readIntOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readIntOffAddr# a1 a2 a3 = (GHC.Prim.readIntOffAddr#) a1 a2 a3
# NOINLINE readWordOffAddr # #
readWordOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWordOffAddr# a1 a2 a3 = (GHC.Prim.readWordOffAddr#) a1 a2 a3
# NOINLINE readAddrOffAddr # #
readAddrOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Addr# #)
readAddrOffAddr# a1 a2 a3 = (GHC.Prim.readAddrOffAddr#) a1 a2 a3
# NOINLINE readFloatOffAddr # #
readFloatOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Float# #)
readFloatOffAddr# a1 a2 a3 = (GHC.Prim.readFloatOffAddr#) a1 a2 a3
# NOINLINE readDoubleOffAddr # #
readDoubleOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Double# #)
readDoubleOffAddr# a1 a2 a3 = (GHC.Prim.readDoubleOffAddr#) a1 a2 a3
# NOINLINE readStablePtrOffAddr # #
readStablePtrOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,StablePtr# a #)
readStablePtrOffAddr# a1 a2 a3 = (GHC.Prim.readStablePtrOffAddr#) a1 a2 a3
# NOINLINE readInt8OffAddr # #
readInt8OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt8OffAddr# a1 a2 a3 = (GHC.Prim.readInt8OffAddr#) a1 a2 a3
# NOINLINE readInt16OffAddr # #
readInt16OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt16OffAddr# a1 a2 a3 = (GHC.Prim.readInt16OffAddr#) a1 a2 a3
# NOINLINE readInt32OffAddr # #
readInt32OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt32OffAddr# a1 a2 a3 = (GHC.Prim.readInt32OffAddr#) a1 a2 a3
# NOINLINE readInt64OffAddr # #
readInt64OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt64OffAddr# a1 a2 a3 = (GHC.Prim.readInt64OffAddr#) a1 a2 a3
# NOINLINE readWord8OffAddr # #
readWord8OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord8OffAddr# a1 a2 a3 = (GHC.Prim.readWord8OffAddr#) a1 a2 a3
# NOINLINE readWord16OffAddr # #
readWord16OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord16OffAddr# a1 a2 a3 = (GHC.Prim.readWord16OffAddr#) a1 a2 a3
# NOINLINE readWord32OffAddr # #
readWord32OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord32OffAddr# a1 a2 a3 = (GHC.Prim.readWord32OffAddr#) a1 a2 a3
# NOINLINE readWord64OffAddr # #
readWord64OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord64OffAddr# a1 a2 a3 = (GHC.Prim.readWord64OffAddr#) a1 a2 a3
# NOINLINE writeCharOffAddr # #
writeCharOffAddr# :: Addr# -> Int# -> Char# -> State# s -> State# s
writeCharOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeCharOffAddr#) a1 a2 a3 a4
# NOINLINE writeWideCharOffAddr # #
writeWideCharOffAddr# :: Addr# -> Int# -> Char# -> State# s -> State# s
writeWideCharOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWideCharOffAddr#) a1 a2 a3 a4
{-# NOINLINE writeIntOffAddr# #-}
writeIntOffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeIntOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeIntOffAddr#) a1 a2 a3 a4
# NOINLINE writeWordOffAddr # #
writeWordOffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWordOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWordOffAddr#) a1 a2 a3 a4
# NOINLINE writeAddrOffAddr # #
writeAddrOffAddr# :: Addr# -> Int# -> Addr# -> State# s -> State# s
writeAddrOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeAddrOffAddr#) a1 a2 a3 a4
# NOINLINE writeFloatOffAddr # #
writeFloatOffAddr# :: Addr# -> Int# -> Float# -> State# s -> State# s
writeFloatOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeFloatOffAddr#) a1 a2 a3 a4
# NOINLINE writeDoubleOffAddr # #
writeDoubleOffAddr# :: Addr# -> Int# -> Double# -> State# s -> State# s
writeDoubleOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeDoubleOffAddr#) a1 a2 a3 a4
# NOINLINE writeStablePtrOffAddr # #
writeStablePtrOffAddr# :: Addr# -> Int# -> StablePtr# a -> State# s -> State# s
writeStablePtrOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeStablePtrOffAddr#) a1 a2 a3 a4
# NOINLINE writeInt8OffAddr # #
writeInt8OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt8OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt8OffAddr#) a1 a2 a3 a4
# NOINLINE writeInt16OffAddr # #
writeInt16OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt16OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt16OffAddr#) a1 a2 a3 a4
# NOINLINE writeInt32OffAddr # #
writeInt32OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt32OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt32OffAddr#) a1 a2 a3 a4
# NOINLINE writeInt64OffAddr # #
writeInt64OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt64OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt64OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord8OffAddr # #
writeWord8OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord8OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord8OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord16OffAddr # #
writeWord16OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord16OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord16OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord32OffAddr # #
writeWord32OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord32OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord32OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord64OffAddr # #
writeWord64OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord64OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord64OffAddr#) a1 a2 a3 a4
# NOINLINE newMutVar # #
newMutVar# :: a -> State# s -> (# State# s,MutVar# s a #)
newMutVar# a1 a2 = (GHC.Prim.newMutVar#) a1 a2
# NOINLINE readMutVar # #
readMutVar# :: MutVar# s a -> State# s -> (# State# s,a #)
readMutVar# a1 a2 = (GHC.Prim.readMutVar#) a1 a2
# NOINLINE writeMutVar # #
writeMutVar# :: MutVar# s a -> a -> State# s -> State# s
writeMutVar# a1 a2 a3 = (GHC.Prim.writeMutVar#) a1 a2 a3
# NOINLINE sameMutVar # #
sameMutVar# :: MutVar# s a -> MutVar# s a -> Int#
sameMutVar# a1 a2 = (GHC.Prim.sameMutVar#) a1 a2
# NOINLINE atomicModifyMutVar # #
atomicModifyMutVar# :: MutVar# s a -> (a -> b) -> State# s -> (# State# s,c #)
atomicModifyMutVar# a1 a2 a3 = (GHC.Prim.atomicModifyMutVar#) a1 a2 a3
# NOINLINE casMutVar # #
casMutVar# :: MutVar# s a -> a -> a -> State# s -> (# State# s,Int#,a #)
casMutVar# a1 a2 a3 a4 = (GHC.Prim.casMutVar#) a1 a2 a3 a4
# NOINLINE catch # #
catch# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> (b -> State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
catch# a1 a2 a3 = (GHC.Prim.catch#) a1 a2 a3
# NOINLINE raise # #
raise# :: b -> o
raise# a1 = (GHC.Prim.raise#) a1
# NOINLINE raiseIO # #
raiseIO# :: a -> State# (RealWorld) -> (# State# (RealWorld),b #)
raiseIO# a1 a2 = (GHC.Prim.raiseIO#) a1 a2
# NOINLINE maskAsyncExceptions # #
maskAsyncExceptions# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
maskAsyncExceptions# a1 a2 = (GHC.Prim.maskAsyncExceptions#) a1 a2
# NOINLINE maskUninterruptible # #
maskUninterruptible# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
maskUninterruptible# a1 a2 = (GHC.Prim.maskUninterruptible#) a1 a2
{-# NOINLINE unmaskAsyncExceptions# #-}
unmaskAsyncExceptions# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
unmaskAsyncExceptions# a1 a2 = (GHC.Prim.unmaskAsyncExceptions#) a1 a2
{-# NOINLINE getMaskingState# #-}
getMaskingState# :: State# (RealWorld) -> (# State# (RealWorld),Int# #)
getMaskingState# a1 = (GHC.Prim.getMaskingState#) a1
# NOINLINE atomically # #
atomically# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
atomically# a1 a2 = (GHC.Prim.atomically#) a1 a2
# NOINLINE retry # #
retry# :: State# (RealWorld) -> (# State# (RealWorld),a #)
retry# a1 = (GHC.Prim.retry#) a1
# NOINLINE catchRetry # #
catchRetry# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
catchRetry# a1 a2 a3 = (GHC.Prim.catchRetry#) a1 a2 a3
# NOINLINE catchSTM # #
catchSTM# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> (b -> State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
catchSTM# a1 a2 a3 = (GHC.Prim.catchSTM#) a1 a2 a3
# NOINLINE check # #
check# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> State# (RealWorld)
check# a1 a2 = (GHC.Prim.check#) a1 a2
# NOINLINE newTVar # #
newTVar# :: a -> State# s -> (# State# s,TVar# s a #)
newTVar# a1 a2 = (GHC.Prim.newTVar#) a1 a2
# NOINLINE readTVar # #
readTVar# :: TVar# s a -> State# s -> (# State# s,a #)
readTVar# a1 a2 = (GHC.Prim.readTVar#) a1 a2
# NOINLINE readTVarIO # #
readTVarIO# :: TVar# s a -> State# s -> (# State# s,a #)
readTVarIO# a1 a2 = (GHC.Prim.readTVarIO#) a1 a2
# NOINLINE writeTVar # #
writeTVar# :: TVar# s a -> a -> State# s -> State# s
writeTVar# a1 a2 a3 = (GHC.Prim.writeTVar#) a1 a2 a3
# NOINLINE sameTVar # #
sameTVar# :: TVar# s a -> TVar# s a -> Int#
sameTVar# a1 a2 = (GHC.Prim.sameTVar#) a1 a2
# NOINLINE newMVar # #
newMVar# :: State# s -> (# State# s,MVar# s a #)
newMVar# a1 = (GHC.Prim.newMVar#) a1
# NOINLINE takeMVar # #
takeMVar# :: MVar# s a -> State# s -> (# State# s,a #)
takeMVar# a1 a2 = (GHC.Prim.takeMVar#) a1 a2
# NOINLINE tryTakeMVar # #
tryTakeMVar# :: MVar# s a -> State# s -> (# State# s,Int#,a #)
tryTakeMVar# a1 a2 = (GHC.Prim.tryTakeMVar#) a1 a2
# NOINLINE putMVar # #
putMVar# :: MVar# s a -> a -> State# s -> State# s
putMVar# a1 a2 a3 = (GHC.Prim.putMVar#) a1 a2 a3
# NOINLINE tryPutMVar # #
tryPutMVar# :: MVar# s a -> a -> State# s -> (# State# s,Int# #)
tryPutMVar# a1 a2 a3 = (GHC.Prim.tryPutMVar#) a1 a2 a3
# NOINLINE readMVar # #
readMVar# :: MVar# s a -> State# s -> (# State# s,a #)
readMVar# a1 a2 = (GHC.Prim.readMVar#) a1 a2
# NOINLINE tryReadMVar # #
tryReadMVar# :: MVar# s a -> State# s -> (# State# s,Int#,a #)
tryReadMVar# a1 a2 = (GHC.Prim.tryReadMVar#) a1 a2
# NOINLINE sameMVar # #
sameMVar# :: MVar# s a -> MVar# s a -> Int#
sameMVar# a1 a2 = (GHC.Prim.sameMVar#) a1 a2
# NOINLINE isEmptyMVar # #
isEmptyMVar# :: MVar# s a -> State# s -> (# State# s,Int# #)
isEmptyMVar# a1 a2 = (GHC.Prim.isEmptyMVar#) a1 a2
# NOINLINE delay # #
delay# :: Int# -> State# s -> State# s
delay# a1 a2 = (GHC.Prim.delay#) a1 a2
# NOINLINE waitRead # #
waitRead# :: Int# -> State# s -> State# s
waitRead# a1 a2 = (GHC.Prim.waitRead#) a1 a2
# NOINLINE waitWrite # #
waitWrite# :: Int# -> State# s -> State# s
waitWrite# a1 a2 = (GHC.Prim.waitWrite#) a1 a2
# NOINLINE fork # #
fork# :: a -> State# (RealWorld) -> (# State# (RealWorld),ThreadId# #)
fork# a1 a2 = (GHC.Prim.fork#) a1 a2
# NOINLINE forkOn # #
forkOn# :: Int# -> a -> State# (RealWorld) -> (# State# (RealWorld),ThreadId# #)
forkOn# a1 a2 a3 = (GHC.Prim.forkOn#) a1 a2 a3
# NOINLINE killThread # #
killThread# :: ThreadId# -> a -> State# (RealWorld) -> State# (RealWorld)
killThread# a1 a2 a3 = (GHC.Prim.killThread#) a1 a2 a3
# NOINLINE yield # #
yield# :: State# (RealWorld) -> State# (RealWorld)
yield# a1 = (GHC.Prim.yield#) a1
# NOINLINE myThreadId # #
myThreadId# :: State# (RealWorld) -> (# State# (RealWorld),ThreadId# #)
myThreadId# a1 = (GHC.Prim.myThreadId#) a1
# NOINLINE labelThread # #
labelThread# :: ThreadId# -> Addr# -> State# (RealWorld) -> State# (RealWorld)
labelThread# a1 a2 a3 = (GHC.Prim.labelThread#) a1 a2 a3
{-# NOINLINE isCurrentThreadBound# #-}
isCurrentThreadBound# :: State# (RealWorld) -> (# State# (RealWorld),Int# #)
isCurrentThreadBound# a1 = (GHC.Prim.isCurrentThreadBound#) a1
{-# NOINLINE noDuplicate# #-}
noDuplicate# :: State# s -> State# s
noDuplicate# a1 = (GHC.Prim.noDuplicate#) a1
# NOINLINE threadStatus # #
threadStatus# :: ThreadId# -> State# (RealWorld) -> (# State# (RealWorld),Int#,Int#,Int# #)
threadStatus# a1 a2 = (GHC.Prim.threadStatus#) a1 a2
# NOINLINE mkWeak # #
mkWeak# :: o -> b -> (State# (RealWorld) -> (# State# (RealWorld),c #)) -> State# (RealWorld) -> (# State# (RealWorld),Weak# b #)
mkWeak# a1 a2 a3 a4 = (GHC.Prim.mkWeak#) a1 a2 a3 a4
# NOINLINE mkWeakNoFinalizer # #
mkWeakNoFinalizer# :: o -> b -> State# (RealWorld) -> (# State# (RealWorld),Weak# b #)
mkWeakNoFinalizer# a1 a2 a3 = (GHC.Prim.mkWeakNoFinalizer#) a1 a2 a3
# NOINLINE addCFinalizerToWeak # #
addCFinalizerToWeak# :: Addr# -> Addr# -> Int# -> Addr# -> Weak# b -> State# (RealWorld) -> (# State# (RealWorld),Int# #)
addCFinalizerToWeak# a1 a2 a3 a4 a5 a6 = (GHC.Prim.addCFinalizerToWeak#) a1 a2 a3 a4 a5 a6
# NOINLINE deRefWeak # #
deRefWeak# :: Weak# a -> State# (RealWorld) -> (# State# (RealWorld),Int#,a #)
deRefWeak# a1 a2 = (GHC.Prim.deRefWeak#) a1 a2
{-# NOINLINE finalizeWeak# #-}
finalizeWeak# :: Weak# a -> State# (RealWorld) -> (# State# (RealWorld),Int#,State# (RealWorld) -> (# State# (RealWorld),b #) #)
finalizeWeak# a1 a2 = (GHC.Prim.finalizeWeak#) a1 a2
# NOINLINE touch # #
touch# :: o -> State# (RealWorld) -> State# (RealWorld)
touch# a1 a2 = (GHC.Prim.touch#) a1 a2
# NOINLINE makeStablePtr # #
makeStablePtr# :: a -> State# (RealWorld) -> (# State# (RealWorld),StablePtr# a #)
makeStablePtr# a1 a2 = (GHC.Prim.makeStablePtr#) a1 a2
# NOINLINE deRefStablePtr # #
deRefStablePtr# :: StablePtr# a -> State# (RealWorld) -> (# State# (RealWorld),a #)
deRefStablePtr# a1 a2 = (GHC.Prim.deRefStablePtr#) a1 a2
# NOINLINE eqStablePtr # #
eqStablePtr# :: StablePtr# a -> StablePtr# a -> Int#
eqStablePtr# a1 a2 = (GHC.Prim.eqStablePtr#) a1 a2
# NOINLINE makeStableName # #
makeStableName# :: a -> State# (RealWorld) -> (# State# (RealWorld),StableName# a #)
makeStableName# a1 a2 = (GHC.Prim.makeStableName#) a1 a2
{-# NOINLINE eqStableName# #-}
eqStableName# :: StableName# a -> StableName# b -> Int#
eqStableName# a1 a2 = (GHC.Prim.eqStableName#) a1 a2
# NOINLINE stableNameToInt # #
stableNameToInt# :: StableName# a -> Int#
stableNameToInt# a1 = (GHC.Prim.stableNameToInt#) a1
# NOINLINE compactNew # #
compactNew# :: Word# -> State# (RealWorld) -> (# State# (RealWorld),Compact# #)
compactNew# a1 a2 = (GHC.Prim.compactNew#) a1 a2
# NOINLINE compactResize # #
compactResize# :: Compact# -> Word# -> State# (RealWorld) -> State# (RealWorld)
compactResize# a1 a2 a3 = (GHC.Prim.compactResize#) a1 a2 a3
# NOINLINE compactContains # #
compactContains# :: Compact# -> a -> State# (RealWorld) -> (# State# (RealWorld),Int# #)
compactContains# a1 a2 a3 = (GHC.Prim.compactContains#) a1 a2 a3
# NOINLINE compactContainsAny # #
compactContainsAny# :: a -> State# (RealWorld) -> (# State# (RealWorld),Int# #)
compactContainsAny# a1 a2 = (GHC.Prim.compactContainsAny#) a1 a2
# NOINLINE compactGetFirstBlock # #
compactGetFirstBlock# :: Compact# -> State# (RealWorld) -> (# State# (RealWorld),Addr#,Word# #)
compactGetFirstBlock# a1 a2 = (GHC.Prim.compactGetFirstBlock#) a1 a2
# NOINLINE compactGetNextBlock # #
compactGetNextBlock# :: Compact# -> Addr# -> State# (RealWorld) -> (# State# (RealWorld),Addr#,Word# #)
compactGetNextBlock# a1 a2 a3 = (GHC.Prim.compactGetNextBlock#) a1 a2 a3
# NOINLINE compactAllocateBlock # #
compactAllocateBlock# :: Word# -> Addr# -> State# (RealWorld) -> (# State# (RealWorld),Addr# #)
compactAllocateBlock# a1 a2 a3 = (GHC.Prim.compactAllocateBlock#) a1 a2 a3
# NOINLINE compactFixupPointers # #
compactFixupPointers# :: Addr# -> Addr# -> State# (RealWorld) -> (# State# (RealWorld),Compact#,Addr# #)
compactFixupPointers# a1 a2 a3 = (GHC.Prim.compactFixupPointers#) a1 a2 a3
# NOINLINE compactAdd # #
compactAdd# :: Compact# -> a -> State# (RealWorld) -> (# State# (RealWorld),a #)
compactAdd# a1 a2 a3 = (GHC.Prim.compactAdd#) a1 a2 a3
# NOINLINE compactAddWithSharing # #
compactAddWithSharing# :: Compact# -> a -> State# (RealWorld) -> (# State# (RealWorld),a #)
compactAddWithSharing# a1 a2 a3 = (GHC.Prim.compactAddWithSharing#) a1 a2 a3
# NOINLINE compactSize # #
compactSize# :: Compact# -> State# (RealWorld) -> (# State# (RealWorld),Word# #)
compactSize# a1 a2 = (GHC.Prim.compactSize#) a1 a2
# NOINLINE reallyUnsafePtrEquality # #
reallyUnsafePtrEquality# :: a -> a -> Int#
reallyUnsafePtrEquality# a1 a2 = (GHC.Prim.reallyUnsafePtrEquality#) a1 a2
# NOINLINE par # #
par# :: a -> Int#
par# a1 = (GHC.Prim.par#) a1
# NOINLINE spark # #
spark# :: a -> State# s -> (# State# s,a #)
spark# a1 a2 = (GHC.Prim.spark#) a1 a2
{-# NOINLINE seq# #-}
seq# :: a -> State# s -> (# State# s,a #)
seq# a1 a2 = (GHC.Prim.seq#) a1 a2
# NOINLINE getSpark # #
getSpark# :: State# s -> (# State# s,Int#,a #)
getSpark# a1 = (GHC.Prim.getSpark#) a1
# NOINLINE numSparks # #
numSparks# :: State# s -> (# State# s,Int# #)
numSparks# a1 = (GHC.Prim.numSparks#) a1
# NOINLINE dataToTag # #
dataToTag# :: a -> Int#
dataToTag# a1 = (GHC.Prim.dataToTag#) a1
# NOINLINE addrToAny # #
addrToAny# :: Addr# -> (# a #)
addrToAny# a1 = (GHC.Prim.addrToAny#) a1
{-# NOINLINE anyToAddr# #-}
anyToAddr# :: a -> State# (RealWorld) -> (# State# (RealWorld),Addr# #)
anyToAddr# a1 a2 = (GHC.Prim.anyToAddr#) a1 a2
# NOINLINE mkApUpd0 # #
mkApUpd0# :: BCO# -> (# a #)
mkApUpd0# a1 = (GHC.Prim.mkApUpd0#) a1
# NOINLINE newBCO # #
newBCO# :: ByteArray# -> ByteArray# -> Array# a -> Int# -> ByteArray# -> State# s -> (# State# s,BCO# #)
newBCO# a1 a2 a3 a4 a5 a6 = (GHC.Prim.newBCO#) a1 a2 a3 a4 a5 a6
# NOINLINE unpackClosure # #
unpackClosure# :: a -> (# Addr#,Array# b,ByteArray# #)
unpackClosure# a1 = (GHC.Prim.unpackClosure#) a1
# NOINLINE getApStackVal # #
getApStackVal# :: a -> Int# -> (# Int#,b #)
getApStackVal# a1 a2 = (GHC.Prim.getApStackVal#) a1 a2
# NOINLINE getCCSOf # #
getCCSOf# :: a -> State# s -> (# State# s,Addr# #)
getCCSOf# a1 a2 = (GHC.Prim.getCCSOf#) a1 a2
# NOINLINE getCurrentCCS # #
getCurrentCCS# :: a -> State# s -> (# State# s,Addr# #)
getCurrentCCS# a1 a2 = (GHC.Prim.getCurrentCCS#) a1 a2
# NOINLINE clearCCS # #
clearCCS# :: (State# s -> (# State# s,a #)) -> State# s -> (# State# s,a #)
clearCCS# a1 a2 = (GHC.Prim.clearCCS#) a1 a2
# NOINLINE traceEvent # #
traceEvent# :: Addr# -> State# s -> State# s
traceEvent# a1 a2 = (GHC.Prim.traceEvent#) a1 a2
# NOINLINE traceMarker # #
traceMarker# :: Addr# -> State# s -> State# s
traceMarker# a1 a2 = (GHC.Prim.traceMarker#) a1 a2
# NOINLINE prefetchByteArray3 # #
prefetchByteArray3# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray3# a1 a2 a3 = (GHC.Prim.prefetchByteArray3#) a1 a2 a3
# NOINLINE prefetchMutableByteArray3 # #
prefetchMutableByteArray3# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray3# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray3#) a1 a2 a3
# NOINLINE prefetchAddr3 # #
prefetchAddr3# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr3# a1 a2 a3 = (GHC.Prim.prefetchAddr3#) a1 a2 a3
# NOINLINE prefetchValue3 # #
prefetchValue3# :: a -> State# s -> State# s
prefetchValue3# a1 a2 = (GHC.Prim.prefetchValue3#) a1 a2
# NOINLINE prefetchByteArray2 # #
prefetchByteArray2# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray2# a1 a2 a3 = (GHC.Prim.prefetchByteArray2#) a1 a2 a3
# NOINLINE prefetchMutableByteArray2 # #
prefetchMutableByteArray2# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray2# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray2#) a1 a2 a3
# NOINLINE prefetchAddr2 # #
prefetchAddr2# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr2# a1 a2 a3 = (GHC.Prim.prefetchAddr2#) a1 a2 a3
# NOINLINE prefetchValue2 # #
prefetchValue2# :: a -> State# s -> State# s
prefetchValue2# a1 a2 = (GHC.Prim.prefetchValue2#) a1 a2
# NOINLINE prefetchByteArray1 # #
prefetchByteArray1# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray1# a1 a2 a3 = (GHC.Prim.prefetchByteArray1#) a1 a2 a3
# NOINLINE prefetchMutableByteArray1 # #
prefetchMutableByteArray1# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray1# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray1#) a1 a2 a3
{-# NOINLINE prefetchAddr1# #-}
prefetchAddr1# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr1# a1 a2 a3 = (GHC.Prim.prefetchAddr1#) a1 a2 a3
# NOINLINE prefetchValue1 # #
prefetchValue1# :: a -> State# s -> State# s
prefetchValue1# a1 a2 = (GHC.Prim.prefetchValue1#) a1 a2
# NOINLINE prefetchByteArray0 # #
prefetchByteArray0# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray0# a1 a2 a3 = (GHC.Prim.prefetchByteArray0#) a1 a2 a3
# NOINLINE prefetchMutableByteArray0 # #
prefetchMutableByteArray0# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray0# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray0#) a1 a2 a3
# NOINLINE prefetchAddr0 # #
prefetchAddr0# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr0# a1 a2 a3 = (GHC.Prim.prefetchAddr0#) a1 a2 a3
# NOINLINE prefetchValue0 # #
prefetchValue0# :: a -> State# s -> State# s
prefetchValue0# a1 a2 = (GHC.Prim.prefetchValue0#) a1 a2
| null | https://raw.githubusercontent.com/TerrorJack/ghc-alter/db736f34095eef416b7e077f9b26fc03aa78c311/ghc-alter/boot-lib/ghc-prim/GHC/PrimopWrappers.hs | haskell | # LANGUAGE MagicHash, NoImplicitPrelude, UnboxedTuples #
# NOINLINE geChar# #
# NOINLINE neChar# #
# NOINLINE (*#) #
# NOINLINE (==#) #
# NOINLINE (/=#) #
# NOINLINE uncheckedIShiftL# #
# NOINLINE timesWord# #
# NOINLINE quotRemWord# #
# NOINLINE popCnt16# #
# NOINLINE (==##) #
# NOINLINE (/=##) #
# NOINLINE double2Float# #
# NOINLINE sinFloat# #
# NOINLINE cosFloat# #
# NOINLINE float2Double# #
# NOINLINE cloneSmallArray# #
# NOINLINE writeIntOffAddr# #
# NOINLINE unmaskAsyncExceptions# #
# NOINLINE getMaskingState# #
# NOINLINE isCurrentThreadBound# #
# NOINLINE noDuplicate# #
# NOINLINE finalizeWeak# #
# NOINLINE eqStableName# #
# NOINLINE seq# #
# NOINLINE anyToAddr# #
# NOINLINE prefetchAddr1# # | module GHC.PrimopWrappers where
import qualified GHC.Prim
import GHC.Tuple ()
import GHC.Prim (Char#, Int#, Word#, Float#, Double#, State#, MutableArray#, Array#, SmallMutableArray#, SmallArray#, MutableByteArray#, ByteArray#, Addr#, StablePtr#, MutableArrayArray#, ArrayArray#, MutVar#, RealWorld, TVar#, MVar#, ThreadId#, Weak#, StableName#, Compact#, BCO#)
# NOINLINE gtChar # #
gtChar# :: Char# -> Char# -> Int#
gtChar# a1 a2 = (GHC.Prim.gtChar#) a1 a2
geChar# :: Char# -> Char# -> Int#
geChar# a1 a2 = (GHC.Prim.geChar#) a1 a2
# NOINLINE eqChar # #
eqChar# :: Char# -> Char# -> Int#
eqChar# a1 a2 = (GHC.Prim.eqChar#) a1 a2
neChar# :: Char# -> Char# -> Int#
neChar# a1 a2 = (GHC.Prim.neChar#) a1 a2
# NOINLINE ltChar # #
ltChar# :: Char# -> Char# -> Int#
ltChar# a1 a2 = (GHC.Prim.ltChar#) a1 a2
# NOINLINE leChar # #
leChar# :: Char# -> Char# -> Int#
leChar# a1 a2 = (GHC.Prim.leChar#) a1 a2
# NOINLINE ord # #
ord# :: Char# -> Int#
ord# a1 = (GHC.Prim.ord#) a1
# NOINLINE ( + # ) #
(+#) :: Int# -> Int# -> Int#
(+#) a1 a2 = (GHC.Prim.+#) a1 a2
# NOINLINE ( - # ) #
(-#) :: Int# -> Int# -> Int#
(-#) a1 a2 = (GHC.Prim.-#) a1 a2
(*#) :: Int# -> Int# -> Int#
(*#) a1 a2 = (GHC.Prim.*#) a1 a2
# NOINLINE mulIntMayOflo # #
mulIntMayOflo# :: Int# -> Int# -> Int#
mulIntMayOflo# a1 a2 = (GHC.Prim.mulIntMayOflo#) a1 a2
# NOINLINE quotInt # #
quotInt# :: Int# -> Int# -> Int#
quotInt# a1 a2 = (GHC.Prim.quotInt#) a1 a2
# NOINLINE remInt # #
remInt# :: Int# -> Int# -> Int#
remInt# a1 a2 = (GHC.Prim.remInt#) a1 a2
# NOINLINE quotRemInt # #
quotRemInt# :: Int# -> Int# -> (# Int#,Int# #)
quotRemInt# a1 a2 = (GHC.Prim.quotRemInt#) a1 a2
# NOINLINE andI # #
andI# :: Int# -> Int# -> Int#
andI# a1 a2 = (GHC.Prim.andI#) a1 a2
# NOINLINE orI # #
orI# :: Int# -> Int# -> Int#
orI# a1 a2 = (GHC.Prim.orI#) a1 a2
# NOINLINE xorI # #
xorI# :: Int# -> Int# -> Int#
xorI# a1 a2 = (GHC.Prim.xorI#) a1 a2
# NOINLINE notI # #
notI# :: Int# -> Int#
notI# a1 = (GHC.Prim.notI#) a1
# NOINLINE negateInt # #
negateInt# :: Int# -> Int#
negateInt# a1 = (GHC.Prim.negateInt#) a1
# NOINLINE addIntC # #
addIntC# :: Int# -> Int# -> (# Int#,Int# #)
addIntC# a1 a2 = (GHC.Prim.addIntC#) a1 a2
# NOINLINE subIntC # #
subIntC# :: Int# -> Int# -> (# Int#,Int# #)
subIntC# a1 a2 = (GHC.Prim.subIntC#) a1 a2
# NOINLINE ( > # ) #
(>#) :: Int# -> Int# -> Int#
(>#) a1 a2 = (GHC.Prim.>#) a1 a2
# NOINLINE ( > = # ) #
(>=#) :: Int# -> Int# -> Int#
(>=#) a1 a2 = (GHC.Prim.>=#) a1 a2
(==#) :: Int# -> Int# -> Int#
(==#) a1 a2 = (GHC.Prim.==#) a1 a2
(/=#) :: Int# -> Int# -> Int#
(/=#) a1 a2 = (GHC.Prim./=#) a1 a2
# NOINLINE ( < # ) #
(<#) :: Int# -> Int# -> Int#
(<#) a1 a2 = (GHC.Prim.<#) a1 a2
# NOINLINE ( < = # ) #
(<=#) :: Int# -> Int# -> Int#
(<=#) a1 a2 = (GHC.Prim.<=#) a1 a2
# NOINLINE chr # #
chr# :: Int# -> Char#
chr# a1 = (GHC.Prim.chr#) a1
# NOINLINE int2Word # #
int2Word# :: Int# -> Word#
int2Word# a1 = (GHC.Prim.int2Word#) a1
# NOINLINE int2Float # #
int2Float# :: Int# -> Float#
int2Float# a1 = (GHC.Prim.int2Float#) a1
# NOINLINE int2Double # #
int2Double# :: Int# -> Double#
int2Double# a1 = (GHC.Prim.int2Double#) a1
# NOINLINE word2Float # #
word2Float# :: Word# -> Float#
word2Float# a1 = (GHC.Prim.word2Float#) a1
# NOINLINE word2Double # #
word2Double# :: Word# -> Double#
word2Double# a1 = (GHC.Prim.word2Double#) a1
uncheckedIShiftL# :: Int# -> Int# -> Int#
uncheckedIShiftL# a1 a2 = (GHC.Prim.uncheckedIShiftL#) a1 a2
# NOINLINE uncheckedIShiftRA # #
uncheckedIShiftRA# :: Int# -> Int# -> Int#
uncheckedIShiftRA# a1 a2 = (GHC.Prim.uncheckedIShiftRA#) a1 a2
# NOINLINE uncheckedIShiftRL # #
uncheckedIShiftRL# :: Int# -> Int# -> Int#
uncheckedIShiftRL# a1 a2 = (GHC.Prim.uncheckedIShiftRL#) a1 a2
# NOINLINE plusWord # #
plusWord# :: Word# -> Word# -> Word#
plusWord# a1 a2 = (GHC.Prim.plusWord#) a1 a2
# NOINLINE subWordC # #
subWordC# :: Word# -> Word# -> (# Word#,Int# #)
subWordC# a1 a2 = (GHC.Prim.subWordC#) a1 a2
# NOINLINE plusWord2 # #
plusWord2# :: Word# -> Word# -> (# Word#,Word# #)
plusWord2# a1 a2 = (GHC.Prim.plusWord2#) a1 a2
# NOINLINE minusWord # #
minusWord# :: Word# -> Word# -> Word#
minusWord# a1 a2 = (GHC.Prim.minusWord#) a1 a2
timesWord# :: Word# -> Word# -> Word#
timesWord# a1 a2 = (GHC.Prim.timesWord#) a1 a2
# NOINLINE timesWord2 # #
timesWord2# :: Word# -> Word# -> (# Word#,Word# #)
timesWord2# a1 a2 = (GHC.Prim.timesWord2#) a1 a2
# NOINLINE quotWord # #
quotWord# :: Word# -> Word# -> Word#
quotWord# a1 a2 = (GHC.Prim.quotWord#) a1 a2
# NOINLINE remWord # #
remWord# :: Word# -> Word# -> Word#
remWord# a1 a2 = (GHC.Prim.remWord#) a1 a2
quotRemWord# :: Word# -> Word# -> (# Word#,Word# #)
quotRemWord# a1 a2 = (GHC.Prim.quotRemWord#) a1 a2
# NOINLINE quotRemWord2 # #
quotRemWord2# :: Word# -> Word# -> Word# -> (# Word#,Word# #)
quotRemWord2# a1 a2 a3 = (GHC.Prim.quotRemWord2#) a1 a2 a3
# NOINLINE and # #
and# :: Word# -> Word# -> Word#
and# a1 a2 = (GHC.Prim.and#) a1 a2
# NOINLINE or # #
or# :: Word# -> Word# -> Word#
or# a1 a2 = (GHC.Prim.or#) a1 a2
# NOINLINE xor # #
xor# :: Word# -> Word# -> Word#
xor# a1 a2 = (GHC.Prim.xor#) a1 a2
# NOINLINE not # #
not# :: Word# -> Word#
not# a1 = (GHC.Prim.not#) a1
# NOINLINE uncheckedShiftL # #
uncheckedShiftL# :: Word# -> Int# -> Word#
uncheckedShiftL# a1 a2 = (GHC.Prim.uncheckedShiftL#) a1 a2
# NOINLINE uncheckedShiftRL # #
uncheckedShiftRL# :: Word# -> Int# -> Word#
uncheckedShiftRL# a1 a2 = (GHC.Prim.uncheckedShiftRL#) a1 a2
# NOINLINE word2Int # #
word2Int# :: Word# -> Int#
word2Int# a1 = (GHC.Prim.word2Int#) a1
# NOINLINE gtWord # #
gtWord# :: Word# -> Word# -> Int#
gtWord# a1 a2 = (GHC.Prim.gtWord#) a1 a2
# NOINLINE geWord # #
geWord# :: Word# -> Word# -> Int#
geWord# a1 a2 = (GHC.Prim.geWord#) a1 a2
# NOINLINE eqWord # #
eqWord# :: Word# -> Word# -> Int#
eqWord# a1 a2 = (GHC.Prim.eqWord#) a1 a2
# NOINLINE neWord # #
neWord# :: Word# -> Word# -> Int#
neWord# a1 a2 = (GHC.Prim.neWord#) a1 a2
# NOINLINE ltWord # #
ltWord# :: Word# -> Word# -> Int#
ltWord# a1 a2 = (GHC.Prim.ltWord#) a1 a2
# NOINLINE leWord # #
leWord# :: Word# -> Word# -> Int#
leWord# a1 a2 = (GHC.Prim.leWord#) a1 a2
# NOINLINE popCnt8 # #
popCnt8# :: Word# -> Word#
popCnt8# a1 = (GHC.Prim.popCnt8#) a1
popCnt16# :: Word# -> Word#
popCnt16# a1 = (GHC.Prim.popCnt16#) a1
# NOINLINE popCnt32 # #
popCnt32# :: Word# -> Word#
popCnt32# a1 = (GHC.Prim.popCnt32#) a1
# NOINLINE popCnt64 # #
popCnt64# :: Word# -> Word#
popCnt64# a1 = (GHC.Prim.popCnt64#) a1
# NOINLINE popCnt # #
popCnt# :: Word# -> Word#
popCnt# a1 = (GHC.Prim.popCnt#) a1
# NOINLINE clz8 # #
clz8# :: Word# -> Word#
clz8# a1 = (GHC.Prim.clz8#) a1
# NOINLINE clz16 # #
clz16# :: Word# -> Word#
clz16# a1 = (GHC.Prim.clz16#) a1
# NOINLINE clz32 # #
clz32# :: Word# -> Word#
clz32# a1 = (GHC.Prim.clz32#) a1
# NOINLINE clz64 # #
clz64# :: Word# -> Word#
clz64# a1 = (GHC.Prim.clz64#) a1
# NOINLINE clz # #
clz# :: Word# -> Word#
clz# a1 = (GHC.Prim.clz#) a1
# NOINLINE ctz8 # #
ctz8# :: Word# -> Word#
ctz8# a1 = (GHC.Prim.ctz8#) a1
# NOINLINE ctz16 # #
ctz16# :: Word# -> Word#
ctz16# a1 = (GHC.Prim.ctz16#) a1
# NOINLINE ctz32 # #
ctz32# :: Word# -> Word#
ctz32# a1 = (GHC.Prim.ctz32#) a1
# NOINLINE ctz64 # #
ctz64# :: Word# -> Word#
ctz64# a1 = (GHC.Prim.ctz64#) a1
# NOINLINE ctz # #
ctz# :: Word# -> Word#
ctz# a1 = (GHC.Prim.ctz#) a1
# NOINLINE byteSwap16 # #
byteSwap16# :: Word# -> Word#
byteSwap16# a1 = (GHC.Prim.byteSwap16#) a1
# NOINLINE byteSwap32 # #
byteSwap32# :: Word# -> Word#
byteSwap32# a1 = (GHC.Prim.byteSwap32#) a1
# NOINLINE byteSwap64 # #
byteSwap64# :: Word# -> Word#
byteSwap64# a1 = (GHC.Prim.byteSwap64#) a1
# NOINLINE byteSwap # #
byteSwap# :: Word# -> Word#
byteSwap# a1 = (GHC.Prim.byteSwap#) a1
# NOINLINE narrow8Int # #
narrow8Int# :: Int# -> Int#
narrow8Int# a1 = (GHC.Prim.narrow8Int#) a1
# NOINLINE narrow16Int # #
narrow16Int# :: Int# -> Int#
narrow16Int# a1 = (GHC.Prim.narrow16Int#) a1
# NOINLINE narrow32Int # #
narrow32Int# :: Int# -> Int#
narrow32Int# a1 = (GHC.Prim.narrow32Int#) a1
# NOINLINE narrow8Word # #
narrow8Word# :: Word# -> Word#
narrow8Word# a1 = (GHC.Prim.narrow8Word#) a1
# NOINLINE narrow16Word # #
narrow16Word# :: Word# -> Word#
narrow16Word# a1 = (GHC.Prim.narrow16Word#) a1
# NOINLINE narrow32Word # #
narrow32Word# :: Word# -> Word#
narrow32Word# a1 = (GHC.Prim.narrow32Word#) a1
# NOINLINE ( > # # ) #
(>##) :: Double# -> Double# -> Int#
(>##) a1 a2 = (GHC.Prim.>##) a1 a2
# NOINLINE ( > = # # ) #
(>=##) :: Double# -> Double# -> Int#
(>=##) a1 a2 = (GHC.Prim.>=##) a1 a2
(==##) :: Double# -> Double# -> Int#
(==##) a1 a2 = (GHC.Prim.==##) a1 a2
(/=##) :: Double# -> Double# -> Int#
(/=##) a1 a2 = (GHC.Prim./=##) a1 a2
# NOINLINE ( < # # ) #
(<##) :: Double# -> Double# -> Int#
(<##) a1 a2 = (GHC.Prim.<##) a1 a2
# NOINLINE ( < = # # ) #
(<=##) :: Double# -> Double# -> Int#
(<=##) a1 a2 = (GHC.Prim.<=##) a1 a2
# NOINLINE ( + # # ) #
(+##) :: Double# -> Double# -> Double#
(+##) a1 a2 = (GHC.Prim.+##) a1 a2
# NOINLINE ( - # # ) #
(-##) :: Double# -> Double# -> Double#
(-##) a1 a2 = (GHC.Prim.-##) a1 a2
# NOINLINE ( * # # ) #
(*##) :: Double# -> Double# -> Double#
(*##) a1 a2 = (GHC.Prim.*##) a1 a2
# NOINLINE ( / # # ) #
(/##) :: Double# -> Double# -> Double#
(/##) a1 a2 = (GHC.Prim./##) a1 a2
# NOINLINE negateDouble # #
negateDouble# :: Double# -> Double#
negateDouble# a1 = (GHC.Prim.negateDouble#) a1
# NOINLINE fabsDouble # #
fabsDouble# :: Double# -> Double#
fabsDouble# a1 = (GHC.Prim.fabsDouble#) a1
# NOINLINE double2Int # #
double2Int# :: Double# -> Int#
double2Int# a1 = (GHC.Prim.double2Int#) a1
double2Float# :: Double# -> Float#
double2Float# a1 = (GHC.Prim.double2Float#) a1
# NOINLINE expDouble # #
expDouble# :: Double# -> Double#
expDouble# a1 = (GHC.Prim.expDouble#) a1
# NOINLINE logDouble # #
logDouble# :: Double# -> Double#
logDouble# a1 = (GHC.Prim.logDouble#) a1
# NOINLINE sqrtDouble # #
sqrtDouble# :: Double# -> Double#
sqrtDouble# a1 = (GHC.Prim.sqrtDouble#) a1
# NOINLINE sinDouble # #
sinDouble# :: Double# -> Double#
sinDouble# a1 = (GHC.Prim.sinDouble#) a1
# NOINLINE cosDouble # #
cosDouble# :: Double# -> Double#
cosDouble# a1 = (GHC.Prim.cosDouble#) a1
# NOINLINE tanDouble # #
tanDouble# :: Double# -> Double#
tanDouble# a1 = (GHC.Prim.tanDouble#) a1
# NOINLINE asinDouble # #
asinDouble# :: Double# -> Double#
asinDouble# a1 = (GHC.Prim.asinDouble#) a1
# NOINLINE acosDouble # #
acosDouble# :: Double# -> Double#
acosDouble# a1 = (GHC.Prim.acosDouble#) a1
# NOINLINE atanDouble # #
atanDouble# :: Double# -> Double#
atanDouble# a1 = (GHC.Prim.atanDouble#) a1
# NOINLINE sinhDouble # #
sinhDouble# :: Double# -> Double#
sinhDouble# a1 = (GHC.Prim.sinhDouble#) a1
# NOINLINE coshDouble # #
coshDouble# :: Double# -> Double#
coshDouble# a1 = (GHC.Prim.coshDouble#) a1
# NOINLINE tanhDouble # #
tanhDouble# :: Double# -> Double#
tanhDouble# a1 = (GHC.Prim.tanhDouble#) a1
# NOINLINE ( * * # # ) #
(**##) :: Double# -> Double# -> Double#
(**##) a1 a2 = (GHC.Prim.**##) a1 a2
# NOINLINE decodeDouble_2Int # #
decodeDouble_2Int# :: Double# -> (# Int#,Word#,Word#,Int# #)
decodeDouble_2Int# a1 = (GHC.Prim.decodeDouble_2Int#) a1
# NOINLINE decodeDouble_Int64 # #
decodeDouble_Int64# :: Double# -> (# Int#,Int# #)
decodeDouble_Int64# a1 = (GHC.Prim.decodeDouble_Int64#) a1
# NOINLINE gtFloat # #
gtFloat# :: Float# -> Float# -> Int#
gtFloat# a1 a2 = (GHC.Prim.gtFloat#) a1 a2
# NOINLINE geFloat # #
geFloat# :: Float# -> Float# -> Int#
geFloat# a1 a2 = (GHC.Prim.geFloat#) a1 a2
# NOINLINE eqFloat # #
eqFloat# :: Float# -> Float# -> Int#
eqFloat# a1 a2 = (GHC.Prim.eqFloat#) a1 a2
# NOINLINE neFloat # #
neFloat# :: Float# -> Float# -> Int#
neFloat# a1 a2 = (GHC.Prim.neFloat#) a1 a2
# NOINLINE ltFloat # #
ltFloat# :: Float# -> Float# -> Int#
ltFloat# a1 a2 = (GHC.Prim.ltFloat#) a1 a2
# NOINLINE leFloat # #
leFloat# :: Float# -> Float# -> Int#
leFloat# a1 a2 = (GHC.Prim.leFloat#) a1 a2
# NOINLINE plusFloat # #
plusFloat# :: Float# -> Float# -> Float#
plusFloat# a1 a2 = (GHC.Prim.plusFloat#) a1 a2
# NOINLINE minusFloat # #
minusFloat# :: Float# -> Float# -> Float#
minusFloat# a1 a2 = (GHC.Prim.minusFloat#) a1 a2
# NOINLINE timesFloat # #
timesFloat# :: Float# -> Float# -> Float#
timesFloat# a1 a2 = (GHC.Prim.timesFloat#) a1 a2
# NOINLINE divideFloat # #
divideFloat# :: Float# -> Float# -> Float#
divideFloat# a1 a2 = (GHC.Prim.divideFloat#) a1 a2
# NOINLINE negateFloat # #
negateFloat# :: Float# -> Float#
negateFloat# a1 = (GHC.Prim.negateFloat#) a1
# NOINLINE fabsFloat # #
fabsFloat# :: Float# -> Float#
fabsFloat# a1 = (GHC.Prim.fabsFloat#) a1
# NOINLINE float2Int # #
float2Int# :: Float# -> Int#
float2Int# a1 = (GHC.Prim.float2Int#) a1
# NOINLINE expFloat # #
expFloat# :: Float# -> Float#
expFloat# a1 = (GHC.Prim.expFloat#) a1
# NOINLINE logFloat # #
logFloat# :: Float# -> Float#
logFloat# a1 = (GHC.Prim.logFloat#) a1
# NOINLINE sqrtFloat # #
sqrtFloat# :: Float# -> Float#
sqrtFloat# a1 = (GHC.Prim.sqrtFloat#) a1
sinFloat# :: Float# -> Float#
sinFloat# a1 = (GHC.Prim.sinFloat#) a1
cosFloat# :: Float# -> Float#
cosFloat# a1 = (GHC.Prim.cosFloat#) a1
# NOINLINE tanFloat # #
tanFloat# :: Float# -> Float#
tanFloat# a1 = (GHC.Prim.tanFloat#) a1
# NOINLINE asinFloat # #
asinFloat# :: Float# -> Float#
asinFloat# a1 = (GHC.Prim.asinFloat#) a1
# NOINLINE acosFloat # #
acosFloat# :: Float# -> Float#
acosFloat# a1 = (GHC.Prim.acosFloat#) a1
# NOINLINE atanFloat # #
atanFloat# :: Float# -> Float#
atanFloat# a1 = (GHC.Prim.atanFloat#) a1
# NOINLINE sinhFloat # #
sinhFloat# :: Float# -> Float#
sinhFloat# a1 = (GHC.Prim.sinhFloat#) a1
# NOINLINE coshFloat # #
coshFloat# :: Float# -> Float#
coshFloat# a1 = (GHC.Prim.coshFloat#) a1
# NOINLINE tanhFloat # #
tanhFloat# :: Float# -> Float#
tanhFloat# a1 = (GHC.Prim.tanhFloat#) a1
# NOINLINE powerFloat # #
powerFloat# :: Float# -> Float# -> Float#
powerFloat# a1 a2 = (GHC.Prim.powerFloat#) a1 a2
float2Double# :: Float# -> Double#
float2Double# a1 = (GHC.Prim.float2Double#) a1
# NOINLINE decodeFloat_Int # #
decodeFloat_Int# :: Float# -> (# Int#,Int# #)
decodeFloat_Int# a1 = (GHC.Prim.decodeFloat_Int#) a1
# NOINLINE newArray # #
newArray# :: Int# -> a -> State# s -> (# State# s,MutableArray# s a #)
newArray# a1 a2 a3 = (GHC.Prim.newArray#) a1 a2 a3
# NOINLINE sameMutableArray # #
sameMutableArray# :: MutableArray# s a -> MutableArray# s a -> Int#
sameMutableArray# a1 a2 = (GHC.Prim.sameMutableArray#) a1 a2
# NOINLINE readArray # #
readArray# :: MutableArray# s a -> Int# -> State# s -> (# State# s,a #)
readArray# a1 a2 a3 = (GHC.Prim.readArray#) a1 a2 a3
# NOINLINE writeArray # #
writeArray# :: MutableArray# s a -> Int# -> a -> State# s -> State# s
writeArray# a1 a2 a3 a4 = (GHC.Prim.writeArray#) a1 a2 a3 a4
# NOINLINE sizeofArray # #
sizeofArray# :: Array# a -> Int#
sizeofArray# a1 = (GHC.Prim.sizeofArray#) a1
# NOINLINE sizeofMutableArray # #
sizeofMutableArray# :: MutableArray# s a -> Int#
sizeofMutableArray# a1 = (GHC.Prim.sizeofMutableArray#) a1
# NOINLINE indexArray # #
indexArray# :: Array# a -> Int# -> (# a #)
indexArray# a1 a2 = (GHC.Prim.indexArray#) a1 a2
# NOINLINE unsafeFreezeArray # #
unsafeFreezeArray# :: MutableArray# s a -> State# s -> (# State# s,Array# a #)
unsafeFreezeArray# a1 a2 = (GHC.Prim.unsafeFreezeArray#) a1 a2
# NOINLINE unsafeThawArray # #
unsafeThawArray# :: Array# a -> State# s -> (# State# s,MutableArray# s a #)
unsafeThawArray# a1 a2 = (GHC.Prim.unsafeThawArray#) a1 a2
# NOINLINE copyArray # #
copyArray# :: Array# a -> Int# -> MutableArray# s a -> Int# -> Int# -> State# s -> State# s
copyArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyMutableArray # #
copyMutableArray# :: MutableArray# s a -> Int# -> MutableArray# s a -> Int# -> Int# -> State# s -> State# s
copyMutableArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyMutableArray#) a1 a2 a3 a4 a5 a6
# NOINLINE cloneArray # #
cloneArray# :: Array# a -> Int# -> Int# -> Array# a
cloneArray# a1 a2 a3 = (GHC.Prim.cloneArray#) a1 a2 a3
# NOINLINE cloneMutableArray # #
cloneMutableArray# :: MutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,MutableArray# s a #)
cloneMutableArray# a1 a2 a3 a4 = (GHC.Prim.cloneMutableArray#) a1 a2 a3 a4
# NOINLINE freezeArray # #
freezeArray# :: MutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,Array# a #)
freezeArray# a1 a2 a3 a4 = (GHC.Prim.freezeArray#) a1 a2 a3 a4
# NOINLINE thawArray # #
thawArray# :: Array# a -> Int# -> Int# -> State# s -> (# State# s,MutableArray# s a #)
thawArray# a1 a2 a3 a4 = (GHC.Prim.thawArray#) a1 a2 a3 a4
# NOINLINE casArray # #
casArray# :: MutableArray# s a -> Int# -> a -> a -> State# s -> (# State# s,Int#,a #)
casArray# a1 a2 a3 a4 a5 = (GHC.Prim.casArray#) a1 a2 a3 a4 a5
# NOINLINE newSmallArray # #
newSmallArray# :: Int# -> a -> State# s -> (# State# s,SmallMutableArray# s a #)
newSmallArray# a1 a2 a3 = (GHC.Prim.newSmallArray#) a1 a2 a3
# NOINLINE sameSmallMutableArray # #
sameSmallMutableArray# :: SmallMutableArray# s a -> SmallMutableArray# s a -> Int#
sameSmallMutableArray# a1 a2 = (GHC.Prim.sameSmallMutableArray#) a1 a2
# NOINLINE readSmallArray # #
readSmallArray# :: SmallMutableArray# s a -> Int# -> State# s -> (# State# s,a #)
readSmallArray# a1 a2 a3 = (GHC.Prim.readSmallArray#) a1 a2 a3
# NOINLINE writeSmallArray # #
writeSmallArray# :: SmallMutableArray# s a -> Int# -> a -> State# s -> State# s
writeSmallArray# a1 a2 a3 a4 = (GHC.Prim.writeSmallArray#) a1 a2 a3 a4
# NOINLINE sizeofSmallArray # #
sizeofSmallArray# :: SmallArray# a -> Int#
sizeofSmallArray# a1 = (GHC.Prim.sizeofSmallArray#) a1
# NOINLINE sizeofSmallMutableArray # #
sizeofSmallMutableArray# :: SmallMutableArray# s a -> Int#
sizeofSmallMutableArray# a1 = (GHC.Prim.sizeofSmallMutableArray#) a1
# NOINLINE indexSmallArray # #
indexSmallArray# :: SmallArray# a -> Int# -> (# a #)
indexSmallArray# a1 a2 = (GHC.Prim.indexSmallArray#) a1 a2
# NOINLINE unsafeFreezeSmallArray # #
unsafeFreezeSmallArray# :: SmallMutableArray# s a -> State# s -> (# State# s,SmallArray# a #)
unsafeFreezeSmallArray# a1 a2 = (GHC.Prim.unsafeFreezeSmallArray#) a1 a2
# NOINLINE unsafeThawSmallArray # #
unsafeThawSmallArray# :: SmallArray# a -> State# s -> (# State# s,SmallMutableArray# s a #)
unsafeThawSmallArray# a1 a2 = (GHC.Prim.unsafeThawSmallArray#) a1 a2
# NOINLINE copySmallArray # #
copySmallArray# :: SmallArray# a -> Int# -> SmallMutableArray# s a -> Int# -> Int# -> State# s -> State# s
copySmallArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copySmallArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copySmallMutableArray # #
copySmallMutableArray# :: SmallMutableArray# s a -> Int# -> SmallMutableArray# s a -> Int# -> Int# -> State# s -> State# s
copySmallMutableArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copySmallMutableArray#) a1 a2 a3 a4 a5 a6
cloneSmallArray# :: SmallArray# a -> Int# -> Int# -> SmallArray# a
cloneSmallArray# a1 a2 a3 = (GHC.Prim.cloneSmallArray#) a1 a2 a3
# NOINLINE cloneSmallMutableArray # #
cloneSmallMutableArray# :: SmallMutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,SmallMutableArray# s a #)
cloneSmallMutableArray# a1 a2 a3 a4 = (GHC.Prim.cloneSmallMutableArray#) a1 a2 a3 a4
# NOINLINE freezeSmallArray # #
freezeSmallArray# :: SmallMutableArray# s a -> Int# -> Int# -> State# s -> (# State# s,SmallArray# a #)
freezeSmallArray# a1 a2 a3 a4 = (GHC.Prim.freezeSmallArray#) a1 a2 a3 a4
# NOINLINE thawSmallArray # #
thawSmallArray# :: SmallArray# a -> Int# -> Int# -> State# s -> (# State# s,SmallMutableArray# s a #)
thawSmallArray# a1 a2 a3 a4 = (GHC.Prim.thawSmallArray#) a1 a2 a3 a4
# NOINLINE casSmallArray # #
casSmallArray# :: SmallMutableArray# s a -> Int# -> a -> a -> State# s -> (# State# s,Int#,a #)
casSmallArray# a1 a2 a3 a4 a5 = (GHC.Prim.casSmallArray#) a1 a2 a3 a4 a5
# NOINLINE newByteArray # #
newByteArray# :: Int# -> State# s -> (# State# s,MutableByteArray# s #)
newByteArray# a1 a2 = (GHC.Prim.newByteArray#) a1 a2
# NOINLINE newPinnedByteArray # #
newPinnedByteArray# :: Int# -> State# s -> (# State# s,MutableByteArray# s #)
newPinnedByteArray# a1 a2 = (GHC.Prim.newPinnedByteArray#) a1 a2
# NOINLINE newAlignedPinnedByteArray # #
newAlignedPinnedByteArray# :: Int# -> Int# -> State# s -> (# State# s,MutableByteArray# s #)
newAlignedPinnedByteArray# a1 a2 a3 = (GHC.Prim.newAlignedPinnedByteArray#) a1 a2 a3
# NOINLINE isMutableByteArrayPinned # #
isMutableByteArrayPinned# :: MutableByteArray# s -> Int#
isMutableByteArrayPinned# a1 = (GHC.Prim.isMutableByteArrayPinned#) a1
# NOINLINE isByteArrayPinned # #
isByteArrayPinned# :: ByteArray# -> Int#
isByteArrayPinned# a1 = (GHC.Prim.isByteArrayPinned#) a1
# NOINLINE byteArrayContents # #
byteArrayContents# :: ByteArray# -> Addr#
byteArrayContents# a1 = (GHC.Prim.byteArrayContents#) a1
# NOINLINE sameMutableByteArray # #
sameMutableByteArray# :: MutableByteArray# s -> MutableByteArray# s -> Int#
sameMutableByteArray# a1 a2 = (GHC.Prim.sameMutableByteArray#) a1 a2
# NOINLINE shrinkMutableByteArray # #
shrinkMutableByteArray# :: MutableByteArray# s -> Int# -> State# s -> State# s
shrinkMutableByteArray# a1 a2 a3 = (GHC.Prim.shrinkMutableByteArray#) a1 a2 a3
# NOINLINE resizeMutableByteArray # #
resizeMutableByteArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,MutableByteArray# s #)
resizeMutableByteArray# a1 a2 a3 = (GHC.Prim.resizeMutableByteArray#) a1 a2 a3
# NOINLINE unsafeFreezeByteArray # #
unsafeFreezeByteArray# :: MutableByteArray# s -> State# s -> (# State# s,ByteArray# #)
unsafeFreezeByteArray# a1 a2 = (GHC.Prim.unsafeFreezeByteArray#) a1 a2
# NOINLINE sizeofByteArray # #
sizeofByteArray# :: ByteArray# -> Int#
sizeofByteArray# a1 = (GHC.Prim.sizeofByteArray#) a1
# NOINLINE sizeofMutableByteArray # #
sizeofMutableByteArray# :: MutableByteArray# s -> Int#
sizeofMutableByteArray# a1 = (GHC.Prim.sizeofMutableByteArray#) a1
# NOINLINE getSizeofMutableByteArray # #
getSizeofMutableByteArray# :: MutableByteArray# s -> State# s -> (# State# s,Int# #)
getSizeofMutableByteArray# a1 a2 = (GHC.Prim.getSizeofMutableByteArray#) a1 a2
# NOINLINE indexCharArray # #
indexCharArray# :: ByteArray# -> Int# -> Char#
indexCharArray# a1 a2 = (GHC.Prim.indexCharArray#) a1 a2
# NOINLINE indexWideCharArray # #
indexWideCharArray# :: ByteArray# -> Int# -> Char#
indexWideCharArray# a1 a2 = (GHC.Prim.indexWideCharArray#) a1 a2
# NOINLINE indexIntArray # #
indexIntArray# :: ByteArray# -> Int# -> Int#
indexIntArray# a1 a2 = (GHC.Prim.indexIntArray#) a1 a2
# NOINLINE indexWordArray # #
indexWordArray# :: ByteArray# -> Int# -> Word#
indexWordArray# a1 a2 = (GHC.Prim.indexWordArray#) a1 a2
# NOINLINE indexAddrArray # #
indexAddrArray# :: ByteArray# -> Int# -> Addr#
indexAddrArray# a1 a2 = (GHC.Prim.indexAddrArray#) a1 a2
# NOINLINE indexFloatArray # #
indexFloatArray# :: ByteArray# -> Int# -> Float#
indexFloatArray# a1 a2 = (GHC.Prim.indexFloatArray#) a1 a2
# NOINLINE indexDoubleArray # #
indexDoubleArray# :: ByteArray# -> Int# -> Double#
indexDoubleArray# a1 a2 = (GHC.Prim.indexDoubleArray#) a1 a2
# NOINLINE indexStablePtrArray # #
indexStablePtrArray# :: ByteArray# -> Int# -> StablePtr# a
indexStablePtrArray# a1 a2 = (GHC.Prim.indexStablePtrArray#) a1 a2
# NOINLINE indexInt8Array # #
indexInt8Array# :: ByteArray# -> Int# -> Int#
indexInt8Array# a1 a2 = (GHC.Prim.indexInt8Array#) a1 a2
# NOINLINE indexInt16Array # #
indexInt16Array# :: ByteArray# -> Int# -> Int#
indexInt16Array# a1 a2 = (GHC.Prim.indexInt16Array#) a1 a2
# NOINLINE indexInt32Array # #
indexInt32Array# :: ByteArray# -> Int# -> Int#
indexInt32Array# a1 a2 = (GHC.Prim.indexInt32Array#) a1 a2
# NOINLINE indexInt64Array # #
indexInt64Array# :: ByteArray# -> Int# -> Int#
indexInt64Array# a1 a2 = (GHC.Prim.indexInt64Array#) a1 a2
# NOINLINE indexWord8Array # #
indexWord8Array# :: ByteArray# -> Int# -> Word#
indexWord8Array# a1 a2 = (GHC.Prim.indexWord8Array#) a1 a2
# NOINLINE indexWord16Array # #
indexWord16Array# :: ByteArray# -> Int# -> Word#
indexWord16Array# a1 a2 = (GHC.Prim.indexWord16Array#) a1 a2
# NOINLINE indexWord32Array # #
indexWord32Array# :: ByteArray# -> Int# -> Word#
indexWord32Array# a1 a2 = (GHC.Prim.indexWord32Array#) a1 a2
# NOINLINE indexWord64Array # #
indexWord64Array# :: ByteArray# -> Int# -> Word#
indexWord64Array# a1 a2 = (GHC.Prim.indexWord64Array#) a1 a2
# NOINLINE readCharArray # #
readCharArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Char# #)
readCharArray# a1 a2 a3 = (GHC.Prim.readCharArray#) a1 a2 a3
# NOINLINE readWideCharArray # #
readWideCharArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Char# #)
readWideCharArray# a1 a2 a3 = (GHC.Prim.readWideCharArray#) a1 a2 a3
# NOINLINE readIntArray # #
readIntArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readIntArray# a1 a2 a3 = (GHC.Prim.readIntArray#) a1 a2 a3
# NOINLINE readWordArray # #
readWordArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWordArray# a1 a2 a3 = (GHC.Prim.readWordArray#) a1 a2 a3
# NOINLINE readAddrArray # #
readAddrArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Addr# #)
readAddrArray# a1 a2 a3 = (GHC.Prim.readAddrArray#) a1 a2 a3
# NOINLINE readFloatArray # #
readFloatArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Float# #)
readFloatArray# a1 a2 a3 = (GHC.Prim.readFloatArray#) a1 a2 a3
# NOINLINE readDoubleArray # #
readDoubleArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Double# #)
readDoubleArray# a1 a2 a3 = (GHC.Prim.readDoubleArray#) a1 a2 a3
# NOINLINE readStablePtrArray # #
readStablePtrArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,StablePtr# a #)
readStablePtrArray# a1 a2 a3 = (GHC.Prim.readStablePtrArray#) a1 a2 a3
# NOINLINE readInt8Array # #
readInt8Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt8Array# a1 a2 a3 = (GHC.Prim.readInt8Array#) a1 a2 a3
# NOINLINE readInt16Array # #
readInt16Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt16Array# a1 a2 a3 = (GHC.Prim.readInt16Array#) a1 a2 a3
# NOINLINE readInt32Array # #
readInt32Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt32Array# a1 a2 a3 = (GHC.Prim.readInt32Array#) a1 a2 a3
# NOINLINE readInt64Array # #
readInt64Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
readInt64Array# a1 a2 a3 = (GHC.Prim.readInt64Array#) a1 a2 a3
# NOINLINE readWord8Array # #
readWord8Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord8Array# a1 a2 a3 = (GHC.Prim.readWord8Array#) a1 a2 a3
# NOINLINE readWord16Array # #
readWord16Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord16Array# a1 a2 a3 = (GHC.Prim.readWord16Array#) a1 a2 a3
# NOINLINE readWord32Array # #
readWord32Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord32Array# a1 a2 a3 = (GHC.Prim.readWord32Array#) a1 a2 a3
# NOINLINE readWord64Array # #
readWord64Array# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Word# #)
readWord64Array# a1 a2 a3 = (GHC.Prim.readWord64Array#) a1 a2 a3
# NOINLINE writeCharArray # #
writeCharArray# :: MutableByteArray# s -> Int# -> Char# -> State# s -> State# s
writeCharArray# a1 a2 a3 a4 = (GHC.Prim.writeCharArray#) a1 a2 a3 a4
# NOINLINE writeWideCharArray # #
writeWideCharArray# :: MutableByteArray# s -> Int# -> Char# -> State# s -> State# s
writeWideCharArray# a1 a2 a3 a4 = (GHC.Prim.writeWideCharArray#) a1 a2 a3 a4
# NOINLINE writeIntArray # #
writeIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeIntArray# a1 a2 a3 a4 = (GHC.Prim.writeIntArray#) a1 a2 a3 a4
# NOINLINE writeWordArray # #
writeWordArray# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWordArray# a1 a2 a3 a4 = (GHC.Prim.writeWordArray#) a1 a2 a3 a4
# NOINLINE writeAddrArray # #
writeAddrArray# :: MutableByteArray# s -> Int# -> Addr# -> State# s -> State# s
writeAddrArray# a1 a2 a3 a4 = (GHC.Prim.writeAddrArray#) a1 a2 a3 a4
# NOINLINE writeFloatArray # #
writeFloatArray# :: MutableByteArray# s -> Int# -> Float# -> State# s -> State# s
writeFloatArray# a1 a2 a3 a4 = (GHC.Prim.writeFloatArray#) a1 a2 a3 a4
# NOINLINE writeDoubleArray # #
writeDoubleArray# :: MutableByteArray# s -> Int# -> Double# -> State# s -> State# s
writeDoubleArray# a1 a2 a3 a4 = (GHC.Prim.writeDoubleArray#) a1 a2 a3 a4
# NOINLINE writeStablePtrArray # #
writeStablePtrArray# :: MutableByteArray# s -> Int# -> StablePtr# a -> State# s -> State# s
writeStablePtrArray# a1 a2 a3 a4 = (GHC.Prim.writeStablePtrArray#) a1 a2 a3 a4
# NOINLINE writeInt8Array # #
writeInt8Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt8Array# a1 a2 a3 a4 = (GHC.Prim.writeInt8Array#) a1 a2 a3 a4
# NOINLINE writeInt16Array # #
writeInt16Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt16Array# a1 a2 a3 a4 = (GHC.Prim.writeInt16Array#) a1 a2 a3 a4
# NOINLINE writeInt32Array # #
writeInt32Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt32Array# a1 a2 a3 a4 = (GHC.Prim.writeInt32Array#) a1 a2 a3 a4
# NOINLINE writeInt64Array # #
writeInt64Array# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
writeInt64Array# a1 a2 a3 a4 = (GHC.Prim.writeInt64Array#) a1 a2 a3 a4
# NOINLINE writeWord8Array # #
writeWord8Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord8Array# a1 a2 a3 a4 = (GHC.Prim.writeWord8Array#) a1 a2 a3 a4
# NOINLINE writeWord16Array # #
writeWord16Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord16Array# a1 a2 a3 a4 = (GHC.Prim.writeWord16Array#) a1 a2 a3 a4
# NOINLINE writeWord32Array # #
writeWord32Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord32Array# a1 a2 a3 a4 = (GHC.Prim.writeWord32Array#) a1 a2 a3 a4
# NOINLINE writeWord64Array # #
writeWord64Array# :: MutableByteArray# s -> Int# -> Word# -> State# s -> State# s
writeWord64Array# a1 a2 a3 a4 = (GHC.Prim.writeWord64Array#) a1 a2 a3 a4
# NOINLINE copyByteArray # #
copyByteArray# :: ByteArray# -> Int# -> MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
copyByteArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyByteArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyMutableByteArray # #
copyMutableByteArray# :: MutableByteArray# s -> Int# -> MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
copyMutableByteArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyMutableByteArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyByteArrayToAddr # #
copyByteArrayToAddr# :: ByteArray# -> Int# -> Addr# -> Int# -> State# s -> State# s
copyByteArrayToAddr# a1 a2 a3 a4 a5 = (GHC.Prim.copyByteArrayToAddr#) a1 a2 a3 a4 a5
# NOINLINE copyMutableByteArrayToAddr # #
copyMutableByteArrayToAddr# :: MutableByteArray# s -> Int# -> Addr# -> Int# -> State# s -> State# s
copyMutableByteArrayToAddr# a1 a2 a3 a4 a5 = (GHC.Prim.copyMutableByteArrayToAddr#) a1 a2 a3 a4 a5
# NOINLINE copyAddrToByteArray # #
copyAddrToByteArray# :: Addr# -> MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
copyAddrToByteArray# a1 a2 a3 a4 a5 = (GHC.Prim.copyAddrToByteArray#) a1 a2 a3 a4 a5
# NOINLINE setByteArray # #
setByteArray# :: MutableByteArray# s -> Int# -> Int# -> Int# -> State# s -> State# s
setByteArray# a1 a2 a3 a4 a5 = (GHC.Prim.setByteArray#) a1 a2 a3 a4 a5
# NOINLINE atomicReadIntArray # #
atomicReadIntArray# :: MutableByteArray# s -> Int# -> State# s -> (# State# s,Int# #)
atomicReadIntArray# a1 a2 a3 = (GHC.Prim.atomicReadIntArray#) a1 a2 a3
# NOINLINE atomicWriteIntArray # #
atomicWriteIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> State# s
atomicWriteIntArray# a1 a2 a3 a4 = (GHC.Prim.atomicWriteIntArray#) a1 a2 a3 a4
# NOINLINE casIntArray # #
casIntArray# :: MutableByteArray# s -> Int# -> Int# -> Int# -> State# s -> (# State# s,Int# #)
casIntArray# a1 a2 a3 a4 a5 = (GHC.Prim.casIntArray#) a1 a2 a3 a4 a5
# NOINLINE fetchAddIntArray # #
fetchAddIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchAddIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchAddIntArray#) a1 a2 a3 a4
# NOINLINE fetchSubIntArray # #
fetchSubIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchSubIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchSubIntArray#) a1 a2 a3 a4
# NOINLINE fetchAndIntArray # #
fetchAndIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchAndIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchAndIntArray#) a1 a2 a3 a4
# NOINLINE fetchNandIntArray # #
fetchNandIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchNandIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchNandIntArray#) a1 a2 a3 a4
# NOINLINE fetchOrIntArray # #
fetchOrIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchOrIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchOrIntArray#) a1 a2 a3 a4
# NOINLINE fetchXorIntArray # #
fetchXorIntArray# :: MutableByteArray# s -> Int# -> Int# -> State# s -> (# State# s,Int# #)
fetchXorIntArray# a1 a2 a3 a4 = (GHC.Prim.fetchXorIntArray#) a1 a2 a3 a4
# NOINLINE newArrayArray # #
newArrayArray# :: Int# -> State# s -> (# State# s,MutableArrayArray# s #)
newArrayArray# a1 a2 = (GHC.Prim.newArrayArray#) a1 a2
# NOINLINE sameMutableArrayArray # #
sameMutableArrayArray# :: MutableArrayArray# s -> MutableArrayArray# s -> Int#
sameMutableArrayArray# a1 a2 = (GHC.Prim.sameMutableArrayArray#) a1 a2
# NOINLINE unsafeFreezeArrayArray # #
unsafeFreezeArrayArray# :: MutableArrayArray# s -> State# s -> (# State# s,ArrayArray# #)
unsafeFreezeArrayArray# a1 a2 = (GHC.Prim.unsafeFreezeArrayArray#) a1 a2
# NOINLINE sizeofArrayArray # #
sizeofArrayArray# :: ArrayArray# -> Int#
sizeofArrayArray# a1 = (GHC.Prim.sizeofArrayArray#) a1
# NOINLINE sizeofMutableArrayArray # #
sizeofMutableArrayArray# :: MutableArrayArray# s -> Int#
sizeofMutableArrayArray# a1 = (GHC.Prim.sizeofMutableArrayArray#) a1
# NOINLINE indexByteArrayArray # #
indexByteArrayArray# :: ArrayArray# -> Int# -> ByteArray#
indexByteArrayArray# a1 a2 = (GHC.Prim.indexByteArrayArray#) a1 a2
# NOINLINE indexArrayArrayArray # #
indexArrayArrayArray# :: ArrayArray# -> Int# -> ArrayArray#
indexArrayArrayArray# a1 a2 = (GHC.Prim.indexArrayArrayArray#) a1 a2
# NOINLINE readByteArrayArray # #
readByteArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,ByteArray# #)
readByteArrayArray# a1 a2 a3 = (GHC.Prim.readByteArrayArray#) a1 a2 a3
# NOINLINE readMutableByteArrayArray # #
readMutableByteArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,MutableByteArray# s #)
readMutableByteArrayArray# a1 a2 a3 = (GHC.Prim.readMutableByteArrayArray#) a1 a2 a3
# NOINLINE readArrayArrayArray # #
readArrayArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,ArrayArray# #)
readArrayArrayArray# a1 a2 a3 = (GHC.Prim.readArrayArrayArray#) a1 a2 a3
# NOINLINE readMutableArrayArrayArray # #
readMutableArrayArrayArray# :: MutableArrayArray# s -> Int# -> State# s -> (# State# s,MutableArrayArray# s #)
readMutableArrayArrayArray# a1 a2 a3 = (GHC.Prim.readMutableArrayArrayArray#) a1 a2 a3
# NOINLINE writeByteArrayArray # #
writeByteArrayArray# :: MutableArrayArray# s -> Int# -> ByteArray# -> State# s -> State# s
writeByteArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeByteArrayArray#) a1 a2 a3 a4
# NOINLINE writeMutableByteArrayArray # #
writeMutableByteArrayArray# :: MutableArrayArray# s -> Int# -> MutableByteArray# s -> State# s -> State# s
writeMutableByteArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeMutableByteArrayArray#) a1 a2 a3 a4
# NOINLINE writeArrayArrayArray # #
writeArrayArrayArray# :: MutableArrayArray# s -> Int# -> ArrayArray# -> State# s -> State# s
writeArrayArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeArrayArrayArray#) a1 a2 a3 a4
# NOINLINE writeMutableArrayArrayArray # #
writeMutableArrayArrayArray# :: MutableArrayArray# s -> Int# -> MutableArrayArray# s -> State# s -> State# s
writeMutableArrayArrayArray# a1 a2 a3 a4 = (GHC.Prim.writeMutableArrayArrayArray#) a1 a2 a3 a4
# NOINLINE copyArrayArray # #
copyArrayArray# :: ArrayArray# -> Int# -> MutableArrayArray# s -> Int# -> Int# -> State# s -> State# s
copyArrayArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyArrayArray#) a1 a2 a3 a4 a5 a6
# NOINLINE copyMutableArrayArray # #
copyMutableArrayArray# :: MutableArrayArray# s -> Int# -> MutableArrayArray# s -> Int# -> Int# -> State# s -> State# s
copyMutableArrayArray# a1 a2 a3 a4 a5 a6 = (GHC.Prim.copyMutableArrayArray#) a1 a2 a3 a4 a5 a6
# NOINLINE plusAddr # #
plusAddr# :: Addr# -> Int# -> Addr#
plusAddr# a1 a2 = (GHC.Prim.plusAddr#) a1 a2
# NOINLINE minusAddr # #
minusAddr# :: Addr# -> Addr# -> Int#
minusAddr# a1 a2 = (GHC.Prim.minusAddr#) a1 a2
# NOINLINE remAddr # #
remAddr# :: Addr# -> Int# -> Int#
remAddr# a1 a2 = (GHC.Prim.remAddr#) a1 a2
# NOINLINE addr2Int # #
addr2Int# :: Addr# -> Int#
addr2Int# a1 = (GHC.Prim.addr2Int#) a1
# NOINLINE int2Addr # #
int2Addr# :: Int# -> Addr#
int2Addr# a1 = (GHC.Prim.int2Addr#) a1
# NOINLINE gtAddr # #
gtAddr# :: Addr# -> Addr# -> Int#
gtAddr# a1 a2 = (GHC.Prim.gtAddr#) a1 a2
# NOINLINE geAddr # #
geAddr# :: Addr# -> Addr# -> Int#
geAddr# a1 a2 = (GHC.Prim.geAddr#) a1 a2
# NOINLINE eqAddr # #
eqAddr# :: Addr# -> Addr# -> Int#
eqAddr# a1 a2 = (GHC.Prim.eqAddr#) a1 a2
# NOINLINE neAddr # #
neAddr# :: Addr# -> Addr# -> Int#
neAddr# a1 a2 = (GHC.Prim.neAddr#) a1 a2
# NOINLINE ltAddr # #
ltAddr# :: Addr# -> Addr# -> Int#
ltAddr# a1 a2 = (GHC.Prim.ltAddr#) a1 a2
# NOINLINE leAddr # #
leAddr# :: Addr# -> Addr# -> Int#
leAddr# a1 a2 = (GHC.Prim.leAddr#) a1 a2
# NOINLINE indexCharOffAddr # #
indexCharOffAddr# :: Addr# -> Int# -> Char#
indexCharOffAddr# a1 a2 = (GHC.Prim.indexCharOffAddr#) a1 a2
# NOINLINE indexWideCharOffAddr # #
indexWideCharOffAddr# :: Addr# -> Int# -> Char#
indexWideCharOffAddr# a1 a2 = (GHC.Prim.indexWideCharOffAddr#) a1 a2
# NOINLINE indexIntOffAddr # #
indexIntOffAddr# :: Addr# -> Int# -> Int#
indexIntOffAddr# a1 a2 = (GHC.Prim.indexIntOffAddr#) a1 a2
# NOINLINE indexWordOffAddr # #
indexWordOffAddr# :: Addr# -> Int# -> Word#
indexWordOffAddr# a1 a2 = (GHC.Prim.indexWordOffAddr#) a1 a2
# NOINLINE indexAddrOffAddr # #
indexAddrOffAddr# :: Addr# -> Int# -> Addr#
indexAddrOffAddr# a1 a2 = (GHC.Prim.indexAddrOffAddr#) a1 a2
# NOINLINE indexFloatOffAddr # #
indexFloatOffAddr# :: Addr# -> Int# -> Float#
indexFloatOffAddr# a1 a2 = (GHC.Prim.indexFloatOffAddr#) a1 a2
# NOINLINE indexDoubleOffAddr # #
indexDoubleOffAddr# :: Addr# -> Int# -> Double#
indexDoubleOffAddr# a1 a2 = (GHC.Prim.indexDoubleOffAddr#) a1 a2
# NOINLINE indexStablePtrOffAddr # #
indexStablePtrOffAddr# :: Addr# -> Int# -> StablePtr# a
indexStablePtrOffAddr# a1 a2 = (GHC.Prim.indexStablePtrOffAddr#) a1 a2
# NOINLINE indexInt8OffAddr # #
indexInt8OffAddr# :: Addr# -> Int# -> Int#
indexInt8OffAddr# a1 a2 = (GHC.Prim.indexInt8OffAddr#) a1 a2
# NOINLINE indexInt16OffAddr # #
indexInt16OffAddr# :: Addr# -> Int# -> Int#
indexInt16OffAddr# a1 a2 = (GHC.Prim.indexInt16OffAddr#) a1 a2
# NOINLINE indexInt32OffAddr # #
indexInt32OffAddr# :: Addr# -> Int# -> Int#
indexInt32OffAddr# a1 a2 = (GHC.Prim.indexInt32OffAddr#) a1 a2
# NOINLINE indexInt64OffAddr # #
indexInt64OffAddr# :: Addr# -> Int# -> Int#
indexInt64OffAddr# a1 a2 = (GHC.Prim.indexInt64OffAddr#) a1 a2
# NOINLINE indexWord8OffAddr # #
indexWord8OffAddr# :: Addr# -> Int# -> Word#
indexWord8OffAddr# a1 a2 = (GHC.Prim.indexWord8OffAddr#) a1 a2
# NOINLINE indexWord16OffAddr # #
indexWord16OffAddr# :: Addr# -> Int# -> Word#
indexWord16OffAddr# a1 a2 = (GHC.Prim.indexWord16OffAddr#) a1 a2
# NOINLINE indexWord32OffAddr # #
indexWord32OffAddr# :: Addr# -> Int# -> Word#
indexWord32OffAddr# a1 a2 = (GHC.Prim.indexWord32OffAddr#) a1 a2
# NOINLINE indexWord64OffAddr # #
indexWord64OffAddr# :: Addr# -> Int# -> Word#
indexWord64OffAddr# a1 a2 = (GHC.Prim.indexWord64OffAddr#) a1 a2
# NOINLINE readCharOffAddr # #
readCharOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Char# #)
readCharOffAddr# a1 a2 a3 = (GHC.Prim.readCharOffAddr#) a1 a2 a3
# NOINLINE readWideCharOffAddr # #
readWideCharOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Char# #)
readWideCharOffAddr# a1 a2 a3 = (GHC.Prim.readWideCharOffAddr#) a1 a2 a3
# NOINLINE readIntOffAddr # #
readIntOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readIntOffAddr# a1 a2 a3 = (GHC.Prim.readIntOffAddr#) a1 a2 a3
# NOINLINE readWordOffAddr # #
readWordOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWordOffAddr# a1 a2 a3 = (GHC.Prim.readWordOffAddr#) a1 a2 a3
# NOINLINE readAddrOffAddr # #
readAddrOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Addr# #)
readAddrOffAddr# a1 a2 a3 = (GHC.Prim.readAddrOffAddr#) a1 a2 a3
# NOINLINE readFloatOffAddr # #
readFloatOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Float# #)
readFloatOffAddr# a1 a2 a3 = (GHC.Prim.readFloatOffAddr#) a1 a2 a3
# NOINLINE readDoubleOffAddr # #
readDoubleOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Double# #)
readDoubleOffAddr# a1 a2 a3 = (GHC.Prim.readDoubleOffAddr#) a1 a2 a3
# NOINLINE readStablePtrOffAddr # #
readStablePtrOffAddr# :: Addr# -> Int# -> State# s -> (# State# s,StablePtr# a #)
readStablePtrOffAddr# a1 a2 a3 = (GHC.Prim.readStablePtrOffAddr#) a1 a2 a3
# NOINLINE readInt8OffAddr # #
readInt8OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt8OffAddr# a1 a2 a3 = (GHC.Prim.readInt8OffAddr#) a1 a2 a3
# NOINLINE readInt16OffAddr # #
readInt16OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt16OffAddr# a1 a2 a3 = (GHC.Prim.readInt16OffAddr#) a1 a2 a3
# NOINLINE readInt32OffAddr # #
readInt32OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt32OffAddr# a1 a2 a3 = (GHC.Prim.readInt32OffAddr#) a1 a2 a3
# NOINLINE readInt64OffAddr # #
readInt64OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Int# #)
readInt64OffAddr# a1 a2 a3 = (GHC.Prim.readInt64OffAddr#) a1 a2 a3
# NOINLINE readWord8OffAddr # #
readWord8OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord8OffAddr# a1 a2 a3 = (GHC.Prim.readWord8OffAddr#) a1 a2 a3
# NOINLINE readWord16OffAddr # #
readWord16OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord16OffAddr# a1 a2 a3 = (GHC.Prim.readWord16OffAddr#) a1 a2 a3
# NOINLINE readWord32OffAddr # #
readWord32OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord32OffAddr# a1 a2 a3 = (GHC.Prim.readWord32OffAddr#) a1 a2 a3
# NOINLINE readWord64OffAddr # #
readWord64OffAddr# :: Addr# -> Int# -> State# s -> (# State# s,Word# #)
readWord64OffAddr# a1 a2 a3 = (GHC.Prim.readWord64OffAddr#) a1 a2 a3
# NOINLINE writeCharOffAddr # #
writeCharOffAddr# :: Addr# -> Int# -> Char# -> State# s -> State# s
writeCharOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeCharOffAddr#) a1 a2 a3 a4
# NOINLINE writeWideCharOffAddr # #
writeWideCharOffAddr# :: Addr# -> Int# -> Char# -> State# s -> State# s
writeWideCharOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWideCharOffAddr#) a1 a2 a3 a4
writeIntOffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeIntOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeIntOffAddr#) a1 a2 a3 a4
# NOINLINE writeWordOffAddr # #
writeWordOffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWordOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWordOffAddr#) a1 a2 a3 a4
# NOINLINE writeAddrOffAddr # #
writeAddrOffAddr# :: Addr# -> Int# -> Addr# -> State# s -> State# s
writeAddrOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeAddrOffAddr#) a1 a2 a3 a4
# NOINLINE writeFloatOffAddr # #
writeFloatOffAddr# :: Addr# -> Int# -> Float# -> State# s -> State# s
writeFloatOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeFloatOffAddr#) a1 a2 a3 a4
# NOINLINE writeDoubleOffAddr # #
writeDoubleOffAddr# :: Addr# -> Int# -> Double# -> State# s -> State# s
writeDoubleOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeDoubleOffAddr#) a1 a2 a3 a4
# NOINLINE writeStablePtrOffAddr # #
writeStablePtrOffAddr# :: Addr# -> Int# -> StablePtr# a -> State# s -> State# s
writeStablePtrOffAddr# a1 a2 a3 a4 = (GHC.Prim.writeStablePtrOffAddr#) a1 a2 a3 a4
# NOINLINE writeInt8OffAddr # #
writeInt8OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt8OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt8OffAddr#) a1 a2 a3 a4
# NOINLINE writeInt16OffAddr # #
writeInt16OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt16OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt16OffAddr#) a1 a2 a3 a4
# NOINLINE writeInt32OffAddr # #
writeInt32OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt32OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt32OffAddr#) a1 a2 a3 a4
# NOINLINE writeInt64OffAddr # #
writeInt64OffAddr# :: Addr# -> Int# -> Int# -> State# s -> State# s
writeInt64OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeInt64OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord8OffAddr # #
writeWord8OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord8OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord8OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord16OffAddr # #
writeWord16OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord16OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord16OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord32OffAddr # #
writeWord32OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord32OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord32OffAddr#) a1 a2 a3 a4
# NOINLINE writeWord64OffAddr # #
writeWord64OffAddr# :: Addr# -> Int# -> Word# -> State# s -> State# s
writeWord64OffAddr# a1 a2 a3 a4 = (GHC.Prim.writeWord64OffAddr#) a1 a2 a3 a4
# NOINLINE newMutVar # #
newMutVar# :: a -> State# s -> (# State# s,MutVar# s a #)
newMutVar# a1 a2 = (GHC.Prim.newMutVar#) a1 a2
# NOINLINE readMutVar # #
readMutVar# :: MutVar# s a -> State# s -> (# State# s,a #)
readMutVar# a1 a2 = (GHC.Prim.readMutVar#) a1 a2
# NOINLINE writeMutVar # #
writeMutVar# :: MutVar# s a -> a -> State# s -> State# s
writeMutVar# a1 a2 a3 = (GHC.Prim.writeMutVar#) a1 a2 a3
# NOINLINE sameMutVar # #
sameMutVar# :: MutVar# s a -> MutVar# s a -> Int#
sameMutVar# a1 a2 = (GHC.Prim.sameMutVar#) a1 a2
# NOINLINE atomicModifyMutVar # #
atomicModifyMutVar# :: MutVar# s a -> (a -> b) -> State# s -> (# State# s,c #)
atomicModifyMutVar# a1 a2 a3 = (GHC.Prim.atomicModifyMutVar#) a1 a2 a3
# NOINLINE casMutVar # #
casMutVar# :: MutVar# s a -> a -> a -> State# s -> (# State# s,Int#,a #)
casMutVar# a1 a2 a3 a4 = (GHC.Prim.casMutVar#) a1 a2 a3 a4
# NOINLINE catch # #
catch# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> (b -> State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
catch# a1 a2 a3 = (GHC.Prim.catch#) a1 a2 a3
# NOINLINE raise # #
raise# :: b -> o
raise# a1 = (GHC.Prim.raise#) a1
# NOINLINE raiseIO # #
raiseIO# :: a -> State# (RealWorld) -> (# State# (RealWorld),b #)
raiseIO# a1 a2 = (GHC.Prim.raiseIO#) a1 a2
# NOINLINE maskAsyncExceptions # #
maskAsyncExceptions# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
maskAsyncExceptions# a1 a2 = (GHC.Prim.maskAsyncExceptions#) a1 a2
# NOINLINE maskUninterruptible # #
maskUninterruptible# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
maskUninterruptible# a1 a2 = (GHC.Prim.maskUninterruptible#) a1 a2
unmaskAsyncExceptions# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
unmaskAsyncExceptions# a1 a2 = (GHC.Prim.unmaskAsyncExceptions#) a1 a2
getMaskingState# :: State# (RealWorld) -> (# State# (RealWorld),Int# #)
getMaskingState# a1 = (GHC.Prim.getMaskingState#) a1
# NOINLINE atomically # #
atomically# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
atomically# a1 a2 = (GHC.Prim.atomically#) a1 a2
# NOINLINE retry # #
retry# :: State# (RealWorld) -> (# State# (RealWorld),a #)
retry# a1 = (GHC.Prim.retry#) a1
# NOINLINE catchRetry # #
catchRetry# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
catchRetry# a1 a2 a3 = (GHC.Prim.catchRetry#) a1 a2 a3
# NOINLINE catchSTM # #
catchSTM# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> (b -> State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> (# State# (RealWorld),a #)
catchSTM# a1 a2 a3 = (GHC.Prim.catchSTM#) a1 a2 a3
# NOINLINE check # #
check# :: (State# (RealWorld) -> (# State# (RealWorld),a #)) -> State# (RealWorld) -> State# (RealWorld)
check# a1 a2 = (GHC.Prim.check#) a1 a2
# NOINLINE newTVar # #
newTVar# :: a -> State# s -> (# State# s,TVar# s a #)
newTVar# a1 a2 = (GHC.Prim.newTVar#) a1 a2
# NOINLINE readTVar # #
readTVar# :: TVar# s a -> State# s -> (# State# s,a #)
readTVar# a1 a2 = (GHC.Prim.readTVar#) a1 a2
# NOINLINE readTVarIO # #
readTVarIO# :: TVar# s a -> State# s -> (# State# s,a #)
readTVarIO# a1 a2 = (GHC.Prim.readTVarIO#) a1 a2
# NOINLINE writeTVar # #
writeTVar# :: TVar# s a -> a -> State# s -> State# s
writeTVar# a1 a2 a3 = (GHC.Prim.writeTVar#) a1 a2 a3
# NOINLINE sameTVar # #
sameTVar# :: TVar# s a -> TVar# s a -> Int#
sameTVar# a1 a2 = (GHC.Prim.sameTVar#) a1 a2
# NOINLINE newMVar # #
newMVar# :: State# s -> (# State# s,MVar# s a #)
newMVar# a1 = (GHC.Prim.newMVar#) a1
# NOINLINE takeMVar # #
takeMVar# :: MVar# s a -> State# s -> (# State# s,a #)
takeMVar# a1 a2 = (GHC.Prim.takeMVar#) a1 a2
# NOINLINE tryTakeMVar # #
tryTakeMVar# :: MVar# s a -> State# s -> (# State# s,Int#,a #)
tryTakeMVar# a1 a2 = (GHC.Prim.tryTakeMVar#) a1 a2
# NOINLINE putMVar # #
putMVar# :: MVar# s a -> a -> State# s -> State# s
putMVar# a1 a2 a3 = (GHC.Prim.putMVar#) a1 a2 a3
# NOINLINE tryPutMVar # #
tryPutMVar# :: MVar# s a -> a -> State# s -> (# State# s,Int# #)
tryPutMVar# a1 a2 a3 = (GHC.Prim.tryPutMVar#) a1 a2 a3
# NOINLINE readMVar # #
readMVar# :: MVar# s a -> State# s -> (# State# s,a #)
readMVar# a1 a2 = (GHC.Prim.readMVar#) a1 a2
# NOINLINE tryReadMVar # #
tryReadMVar# :: MVar# s a -> State# s -> (# State# s,Int#,a #)
tryReadMVar# a1 a2 = (GHC.Prim.tryReadMVar#) a1 a2
# NOINLINE sameMVar # #
sameMVar# :: MVar# s a -> MVar# s a -> Int#
sameMVar# a1 a2 = (GHC.Prim.sameMVar#) a1 a2
# NOINLINE isEmptyMVar # #
isEmptyMVar# :: MVar# s a -> State# s -> (# State# s,Int# #)
isEmptyMVar# a1 a2 = (GHC.Prim.isEmptyMVar#) a1 a2
# NOINLINE delay # #
delay# :: Int# -> State# s -> State# s
delay# a1 a2 = (GHC.Prim.delay#) a1 a2
# NOINLINE waitRead # #
waitRead# :: Int# -> State# s -> State# s
waitRead# a1 a2 = (GHC.Prim.waitRead#) a1 a2
# NOINLINE waitWrite # #
waitWrite# :: Int# -> State# s -> State# s
waitWrite# a1 a2 = (GHC.Prim.waitWrite#) a1 a2
# NOINLINE fork # #
fork# :: a -> State# (RealWorld) -> (# State# (RealWorld),ThreadId# #)
fork# a1 a2 = (GHC.Prim.fork#) a1 a2
# NOINLINE forkOn # #
forkOn# :: Int# -> a -> State# (RealWorld) -> (# State# (RealWorld),ThreadId# #)
forkOn# a1 a2 a3 = (GHC.Prim.forkOn#) a1 a2 a3
# NOINLINE killThread # #
killThread# :: ThreadId# -> a -> State# (RealWorld) -> State# (RealWorld)
killThread# a1 a2 a3 = (GHC.Prim.killThread#) a1 a2 a3
# NOINLINE yield # #
yield# :: State# (RealWorld) -> State# (RealWorld)
yield# a1 = (GHC.Prim.yield#) a1
# NOINLINE myThreadId # #
myThreadId# :: State# (RealWorld) -> (# State# (RealWorld),ThreadId# #)
myThreadId# a1 = (GHC.Prim.myThreadId#) a1
# NOINLINE labelThread # #
labelThread# :: ThreadId# -> Addr# -> State# (RealWorld) -> State# (RealWorld)
labelThread# a1 a2 a3 = (GHC.Prim.labelThread#) a1 a2 a3
isCurrentThreadBound# :: State# (RealWorld) -> (# State# (RealWorld),Int# #)
isCurrentThreadBound# a1 = (GHC.Prim.isCurrentThreadBound#) a1
noDuplicate# :: State# s -> State# s
noDuplicate# a1 = (GHC.Prim.noDuplicate#) a1
# NOINLINE threadStatus # #
threadStatus# :: ThreadId# -> State# (RealWorld) -> (# State# (RealWorld),Int#,Int#,Int# #)
threadStatus# a1 a2 = (GHC.Prim.threadStatus#) a1 a2
# NOINLINE mkWeak # #
mkWeak# :: o -> b -> (State# (RealWorld) -> (# State# (RealWorld),c #)) -> State# (RealWorld) -> (# State# (RealWorld),Weak# b #)
mkWeak# a1 a2 a3 a4 = (GHC.Prim.mkWeak#) a1 a2 a3 a4
# NOINLINE mkWeakNoFinalizer # #
mkWeakNoFinalizer# :: o -> b -> State# (RealWorld) -> (# State# (RealWorld),Weak# b #)
mkWeakNoFinalizer# a1 a2 a3 = (GHC.Prim.mkWeakNoFinalizer#) a1 a2 a3
# NOINLINE addCFinalizerToWeak # #
addCFinalizerToWeak# :: Addr# -> Addr# -> Int# -> Addr# -> Weak# b -> State# (RealWorld) -> (# State# (RealWorld),Int# #)
addCFinalizerToWeak# a1 a2 a3 a4 a5 a6 = (GHC.Prim.addCFinalizerToWeak#) a1 a2 a3 a4 a5 a6
# NOINLINE deRefWeak # #
deRefWeak# :: Weak# a -> State# (RealWorld) -> (# State# (RealWorld),Int#,a #)
deRefWeak# a1 a2 = (GHC.Prim.deRefWeak#) a1 a2
finalizeWeak# :: Weak# a -> State# (RealWorld) -> (# State# (RealWorld),Int#,State# (RealWorld) -> (# State# (RealWorld),b #) #)
finalizeWeak# a1 a2 = (GHC.Prim.finalizeWeak#) a1 a2
# NOINLINE touch # #
touch# :: o -> State# (RealWorld) -> State# (RealWorld)
touch# a1 a2 = (GHC.Prim.touch#) a1 a2
# NOINLINE makeStablePtr # #
makeStablePtr# :: a -> State# (RealWorld) -> (# State# (RealWorld),StablePtr# a #)
makeStablePtr# a1 a2 = (GHC.Prim.makeStablePtr#) a1 a2
# NOINLINE deRefStablePtr # #
deRefStablePtr# :: StablePtr# a -> State# (RealWorld) -> (# State# (RealWorld),a #)
deRefStablePtr# a1 a2 = (GHC.Prim.deRefStablePtr#) a1 a2
# NOINLINE eqStablePtr # #
eqStablePtr# :: StablePtr# a -> StablePtr# a -> Int#
eqStablePtr# a1 a2 = (GHC.Prim.eqStablePtr#) a1 a2
# NOINLINE makeStableName # #
makeStableName# :: a -> State# (RealWorld) -> (# State# (RealWorld),StableName# a #)
makeStableName# a1 a2 = (GHC.Prim.makeStableName#) a1 a2
eqStableName# :: StableName# a -> StableName# b -> Int#
eqStableName# a1 a2 = (GHC.Prim.eqStableName#) a1 a2
# NOINLINE stableNameToInt # #
stableNameToInt# :: StableName# a -> Int#
stableNameToInt# a1 = (GHC.Prim.stableNameToInt#) a1
# NOINLINE compactNew # #
compactNew# :: Word# -> State# (RealWorld) -> (# State# (RealWorld),Compact# #)
compactNew# a1 a2 = (GHC.Prim.compactNew#) a1 a2
# NOINLINE compactResize # #
compactResize# :: Compact# -> Word# -> State# (RealWorld) -> State# (RealWorld)
compactResize# a1 a2 a3 = (GHC.Prim.compactResize#) a1 a2 a3
# NOINLINE compactContains # #
compactContains# :: Compact# -> a -> State# (RealWorld) -> (# State# (RealWorld),Int# #)
compactContains# a1 a2 a3 = (GHC.Prim.compactContains#) a1 a2 a3
# NOINLINE compactContainsAny # #
compactContainsAny# :: a -> State# (RealWorld) -> (# State# (RealWorld),Int# #)
compactContainsAny# a1 a2 = (GHC.Prim.compactContainsAny#) a1 a2
# NOINLINE compactGetFirstBlock # #
compactGetFirstBlock# :: Compact# -> State# (RealWorld) -> (# State# (RealWorld),Addr#,Word# #)
compactGetFirstBlock# a1 a2 = (GHC.Prim.compactGetFirstBlock#) a1 a2
# NOINLINE compactGetNextBlock # #
compactGetNextBlock# :: Compact# -> Addr# -> State# (RealWorld) -> (# State# (RealWorld),Addr#,Word# #)
compactGetNextBlock# a1 a2 a3 = (GHC.Prim.compactGetNextBlock#) a1 a2 a3
# NOINLINE compactAllocateBlock # #
compactAllocateBlock# :: Word# -> Addr# -> State# (RealWorld) -> (# State# (RealWorld),Addr# #)
compactAllocateBlock# a1 a2 a3 = (GHC.Prim.compactAllocateBlock#) a1 a2 a3
# NOINLINE compactFixupPointers # #
compactFixupPointers# :: Addr# -> Addr# -> State# (RealWorld) -> (# State# (RealWorld),Compact#,Addr# #)
compactFixupPointers# a1 a2 a3 = (GHC.Prim.compactFixupPointers#) a1 a2 a3
# NOINLINE compactAdd # #
compactAdd# :: Compact# -> a -> State# (RealWorld) -> (# State# (RealWorld),a #)
compactAdd# a1 a2 a3 = (GHC.Prim.compactAdd#) a1 a2 a3
# NOINLINE compactAddWithSharing # #
compactAddWithSharing# :: Compact# -> a -> State# (RealWorld) -> (# State# (RealWorld),a #)
compactAddWithSharing# a1 a2 a3 = (GHC.Prim.compactAddWithSharing#) a1 a2 a3
# NOINLINE compactSize # #
compactSize# :: Compact# -> State# (RealWorld) -> (# State# (RealWorld),Word# #)
compactSize# a1 a2 = (GHC.Prim.compactSize#) a1 a2
# NOINLINE reallyUnsafePtrEquality # #
reallyUnsafePtrEquality# :: a -> a -> Int#
reallyUnsafePtrEquality# a1 a2 = (GHC.Prim.reallyUnsafePtrEquality#) a1 a2
# NOINLINE par # #
par# :: a -> Int#
par# a1 = (GHC.Prim.par#) a1
# NOINLINE spark # #
spark# :: a -> State# s -> (# State# s,a #)
spark# a1 a2 = (GHC.Prim.spark#) a1 a2
seq# :: a -> State# s -> (# State# s,a #)
seq# a1 a2 = (GHC.Prim.seq#) a1 a2
# NOINLINE getSpark # #
getSpark# :: State# s -> (# State# s,Int#,a #)
getSpark# a1 = (GHC.Prim.getSpark#) a1
# NOINLINE numSparks # #
numSparks# :: State# s -> (# State# s,Int# #)
numSparks# a1 = (GHC.Prim.numSparks#) a1
# NOINLINE dataToTag # #
dataToTag# :: a -> Int#
dataToTag# a1 = (GHC.Prim.dataToTag#) a1
# NOINLINE addrToAny # #
addrToAny# :: Addr# -> (# a #)
addrToAny# a1 = (GHC.Prim.addrToAny#) a1
anyToAddr# :: a -> State# (RealWorld) -> (# State# (RealWorld),Addr# #)
anyToAddr# a1 a2 = (GHC.Prim.anyToAddr#) a1 a2
# NOINLINE mkApUpd0 # #
mkApUpd0# :: BCO# -> (# a #)
mkApUpd0# a1 = (GHC.Prim.mkApUpd0#) a1
# NOINLINE newBCO # #
newBCO# :: ByteArray# -> ByteArray# -> Array# a -> Int# -> ByteArray# -> State# s -> (# State# s,BCO# #)
newBCO# a1 a2 a3 a4 a5 a6 = (GHC.Prim.newBCO#) a1 a2 a3 a4 a5 a6
# NOINLINE unpackClosure # #
unpackClosure# :: a -> (# Addr#,Array# b,ByteArray# #)
unpackClosure# a1 = (GHC.Prim.unpackClosure#) a1
# NOINLINE getApStackVal # #
getApStackVal# :: a -> Int# -> (# Int#,b #)
getApStackVal# a1 a2 = (GHC.Prim.getApStackVal#) a1 a2
# NOINLINE getCCSOf # #
getCCSOf# :: a -> State# s -> (# State# s,Addr# #)
getCCSOf# a1 a2 = (GHC.Prim.getCCSOf#) a1 a2
# NOINLINE getCurrentCCS # #
getCurrentCCS# :: a -> State# s -> (# State# s,Addr# #)
getCurrentCCS# a1 a2 = (GHC.Prim.getCurrentCCS#) a1 a2
# NOINLINE clearCCS # #
clearCCS# :: (State# s -> (# State# s,a #)) -> State# s -> (# State# s,a #)
clearCCS# a1 a2 = (GHC.Prim.clearCCS#) a1 a2
# NOINLINE traceEvent # #
traceEvent# :: Addr# -> State# s -> State# s
traceEvent# a1 a2 = (GHC.Prim.traceEvent#) a1 a2
# NOINLINE traceMarker # #
traceMarker# :: Addr# -> State# s -> State# s
traceMarker# a1 a2 = (GHC.Prim.traceMarker#) a1 a2
# NOINLINE prefetchByteArray3 # #
prefetchByteArray3# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray3# a1 a2 a3 = (GHC.Prim.prefetchByteArray3#) a1 a2 a3
# NOINLINE prefetchMutableByteArray3 # #
prefetchMutableByteArray3# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray3# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray3#) a1 a2 a3
# NOINLINE prefetchAddr3 # #
prefetchAddr3# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr3# a1 a2 a3 = (GHC.Prim.prefetchAddr3#) a1 a2 a3
# NOINLINE prefetchValue3 # #
prefetchValue3# :: a -> State# s -> State# s
prefetchValue3# a1 a2 = (GHC.Prim.prefetchValue3#) a1 a2
# NOINLINE prefetchByteArray2 # #
prefetchByteArray2# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray2# a1 a2 a3 = (GHC.Prim.prefetchByteArray2#) a1 a2 a3
# NOINLINE prefetchMutableByteArray2 # #
prefetchMutableByteArray2# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray2# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray2#) a1 a2 a3
# NOINLINE prefetchAddr2 # #
prefetchAddr2# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr2# a1 a2 a3 = (GHC.Prim.prefetchAddr2#) a1 a2 a3
# NOINLINE prefetchValue2 # #
prefetchValue2# :: a -> State# s -> State# s
prefetchValue2# a1 a2 = (GHC.Prim.prefetchValue2#) a1 a2
# NOINLINE prefetchByteArray1 # #
prefetchByteArray1# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray1# a1 a2 a3 = (GHC.Prim.prefetchByteArray1#) a1 a2 a3
# NOINLINE prefetchMutableByteArray1 # #
prefetchMutableByteArray1# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray1# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray1#) a1 a2 a3
prefetchAddr1# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr1# a1 a2 a3 = (GHC.Prim.prefetchAddr1#) a1 a2 a3
# NOINLINE prefetchValue1 # #
prefetchValue1# :: a -> State# s -> State# s
prefetchValue1# a1 a2 = (GHC.Prim.prefetchValue1#) a1 a2
# NOINLINE prefetchByteArray0 # #
prefetchByteArray0# :: ByteArray# -> Int# -> State# s -> State# s
prefetchByteArray0# a1 a2 a3 = (GHC.Prim.prefetchByteArray0#) a1 a2 a3
# NOINLINE prefetchMutableByteArray0 # #
prefetchMutableByteArray0# :: MutableByteArray# s -> Int# -> State# s -> State# s
prefetchMutableByteArray0# a1 a2 a3 = (GHC.Prim.prefetchMutableByteArray0#) a1 a2 a3
# NOINLINE prefetchAddr0 # #
prefetchAddr0# :: Addr# -> Int# -> State# s -> State# s
prefetchAddr0# a1 a2 a3 = (GHC.Prim.prefetchAddr0#) a1 a2 a3
# NOINLINE prefetchValue0 # #
prefetchValue0# :: a -> State# s -> State# s
prefetchValue0# a1 a2 = (GHC.Prim.prefetchValue0#) a1 a2
|
c023c88bab15159e1780af7859c65452c664319ad8171378dcc9afc6a55d3661 | ocurrent/mirage-ci | website.ml | module Git = Current_git
module Github = Current_github
module Website_description = struct
open Tyxml_html
let extra_routes = []
module Output = struct
type t = unit
let marshal () = ""
let unmarshal _ = ()
open Tyxml_html
let render_inline () = txt ""
end
module Node = struct
type t = string
let render_inline name = txt name
let map_status _ = Fun.id
let marshal = Fun.id
let unmarshal = Fun.id
end
module Stage = struct
type t = string
let id name = name
let render_inline name = txt name
let render _ = txt ""
let marshal = Fun.id
let unmarshal = Fun.id
end
module Pipeline = struct
module Group = struct
type t =
| Local
| Mirage
| Mirage_dev
| Mirage_skeleton
| Opam_overlays
| Mirage_opam_overlays
let id = function
| Local -> "local"
| Mirage -> "mirage/mirage"
| Mirage_dev -> "mirage/mirage-dev"
| Mirage_skeleton -> "mirage/mirage-skeleton"
| Opam_overlays -> "dune-universe/opam-overlays"
| Mirage_opam_overlays -> "dune-universe/mirage-opam-overlays"
let to_string = id
end
module Source = struct
let build_mode_to_string = function
| `Mirage_3 -> "mirage-3"
| `Mirage_4 -> "mirage-4"
let branch_name ref =
match String.split_on_char '/' ref with
| [ "refs"; "heads"; b ] -> b
| _ -> "failure"
(* the exposed metadata *)
type metadata_gh = {
kind :
[ `Mirage | `Mirage_dev | `Mirage_skeleton | `Overlay of string ];
build_mode : [ `Mirage_4 | `Mirage_3 ];
commit : string;
ref : Github.Api.Ref.t;
owner : string;
name : string;
friend_prs : (string * Github.Api.Ref.pr_info) list;
}
let gh_id = function
| { ref = `PR { id; _ }; owner; name; commit; build_mode; _ } ->
Fmt.str "pr-%d-%s-%s-%s-%s" id
(build_mode_to_string build_mode)
owner name commit
| { ref = `Ref ref; owner; name; commit; build_mode; _ } ->
Fmt.str "branch-%s-%s-%s-%s-%s" (branch_name ref)
(build_mode_to_string build_mode)
owner name commit
type t = [ `Local of [ `Mirage_4 | `Mirage_3 ] | `Github of metadata_gh ]
let group = function
| `Local _ -> Group.Local
| `Github { kind = `Mirage; _ } -> Mirage
| `Github { kind = `Mirage_dev; _ } -> Mirage_dev
| `Github { kind = `Mirage_skeleton; _ } -> Mirage_skeleton
| `Github { kind = `Overlay "opam-overlays"; _ } -> Opam_overlays
| `Github { kind = `Overlay "mirage-opam-overlays"; _ } ->
Mirage_opam_overlays
| `Github { kind = `Overlay _; _ } ->
failwith "unknown overlay repository"
let build_mode_to_string = function
| `Mirage_3 -> "mirage-3"
| `Mirage_4 -> "mirage-4"
let branch_name ref =
match String.split_on_char '/' ref with
| [ "refs"; "heads"; b ] -> b
| _ -> "failure"
let id = function
| `Local `Mirage_3 -> "local-mirage-3"
| `Local `Mirage_4 -> "local-mirage-4"
| `Github { ref = `PR { id; _ }; owner; name; build_mode; _ } ->
Fmt.str "pr-%d-%s-%s-%s" id
(build_mode_to_string build_mode)
owner name
| `Github { ref = `Ref ref; owner; name; build_mode; _ } ->
Fmt.str "branch-%s-%s-%s-%s" (branch_name ref)
(build_mode_to_string build_mode)
owner name
let to_string = function
| `Local `Mirage_3 -> "Local (mirage 3)"
| `Local `Mirage_4 -> "Local (mirage 4)"
| `Github { ref = `PR { id; title; _ }; _ } ->
Fmt.str "PR #%d: %s" id title
| `Github { ref = `Ref ref; _ } -> Fmt.str "Branch %s" (branch_name ref)
let compare a b =
match (a, b) with
| `Github { ref = `Ref ref_a; _ }, `Github { ref = `Ref ref_b; _ } ->
String.compare ref_a ref_b
| ( `Github { ref = `PR { id = id_a; _ }; _ },
`Github { ref = `PR { id = id_b; _ }; _ } ) ->
id_b - id_a
| a, b -> String.compare (to_string a) (to_string b)
end
type t = Source.t
let to_link (t : t) =
match t with
| `Local _ -> ""
| `Github { ref = `PR { id; _ }; owner; name; _ } ->
Fmt.str "" owner name id
| `Github { ref = `Ref b; owner; name; _ } ->
Fmt.str "" owner name b
let build_mode_to_string = function
| `Mirage_3 -> "mirage-3"
| `Mirage_4 -> "mirage-4"
let id = function
| `Local `Mirage_3 -> "local-mirage-3"
| `Local `Mirage_4 -> "local-mirage-4"
| `Github gh -> Source.gh_id gh
let marshal v = Marshal.to_string v []
let unmarshal v = Marshal.from_string v 0
let source = Fun.id
let branch_name ref =
match String.split_on_char '/' ref with
| [ "refs"; "heads"; b ] -> b
| _ -> "failure"
let render_inline (t : t) =
match t with
| `Local b -> txt (Fmt.str "%s" (build_mode_to_string b))
| `Github { commit; _ } ->
let commit_hash = String.sub commit 0 7 in
txt ("@" ^ commit_hash)
let render (t : t) =
match t with
| `Local _ -> div []
| `Github { friend_prs = []; _ } ->
div [ txt "Link to "; a ~a:[ a_href (to_link t) ] [ txt "Github" ] ]
| `Github { friend_prs; _ } ->
div
[
txt "Link to ";
a ~a:[ a_href (to_link t) ] [ txt "Github" ];
br ();
h3 [ txt "Friend PRs" ];
i [ txt "This PR is tested along with the following PRs:" ];
ul
(List.map
(fun (repo, { Github.Api.Ref.id; title; _ }) ->
li
[
txt (Fmt.str "%s: PR " repo);
a
~a:
[
a_href
(Fmt.str "" repo
id);
]
[ txt (Fmt.str "#%d" id) ];
txt (Fmt.str ": %s" title);
])
friend_prs);
br ();
i
[
txt
"To use that feature, simply mention target PR in the \
original PR's description.";
];
]
end
let render_index () =
div
[
h1 [ txt "Mirage CI" ];
span
[
txt "Source code available here: ";
a
~a:[ a_href "-ci" ]
[ txt "-ci" ];
];
]
end
include Current_web_pipelines.Web.Make (Website_description)
| null | https://raw.githubusercontent.com/ocurrent/mirage-ci/b83807d40c802663df50340c0e1ba1dc694df055/src/website/website.ml | ocaml | the exposed metadata | module Git = Current_git
module Github = Current_github
module Website_description = struct
open Tyxml_html
let extra_routes = []
module Output = struct
type t = unit
let marshal () = ""
let unmarshal _ = ()
open Tyxml_html
let render_inline () = txt ""
end
module Node = struct
type t = string
let render_inline name = txt name
let map_status _ = Fun.id
let marshal = Fun.id
let unmarshal = Fun.id
end
module Stage = struct
type t = string
let id name = name
let render_inline name = txt name
let render _ = txt ""
let marshal = Fun.id
let unmarshal = Fun.id
end
module Pipeline = struct
module Group = struct
type t =
| Local
| Mirage
| Mirage_dev
| Mirage_skeleton
| Opam_overlays
| Mirage_opam_overlays
let id = function
| Local -> "local"
| Mirage -> "mirage/mirage"
| Mirage_dev -> "mirage/mirage-dev"
| Mirage_skeleton -> "mirage/mirage-skeleton"
| Opam_overlays -> "dune-universe/opam-overlays"
| Mirage_opam_overlays -> "dune-universe/mirage-opam-overlays"
let to_string = id
end
module Source = struct
let build_mode_to_string = function
| `Mirage_3 -> "mirage-3"
| `Mirage_4 -> "mirage-4"
let branch_name ref =
match String.split_on_char '/' ref with
| [ "refs"; "heads"; b ] -> b
| _ -> "failure"
type metadata_gh = {
kind :
[ `Mirage | `Mirage_dev | `Mirage_skeleton | `Overlay of string ];
build_mode : [ `Mirage_4 | `Mirage_3 ];
commit : string;
ref : Github.Api.Ref.t;
owner : string;
name : string;
friend_prs : (string * Github.Api.Ref.pr_info) list;
}
let gh_id = function
| { ref = `PR { id; _ }; owner; name; commit; build_mode; _ } ->
Fmt.str "pr-%d-%s-%s-%s-%s" id
(build_mode_to_string build_mode)
owner name commit
| { ref = `Ref ref; owner; name; commit; build_mode; _ } ->
Fmt.str "branch-%s-%s-%s-%s-%s" (branch_name ref)
(build_mode_to_string build_mode)
owner name commit
type t = [ `Local of [ `Mirage_4 | `Mirage_3 ] | `Github of metadata_gh ]
let group = function
| `Local _ -> Group.Local
| `Github { kind = `Mirage; _ } -> Mirage
| `Github { kind = `Mirage_dev; _ } -> Mirage_dev
| `Github { kind = `Mirage_skeleton; _ } -> Mirage_skeleton
| `Github { kind = `Overlay "opam-overlays"; _ } -> Opam_overlays
| `Github { kind = `Overlay "mirage-opam-overlays"; _ } ->
Mirage_opam_overlays
| `Github { kind = `Overlay _; _ } ->
failwith "unknown overlay repository"
let build_mode_to_string = function
| `Mirage_3 -> "mirage-3"
| `Mirage_4 -> "mirage-4"
let branch_name ref =
match String.split_on_char '/' ref with
| [ "refs"; "heads"; b ] -> b
| _ -> "failure"
let id = function
| `Local `Mirage_3 -> "local-mirage-3"
| `Local `Mirage_4 -> "local-mirage-4"
| `Github { ref = `PR { id; _ }; owner; name; build_mode; _ } ->
Fmt.str "pr-%d-%s-%s-%s" id
(build_mode_to_string build_mode)
owner name
| `Github { ref = `Ref ref; owner; name; build_mode; _ } ->
Fmt.str "branch-%s-%s-%s-%s" (branch_name ref)
(build_mode_to_string build_mode)
owner name
let to_string = function
| `Local `Mirage_3 -> "Local (mirage 3)"
| `Local `Mirage_4 -> "Local (mirage 4)"
| `Github { ref = `PR { id; title; _ }; _ } ->
Fmt.str "PR #%d: %s" id title
| `Github { ref = `Ref ref; _ } -> Fmt.str "Branch %s" (branch_name ref)
let compare a b =
match (a, b) with
| `Github { ref = `Ref ref_a; _ }, `Github { ref = `Ref ref_b; _ } ->
String.compare ref_a ref_b
| ( `Github { ref = `PR { id = id_a; _ }; _ },
`Github { ref = `PR { id = id_b; _ }; _ } ) ->
id_b - id_a
| a, b -> String.compare (to_string a) (to_string b)
end
type t = Source.t
let to_link (t : t) =
match t with
| `Local _ -> ""
| `Github { ref = `PR { id; _ }; owner; name; _ } ->
Fmt.str "" owner name id
| `Github { ref = `Ref b; owner; name; _ } ->
Fmt.str "" owner name b
let build_mode_to_string = function
| `Mirage_3 -> "mirage-3"
| `Mirage_4 -> "mirage-4"
let id = function
| `Local `Mirage_3 -> "local-mirage-3"
| `Local `Mirage_4 -> "local-mirage-4"
| `Github gh -> Source.gh_id gh
let marshal v = Marshal.to_string v []
let unmarshal v = Marshal.from_string v 0
let source = Fun.id
let branch_name ref =
match String.split_on_char '/' ref with
| [ "refs"; "heads"; b ] -> b
| _ -> "failure"
let render_inline (t : t) =
match t with
| `Local b -> txt (Fmt.str "%s" (build_mode_to_string b))
| `Github { commit; _ } ->
let commit_hash = String.sub commit 0 7 in
txt ("@" ^ commit_hash)
let render (t : t) =
match t with
| `Local _ -> div []
| `Github { friend_prs = []; _ } ->
div [ txt "Link to "; a ~a:[ a_href (to_link t) ] [ txt "Github" ] ]
| `Github { friend_prs; _ } ->
div
[
txt "Link to ";
a ~a:[ a_href (to_link t) ] [ txt "Github" ];
br ();
h3 [ txt "Friend PRs" ];
i [ txt "This PR is tested along with the following PRs:" ];
ul
(List.map
(fun (repo, { Github.Api.Ref.id; title; _ }) ->
li
[
txt (Fmt.str "%s: PR " repo);
a
~a:
[
a_href
(Fmt.str "" repo
id);
]
[ txt (Fmt.str "#%d" id) ];
txt (Fmt.str ": %s" title);
])
friend_prs);
br ();
i
[
txt
"To use that feature, simply mention target PR in the \
original PR's description.";
];
]
end
let render_index () =
div
[
h1 [ txt "Mirage CI" ];
span
[
txt "Source code available here: ";
a
~a:[ a_href "-ci" ]
[ txt "-ci" ];
];
]
end
include Current_web_pipelines.Web.Make (Website_description)
|
0d79f658d24b1b4610bdaa84c15945f770b63130053373ace2fa4df16ed93b19 | adamwalker/clash-utils | FieldExtractSpec.hs | module FieldExtractSpec where
import qualified Clash.Prelude as Clash
import Clash.Prelude (Signal, Vec(..), BitVector, Index, Signed, Unsigned, SFixed, Bit, SNat(..),
simulate, simulate_lazy, listToVecTH, KnownNat, pack, unpack, (++#), mealy, mux, bundle, unbundle,
HiddenClockResetEnable, System)
import Test.Hspec
import Test.QuickCheck
import Data.Default
import Clash.Stream.FieldExtract
streamBytes :: [BitVector 8] -> [BitVector 32]
streamBytes (x : y : z : w : rest) = x ++# y ++# z ++# w : streamBytes rest
streamBytes [] = []
streamBytes r = error $ "streamBytes: " ++ show r
valAtIdx :: Default a => Int -> Int -> a -> [a]
valAtIdx len idx val = map func [0..len-1]
where
func x
| x == idx = val
| otherwise = def
fieldAtIdx :: Default a => Int -> Int -> [a] -> [a]
fieldAtIdx len idx val = replicate idx def ++ val ++ replicate (len - idx - (length val)) def
testParser
:: forall dom a. HiddenClockResetEnable dom
=> (Signal dom (Unsigned 8) -> Signal dom Bool -> Signal dom (BitVector 32) -> Signal dom a)
-> Signal dom (BitVector 32)
-> Signal dom a
testParser func inp = out
where
counter :: Signal dom (Unsigned 8)
counter = Clash.register 0 (counter + 4)
out = func counter (pure True) inp
spec :: SpecWith ()
spec = describe "Message receiver" $ do
it "byteExtract" $
property $ forAll (choose (0, 255)) $ \offset ->
forAll (choose (offset `quot` 4 + 1, 64)) $ \pktLen ->
\val ->
let res = simulate_lazy @System (testParser (byteExtractAccum (fromIntegral offset)))
$ streamBytes
$ valAtIdx ((pktLen + 1) * 4) offset val
in res !! pktLen == val
it "fieldExtract" $
property $ forAll (choose (0, 252)) $ \offset ->
forAll (choose ((offset + 3) `quot` 4 + 1, 64)) $ \pktLen ->
\(v@(x :> y :> z :> w :> Nil) :: Vec 4 (BitVector 8)) ->
let res = simulate_lazy @System (testParser (fieldExtractAccum (fromIntegral offset)))
$ streamBytes
$ fieldAtIdx ((pktLen + 1) * 4) offset [x, y, z, w]
in res !! pktLen == v
it "byteExtractComb" $
property $ forAll (choose (0, 255)) $ \offset ->
forAll (choose (offset `quot` 4 + 1, 64)) $ \pktLen ->
\val ->
let res = simulate_lazy @System (testParser (byteExtractAccumComb (fromIntegral offset)))
$ streamBytes
$ valAtIdx (pktLen * 4) offset val
in res !! (pktLen - 1) == val
it "fieldExtractComb" $
property $ forAll (choose (0, 252)) $ \offset ->
forAll (choose ((offset + 3) `quot` 4 + 1, 64)) $ \pktLen ->
\(v@(x :> y :> z :> w :> Nil) :: Vec 4 (BitVector 8)) ->
let res = simulate_lazy @System (testParser (fieldExtractAccumComb (fromIntegral offset)))
$ streamBytes
$ fieldAtIdx (pktLen * 4) offset [x, y, z, w]
in res !! (pktLen - 1) == v
| null | https://raw.githubusercontent.com/adamwalker/clash-utils/375c61131e21e9a239b80bdb929ae77f156d056f/tests/FieldExtractSpec.hs | haskell | module FieldExtractSpec where
import qualified Clash.Prelude as Clash
import Clash.Prelude (Signal, Vec(..), BitVector, Index, Signed, Unsigned, SFixed, Bit, SNat(..),
simulate, simulate_lazy, listToVecTH, KnownNat, pack, unpack, (++#), mealy, mux, bundle, unbundle,
HiddenClockResetEnable, System)
import Test.Hspec
import Test.QuickCheck
import Data.Default
import Clash.Stream.FieldExtract
streamBytes :: [BitVector 8] -> [BitVector 32]
streamBytes (x : y : z : w : rest) = x ++# y ++# z ++# w : streamBytes rest
streamBytes [] = []
streamBytes r = error $ "streamBytes: " ++ show r
valAtIdx :: Default a => Int -> Int -> a -> [a]
valAtIdx len idx val = map func [0..len-1]
where
func x
| x == idx = val
| otherwise = def
fieldAtIdx :: Default a => Int -> Int -> [a] -> [a]
fieldAtIdx len idx val = replicate idx def ++ val ++ replicate (len - idx - (length val)) def
testParser
:: forall dom a. HiddenClockResetEnable dom
=> (Signal dom (Unsigned 8) -> Signal dom Bool -> Signal dom (BitVector 32) -> Signal dom a)
-> Signal dom (BitVector 32)
-> Signal dom a
testParser func inp = out
where
counter :: Signal dom (Unsigned 8)
counter = Clash.register 0 (counter + 4)
out = func counter (pure True) inp
spec :: SpecWith ()
spec = describe "Message receiver" $ do
it "byteExtract" $
property $ forAll (choose (0, 255)) $ \offset ->
forAll (choose (offset `quot` 4 + 1, 64)) $ \pktLen ->
\val ->
let res = simulate_lazy @System (testParser (byteExtractAccum (fromIntegral offset)))
$ streamBytes
$ valAtIdx ((pktLen + 1) * 4) offset val
in res !! pktLen == val
it "fieldExtract" $
property $ forAll (choose (0, 252)) $ \offset ->
forAll (choose ((offset + 3) `quot` 4 + 1, 64)) $ \pktLen ->
\(v@(x :> y :> z :> w :> Nil) :: Vec 4 (BitVector 8)) ->
let res = simulate_lazy @System (testParser (fieldExtractAccum (fromIntegral offset)))
$ streamBytes
$ fieldAtIdx ((pktLen + 1) * 4) offset [x, y, z, w]
in res !! pktLen == v
it "byteExtractComb" $
property $ forAll (choose (0, 255)) $ \offset ->
forAll (choose (offset `quot` 4 + 1, 64)) $ \pktLen ->
\val ->
let res = simulate_lazy @System (testParser (byteExtractAccumComb (fromIntegral offset)))
$ streamBytes
$ valAtIdx (pktLen * 4) offset val
in res !! (pktLen - 1) == val
it "fieldExtractComb" $
property $ forAll (choose (0, 252)) $ \offset ->
forAll (choose ((offset + 3) `quot` 4 + 1, 64)) $ \pktLen ->
\(v@(x :> y :> z :> w :> Nil) :: Vec 4 (BitVector 8)) ->
let res = simulate_lazy @System (testParser (fieldExtractAccumComb (fromIntegral offset)))
$ streamBytes
$ fieldAtIdx (pktLen * 4) offset [x, y, z, w]
in res !! (pktLen - 1) == v
| |
1dfecf0d70631b1098b907500d82992b73de556408a8f80ecb5f0236102f73a6 | haskell-works/hw-xml | Types.hs | module HaskellWorks.Data.Xml.Token.Types (XmlToken(..)) where
data XmlToken s d
= XmlTokenBraceL
| XmlTokenBraceR
| XmlTokenBracketL
| XmlTokenBracketR
| XmlTokenComma
| XmlTokenColon
| XmlTokenWhitespace
| XmlTokenString s
| XmlTokenBoolean Bool
| XmlTokenNumber d
| XmlTokenNull
deriving (Eq, Show)
| null | https://raw.githubusercontent.com/haskell-works/hw-xml/e30a4cd8e6dc7451263a3d45c1ae28b3f35d0079/src/HaskellWorks/Data/Xml/Token/Types.hs | haskell | module HaskellWorks.Data.Xml.Token.Types (XmlToken(..)) where
data XmlToken s d
= XmlTokenBraceL
| XmlTokenBraceR
| XmlTokenBracketL
| XmlTokenBracketR
| XmlTokenComma
| XmlTokenColon
| XmlTokenWhitespace
| XmlTokenString s
| XmlTokenBoolean Bool
| XmlTokenNumber d
| XmlTokenNull
deriving (Eq, Show)
| |
61ecec41586d9ceeef108e14cba7e6866e4ccf2aac7cf49d7ceba316b8c27b2d | input-output-hk/plutus | BasicPlutusTx.hs | -- BLOCK1
-- Necessary language extensions for the Plutus Tx compiler to work.
{-# LANGUAGE DataKinds #-}
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
module BasicPlutusTx where
import PlutusCore.Default qualified as PLC
-- Main Plutus Tx module.
import PlutusTx
-- Additional support for lifting.
import PlutusTx.Lift
Builtin functions .
import PlutusTx.Builtins
The Plutus Tx Prelude , discussed further below .
import PlutusTx.Prelude
-- Setup for doctest examples.
-- $setup
> > > import Tutorial . PlutusTx
> > > import PlutusTx
> > > import PlutusCore
> > > import PlutusCore . Evaluation . Machine . Ck
-- >>> import Data.Text.Prettyprint.Doc
-- BLOCK2
integerOne :: CompiledCode Integer
' compile ' turns the ' TExpQ Integer ' into a
' TExpQ ( CompiledCode Integer ) ' and the splice
inserts it into the program .
'TExpQ (CompiledCode Integer)' and the splice
inserts it into the program. -}
integerOne = $$(compile
The quote has type ' TExpQ Integer ' .
We always use unbounded integers in Plutus Core , so we have to pin
down this numeric literal to an ` ` Integer ` ` rather than an ` ` Int ` ` .
We always use unbounded integers in Plutus Core, so we have to pin
down this numeric literal to an ``Integer`` rather than an ``Int``. -}
[|| (1 :: Integer) ||])
|
> > > pretty $ getPlc integerOne
( program 1.0.0
( con 1 )
)
>>> pretty $ getPlc integerOne
(program 1.0.0
(con 1)
)
-}
integerIdentity :: CompiledCode (Integer -> Integer)
integerIdentity = $$(compile [|| \(x:: Integer) -> x ||])
{- |
>>> pretty $ getPlc integerIdentity
(program 1.0.0
(lam ds (con integer) ds)
)
-}
Functions which will be used in Tx programs should be marked
with GHC ’s ' INLINABLE ' pragma . This is usually necessary for
non - local functions to be usable in Plutus Tx blocks , as it instructs
GHC to keep the information that the Tx compiler needs . While
you may be able to get away with omitting it , it is good practice to
always include it .
with GHC’s 'INLINABLE' pragma. This is usually necessary for
non-local functions to be usable in Plutus Tx blocks, as it instructs
GHC to keep the information that the Plutus Tx compiler needs. While
you may be able to get away with omitting it, it is good practice to
always include it. -}
{-# INLINABLE plusOne #-}
plusOne :: Integer -> Integer
' ' comes from ' PlutusTx . Builtins ' , and is
mapped to the builtin integer addition function in Plutus Core .
mapped to the builtin integer addition function in Plutus Core. -}
plusOne x = x `addInteger` 1
# INLINABLE myProgram #
myProgram :: Integer
myProgram =
let
-- Local functions do not need to be marked as 'INLINABLE'.
plusOneLocal :: Integer -> Integer
plusOneLocal x = x `addInteger` 1
localTwo = plusOneLocal 1
externalTwo = plusOne 1
in localTwo `addInteger` externalTwo
functions :: CompiledCode Integer
functions = $$(compile [|| myProgram ||])
We ’ve used the CK evaluator for to evaluate the program
and check that the result was what we expected .
and check that the result was what we expected. -}
|
> > > pretty $ unsafeEvaluateCk $ toTerm $ getPlc functions
( con 4 )
>>> pretty $ unsafeEvaluateCk $ toTerm $ getPlc functions
(con 4)
-}
-- BLOCK5
matchMaybe :: CompiledCode (Maybe Integer -> Integer)
matchMaybe = $$(compile [|| \(x:: Maybe Integer) -> case x of
Just n -> n
Nothing -> 0
||])
-- BLOCK6
-- | Either a specific end date, or "never".
data EndDate = Fixed Integer | Never
-- | Check whether a given time is past the end date.
pastEnd :: CompiledCode (EndDate -> Integer -> Bool)
pastEnd = $$(compile [|| \(end::EndDate) (current::Integer) -> case end of
Fixed n -> n `lessThanEqualsInteger` current
Never -> False
||])
BLOCK7
-- | Check whether a given time is past the end date.
pastEnd' :: CompiledCode (EndDate -> Integer -> Bool)
pastEnd' = $$(compile [|| \(end::EndDate) (current::Integer) -> case end of
Fixed n -> n < current
Never -> False
||])
BLOCK8
addOne :: CompiledCode (Integer -> Integer)
addOne = $$(compile [|| \(x:: Integer) -> x `addInteger` 1 ||])
addOneToN :: Integer -> CompiledCode Integer
addOneToN n =
addOne
' unsafeApplyCode ' applies one ' CompiledCode ' to another .
`unsafeApplyCode`
-- 'liftCode' lifts the argument 'n' into a
' CompiledCode Integer ' .
liftCode n
|
> > > pretty $ getPlc addOne
( program 1.0.0
[
( lam
( fun ( con integer ) ( fun ( con integer ) ( con integer ) ) )
( lam ds ( con integer ) [ [ ] ( con 1 ) ] )
)
( lam
arg
( con integer )
( lam arg ( con integer ) [ [ ( builtin ) arg ] arg ] )
)
]
)
> > > let program = getPlc $ addOneToN 4
> > > pretty program
( program 1.0.0
[
[
( lam
( fun ( con integer ) ( fun ( con integer ) ( con integer ) ) )
( lam ds ( con integer ) [ [ ] ( con 1 ) ] )
)
( lam
arg
( con integer )
( lam arg ( con integer ) [ [ ( builtin ) arg ] arg ] )
)
]
( con 4 )
]
)
> > > pretty $ unsafeEvaluateCk $ toTerm program
( con 5 )
>>> pretty $ getPlc addOne
(program 1.0.0
[
(lam
addInteger
(fun (con integer) (fun (con integer) (con integer)))
(lam ds (con integer) [ [ addInteger ds ] (con 1) ])
)
(lam
arg
(con integer)
(lam arg (con integer) [ [ (builtin addInteger) arg ] arg ])
)
]
)
>>> let program = getPlc $ addOneToN 4
>>> pretty program
(program 1.0.0
[
[
(lam
addInteger
(fun (con integer) (fun (con integer) (con integer)))
(lam ds (con integer) [ [ addInteger ds ] (con 1) ])
)
(lam
arg
(con integer)
(lam arg (con integer) [ [ (builtin addInteger) arg ] arg ])
)
]
(con 4)
]
)
>>> pretty $ unsafeEvaluateCk $ toTerm program
(con 5)
-}
BLOCK10
-- 'makeLift' generates instances of 'Lift' automatically.
makeLift ''EndDate
pastEndAt :: EndDate -> Integer -> CompiledCode Bool
pastEndAt end current =
pastEnd
`unsafeApplyCode`
liftCode end
`unsafeApplyCode`
liftCode current
|
> > > let program = getPlc $ pastEndAt Never 5
> > > pretty $ unsafeEvaluateCk $ toTerm program
( abs
out_Bool ( type ) ( lam case_True out_Bool ( lam case_False out_Bool case_False ) )
)
>>> let program = getPlc $ pastEndAt Never 5
>>> pretty $ unsafeEvaluateCk $ toTerm program
(abs
out_Bool (type) (lam case_True out_Bool (lam case_False out_Bool case_False))
)
-}
-- BLOCK11
| null | https://raw.githubusercontent.com/input-output-hk/plutus/bb9b5a18c26476fbf6b2f446ab267706426fec3a/doc/read-the-docs-site/tutorials/BasicPlutusTx.hs | haskell | BLOCK1
Necessary language extensions for the Plutus Tx compiler to work.
# LANGUAGE DataKinds #
Main Plutus Tx module.
Additional support for lifting.
Setup for doctest examples.
$setup
>>> import Data.Text.Prettyprint.Doc
BLOCK2
|
>>> pretty $ getPlc integerIdentity
(program 1.0.0
(lam ds (con integer) ds)
)
# INLINABLE plusOne #
Local functions do not need to be marked as 'INLINABLE'.
BLOCK5
BLOCK6
| Either a specific end date, or "never".
| Check whether a given time is past the end date.
| Check whether a given time is past the end date.
'liftCode' lifts the argument 'n' into a
'makeLift' generates instances of 'Lift' automatically.
BLOCK11 | # LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
module BasicPlutusTx where
import PlutusCore.Default qualified as PLC
import PlutusTx
import PlutusTx.Lift
Builtin functions .
import PlutusTx.Builtins
The Plutus Tx Prelude , discussed further below .
import PlutusTx.Prelude
> > > import Tutorial . PlutusTx
> > > import PlutusTx
> > > import PlutusCore
> > > import PlutusCore . Evaluation . Machine . Ck
integerOne :: CompiledCode Integer
' compile ' turns the ' TExpQ Integer ' into a
' TExpQ ( CompiledCode Integer ) ' and the splice
inserts it into the program .
'TExpQ (CompiledCode Integer)' and the splice
inserts it into the program. -}
integerOne = $$(compile
The quote has type ' TExpQ Integer ' .
We always use unbounded integers in Plutus Core , so we have to pin
down this numeric literal to an ` ` Integer ` ` rather than an ` ` Int ` ` .
We always use unbounded integers in Plutus Core, so we have to pin
down this numeric literal to an ``Integer`` rather than an ``Int``. -}
[|| (1 :: Integer) ||])
|
> > > pretty $ getPlc integerOne
( program 1.0.0
( con 1 )
)
>>> pretty $ getPlc integerOne
(program 1.0.0
(con 1)
)
-}
integerIdentity :: CompiledCode (Integer -> Integer)
integerIdentity = $$(compile [|| \(x:: Integer) -> x ||])
Functions which will be used in Tx programs should be marked
with GHC ’s ' INLINABLE ' pragma . This is usually necessary for
non - local functions to be usable in Plutus Tx blocks , as it instructs
GHC to keep the information that the Tx compiler needs . While
you may be able to get away with omitting it , it is good practice to
always include it .
with GHC’s 'INLINABLE' pragma. This is usually necessary for
non-local functions to be usable in Plutus Tx blocks, as it instructs
GHC to keep the information that the Plutus Tx compiler needs. While
you may be able to get away with omitting it, it is good practice to
always include it. -}
plusOne :: Integer -> Integer
' ' comes from ' PlutusTx . Builtins ' , and is
mapped to the builtin integer addition function in Plutus Core .
mapped to the builtin integer addition function in Plutus Core. -}
plusOne x = x `addInteger` 1
# INLINABLE myProgram #
myProgram :: Integer
myProgram =
let
plusOneLocal :: Integer -> Integer
plusOneLocal x = x `addInteger` 1
localTwo = plusOneLocal 1
externalTwo = plusOne 1
in localTwo `addInteger` externalTwo
functions :: CompiledCode Integer
functions = $$(compile [|| myProgram ||])
We ’ve used the CK evaluator for to evaluate the program
and check that the result was what we expected .
and check that the result was what we expected. -}
|
> > > pretty $ unsafeEvaluateCk $ toTerm $ getPlc functions
( con 4 )
>>> pretty $ unsafeEvaluateCk $ toTerm $ getPlc functions
(con 4)
-}
matchMaybe :: CompiledCode (Maybe Integer -> Integer)
matchMaybe = $$(compile [|| \(x:: Maybe Integer) -> case x of
Just n -> n
Nothing -> 0
||])
data EndDate = Fixed Integer | Never
pastEnd :: CompiledCode (EndDate -> Integer -> Bool)
pastEnd = $$(compile [|| \(end::EndDate) (current::Integer) -> case end of
Fixed n -> n `lessThanEqualsInteger` current
Never -> False
||])
BLOCK7
pastEnd' :: CompiledCode (EndDate -> Integer -> Bool)
pastEnd' = $$(compile [|| \(end::EndDate) (current::Integer) -> case end of
Fixed n -> n < current
Never -> False
||])
BLOCK8
addOne :: CompiledCode (Integer -> Integer)
addOne = $$(compile [|| \(x:: Integer) -> x `addInteger` 1 ||])
addOneToN :: Integer -> CompiledCode Integer
addOneToN n =
addOne
' unsafeApplyCode ' applies one ' CompiledCode ' to another .
`unsafeApplyCode`
' CompiledCode Integer ' .
liftCode n
|
> > > pretty $ getPlc addOne
( program 1.0.0
[
( lam
( fun ( con integer ) ( fun ( con integer ) ( con integer ) ) )
( lam ds ( con integer ) [ [ ] ( con 1 ) ] )
)
( lam
arg
( con integer )
( lam arg ( con integer ) [ [ ( builtin ) arg ] arg ] )
)
]
)
> > > let program = getPlc $ addOneToN 4
> > > pretty program
( program 1.0.0
[
[
( lam
( fun ( con integer ) ( fun ( con integer ) ( con integer ) ) )
( lam ds ( con integer ) [ [ ] ( con 1 ) ] )
)
( lam
arg
( con integer )
( lam arg ( con integer ) [ [ ( builtin ) arg ] arg ] )
)
]
( con 4 )
]
)
> > > pretty $ unsafeEvaluateCk $ toTerm program
( con 5 )
>>> pretty $ getPlc addOne
(program 1.0.0
[
(lam
addInteger
(fun (con integer) (fun (con integer) (con integer)))
(lam ds (con integer) [ [ addInteger ds ] (con 1) ])
)
(lam
arg
(con integer)
(lam arg (con integer) [ [ (builtin addInteger) arg ] arg ])
)
]
)
>>> let program = getPlc $ addOneToN 4
>>> pretty program
(program 1.0.0
[
[
(lam
addInteger
(fun (con integer) (fun (con integer) (con integer)))
(lam ds (con integer) [ [ addInteger ds ] (con 1) ])
)
(lam
arg
(con integer)
(lam arg (con integer) [ [ (builtin addInteger) arg ] arg ])
)
]
(con 4)
]
)
>>> pretty $ unsafeEvaluateCk $ toTerm program
(con 5)
-}
BLOCK10
makeLift ''EndDate
pastEndAt :: EndDate -> Integer -> CompiledCode Bool
pastEndAt end current =
pastEnd
`unsafeApplyCode`
liftCode end
`unsafeApplyCode`
liftCode current
|
> > > let program = getPlc $ pastEndAt Never 5
> > > pretty $ unsafeEvaluateCk $ toTerm program
( abs
out_Bool ( type ) ( lam case_True out_Bool ( lam case_False out_Bool case_False ) )
)
>>> let program = getPlc $ pastEndAt Never 5
>>> pretty $ unsafeEvaluateCk $ toTerm program
(abs
out_Bool (type) (lam case_True out_Bool (lam case_False out_Bool case_False))
)
-}
|
157843ee5b8fedae709d88197e895213e57ca92ff42641c191417cfaaa8498b9 | haskell-waargonaut/waargonaut | Test.hs | {-# LANGUAGE RankNTypes #-}
-- | Helper functions for testing your 'Decoder' and 'Encoder' functions.
--
module Waargonaut.Test
( roundTripSimple
) where
import Data.Text (Text)
import qualified Data.Text.Lazy as TextL
import Text.Parser.Char (CharParsing)
import Waargonaut.Encode (Encoder)
import qualified Waargonaut.Encode as E
import Waargonaut.Decode (CursorHistory, Decoder)
import qualified Waargonaut.Decode as D
import Waargonaut.Decode.Error (DecodeError)
-- | Test a 'Encoder' and 'Decoder' pair are able to maintain the "round trip"
-- property. That is, if you encode a given value, and then decode it, you should
-- have the exact same value that you started with.
roundTripSimple
:: ( Eq b
, Monad f
, CharParsing f
, Monad g
, Show e
)
=> (forall a. f a -> Text -> Either e a)
-> Encoder g b
-> Decoder g b
-> b
-> g (Either (DecodeError, CursorHistory) Bool)
roundTripSimple f e d a = do
encodedA <- E.simpleEncodeTextNoSpaces e a
fmap (== a) <$> D.decodeFromText f d (TextL.toStrict encodedA)
| null | https://raw.githubusercontent.com/haskell-waargonaut/waargonaut/ba1dbbc170c2279749ea29bc8aaf375bdb659ad2/src/Waargonaut/Test.hs | haskell | # LANGUAGE RankNTypes #
| Helper functions for testing your 'Decoder' and 'Encoder' functions.
| Test a 'Encoder' and 'Decoder' pair are able to maintain the "round trip"
property. That is, if you encode a given value, and then decode it, you should
have the exact same value that you started with. | module Waargonaut.Test
( roundTripSimple
) where
import Data.Text (Text)
import qualified Data.Text.Lazy as TextL
import Text.Parser.Char (CharParsing)
import Waargonaut.Encode (Encoder)
import qualified Waargonaut.Encode as E
import Waargonaut.Decode (CursorHistory, Decoder)
import qualified Waargonaut.Decode as D
import Waargonaut.Decode.Error (DecodeError)
roundTripSimple
:: ( Eq b
, Monad f
, CharParsing f
, Monad g
, Show e
)
=> (forall a. f a -> Text -> Either e a)
-> Encoder g b
-> Decoder g b
-> b
-> g (Either (DecodeError, CursorHistory) Bool)
roundTripSimple f e d a = do
encodedA <- E.simpleEncodeTextNoSpaces e a
fmap (== a) <$> D.decodeFromText f d (TextL.toStrict encodedA)
|
db59ea4b2b559ea73622244dfa2c3a1deb1977bdd9eb8c604ea7bfa2f833c12a | haskell-opengl/OpenGL | DisplayLists.hs | --------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.DisplayLists
Copyright : ( c ) 2002 - 2019
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
This module corresponds to section 5.4 ( Display Lists ) of the OpenGL 2.1
-- specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.DisplayLists (
-- * Defining Display Lists
DisplayList(DisplayList), ListMode(..), defineList, defineNewList, listIndex,
listMode, maxListNesting,
-- * Calling Display Lists
callList, callLists, listBase
) where
import Control.Monad.IO.Class
import Data.ObjectName
import Data.StateVar
import Foreign.Ptr ( Ptr )
import Graphics.Rendering.OpenGL.GL.DebugOutput
import Graphics.Rendering.OpenGL.GL.DataType
import Graphics.Rendering.OpenGL.GL.Exception
import Graphics.Rendering.OpenGL.GL.GLboolean
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GLU.ErrorsInternal
import Graphics.GL
--------------------------------------------------------------------------------
newtype DisplayList = DisplayList { displayListID :: GLuint }
deriving ( Eq, Ord, Show )
instance ObjectName DisplayList where
isObjectName = liftIO . fmap unmarshalGLboolean . glIsList . displayListID
deleteObjectNames =
liftIO . mapM_ (uncurry glDeleteLists) . combineConsecutive
instance CanBeLabeled DisplayList where
objectLabel = objectNameLabel GL_DISPLAY_LIST . displayListID
combineConsecutive :: [DisplayList] -> [(GLuint, GLsizei)]
combineConsecutive [] = []
combineConsecutive (z:zs) = (displayListID z, len) : combineConsecutive rest
where (len, rest) = run (0 :: GLsizei) z zs
run n x xs = case n + 1 of
m -> case xs of
[] -> (m, [])
(y:ys) | x `isFollowedBy` y -> run m y ys
| otherwise -> (m, xs)
DisplayList x `isFollowedBy` DisplayList y = x + 1 == y
instance GeneratableObjectName DisplayList where
genObjectNames n = liftIO $ do
first <- glGenLists (fromIntegral n)
if DisplayList first == noDisplayList
then do recordOutOfMemory
return []
else return [ DisplayList l
| l <- [ first .. first + fromIntegral n - 1 ] ]
--------------------------------------------------------------------------------
data ListMode =
Compile
| CompileAndExecute
deriving ( Eq, Ord, Show )
marshalListMode :: ListMode -> GLenum
marshalListMode x = case x of
Compile -> GL_COMPILE
CompileAndExecute -> GL_COMPILE_AND_EXECUTE
unmarshalListMode :: GLenum -> ListMode
unmarshalListMode x
| x == GL_COMPILE = Compile
| x == GL_COMPILE_AND_EXECUTE = CompileAndExecute
| otherwise = error ("unmarshalListMode: illegal value " ++ show x)
--------------------------------------------------------------------------------
defineList :: DisplayList -> ListMode -> IO a -> IO a
defineList dl mode =
bracket_ (glNewList (displayListID dl) (marshalListMode mode)) glEndList
defineNewList :: ListMode -> IO a -> IO DisplayList
defineNewList mode action = do
lst <- genObjectName
_ <- defineList lst mode action
return lst
--------------------------------------------------------------------------------
listIndex :: GettableStateVar (Maybe DisplayList)
listIndex =
makeGettableStateVar
(do l <- getEnum1 (DisplayList . fromIntegral) GetListIndex
return $ if l == noDisplayList then Nothing else Just l)
noDisplayList :: DisplayList
noDisplayList = DisplayList 0
listMode :: GettableStateVar ListMode
listMode = makeGettableStateVar (getEnum1 unmarshalListMode GetListMode)
maxListNesting :: GettableStateVar GLsizei
maxListNesting = makeGettableStateVar (getSizei1 id GetMaxListNesting)
--------------------------------------------------------------------------------
callList :: DisplayList -> IO ()
callList = glCallList . displayListID
callLists :: GLsizei -> DataType -> Ptr a -> IO ()
callLists n = glCallLists n . marshalDataType
--------------------------------------------------------------------------------
listBase :: StateVar DisplayList
listBase =
makeStateVar
(getEnum1 (DisplayList . fromIntegral) GetListBase)
(glListBase . displayListID)
| null | https://raw.githubusercontent.com/haskell-opengl/OpenGL/f7af8fe04b0f19c260a85c9ebcad612737cd7c8c/src/Graphics/Rendering/OpenGL/GL/DisplayLists.hs | haskell | ------------------------------------------------------------------------------
|
Module : Graphics.Rendering.OpenGL.GL.DisplayLists
License : BSD3
Stability : stable
Portability : portable
specs.
------------------------------------------------------------------------------
* Defining Display Lists
* Calling Display Lists
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | Copyright : ( c ) 2002 - 2019
Maintainer : < >
This module corresponds to section 5.4 ( Display Lists ) of the OpenGL 2.1
module Graphics.Rendering.OpenGL.GL.DisplayLists (
DisplayList(DisplayList), ListMode(..), defineList, defineNewList, listIndex,
listMode, maxListNesting,
callList, callLists, listBase
) where
import Control.Monad.IO.Class
import Data.ObjectName
import Data.StateVar
import Foreign.Ptr ( Ptr )
import Graphics.Rendering.OpenGL.GL.DebugOutput
import Graphics.Rendering.OpenGL.GL.DataType
import Graphics.Rendering.OpenGL.GL.Exception
import Graphics.Rendering.OpenGL.GL.GLboolean
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GLU.ErrorsInternal
import Graphics.GL
newtype DisplayList = DisplayList { displayListID :: GLuint }
deriving ( Eq, Ord, Show )
instance ObjectName DisplayList where
isObjectName = liftIO . fmap unmarshalGLboolean . glIsList . displayListID
deleteObjectNames =
liftIO . mapM_ (uncurry glDeleteLists) . combineConsecutive
instance CanBeLabeled DisplayList where
objectLabel = objectNameLabel GL_DISPLAY_LIST . displayListID
combineConsecutive :: [DisplayList] -> [(GLuint, GLsizei)]
combineConsecutive [] = []
combineConsecutive (z:zs) = (displayListID z, len) : combineConsecutive rest
where (len, rest) = run (0 :: GLsizei) z zs
run n x xs = case n + 1 of
m -> case xs of
[] -> (m, [])
(y:ys) | x `isFollowedBy` y -> run m y ys
| otherwise -> (m, xs)
DisplayList x `isFollowedBy` DisplayList y = x + 1 == y
instance GeneratableObjectName DisplayList where
genObjectNames n = liftIO $ do
first <- glGenLists (fromIntegral n)
if DisplayList first == noDisplayList
then do recordOutOfMemory
return []
else return [ DisplayList l
| l <- [ first .. first + fromIntegral n - 1 ] ]
data ListMode =
Compile
| CompileAndExecute
deriving ( Eq, Ord, Show )
marshalListMode :: ListMode -> GLenum
marshalListMode x = case x of
Compile -> GL_COMPILE
CompileAndExecute -> GL_COMPILE_AND_EXECUTE
unmarshalListMode :: GLenum -> ListMode
unmarshalListMode x
| x == GL_COMPILE = Compile
| x == GL_COMPILE_AND_EXECUTE = CompileAndExecute
| otherwise = error ("unmarshalListMode: illegal value " ++ show x)
defineList :: DisplayList -> ListMode -> IO a -> IO a
defineList dl mode =
bracket_ (glNewList (displayListID dl) (marshalListMode mode)) glEndList
defineNewList :: ListMode -> IO a -> IO DisplayList
defineNewList mode action = do
lst <- genObjectName
_ <- defineList lst mode action
return lst
listIndex :: GettableStateVar (Maybe DisplayList)
listIndex =
makeGettableStateVar
(do l <- getEnum1 (DisplayList . fromIntegral) GetListIndex
return $ if l == noDisplayList then Nothing else Just l)
noDisplayList :: DisplayList
noDisplayList = DisplayList 0
listMode :: GettableStateVar ListMode
listMode = makeGettableStateVar (getEnum1 unmarshalListMode GetListMode)
maxListNesting :: GettableStateVar GLsizei
maxListNesting = makeGettableStateVar (getSizei1 id GetMaxListNesting)
callList :: DisplayList -> IO ()
callList = glCallList . displayListID
callLists :: GLsizei -> DataType -> Ptr a -> IO ()
callLists n = glCallLists n . marshalDataType
listBase :: StateVar DisplayList
listBase =
makeStateVar
(getEnum1 (DisplayList . fromIntegral) GetListBase)
(glListBase . displayListID)
|
2a706c551bbb80e40edaa92f4116584dba0e2de1e876290bbe18f78fa7c53e03 | facebook/duckling | Rules.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.KA.Rules
( rules
) where
import Data.Maybe
import Data.String
import Prelude
import qualified Data.Text as Text
import Duckling.AmountOfMoney.Helpers
import Duckling.AmountOfMoney.Types (Currency(..), AmountOfMoneyData (..))
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (isNatural, isPositive)
import Duckling.Numeral.Types (NumeralData (..))
import Duckling.Regex.Types
import Duckling.Types
import qualified Duckling.AmountOfMoney.Types as TAmountOfMoney
import qualified Duckling.Numeral.Types as TNumeral
ruleUnitAmount :: Rule
ruleUnitAmount = Rule
{ name = "<unit> <amount>"
, pattern =
[ Predicate isCurrencyOnly
, Predicate isPositive
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.currency = c}:
Token Numeral NumeralData{TNumeral.value = v}:
_) -> Just . Token AmountOfMoney . withValue v $ currencyOnly c
_ -> Nothing
}
rulePounds :: Rule
rulePounds = Rule
{ name = "£"
, pattern =
[ regex "(გირვანქა )?სტერლინგი?ს?(ად)?თ?|(გირვანქა )?ფუნ?ტი?ს?(ად)?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Pound
}
ruleOtherPounds :: Rule
ruleOtherPounds = Rule
{ name = "other pounds"
, pattern =
[ regex "(ეგვიპტური|ლიბანური) ?ფუნ?ტი?ს?(ად)?თ?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"ეგვიპტური" -> Just . Token AmountOfMoney $ currencyOnly EGP
"ლიბანური" -> Just . Token AmountOfMoney $ currencyOnly LBP
_ -> Nothing
_ -> Nothing
}
ruleRiyals :: Rule
ruleRiyals = Rule
{ name = "riyals"
, pattern =
[ regex "(კატარული|საუდის არაბული) ?რიალი?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"კატარული" -> Just . Token AmountOfMoney $ currencyOnly QAR
"საუდის არაბული" -> Just . Token AmountOfMoney $ currencyOnly SAR
_ -> Nothing
_ -> Nothing
}
ruleDinars :: Rule
ruleDinars = Rule
{ name = "dinars"
, pattern =
[ regex "(ქუვეითური)? ?დინარი?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"ქუვეითური" -> Just . Token AmountOfMoney $ currencyOnly KWD
_ -> Nothing
_ -> Nothing
}
ruleDirham :: Rule
ruleDirham = Rule
{ name = "დირჰემი"
, pattern =
[ regex "დირჰემი?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly AED
}
ruleRinggit :: Rule
ruleRinggit = Rule
{ name = "ringgit"
, pattern =
[ regex "(მალაიზი?ური? )?რინგიტი?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly MYR
}
ruleCent :: Rule
ruleCent = Rule
{ name = "cent"
, pattern =
[ regex "ცენტი?ს?(ად)?თ?|თეთრი?ს?(ად)?თ?|პენსი?ს?(ად)?თ?|ევროცენტი?ს?(ად)?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Cent
}
ruleLari :: Rule
ruleLari = Rule
{ name = "Lari"
, pattern =
[ regex "(ლარი?ს?(ად)?თ?)|GEL"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly GEL
}
ruleDollars :: Rule
ruleDollars = Rule
{ name = "Dollars"
, pattern =
[ regex "დოლარი?ს?(ად)?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Dollar
}
ruleEuros :: Rule
ruleEuros = Rule
{ name = "Euros"
, pattern =
[ regex "ევრო?ს?დ?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly EUR
}
ruleACurrency :: Rule
ruleACurrency = Rule
{ name = "a <currency>"
, pattern =
[ Predicate isCurrencyOnly
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
_) -> Just . Token AmountOfMoney $ fd {TAmountOfMoney.value = Just 1}
_ -> Nothing
}
ruleIntersectAndXCents :: Rule
ruleIntersectAndXCents = Rule
{ name = "intersect (and X cents)"
, pattern =
[ Predicate isWithoutCents
, regex "და"
, Predicate isCents
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}:
_) -> Just . Token AmountOfMoney $ withCents c fd
_ -> Nothing
}
ruleIntersectAndNumeral :: Rule
ruleIntersectAndNumeral = Rule
{ name = "intersect (and number)"
, pattern =
[ Predicate isWithoutCents
, regex "და"
, Predicate isNatural
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
_:
Token Numeral NumeralData{TNumeral.value = c}:
_) -> Just . Token AmountOfMoney $ withCents c fd
_ -> Nothing
}
ruleIntersectXCents :: Rule
ruleIntersectXCents = Rule
{ name = "intersect (X cents)"
, pattern =
[ Predicate isWithoutCents
, Predicate isCents
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}:
_) -> Just . Token AmountOfMoney $ withCents c fd
_ -> Nothing
}
rulePrecision :: Rule
rulePrecision = Rule
{ name = "about|exactly <amount-of-money>"
, pattern =
[ regex "ზუსტად|იმენა|დაახლოებით|გძეტა"
, Predicate isMoneyWithValue
]
, prod = \tokens -> case tokens of
(_:token:_) -> Just token
_ -> Nothing
}
ruleIntervalBetweenNumeral :: Rule
ruleIntervalBetweenNumeral = Rule
{ name = "between|from <numeral> to|and <amount-of-money>"
, pattern =
[ Predicate isPositive
, regex "-დან|დან"
, Predicate isSimpleAmountOfMoney
, regex "-ა?მდე|ა?მდე"
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = from}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) | from < to ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalBetween :: Rule
ruleIntervalBetween = Rule
{ name = "between|from <amount-of-money> to|and <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-დან|დან"
, Predicate isSimpleAmountOfMoney
, regex "-ა?მდე|ა?მდე"
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c1}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c2}:
_) | from < to && c1 == c2 ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1
_ -> Nothing
}
ruleIntervalNumeralDash :: Rule
ruleIntervalNumeralDash = Rule
{ name = "<numeral> - <amount-of-money>"
, pattern =
[ Predicate isNatural
, regex "-"
, Predicate isSimpleAmountOfMoney
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = from}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) | from < to->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalDash :: Rule
ruleIntervalDash = Rule
{ name = "<amount-of-money> - <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-"
, Predicate isSimpleAmountOfMoney
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c1}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c2}:
_) | from < to && c1 == c2 ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1
_ -> Nothing
}
ruleIntervalMax :: Rule
ruleIntervalMax = Rule
{ name = "under/less/lower/no more than <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-ა?მდე|ა?მდე|(-ზე|ზე) ნაკლები"
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c
_ -> Nothing
}
ruleIntervalMin :: Rule
ruleIntervalMin = Rule
{ name = "over/above/at least/more than <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-დან|დან|(-ზე|ზე) მეტი"
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleUnitAmount
, ruleACurrency
, ruleDollars
, ruleCent
, ruleEuros
, ruleDinars
, ruleDirham
, ruleIntersectAndNumeral
, ruleIntersectAndXCents
, ruleIntersectXCents
, ruleIntervalBetweenNumeral
, ruleIntervalBetween
, ruleIntervalMax
, ruleIntervalMin
, ruleIntervalNumeralDash
, ruleIntervalDash
, ruleOtherPounds
, rulePounds
, rulePrecision
, ruleRinggit
, ruleRiyals
, ruleLari
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/AmountOfMoney/KA/Rules.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings # | Copyright ( c ) 2016 - present , Facebook , Inc.
# LANGUAGE LambdaCase #
module Duckling.AmountOfMoney.KA.Rules
( rules
) where
import Data.Maybe
import Data.String
import Prelude
import qualified Data.Text as Text
import Duckling.AmountOfMoney.Helpers
import Duckling.AmountOfMoney.Types (Currency(..), AmountOfMoneyData (..))
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (isNatural, isPositive)
import Duckling.Numeral.Types (NumeralData (..))
import Duckling.Regex.Types
import Duckling.Types
import qualified Duckling.AmountOfMoney.Types as TAmountOfMoney
import qualified Duckling.Numeral.Types as TNumeral
ruleUnitAmount :: Rule
ruleUnitAmount = Rule
{ name = "<unit> <amount>"
, pattern =
[ Predicate isCurrencyOnly
, Predicate isPositive
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.currency = c}:
Token Numeral NumeralData{TNumeral.value = v}:
_) -> Just . Token AmountOfMoney . withValue v $ currencyOnly c
_ -> Nothing
}
rulePounds :: Rule
rulePounds = Rule
{ name = "£"
, pattern =
[ regex "(გირვანქა )?სტერლინგი?ს?(ად)?თ?|(გირვანქა )?ფუნ?ტი?ს?(ად)?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Pound
}
ruleOtherPounds :: Rule
ruleOtherPounds = Rule
{ name = "other pounds"
, pattern =
[ regex "(ეგვიპტური|ლიბანური) ?ფუნ?ტი?ს?(ად)?თ?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"ეგვიპტური" -> Just . Token AmountOfMoney $ currencyOnly EGP
"ლიბანური" -> Just . Token AmountOfMoney $ currencyOnly LBP
_ -> Nothing
_ -> Nothing
}
ruleRiyals :: Rule
ruleRiyals = Rule
{ name = "riyals"
, pattern =
[ regex "(კატარული|საუდის არაბული) ?რიალი?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"კატარული" -> Just . Token AmountOfMoney $ currencyOnly QAR
"საუდის არაბული" -> Just . Token AmountOfMoney $ currencyOnly SAR
_ -> Nothing
_ -> Nothing
}
ruleDinars :: Rule
ruleDinars = Rule
{ name = "dinars"
, pattern =
[ regex "(ქუვეითური)? ?დინარი?"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (match:_)):_) -> case Text.toLower match of
"ქუვეითური" -> Just . Token AmountOfMoney $ currencyOnly KWD
_ -> Nothing
_ -> Nothing
}
ruleDirham :: Rule
ruleDirham = Rule
{ name = "დირჰემი"
, pattern =
[ regex "დირჰემი?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly AED
}
ruleRinggit :: Rule
ruleRinggit = Rule
{ name = "ringgit"
, pattern =
[ regex "(მალაიზი?ური? )?რინგიტი?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly MYR
}
ruleCent :: Rule
ruleCent = Rule
{ name = "cent"
, pattern =
[ regex "ცენტი?ს?(ად)?თ?|თეთრი?ს?(ად)?თ?|პენსი?ს?(ად)?თ?|ევროცენტი?ს?(ად)?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Cent
}
ruleLari :: Rule
ruleLari = Rule
{ name = "Lari"
, pattern =
[ regex "(ლარი?ს?(ად)?თ?)|GEL"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly GEL
}
ruleDollars :: Rule
ruleDollars = Rule
{ name = "Dollars"
, pattern =
[ regex "დოლარი?ს?(ად)?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Dollar
}
ruleEuros :: Rule
ruleEuros = Rule
{ name = "Euros"
, pattern =
[ regex "ევრო?ს?დ?თ?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly EUR
}
ruleACurrency :: Rule
ruleACurrency = Rule
{ name = "a <currency>"
, pattern =
[ Predicate isCurrencyOnly
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
_) -> Just . Token AmountOfMoney $ fd {TAmountOfMoney.value = Just 1}
_ -> Nothing
}
ruleIntersectAndXCents :: Rule
ruleIntersectAndXCents = Rule
{ name = "intersect (and X cents)"
, pattern =
[ Predicate isWithoutCents
, regex "და"
, Predicate isCents
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}:
_) -> Just . Token AmountOfMoney $ withCents c fd
_ -> Nothing
}
ruleIntersectAndNumeral :: Rule
ruleIntersectAndNumeral = Rule
{ name = "intersect (and number)"
, pattern =
[ Predicate isWithoutCents
, regex "და"
, Predicate isNatural
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
_:
Token Numeral NumeralData{TNumeral.value = c}:
_) -> Just . Token AmountOfMoney $ withCents c fd
_ -> Nothing
}
ruleIntersectXCents :: Rule
ruleIntersectXCents = Rule
{ name = "intersect (X cents)"
, pattern =
[ Predicate isWithoutCents
, Predicate isCents
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney fd:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just c}:
_) -> Just . Token AmountOfMoney $ withCents c fd
_ -> Nothing
}
rulePrecision :: Rule
rulePrecision = Rule
{ name = "about|exactly <amount-of-money>"
, pattern =
[ regex "ზუსტად|იმენა|დაახლოებით|გძეტა"
, Predicate isMoneyWithValue
]
, prod = \tokens -> case tokens of
(_:token:_) -> Just token
_ -> Nothing
}
ruleIntervalBetweenNumeral :: Rule
ruleIntervalBetweenNumeral = Rule
{ name = "between|from <numeral> to|and <amount-of-money>"
, pattern =
[ Predicate isPositive
, regex "-დან|დან"
, Predicate isSimpleAmountOfMoney
, regex "-ა?მდე|ა?მდე"
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = from}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) | from < to ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalBetween :: Rule
ruleIntervalBetween = Rule
{ name = "between|from <amount-of-money> to|and <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-დან|დან"
, Predicate isSimpleAmountOfMoney
, regex "-ა?მდე|ა?მდე"
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c1}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c2}:
_) | from < to && c1 == c2 ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1
_ -> Nothing
}
ruleIntervalNumeralDash :: Rule
ruleIntervalNumeralDash = Rule
{ name = "<numeral> - <amount-of-money>"
, pattern =
[ Predicate isNatural
, regex "-"
, Predicate isSimpleAmountOfMoney
]
, prod = \tokens -> case tokens of
(Token Numeral NumeralData{TNumeral.value = from}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) | from < to->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalDash :: Rule
ruleIntervalDash = Rule
{ name = "<amount-of-money> - <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-"
, Predicate isSimpleAmountOfMoney
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c1}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c2}:
_) | from < to && c1 == c2 ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1
_ -> Nothing
}
ruleIntervalMax :: Rule
ruleIntervalMax = Rule
{ name = "under/less/lower/no more than <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-ა?მდე|ა?მდე|(-ზე|ზე) ნაკლები"
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c
_ -> Nothing
}
ruleIntervalMin :: Rule
ruleIntervalMin = Rule
{ name = "over/above/at least/more than <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-დან|დან|(-ზე|ზე) მეტი"
]
, prod = \tokens -> case tokens of
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleUnitAmount
, ruleACurrency
, ruleDollars
, ruleCent
, ruleEuros
, ruleDinars
, ruleDirham
, ruleIntersectAndNumeral
, ruleIntersectAndXCents
, ruleIntersectXCents
, ruleIntervalBetweenNumeral
, ruleIntervalBetween
, ruleIntervalMax
, ruleIntervalMin
, ruleIntervalNumeralDash
, ruleIntervalDash
, ruleOtherPounds
, rulePounds
, rulePrecision
, ruleRinggit
, ruleRiyals
, ruleLari
]
|
955be4e1cfa9ddefaaed4819e56ce1e12675997251410ff53d1ead24426cf094 | ocamllabs/vscode-ocaml-platform | platform.ml | open Import
type t =
| Win32
| Darwin
| Linux
| Other
let of_string = function
| "win32" -> Win32
| "darwin" -> Darwin
| "linux" -> Linux
| _ -> Other
let t = of_string Process.platform
module Map = struct
type 'a t =
{ win32 : 'a
; darwin : 'a
; linux : 'a
; other : 'a
}
let find { win32; darwin; linux; other } = function
| Win32 -> win32
| Darwin -> darwin
| Linux -> linux
| Other -> other
end
type arch =
| Arm
| Arm64
| Ia32
| Mips
| Mipsel
| Ppc
| Ppc64
| S390
| S390x
| X32
| X64
let arch_of_string = function
| "arm" -> Arm
| "arm64" -> Arm64
| "ia32" -> Ia32
| "mips" -> Mips
| "mipsel" -> Mipsel
| "ppc" -> Ppc
| "ppc64" -> Ppc64
| "s390" -> S390
| "s390x" -> S390x
| "x32" -> X32
| "x64" -> X64
| _ -> assert false
let arch = Node.Process.arch |> arch_of_string
type shell =
| Sh of Path.t
| PowerShell of Path.t
let shell =
let sh = Sh (Path.of_string "/bin/sh") in
let powershell =
PowerShell
(Path.of_string
"C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe")
in
Map.find { win32 = powershell; darwin = sh; linux = sh; other = sh } t
| null | https://raw.githubusercontent.com/ocamllabs/vscode-ocaml-platform/300e2be30839a05c79218af4d7e4a87818fb368e/src/platform.ml | ocaml | open Import
type t =
| Win32
| Darwin
| Linux
| Other
let of_string = function
| "win32" -> Win32
| "darwin" -> Darwin
| "linux" -> Linux
| _ -> Other
let t = of_string Process.platform
module Map = struct
type 'a t =
{ win32 : 'a
; darwin : 'a
; linux : 'a
; other : 'a
}
let find { win32; darwin; linux; other } = function
| Win32 -> win32
| Darwin -> darwin
| Linux -> linux
| Other -> other
end
type arch =
| Arm
| Arm64
| Ia32
| Mips
| Mipsel
| Ppc
| Ppc64
| S390
| S390x
| X32
| X64
let arch_of_string = function
| "arm" -> Arm
| "arm64" -> Arm64
| "ia32" -> Ia32
| "mips" -> Mips
| "mipsel" -> Mipsel
| "ppc" -> Ppc
| "ppc64" -> Ppc64
| "s390" -> S390
| "s390x" -> S390x
| "x32" -> X32
| "x64" -> X64
| _ -> assert false
let arch = Node.Process.arch |> arch_of_string
type shell =
| Sh of Path.t
| PowerShell of Path.t
let shell =
let sh = Sh (Path.of_string "/bin/sh") in
let powershell =
PowerShell
(Path.of_string
"C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe")
in
Map.find { win32 = powershell; darwin = sh; linux = sh; other = sh } t
| |
152f5962e1e62e0d7ed6b9adbf274a31a4e7feca018a118af8095ed775c156e7 | janestreet/async_smtp | test_helper.mli | open! Core
open! Async
open Async_smtp_types
(** This module provides helpers for writing expect tests for testing
[Async_smtp] and [Async_smtp.Server] plugins.
*)
type 'a smtp_flags =
?tls:bool
* [ tls ] pretends that START_TLS was negotiated successfully ( default : false )
-> 'a
type 'a server_flags =
?max_message_size:Byte_units.t
(** [max_message_size] limits the size of messages accepted by the server.
(default: no practical limit) *)
-> ?malformed_emails:[ `Reject | `Wrap ]
(** [malformed_emails] indicates how a malformed email should be handled.
(default: [`Reject]) *)
-> ?server_log:[ Log.Level.t | `None ]
* [ ] controls the amount of detail logged from the server logic , excluding
the plugins . This is usually not relevant to tests and generates a lot of noise .
( default : ` None )
the plugins. This is usually not relevant to tests and generates a lot of noise.
(default: `None) *)
-> ?plugin:(module Server.Plugin.S with type State.t = unit)
(** Provide a custom [Server.Plugin.S] with custom logic.
(default: Server.Plugin.Simple) *)
-> ?plugin_log:[ Log.Level.t | `None ]
(** [plugin_log] controls the log level from the plugin logic. (default: `Debug) *)
-> 'a
type 'a client_flags =
?credentials:Credentials.t
(** Client authentication [credentials] (default: None - no authentication) *)
-> ?client_greeting:string
* [ client_greeting ] specifies the HELO / EHLO greeting to send .
( default : " [ SMTP TEST CLIENT ] " )
(default: "[SMTP TEST CLIENT]") *)
-> ?client_log:[ Log.Level.t | `None ]
(** [client_log] controls the log level from the client logic.
This is usually not relevant to tests and a lot of noise.
(default: `None) *)
-> 'a
(** Helper for creating SMTP Envelopes *)
val envelope
: ?sender:string
-> ?recipients:string list
-> ?data:string
-> unit
-> Smtp_envelope.t
* Attempt to send the given envelope to a dummy server .
Expect test output will be the SMTP session transcript with the following
format :
< EHLO Client
> 200 Server Response
Custom plugin output
Expect test output will be the SMTP session transcript with the following
format:
< EHLO Client
> 200 Server Response
Custom plugin output *)
val smtp : (Smtp_envelope.t list -> unit Deferred.t) client_flags server_flags smtp_flags
* Like [ smtp ] but instead of the mailcore client you describe the client behaviour
allowing testing server behaviour in edge cases .
Use [ client ] to submit requests to the server , and [ server ] to document the expected
responses .
Example
{ [
manual_client
( fun ~client ~server - >
server " 220 [ SMTP TEST SERVER ] "
> > = fun ( ) - >
client " EHLO test "
> > = fun ( ) - >
server " 250 - Ok : Continue , extensions follow:\n\
250 8BITMIME "
> > = fun ( ) - >
client " RESET "
> > = fun ( ) - >
server " 250 Ok : continue "
> > = fun ( ) - >
client " QUIT "
> > = fun ( ) - >
server " 221 closing connection "
)
] }
allowing testing server behaviour in edge cases.
Use [client] to submit requests to the server, and [server] to document the expected
responses.
Example
{[
manual_client
(fun ~client ~server ->
server "220 [SMTP TEST SERVER]"
>>= fun () ->
client "EHLO test"
>>= fun () ->
server "250-Ok: Continue, extensions follow:\n\
250 8BITMIME"
>>= fun () ->
client "RESET"
>>= fun () ->
server "250 Ok: continue"
>>= fun () ->
client "QUIT"
>>= fun () ->
server "221 closing connection"
)
]}
*)
val manual_client
: ((client:(string -> unit Deferred.t)
-> server:(string -> unit Deferred.t)
-> unit Deferred.t)
-> unit Deferred.t)
server_flags
smtp_flags
(** Like [manual_client] but you provide the server side of the protocol.
Use [client] to document expected requests, and [server] to send the responses. *)
val manual_server
: (Smtp_envelope.t list
-> (client:(string -> unit Deferred.t)
-> server:(string -> unit Deferred.t)
-> unit Deferred.t)
-> unit Deferred.t)
client_flags
smtp_flags
| null | https://raw.githubusercontent.com/janestreet/async_smtp/c2c1f8b7b27f571a99d2f21e8a31ce150fbd6ced/src/test_helper.mli | ocaml | * This module provides helpers for writing expect tests for testing
[Async_smtp] and [Async_smtp.Server] plugins.
* [max_message_size] limits the size of messages accepted by the server.
(default: no practical limit)
* [malformed_emails] indicates how a malformed email should be handled.
(default: [`Reject])
* Provide a custom [Server.Plugin.S] with custom logic.
(default: Server.Plugin.Simple)
* [plugin_log] controls the log level from the plugin logic. (default: `Debug)
* Client authentication [credentials] (default: None - no authentication)
* [client_log] controls the log level from the client logic.
This is usually not relevant to tests and a lot of noise.
(default: `None)
* Helper for creating SMTP Envelopes
* Like [manual_client] but you provide the server side of the protocol.
Use [client] to document expected requests, and [server] to send the responses. | open! Core
open! Async
open Async_smtp_types
type 'a smtp_flags =
?tls:bool
* [ tls ] pretends that START_TLS was negotiated successfully ( default : false )
-> 'a
type 'a server_flags =
?max_message_size:Byte_units.t
-> ?malformed_emails:[ `Reject | `Wrap ]
-> ?server_log:[ Log.Level.t | `None ]
* [ ] controls the amount of detail logged from the server logic , excluding
the plugins . This is usually not relevant to tests and generates a lot of noise .
( default : ` None )
the plugins. This is usually not relevant to tests and generates a lot of noise.
(default: `None) *)
-> ?plugin:(module Server.Plugin.S with type State.t = unit)
-> ?plugin_log:[ Log.Level.t | `None ]
-> 'a
type 'a client_flags =
?credentials:Credentials.t
-> ?client_greeting:string
* [ client_greeting ] specifies the HELO / EHLO greeting to send .
( default : " [ SMTP TEST CLIENT ] " )
(default: "[SMTP TEST CLIENT]") *)
-> ?client_log:[ Log.Level.t | `None ]
-> 'a
val envelope
: ?sender:string
-> ?recipients:string list
-> ?data:string
-> unit
-> Smtp_envelope.t
* Attempt to send the given envelope to a dummy server .
Expect test output will be the SMTP session transcript with the following
format :
< EHLO Client
> 200 Server Response
Custom plugin output
Expect test output will be the SMTP session transcript with the following
format:
< EHLO Client
> 200 Server Response
Custom plugin output *)
val smtp : (Smtp_envelope.t list -> unit Deferred.t) client_flags server_flags smtp_flags
* Like [ smtp ] but instead of the mailcore client you describe the client behaviour
allowing testing server behaviour in edge cases .
Use [ client ] to submit requests to the server , and [ server ] to document the expected
responses .
Example
{ [
manual_client
( fun ~client ~server - >
server " 220 [ SMTP TEST SERVER ] "
> > = fun ( ) - >
client " EHLO test "
> > = fun ( ) - >
server " 250 - Ok : Continue , extensions follow:\n\
250 8BITMIME "
> > = fun ( ) - >
client " RESET "
> > = fun ( ) - >
server " 250 Ok : continue "
> > = fun ( ) - >
client " QUIT "
> > = fun ( ) - >
server " 221 closing connection "
)
] }
allowing testing server behaviour in edge cases.
Use [client] to submit requests to the server, and [server] to document the expected
responses.
Example
{[
manual_client
(fun ~client ~server ->
server "220 [SMTP TEST SERVER]"
>>= fun () ->
client "EHLO test"
>>= fun () ->
server "250-Ok: Continue, extensions follow:\n\
250 8BITMIME"
>>= fun () ->
client "RESET"
>>= fun () ->
server "250 Ok: continue"
>>= fun () ->
client "QUIT"
>>= fun () ->
server "221 closing connection"
)
]}
*)
val manual_client
: ((client:(string -> unit Deferred.t)
-> server:(string -> unit Deferred.t)
-> unit Deferred.t)
-> unit Deferred.t)
server_flags
smtp_flags
val manual_server
: (Smtp_envelope.t list
-> (client:(string -> unit Deferred.t)
-> server:(string -> unit Deferred.t)
-> unit Deferred.t)
-> unit Deferred.t)
client_flags
smtp_flags
|
90cc749385173479e8f001108795383baf8b38e00a0d9372b664c88292b515f0 | luminus-framework/luminus-template | auth_jwe.clj | (ns leiningen.new.auth-jwe
(:require [leiningen.new.common :refer :all]))
(defn auth-jwe-features [[assets options :as state]]
(if (some #{"+auth-jwe"} (:features options))
[assets
(-> options
(append-formatted :auth-jwe
[['buddy.auth.backends.token :refer ['jwe-backend]]
['buddy.sign.jwt :refer ['encrypt]]
['buddy.core.nonce :refer ['random-bytes]]]
plugin-indent))]
state))
| null | https://raw.githubusercontent.com/luminus-framework/luminus-template/3278aa727cef0a173ed3ca722dfd6afa6b4bbc8f/src/leiningen/new/auth_jwe.clj | clojure | (ns leiningen.new.auth-jwe
(:require [leiningen.new.common :refer :all]))
(defn auth-jwe-features [[assets options :as state]]
(if (some #{"+auth-jwe"} (:features options))
[assets
(-> options
(append-formatted :auth-jwe
[['buddy.auth.backends.token :refer ['jwe-backend]]
['buddy.sign.jwt :refer ['encrypt]]
['buddy.core.nonce :refer ['random-bytes]]]
plugin-indent))]
state))
| |
24929333e282175966cbf8f7f3db727860d32379021d95d8979988eef1553360 | TrustInSoft/tis-interpreter | oneret.ml | Modified by TrustInSoft
(****************************************************************************)
(* *)
Copyright ( C ) 2001 - 2003
< >
(* Scott McPeak <> *)
< >
< >
(* All rights reserved. *)
(* *)
(* Redistribution and use in source and binary forms, with or without *)
(* modification, are permitted provided that the following conditions *)
(* are met: *)
(* *)
1 . Redistributions of source code must retain the above copyright
(* notice, this list of conditions and the following disclaimer. *)
(* *)
2 . Redistributions in binary form must reproduce the above copyright
(* notice, this list of conditions and the following disclaimer in the *)
(* documentation and/or other materials provided with the distribution. *)
(* *)
3 . The names of the contributors may not be used to endorse or
(* promote products derived from this software without specific prior *)
(* written permission. *)
(* *)
(* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS *)
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
(* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS *)
(* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE *)
COPYRIGHT OWNER OR FOR ANY DIRECT , INDIRECT ,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING ,
BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
(* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER *)
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
(* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN *)
(* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE *)
(* POSSIBILITY OF SUCH DAMAGE. *)
(* *)
File modified by CEA ( Commissariat à l'énergie atomique et aux
(* énergies alternatives) *)
and INRIA ( Institut National de Recherche en Informatique
(* et Automatique). *)
(****************************************************************************)
open Cil_types
open Cil
open Logic_const
let adjust_assigns_clause loc var code_annot =
let change_result = object
inherit Cil.nopCilVisitor
method! vterm_lhost = function
| TResult _ -> ChangeTo (TVar var)
| TVar _ | TMem _ -> DoChildren
end
in
let change_term t = Cil.visitCilTerm change_result t in
let module M = struct exception Found end in
let check_var = object
inherit Cil.nopCilVisitor
method! vterm_lhost = function
| TVar v when Cil_datatype.Logic_var.equal var v -> raise M.Found
| TVar _ | TResult _ | TMem _ -> DoChildren
end
in
let contains_var l =
try ignore (Cil.visitCilAssigns check_var (Writes l)); false
with M.Found -> true
in
let change_from = function
| FromAny -> FromAny
| From l -> From (List.map Logic_const.refresh_identified_term l)
in
let adjust_lval (_,assigns as acc) (loc,from) =
if Logic_utils.contains_result loc.it_content then begin
true,
(Logic_const.new_identified_term (change_term loc.it_content),
change_from from)::assigns
end else acc
in
let adjust_clause b =
match b.b_assigns with
| WritesAny -> ()
| Writes l ->
if not (contains_var l) then begin
let (changed, a) = List.fold_left adjust_lval (false,l) l in
let a =
if changed then a
else
(Logic_const.new_identified_term (Logic_const.tvar ~loc var),
FromAny)
:: a
in
b.b_assigns <- Writes a
end
in
match code_annot with
| AStmtSpec (_,s) -> List.iter adjust_clause s.spec_behavior
| _ -> ()
let oneret (f: fundec) : unit =
let fname = f.svar.vname in
(* Get the return type *)
let retTyp =
match f.svar.vtype with
TFun(rt, _, _, _) -> rt
| _ ->
Kernel.fatal "Function %s does not have a function type" f.svar.vname
in
(* Does it return anything ? *)
let hasRet = match unrollType retTyp with TVoid _ -> false | _ -> true in
Memoize the return result variable . Use only if hasRet
let lastloc = ref Cil_datatype.Location.unknown in
let getRetVar =
let retVar : varinfo option ref = ref None in
fun () ->
match !retVar with
Some rv -> rv
| None -> begin
let rv = makeLocalVar f "__retres" retTyp in (* don't collide *)
retVar := Some rv;
rv
end
in
let convert_result p =
let vis = object
inherit Cil.nopCilVisitor
method! vterm_lhost = function
| TResult _ ->
let v = getRetVar () in
ChangeTo (TVar (cvar_to_lvar v))
| TMem _ | TVar _ -> DoChildren
end
in visitCilPredicateNamed vis p
in
let assert_of_returns ca =
match ca.annot_content with
| AAssert _ | AInvariant _ | AVariant _ | AAssigns _ | AAllocation _ | APragma _ -> ptrue
| AStmtSpec (_bhvs,s) ->
let res =
List.fold_left
(fun acc bhv ->
pand
(acc,
pimplies
(pands
(List.map
(fun p ->
pold ~loc:p.ip_loc
(Logic_utils.named_of_identified_predicate p))
bhv.b_assumes),
pands
(List.fold_left
(fun acc (kind,p) ->
match kind with
Returns ->
Logic_utils.named_of_identified_predicate p
:: acc
| Normal | Exits | Breaks | Continues -> acc)
[ptrue] bhv.b_post_cond)
)))
ptrue s.spec_behavior
in convert_result res
in
Remember if we have introduced 's
let haveGoto = ref false in
Memoize the return statement
let retStmt : stmt ref = ref dummyStmt in
let getRetStmt (_x: unit) : stmt =
if !retStmt == dummyStmt then begin
let sr =
CEA modified to have a good [ ! lastloc ]
let rec setLastLoc = function
| [] -> ()
| {skind=Block b} :: [] -> setLastLoc b.bstmts
| {skind=UnspecifiedSequence seq}::[] ->
setLastLoc (List.map (fun (x,_,_,_,_) -> x) seq)
| {skind= _} as s :: [] -> lastloc := Cil_datatype.Stmt.loc s
| {skind=_s} :: l -> setLastLoc l
in
setLastLoc f.sbody.bstmts; !lastloc
in
let loc = getLastLoc () in
(* Must create a statement *)
let rv =
if hasRet then
Some (new_exp ~loc (Lval(Var (getRetVar ()), NoOffset)))
else None
in
mkStmt (Return (rv, loc))
in retStmt := sr;
sr
end else
!retStmt
in
Stack of predicates that must hold in case of returns
( returns clause with \old transformed into \at(,L ) for a suitable L ) .
TODO : split that into behaviors and generates for foo , bar : assert instead
of plain assert .
(returns clause with \old transformed into \at(,L) for a suitable L).
TODO: split that into behaviors and generates for foo,bar: assert instead
of plain assert.
*)
let returns_clause_stack = Stack.create () in
let stmt_contract_stack = Stack.create () in
let rec popn n =
if n > 0 then begin
assert (not (Stack.is_empty returns_clause_stack));
ignore (Stack.pop returns_clause_stack);
ignore (Stack.pop stmt_contract_stack);
popn (n-1)
end
in
Now scan all the statements . Know if you are the main body of the
* function and be prepared to add new statements at the end .
* popstack indicates whether we should pop the stack after having analyzed
current statement . It is an int since nothing in ACSL prevents from having
multiple statement contracts on top of each other before finding an
actual statement ...
* function and be prepared to add new statements at the end.
* popstack indicates whether we should pop the stack after having analyzed
current statement. It is an int since nothing in ACSL prevents from having
multiple statement contracts on top of each other before finding an
actual statement...
*)
let rec scanStmts acc (mainbody: bool) popstack = function
| [] when mainbody -> (* We are at the end of the function. Now it is
* time to add the return statement *)
let rs = getRetStmt () in
if !haveGoto then
rs.labels <- (Label("return_label", !lastloc, false)) :: rs.labels;
List.rev (rs :: acc)
| [] -> List.rev acc
| [{skind=Return (Some ({enode = Lval(Var _,NoOffset)}), _l)} as s]
when mainbody && not !haveGoto ->
(* We're not changing the return into goto, so returns clause will still
have effect.
*)
popn popstack;
List.rev (s::acc)
| ({skind=Return (retval, loc)} as s) :: rests ->
Cil.CurrentLoc.set loc;
ignore ( E.log " Fixing return(%a ) at % a\n "
insert
( match retval with None - > text " None "
| Some e - > d_exp ( ) e )
d_loc l ) ;
ignore (E.log "Fixing return(%a) at %a\n"
insert
(match retval with None -> text "None"
| Some e -> d_exp () e)
d_loc l);
*)
if hasRet && retval = None then
Kernel.fatal ~current:true
"Found return without value in function %s" fname;
if not hasRet && retval <> None then
Kernel.fatal ~current:true "Found return in subroutine %s" fname;
(* Keep this statement because it might have labels. But change it to
* an instruction that sets the return value (if any). *)
s.skind <- begin
match retval with
Some rval -> Instr (Set((Var (getRetVar ()), NoOffset), rval, loc))
| None -> Instr (Skip loc)
end;
let returns_assert = ref ptrue in
Stack.iter (fun p -> returns_assert := pand ~loc (p, !returns_assert))
returns_clause_stack;
(match retval with
| Some _ ->
Stack.iter
(adjust_assigns_clause loc (Cil.cvar_to_lvar (getRetVar())))
stmt_contract_stack;
| None -> () (* There's no \result: no need to adjust it *)
);
let add_assert res =
match !returns_assert with
{ content = Ptrue } -> res
| p ->
let a =
Logic_const.new_code_annotation (AAssert ([],p))
in
mkStmt (Instr(Code_annot (a,loc))) :: res
in
(* See if this is the last statement in function *)
if mainbody && rests == [] then begin
popn popstack;
scanStmts (add_assert (s::acc)) mainbody 0 rests
end else begin
Add a
let sgref = ref (getRetStmt ()) in
let sg = mkStmt (Goto (sgref, loc)) in
haveGoto := true;
popn popstack;
scanStmts (sg :: (add_assert (s::acc))) mainbody 0 rests
end
| ({skind=If(eb,t,e,l)} as s) :: rests ->
CEA currentLoc : = l ;
s.skind <- If(eb, scanBlock false t, scanBlock false e, l);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| ({skind=Loop(a,b,l,lb1,lb2)} as s) :: rests ->
CEA currentLoc : = l ;
s.skind <- Loop(a,scanBlock false b, l,lb1,lb2);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| ({skind=Switch(e, b, cases, l)} as s) :: rests ->
CEA currentLoc : = l ;
s.skind <- Switch(e, scanBlock false b, cases, l);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| [{skind=Block b} as s] ->
s.skind <- Block (scanBlock mainbody b);
popn popstack;
List.rev (s::acc)
| ({skind=Block b} as s) :: rests ->
s.skind <- Block (scanBlock false b);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| [{skind = UnspecifiedSequence seq} as s] ->
s.skind <-
UnspecifiedSequence
(List.concat
(List.map (fun (s,m,w,r,c) ->
let res = scanStmts [] mainbody 0 [s] in
(List.hd res,m,w,r,c)::
(List.map (fun x -> x,[],[],[],[]) (List.tl res)))
seq));
popn popstack;
List.rev (s::acc)
| ({skind = UnspecifiedSequence seq} as s) :: rests ->
s.skind <-
UnspecifiedSequence
(List.concat
(List.map (fun (s,m,w,r,c) ->
let res = scanStmts [] false 0 [s] in
(List.hd res,m,w,r,c)::
(List.map (fun x -> x,[],[],[],[]) (List.tl res)))
seq));
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| {skind=Instr(Code_annot (ca,_))} as s :: rests ->
let returns = assert_of_returns ca in
let returns = Logic_utils.translate_old_label s returns in
Stack.push returns returns_clause_stack;
Stack.push ca.annot_content stmt_contract_stack;
scanStmts (s::acc) mainbody (popstack + 1) rests
| { skind = TryCatch(t,c,l) } as s :: rests ->
let scan_one_catch (e,b) = (e,scanBlock false b) in
let t = scanBlock false t in
let c = List.map scan_one_catch c in
s.skind <- TryCatch(t,c,l);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| ({skind=(Goto _ | Instr _ | Continue _ | Break _
| TryExcept _ | TryFinally _ | Throw _)} as s)
:: rests ->
popn popstack;
scanStmts (s::acc) mainbody 0 rests
and scanBlock (mainbody: bool) (b: block) =
{ b with bstmts = scanStmts [] mainbody 0 b.bstmts;}
in
CEA since CurrentLoc is n't set
ignore ( visitCilBlock dummyVisitor f.sbody ) ;
ignore (visitCilBlock dummyVisitor f.sbody) ; *)(* sets CurrentLoc *)
CEA so , [ scanBlock ] will set [ lastloc ] when necessary
lastloc : = ! currentLoc ;
lastloc := !currentLoc ; *) (* last location in the function *)
f.sbody <- scanBlock true f.sbody
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/kernel_internals/typing/oneret.ml | ocaml | **************************************************************************
Scott McPeak <>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
énergies alternatives)
et Automatique).
**************************************************************************
Get the return type
Does it return anything ?
don't collide
Must create a statement
We are at the end of the function. Now it is
* time to add the return statement
We're not changing the return into goto, so returns clause will still
have effect.
Keep this statement because it might have labels. But change it to
* an instruction that sets the return value (if any).
There's no \result: no need to adjust it
See if this is the last statement in function
sets CurrentLoc
last location in the function
Local Variables:
compile-command: "make -C ../../.."
End:
| Modified by TrustInSoft
Copyright ( C ) 2001 - 2003
< >
< >
< >
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . The names of the contributors may not be used to endorse or
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
COPYRIGHT OWNER OR FOR ANY DIRECT , INDIRECT ,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING ,
BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
File modified by CEA ( Commissariat à l'énergie atomique et aux
and INRIA ( Institut National de Recherche en Informatique
open Cil_types
open Cil
open Logic_const
let adjust_assigns_clause loc var code_annot =
let change_result = object
inherit Cil.nopCilVisitor
method! vterm_lhost = function
| TResult _ -> ChangeTo (TVar var)
| TVar _ | TMem _ -> DoChildren
end
in
let change_term t = Cil.visitCilTerm change_result t in
let module M = struct exception Found end in
let check_var = object
inherit Cil.nopCilVisitor
method! vterm_lhost = function
| TVar v when Cil_datatype.Logic_var.equal var v -> raise M.Found
| TVar _ | TResult _ | TMem _ -> DoChildren
end
in
let contains_var l =
try ignore (Cil.visitCilAssigns check_var (Writes l)); false
with M.Found -> true
in
let change_from = function
| FromAny -> FromAny
| From l -> From (List.map Logic_const.refresh_identified_term l)
in
let adjust_lval (_,assigns as acc) (loc,from) =
if Logic_utils.contains_result loc.it_content then begin
true,
(Logic_const.new_identified_term (change_term loc.it_content),
change_from from)::assigns
end else acc
in
let adjust_clause b =
match b.b_assigns with
| WritesAny -> ()
| Writes l ->
if not (contains_var l) then begin
let (changed, a) = List.fold_left adjust_lval (false,l) l in
let a =
if changed then a
else
(Logic_const.new_identified_term (Logic_const.tvar ~loc var),
FromAny)
:: a
in
b.b_assigns <- Writes a
end
in
match code_annot with
| AStmtSpec (_,s) -> List.iter adjust_clause s.spec_behavior
| _ -> ()
let oneret (f: fundec) : unit =
let fname = f.svar.vname in
let retTyp =
match f.svar.vtype with
TFun(rt, _, _, _) -> rt
| _ ->
Kernel.fatal "Function %s does not have a function type" f.svar.vname
in
let hasRet = match unrollType retTyp with TVoid _ -> false | _ -> true in
Memoize the return result variable . Use only if hasRet
let lastloc = ref Cil_datatype.Location.unknown in
let getRetVar =
let retVar : varinfo option ref = ref None in
fun () ->
match !retVar with
Some rv -> rv
| None -> begin
retVar := Some rv;
rv
end
in
let convert_result p =
let vis = object
inherit Cil.nopCilVisitor
method! vterm_lhost = function
| TResult _ ->
let v = getRetVar () in
ChangeTo (TVar (cvar_to_lvar v))
| TMem _ | TVar _ -> DoChildren
end
in visitCilPredicateNamed vis p
in
let assert_of_returns ca =
match ca.annot_content with
| AAssert _ | AInvariant _ | AVariant _ | AAssigns _ | AAllocation _ | APragma _ -> ptrue
| AStmtSpec (_bhvs,s) ->
let res =
List.fold_left
(fun acc bhv ->
pand
(acc,
pimplies
(pands
(List.map
(fun p ->
pold ~loc:p.ip_loc
(Logic_utils.named_of_identified_predicate p))
bhv.b_assumes),
pands
(List.fold_left
(fun acc (kind,p) ->
match kind with
Returns ->
Logic_utils.named_of_identified_predicate p
:: acc
| Normal | Exits | Breaks | Continues -> acc)
[ptrue] bhv.b_post_cond)
)))
ptrue s.spec_behavior
in convert_result res
in
Remember if we have introduced 's
let haveGoto = ref false in
Memoize the return statement
let retStmt : stmt ref = ref dummyStmt in
let getRetStmt (_x: unit) : stmt =
if !retStmt == dummyStmt then begin
let sr =
CEA modified to have a good [ ! lastloc ]
let rec setLastLoc = function
| [] -> ()
| {skind=Block b} :: [] -> setLastLoc b.bstmts
| {skind=UnspecifiedSequence seq}::[] ->
setLastLoc (List.map (fun (x,_,_,_,_) -> x) seq)
| {skind= _} as s :: [] -> lastloc := Cil_datatype.Stmt.loc s
| {skind=_s} :: l -> setLastLoc l
in
setLastLoc f.sbody.bstmts; !lastloc
in
let loc = getLastLoc () in
let rv =
if hasRet then
Some (new_exp ~loc (Lval(Var (getRetVar ()), NoOffset)))
else None
in
mkStmt (Return (rv, loc))
in retStmt := sr;
sr
end else
!retStmt
in
Stack of predicates that must hold in case of returns
( returns clause with \old transformed into \at(,L ) for a suitable L ) .
TODO : split that into behaviors and generates for foo , bar : assert instead
of plain assert .
(returns clause with \old transformed into \at(,L) for a suitable L).
TODO: split that into behaviors and generates for foo,bar: assert instead
of plain assert.
*)
let returns_clause_stack = Stack.create () in
let stmt_contract_stack = Stack.create () in
let rec popn n =
if n > 0 then begin
assert (not (Stack.is_empty returns_clause_stack));
ignore (Stack.pop returns_clause_stack);
ignore (Stack.pop stmt_contract_stack);
popn (n-1)
end
in
Now scan all the statements . Know if you are the main body of the
* function and be prepared to add new statements at the end .
* popstack indicates whether we should pop the stack after having analyzed
current statement . It is an int since nothing in ACSL prevents from having
multiple statement contracts on top of each other before finding an
actual statement ...
* function and be prepared to add new statements at the end.
* popstack indicates whether we should pop the stack after having analyzed
current statement. It is an int since nothing in ACSL prevents from having
multiple statement contracts on top of each other before finding an
actual statement...
*)
let rec scanStmts acc (mainbody: bool) popstack = function
let rs = getRetStmt () in
if !haveGoto then
rs.labels <- (Label("return_label", !lastloc, false)) :: rs.labels;
List.rev (rs :: acc)
| [] -> List.rev acc
| [{skind=Return (Some ({enode = Lval(Var _,NoOffset)}), _l)} as s]
when mainbody && not !haveGoto ->
popn popstack;
List.rev (s::acc)
| ({skind=Return (retval, loc)} as s) :: rests ->
Cil.CurrentLoc.set loc;
ignore ( E.log " Fixing return(%a ) at % a\n "
insert
( match retval with None - > text " None "
| Some e - > d_exp ( ) e )
d_loc l ) ;
ignore (E.log "Fixing return(%a) at %a\n"
insert
(match retval with None -> text "None"
| Some e -> d_exp () e)
d_loc l);
*)
if hasRet && retval = None then
Kernel.fatal ~current:true
"Found return without value in function %s" fname;
if not hasRet && retval <> None then
Kernel.fatal ~current:true "Found return in subroutine %s" fname;
s.skind <- begin
match retval with
Some rval -> Instr (Set((Var (getRetVar ()), NoOffset), rval, loc))
| None -> Instr (Skip loc)
end;
let returns_assert = ref ptrue in
Stack.iter (fun p -> returns_assert := pand ~loc (p, !returns_assert))
returns_clause_stack;
(match retval with
| Some _ ->
Stack.iter
(adjust_assigns_clause loc (Cil.cvar_to_lvar (getRetVar())))
stmt_contract_stack;
);
let add_assert res =
match !returns_assert with
{ content = Ptrue } -> res
| p ->
let a =
Logic_const.new_code_annotation (AAssert ([],p))
in
mkStmt (Instr(Code_annot (a,loc))) :: res
in
if mainbody && rests == [] then begin
popn popstack;
scanStmts (add_assert (s::acc)) mainbody 0 rests
end else begin
Add a
let sgref = ref (getRetStmt ()) in
let sg = mkStmt (Goto (sgref, loc)) in
haveGoto := true;
popn popstack;
scanStmts (sg :: (add_assert (s::acc))) mainbody 0 rests
end
| ({skind=If(eb,t,e,l)} as s) :: rests ->
CEA currentLoc : = l ;
s.skind <- If(eb, scanBlock false t, scanBlock false e, l);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| ({skind=Loop(a,b,l,lb1,lb2)} as s) :: rests ->
CEA currentLoc : = l ;
s.skind <- Loop(a,scanBlock false b, l,lb1,lb2);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| ({skind=Switch(e, b, cases, l)} as s) :: rests ->
CEA currentLoc : = l ;
s.skind <- Switch(e, scanBlock false b, cases, l);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| [{skind=Block b} as s] ->
s.skind <- Block (scanBlock mainbody b);
popn popstack;
List.rev (s::acc)
| ({skind=Block b} as s) :: rests ->
s.skind <- Block (scanBlock false b);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| [{skind = UnspecifiedSequence seq} as s] ->
s.skind <-
UnspecifiedSequence
(List.concat
(List.map (fun (s,m,w,r,c) ->
let res = scanStmts [] mainbody 0 [s] in
(List.hd res,m,w,r,c)::
(List.map (fun x -> x,[],[],[],[]) (List.tl res)))
seq));
popn popstack;
List.rev (s::acc)
| ({skind = UnspecifiedSequence seq} as s) :: rests ->
s.skind <-
UnspecifiedSequence
(List.concat
(List.map (fun (s,m,w,r,c) ->
let res = scanStmts [] false 0 [s] in
(List.hd res,m,w,r,c)::
(List.map (fun x -> x,[],[],[],[]) (List.tl res)))
seq));
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| {skind=Instr(Code_annot (ca,_))} as s :: rests ->
let returns = assert_of_returns ca in
let returns = Logic_utils.translate_old_label s returns in
Stack.push returns returns_clause_stack;
Stack.push ca.annot_content stmt_contract_stack;
scanStmts (s::acc) mainbody (popstack + 1) rests
| { skind = TryCatch(t,c,l) } as s :: rests ->
let scan_one_catch (e,b) = (e,scanBlock false b) in
let t = scanBlock false t in
let c = List.map scan_one_catch c in
s.skind <- TryCatch(t,c,l);
popn popstack;
scanStmts (s::acc) mainbody 0 rests
| ({skind=(Goto _ | Instr _ | Continue _ | Break _
| TryExcept _ | TryFinally _ | Throw _)} as s)
:: rests ->
popn popstack;
scanStmts (s::acc) mainbody 0 rests
and scanBlock (mainbody: bool) (b: block) =
{ b with bstmts = scanStmts [] mainbody 0 b.bstmts;}
in
CEA since CurrentLoc is n't set
ignore ( visitCilBlock dummyVisitor f.sbody ) ;
CEA so , [ scanBlock ] will set [ lastloc ] when necessary
lastloc : = ! currentLoc ;
f.sbody <- scanBlock true f.sbody
|
020c4f2e49bb4c7c7a9e6a2c2b3c2125732528740e7f0c147faf621925e81431 | PDP-10/panda | extstr.lsp | EXTSTR -*-Mode : Lisp;Package : SI;Lowercase : T-*-
;;; ****************************************************************
* * * * * * * EXTended datatype scheme , basic STRuctures * * * *
;;; ****************************************************************
* * ( c ) Copyright 1981 Massachusetts Institute of Technology * * * *
;;; ****************************************************************
(herald EXTSTR /91)
(eval-when (eval compile)
(load '((lisp) subload))
(subload EXTBAS)
(subload EXTMAC)
This would like to be a SUBLOAD of VECTOR , but this way it 's not circular
(defmacro VSET (v n val) `(SI:XSET ,v ,n ,val))
)
;;; Wherein we build HUNKs for each class that will be directly pointed to
by classes defined by DEFVST . We leave out the interconnections between
classes , to help printing of objects defined by DEFVST . Loading EXTEND
;;; will supply the missing interconnections.
We also define the basic CLASS creator , SI : DEFCLASS*-2 a sub - primitive that
;;; gives a skeletal class. This class can then be filled in by calling
;;; SI:INITIALIZE-CLASS (from EXTEND)
(defvar SI:SKELETAL-CLASSES () "At least it wont be unbound in Old lisps")
(defvar CLASS-CLASS () "Will be set up, at some pain, in this file")
(defvar OBJECT-CLASS () "Will be set up, at some pain, in this file")
SI : EXTSTR - SETUP - CLASSES is set up by EXTMAC , and includes things
;;; like CLASS-CLASS OBJECT-CLASS STRUCT=INFO-CLASS STRUCT-CLASS VECTOR-CLASS
(declare #.`(SPECIAL ,.si:extstr-setup-classes))
(declare (own-symbol **SELF-EVAL** SI:DEFCLASS*-2 SI:DEFVST-BARE-INIT))
(defun SI:SELF-QUOTIFY (x) `',x)
(eval-when (eval compile load)
;; So that we can easily tell classes apart from random extends
(defvar SI:CLASS-MARKER '**CLASS-SELF-EVAL**)
(and (status feature COMPLR)
(*lexpr SI:DEFCLASS*-2 SI:DEFVST-BARE-INIT))
)
(defprop **SELF-EVAL** SI:SELF-QUOTIFY MACRO)
(defprop #.SI:CLASS-MARKER SI:SELF-QUOTIFY MACRO) ;**CLASS-SELF-EVAL**
SI : DEFCLASS*-2
(defun SI:DEFCLASS*-2 (name typep var superiors
&optional source-file class
&rest ignore )
(cond ((cond ((null class))
((not (classp class))
(+internal-lossage 'CLASS 'SI:DEFCLASS*-2 class)
'T))
(setq class (si:make-extend #.si:class-instance-size CLASS-CLASS))
(setf (si:extend-marker-of class) SI:CLASS-MARKER)
(setf (si:class-typep class) typep)
(setf (si:class-plist class) (ncons name))
(setf (si:class-name class) name)))
(if source-file
(setf (get (si:class-plist class) ':SOURCE-FILE) source-file))
(if var
(setf (si:class-var (set var class)) var))
(cond ((fboundp 'SI:INITIALIZE-CLASS)
(setf (si:class-superiors class) superiors)
(si:initialize-class class))
('T (push `(,class ,superiors) SI:SKELETAL-CLASSES)
(setf (si:extend-class-of class) () )
(if (boundp 'PURCOPY) (push class PURCOPY))))
(putprop name class 'CLASS)
class)
Move & OPTIONAL to after VERSION once old files are flushed ( after
defvst - version 1 is gone ) . July 4 , 1981 -- JonL --
See also the similar comments in DEFVSY .
(defun SI:DEFVST-BARE-INIT (name var-name cnsn size inis
&optional (version 1) source-file class sinfo
&rest ignore)
(if (pairp inis)
;; a slight open-coding of TO-VECTOR for (SETQ INIS (TO-VECTOR INIS))
(setq inis (let ((ln (length inis)))
(declare (fixnum ln))
(do ((v (si:make-extend ln VECTOR-CLASS))
(i 0 (1+ i))
(l inis (cdr l)))
((= i ln) v)
(declare (fixnum i))
(vset v i (car l))))))
(if (null class)
(setq class (or (get name 'CLASS)
(si:defclass*-2 name
name
var-name
(list STRUCT-CLASS)
source-file))))
(if (null sinfo)
(setq sinfo (si:extend STRUCT=INFO-CLASS
version
name
cnsn
size
inis
class)))
(putprop name sinfo 'STRUCT=INFO)
;;The STRUCT=INFO property can always be found on the plist of the 'name'
;; of the structure (and consequently the 'name' of the class)
;;So I've the following line optional, so that it doesn't cause
;; a printing circularity when EXTEND isn't loaded.
(if (get 'EXTEND 'VERSION)
(setf (get (si:class-plist class) 'STRUCT=INFO) sinfo)))
;; Setup basics of CLASS hierarchy, if not already done so. DEFVAR
;; at beginning of this file ensures that CLASS-CLASS has a value.
(and (null CLASS-CLASS)
(let (*RSET y x)
;;Note that we cannot now permit error checking by si:make-extend when
called from si : defclass*-2 , since CLASS - CLASS does n't exist yet .
;;Note also the required order of constructing up the classes!
(mapc #'(lambda (z)
(desetq (x y z) z)
(si:defclass*-2 x x y (if z (list (symeval z)))))
'((OBJECT OBJECT-CLASS () )
(CLASS CLASS-CLASS OBJECT-CLASS)
(SEQUENCE SEQUENCE-CLASS OBJECT-CLASS)
(VECTOR VECTOR-CLASS SEQUENCE-CLASS)
(STRUCT STRUCT-CLASS OBJECT-CLASS)
(STRUCT=INFO STRUCT=INFO-CLASS STRUCT-CLASS)))))
;; The following is an open-coding of part of the result of CONS-A-STRUCT=INFO.
(si:defvst-bare-init
'STRUCT=INFO
'STRUCT=INFO-CLASS
'CONS-A-STRUCT=INFO
6
'( () ;&REST info
1st key
2nd
3nd
4rd
5th
6th
)
2) ;Version
(eval-when (eval compile)
(defmacro GEN-SOURCE-FILE-ADDENDA ()
(if (filep infile)
`(MAPC #'(LAMBDA (CLASS)
(SETF (GET (SI:CLASS-PLIST CLASS) ':SOURCE-FILE)
',(namestring (truename infile))))
(LIST CLASS-CLASS OBJECT-CLASS VECTOR-CLASS STRUCT-CLASS
STRUCT=INFO-CLASS SEQUENCE-CLASS))))
)
(gen-source-file-addenda)
(if (status feature COMPLR)
(subload EXTHUK))
| null | https://raw.githubusercontent.com/PDP-10/panda/522301975c062914ea3d630a1daefde261ddf590/files/maclisp/extstr.lsp | lisp | Package : SI;Lowercase : T-*-
****************************************************************
****************************************************************
****************************************************************
Wherein we build HUNKs for each class that will be directly pointed to
will supply the missing interconnections.
gives a skeletal class. This class can then be filled in by calling
SI:INITIALIZE-CLASS (from EXTEND)
like CLASS-CLASS OBJECT-CLASS STRUCT=INFO-CLASS STRUCT-CLASS VECTOR-CLASS
So that we can easily tell classes apart from random extends
**CLASS-SELF-EVAL**
a slight open-coding of TO-VECTOR for (SETQ INIS (TO-VECTOR INIS))
The STRUCT=INFO property can always be found on the plist of the 'name'
of the structure (and consequently the 'name' of the class)
So I've the following line optional, so that it doesn't cause
a printing circularity when EXTEND isn't loaded.
Setup basics of CLASS hierarchy, if not already done so. DEFVAR
at beginning of this file ensures that CLASS-CLASS has a value.
Note that we cannot now permit error checking by si:make-extend when
Note also the required order of constructing up the classes!
The following is an open-coding of part of the result of CONS-A-STRUCT=INFO.
&REST info
Version
| * * * * * * * EXTended datatype scheme , basic STRuctures * * * *
* * ( c ) Copyright 1981 Massachusetts Institute of Technology * * * *
(herald EXTSTR /91)
(eval-when (eval compile)
(load '((lisp) subload))
(subload EXTBAS)
(subload EXTMAC)
This would like to be a SUBLOAD of VECTOR , but this way it 's not circular
(defmacro VSET (v n val) `(SI:XSET ,v ,n ,val))
)
by classes defined by DEFVST . We leave out the interconnections between
classes , to help printing of objects defined by DEFVST . Loading EXTEND
We also define the basic CLASS creator , SI : DEFCLASS*-2 a sub - primitive that
(defvar SI:SKELETAL-CLASSES () "At least it wont be unbound in Old lisps")
(defvar CLASS-CLASS () "Will be set up, at some pain, in this file")
(defvar OBJECT-CLASS () "Will be set up, at some pain, in this file")
SI : EXTSTR - SETUP - CLASSES is set up by EXTMAC , and includes things
(declare #.`(SPECIAL ,.si:extstr-setup-classes))
(declare (own-symbol **SELF-EVAL** SI:DEFCLASS*-2 SI:DEFVST-BARE-INIT))
(defun SI:SELF-QUOTIFY (x) `',x)
(eval-when (eval compile load)
(defvar SI:CLASS-MARKER '**CLASS-SELF-EVAL**)
(and (status feature COMPLR)
(*lexpr SI:DEFCLASS*-2 SI:DEFVST-BARE-INIT))
)
(defprop **SELF-EVAL** SI:SELF-QUOTIFY MACRO)
SI : DEFCLASS*-2
(defun SI:DEFCLASS*-2 (name typep var superiors
&optional source-file class
&rest ignore )
(cond ((cond ((null class))
((not (classp class))
(+internal-lossage 'CLASS 'SI:DEFCLASS*-2 class)
'T))
(setq class (si:make-extend #.si:class-instance-size CLASS-CLASS))
(setf (si:extend-marker-of class) SI:CLASS-MARKER)
(setf (si:class-typep class) typep)
(setf (si:class-plist class) (ncons name))
(setf (si:class-name class) name)))
(if source-file
(setf (get (si:class-plist class) ':SOURCE-FILE) source-file))
(if var
(setf (si:class-var (set var class)) var))
(cond ((fboundp 'SI:INITIALIZE-CLASS)
(setf (si:class-superiors class) superiors)
(si:initialize-class class))
('T (push `(,class ,superiors) SI:SKELETAL-CLASSES)
(setf (si:extend-class-of class) () )
(if (boundp 'PURCOPY) (push class PURCOPY))))
(putprop name class 'CLASS)
class)
Move & OPTIONAL to after VERSION once old files are flushed ( after
defvst - version 1 is gone ) . July 4 , 1981 -- JonL --
See also the similar comments in DEFVSY .
(defun SI:DEFVST-BARE-INIT (name var-name cnsn size inis
&optional (version 1) source-file class sinfo
&rest ignore)
(if (pairp inis)
(setq inis (let ((ln (length inis)))
(declare (fixnum ln))
(do ((v (si:make-extend ln VECTOR-CLASS))
(i 0 (1+ i))
(l inis (cdr l)))
((= i ln) v)
(declare (fixnum i))
(vset v i (car l))))))
(if (null class)
(setq class (or (get name 'CLASS)
(si:defclass*-2 name
name
var-name
(list STRUCT-CLASS)
source-file))))
(if (null sinfo)
(setq sinfo (si:extend STRUCT=INFO-CLASS
version
name
cnsn
size
inis
class)))
(putprop name sinfo 'STRUCT=INFO)
(if (get 'EXTEND 'VERSION)
(setf (get (si:class-plist class) 'STRUCT=INFO) sinfo)))
(and (null CLASS-CLASS)
(let (*RSET y x)
called from si : defclass*-2 , since CLASS - CLASS does n't exist yet .
(mapc #'(lambda (z)
(desetq (x y z) z)
(si:defclass*-2 x x y (if z (list (symeval z)))))
'((OBJECT OBJECT-CLASS () )
(CLASS CLASS-CLASS OBJECT-CLASS)
(SEQUENCE SEQUENCE-CLASS OBJECT-CLASS)
(VECTOR VECTOR-CLASS SEQUENCE-CLASS)
(STRUCT STRUCT-CLASS OBJECT-CLASS)
(STRUCT=INFO STRUCT=INFO-CLASS STRUCT-CLASS)))))
(si:defvst-bare-init
'STRUCT=INFO
'STRUCT=INFO-CLASS
'CONS-A-STRUCT=INFO
6
1st key
2nd
3nd
4rd
5th
6th
)
(eval-when (eval compile)
(defmacro GEN-SOURCE-FILE-ADDENDA ()
(if (filep infile)
`(MAPC #'(LAMBDA (CLASS)
(SETF (GET (SI:CLASS-PLIST CLASS) ':SOURCE-FILE)
',(namestring (truename infile))))
(LIST CLASS-CLASS OBJECT-CLASS VECTOR-CLASS STRUCT-CLASS
STRUCT=INFO-CLASS SEQUENCE-CLASS))))
)
(gen-source-file-addenda)
(if (status feature COMPLR)
(subload EXTHUK))
|
21e8f51bf27ef8ae5290e24689a95f19cf746f96f81555d364be8d437762b3d4 | bcc32/advent-of-code | points.ml | open! Core
open! Async
open! Import
type t = Point.t list
let step = List.map ~f:Point.step
let span_exn (t : t) =
match t with
| [] -> invalid_arg "span_exn"
| hd :: tl ->
List.fold
tl
~init:((hd.x, hd.y), (hd.x, hd.y))
~f:(fun ((min_x, min_y), (max_x, max_y)) { x; y; _ } ->
(Int.min min_x x, Int.min min_y y), (Int.max max_x x, Int.max max_y y))
;;
let area_exn (t : t) =
let (min_x, min_y), (max_x, max_y) = span_exn t in
(max_x - min_x) * (max_y - min_y)
;;
let to_sequence t = Sequence.unfold ~init:t ~f:(fun t -> Some ((t, area_exn t), step t))
let to_string_plot t =
let (min_x, min_y), (max_x, max_y) = span_exn t in
(* x and y inverted *)
let dimx = max_y - min_y + 1 in
let dimy = max_x - min_x + 1 in
let chars = Array.make_matrix '.' ~dimx ~dimy in
List.iter t ~f:(fun { x; y; _ } -> chars.(max_y - y).(x - min_x) <- '#');
invert y axis
Array.rev_inplace chars;
chars
|> Array.to_list
|> List.map ~f:(fun row ->
row |> Array.to_list |> List.map ~f:String.of_char |> String.concat)
|> String.concat ~sep:"\n"
;;
| null | https://raw.githubusercontent.com/bcc32/advent-of-code/653c0f130e2fb2f599d4e76804e02af54c9bb19f/2018/10/points.ml | ocaml | x and y inverted | open! Core
open! Async
open! Import
type t = Point.t list
let step = List.map ~f:Point.step
let span_exn (t : t) =
match t with
| [] -> invalid_arg "span_exn"
| hd :: tl ->
List.fold
tl
~init:((hd.x, hd.y), (hd.x, hd.y))
~f:(fun ((min_x, min_y), (max_x, max_y)) { x; y; _ } ->
(Int.min min_x x, Int.min min_y y), (Int.max max_x x, Int.max max_y y))
;;
let area_exn (t : t) =
let (min_x, min_y), (max_x, max_y) = span_exn t in
(max_x - min_x) * (max_y - min_y)
;;
let to_sequence t = Sequence.unfold ~init:t ~f:(fun t -> Some ((t, area_exn t), step t))
let to_string_plot t =
let (min_x, min_y), (max_x, max_y) = span_exn t in
let dimx = max_y - min_y + 1 in
let dimy = max_x - min_x + 1 in
let chars = Array.make_matrix '.' ~dimx ~dimy in
List.iter t ~f:(fun { x; y; _ } -> chars.(max_y - y).(x - min_x) <- '#');
invert y axis
Array.rev_inplace chars;
chars
|> Array.to_list
|> List.map ~f:(fun row ->
row |> Array.to_list |> List.map ~f:String.of_char |> String.concat)
|> String.concat ~sep:"\n"
;;
|
69cd448360f0139364dbde6ee51793f2cd00d8073457d00f0bcd6998d28f4583 | deadcode/Learning-CL--David-Touretzky | 7.21.lisp | ;;; Not-all-odd == true even if a single number is not odd
;;; Better defined as any-even
(defun not-all-odd (x)
(find-if
#'(lambda (n) (not (oddp n)))
x))
(let ((foo '(not-all-odd '(1 3 5 -3 7 11)))
(bar '(not-all-odd '(1 3 0 5 7 9 11))))
(format t "~s = ~s~%" foo (eval foo))
(format t "~s = ~s~%" bar (eval bar)))
| null | https://raw.githubusercontent.com/deadcode/Learning-CL--David-Touretzky/b4557c33f58e382f765369971e6a4747c27ca692/Chapter%207/7.21.lisp | lisp | Not-all-odd == true even if a single number is not odd
Better defined as any-even
| (defun not-all-odd (x)
(find-if
#'(lambda (n) (not (oddp n)))
x))
(let ((foo '(not-all-odd '(1 3 5 -3 7 11)))
(bar '(not-all-odd '(1 3 0 5 7 9 11))))
(format t "~s = ~s~%" foo (eval foo))
(format t "~s = ~s~%" bar (eval bar)))
|
0e5fe3c824673eb63ccb8b6b9ccb5e7a7ee73ffe90fd1bdce9c41395fc02b717 | pfdietz/ansi-test | phase.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sat Sep 6 21:15:54 2003
;;;; Contains: Tests of PHASE
(deftest phase.error.1
(signals-error (phase) program-error)
t)
(deftest phase.error.2
(signals-error (phase 0 0) program-error)
t)
(deftest phase.error.3
(check-type-error #'phase #'numberp)
nil)
(deftest phase.1
(eqlt (phase 0) 0.0f0)
t)
(deftest phase.2
(eqlt (phase 1) 0.0f0)
t)
(deftest phase.3
(eqlt (phase 1/2) 0.0f0)
t)
(deftest phase.4
(eqlt (phase 100.0f0) 0.0f0)
t)
(deftest phase.5
(eqlt (phase 100.0s0) 0.0s0)
t)
(deftest phase.6
(eqlt (phase 100.0d0) 0.0d0)
t)
(deftest phase.7
(eqlt (phase 100.0l0) 0.0l0)
t)
(deftest phase.8
(eqlt (phase -1) (coerce pi 'single-float))
t)
(deftest phase.9
(eqlt (phase -1/2) (coerce pi 'single-float))
t)
(deftest phase.10
(let ((p1 (phase #c(0 1)))
(p2 (phase #c(0.0f0 1.0f0))))
(and (eql p1 p2)
(approx= p1 (coerce (/ pi 2) 'single-float))))
t)
(deftest phase.11
(let ((p (phase #c(0.0d0 1.0d0))))
(approx= p (coerce (/ pi 2) 'double-float)))
t)
(deftest phase.12
(let ((p (phase #c(0.0s0 1.0s0))))
(approx= p (coerce (/ pi 2) 'single-float)))
t)
(deftest phase.13
(let ((p (phase #c(0.0l0 1.0l0))))
(approx= p (/ pi 2)))
t)
(deftest phase.14
(let ((p1 (phase #c(1 1)))
(p2 (phase #c(1.0f0 1.0f0))))
(and (eql p1 p2)
(approx= p1 (coerce (/ pi 4) 'single-float)
(* 2 single-float-epsilon))))
t)
(deftest phase.15
(let ((p (phase #c(1.0d0 1.0d0))))
(approx= p (coerce (/ pi 4) 'double-float)
(* 2 double-float-epsilon)))
t)
(deftest phase.16
(let ((p (phase #c(1.0s0 1.0s0))))
(approx= p (coerce (/ pi 4) 'single-float)
(* 2 short-float-epsilon)))
t)
(deftest phase.17
(let ((p (phase #c(1.0l0 1.0l0))))
(approx= p (/ pi 4) (* 2 long-float-epsilon)))
t)
;;; Negative zeros
(deftest phase.18
(or (eqlt -0.0s0 0.0s0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'short-float)))
t)
(deftest phase.19
(or (eqlt -0.0f0 0.0f0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'single-float)))
t)
(deftest phase.20
(or (eqlt -0.0d0 0.0d0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'double-float)))
t)
(deftest phase.21
(or (eqlt -0.0l0 0.0l0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'long-float)))
t)
| null | https://raw.githubusercontent.com/pfdietz/ansi-test/3f4b9d31c3408114f0467eaeca4fd13b28e2ce31/numbers/phase.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of PHASE
Negative zeros | Author :
Created : Sat Sep 6 21:15:54 2003
(deftest phase.error.1
(signals-error (phase) program-error)
t)
(deftest phase.error.2
(signals-error (phase 0 0) program-error)
t)
(deftest phase.error.3
(check-type-error #'phase #'numberp)
nil)
(deftest phase.1
(eqlt (phase 0) 0.0f0)
t)
(deftest phase.2
(eqlt (phase 1) 0.0f0)
t)
(deftest phase.3
(eqlt (phase 1/2) 0.0f0)
t)
(deftest phase.4
(eqlt (phase 100.0f0) 0.0f0)
t)
(deftest phase.5
(eqlt (phase 100.0s0) 0.0s0)
t)
(deftest phase.6
(eqlt (phase 100.0d0) 0.0d0)
t)
(deftest phase.7
(eqlt (phase 100.0l0) 0.0l0)
t)
(deftest phase.8
(eqlt (phase -1) (coerce pi 'single-float))
t)
(deftest phase.9
(eqlt (phase -1/2) (coerce pi 'single-float))
t)
(deftest phase.10
(let ((p1 (phase #c(0 1)))
(p2 (phase #c(0.0f0 1.0f0))))
(and (eql p1 p2)
(approx= p1 (coerce (/ pi 2) 'single-float))))
t)
(deftest phase.11
(let ((p (phase #c(0.0d0 1.0d0))))
(approx= p (coerce (/ pi 2) 'double-float)))
t)
(deftest phase.12
(let ((p (phase #c(0.0s0 1.0s0))))
(approx= p (coerce (/ pi 2) 'single-float)))
t)
(deftest phase.13
(let ((p (phase #c(0.0l0 1.0l0))))
(approx= p (/ pi 2)))
t)
(deftest phase.14
(let ((p1 (phase #c(1 1)))
(p2 (phase #c(1.0f0 1.0f0))))
(and (eql p1 p2)
(approx= p1 (coerce (/ pi 4) 'single-float)
(* 2 single-float-epsilon))))
t)
(deftest phase.15
(let ((p (phase #c(1.0d0 1.0d0))))
(approx= p (coerce (/ pi 4) 'double-float)
(* 2 double-float-epsilon)))
t)
(deftest phase.16
(let ((p (phase #c(1.0s0 1.0s0))))
(approx= p (coerce (/ pi 4) 'single-float)
(* 2 short-float-epsilon)))
t)
(deftest phase.17
(let ((p (phase #c(1.0l0 1.0l0))))
(approx= p (/ pi 4) (* 2 long-float-epsilon)))
t)
(deftest phase.18
(or (eqlt -0.0s0 0.0s0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'short-float)))
t)
(deftest phase.19
(or (eqlt -0.0f0 0.0f0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'single-float)))
t)
(deftest phase.20
(or (eqlt -0.0d0 0.0d0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'double-float)))
t)
(deftest phase.21
(or (eqlt -0.0l0 0.0l0)
(approx= (phase #c(-1.0 -0.0)) (coerce (- pi) 'long-float)))
t)
|
98ce7acfec5397e529adb7d3dfafc4508ba69571bf51aeeb171bd5d6411e0b05 | silverpond/hat | relationships_test.clj | (ns hat.relationships-test
(:require [clojure.test :refer :all]
[hat.controllers :refer [the-intermediate-step]]
[hat.descriptions :refer [generate-description]]
[hat.relationships :refer [one-to-many]]))
(def hosts-description
(generate-description
{:singular-name "host"
:singular-titlecase "Host"
:plural-name "hosts"
:plural-titlecase "Hosts"
:conn :conn
:db-search-attr :host/name
:fields
(constantly
[{:title "Name" :name :host/name :type :text}
{:title "Address" :name :host/address :type :text}
{:title "Locality" :name :host/locality :type :text}
{:title "State" :name :host/state :type :text}
{:title "Postcode" :name :host/postcode :type :text}
{:title "Country" :name :host/country :type :text}])}))
(def events-description
(generate-description
{:singular-name "event"
:singular-titlecase "Event"
:plural-name "events"
:plural-titlecase "Events"
:conn :conn
:db-search-attr :event/name
:fields
(constantly
[{:title "Name" :name :event/name :type :text}
{:title "Description" :name :event/description :type :text}
{:title "Start Time" :name :event/start-time :type :datetime}
{:title "End Time" :name :event/end-time :type :datetime}
{:title "Venue" :name :event/venue :type :text}
{:title "Facebook Event" :name :event/facebook-event :type :url}
{:title "Hashtag" :name :event/hashtag :type :text}])}))
(def old-relationship
{:field-title-attr :host/name
:selection-attr :host/name
:parent-attr :event/host
:collection-ref :events
:add-action-name "add-event"
:singular-ref :event
:parent-conn :conn
:child-conn :conn
:replace-action-name "replace-event"})
(def rnr (the-intermediate-step [hosts-description events-description]))
(deftest relationships
(println "disabled")
(= old-relationship
(one-to-many rnr hosts-description events-description
:host/name :event/host)))
| null | https://raw.githubusercontent.com/silverpond/hat/4e4a5dd89cac29c0a0cf68e4f02b76523e096276/test/hat/relationships_test.clj | clojure | (ns hat.relationships-test
(:require [clojure.test :refer :all]
[hat.controllers :refer [the-intermediate-step]]
[hat.descriptions :refer [generate-description]]
[hat.relationships :refer [one-to-many]]))
(def hosts-description
(generate-description
{:singular-name "host"
:singular-titlecase "Host"
:plural-name "hosts"
:plural-titlecase "Hosts"
:conn :conn
:db-search-attr :host/name
:fields
(constantly
[{:title "Name" :name :host/name :type :text}
{:title "Address" :name :host/address :type :text}
{:title "Locality" :name :host/locality :type :text}
{:title "State" :name :host/state :type :text}
{:title "Postcode" :name :host/postcode :type :text}
{:title "Country" :name :host/country :type :text}])}))
(def events-description
(generate-description
{:singular-name "event"
:singular-titlecase "Event"
:plural-name "events"
:plural-titlecase "Events"
:conn :conn
:db-search-attr :event/name
:fields
(constantly
[{:title "Name" :name :event/name :type :text}
{:title "Description" :name :event/description :type :text}
{:title "Start Time" :name :event/start-time :type :datetime}
{:title "End Time" :name :event/end-time :type :datetime}
{:title "Venue" :name :event/venue :type :text}
{:title "Facebook Event" :name :event/facebook-event :type :url}
{:title "Hashtag" :name :event/hashtag :type :text}])}))
(def old-relationship
{:field-title-attr :host/name
:selection-attr :host/name
:parent-attr :event/host
:collection-ref :events
:add-action-name "add-event"
:singular-ref :event
:parent-conn :conn
:child-conn :conn
:replace-action-name "replace-event"})
(def rnr (the-intermediate-step [hosts-description events-description]))
(deftest relationships
(println "disabled")
(= old-relationship
(one-to-many rnr hosts-description events-description
:host/name :event/host)))
| |
648bd109f39b9363f019dafabe006e5c6d6738cf1f66ea5aad5397483ab6d4b9 | aeternity/aeternity | aec_next_nonce.erl | -module(aec_next_nonce).
-export([pick_for_account/1,
pick_for_account/2]).
-include("blocks.hrl").
%% It assumes that in order to pick a nonce for a transaction
%% account has to be present in state tree.
It implies that user can not pick a nonce to create a transaction ( e.g. spend tx )
%% to put it into the mempool until either
%% - some funds are transferred to user's account
%% or
%% - user mined a block, which was already added to the chain.
-spec pick_for_account(aec_keys:pubkey()) -> {ok, non_neg_integer()} |
{error, account_not_found}.
pick_for_account(Pubkey) ->
pick_for_account(Pubkey, max).
-spec pick_for_account(aec_keys:pubkey(), max | continuity) ->
{ok, non_neg_integer()} | {error, account_not_found}.
pick_for_account(Pubkey, Strategy) ->
case get_state_tree_nonce(Pubkey) of
generalized_account -> {ok, 0};
{ok, StateTreeNonce} ->
case Strategy of
max ->
MempoolNonce = get_mempool_nonce(Pubkey),
NextNonce = max(StateTreeNonce, MempoolNonce) + 1,
{ok, NextNonce};
continuity ->
{ok, AllTxs} = aec_tx_pool:peek(infinity, Pubkey),
get all nonces from the pool sorted asc
AllNoncesInPool = lists:sort([aetx:nonce(aetx_sign:tx(T)) ||
T <- AllTxs]),
%% prepend the account nonce this relies on it being
%% smallest number (we don't keep invalid txs in the pool)
Next = hd(lists:seq(StateTreeNonce + 1, StateTreeNonce + length(AllNoncesInPool) + 1) --
AllNoncesInPool),
{ok, Next}
end;
{error, account_not_found} = Error ->
Error
end.
%% Internals
-spec get_state_tree_nonce(aec_keys:pubkey()) -> {ok, non_neg_integer()} |
generalized_account |
{error, account_not_found}.
get_state_tree_nonce(AccountPubkey) ->
case aec_chain:get_account(AccountPubkey) of
{value, Account} ->
case aec_accounts:type(Account) of
basic ->
{ok, aec_accounts:nonce(Account)};
generalized -> generalized_account
end;
none ->
{error, account_not_found}
end.
-spec get_mempool_nonce(aec_keys:pubkey()) -> integer().
get_mempool_nonce(AccountPubkey) ->
case aec_tx_pool:get_max_nonce(AccountPubkey) of
{ok, Nonce} ->
Nonce;
undefined ->
-1
end.
| null | https://raw.githubusercontent.com/aeternity/aeternity/e8ffe6822bcd651e99798057511c8764f09a79b1/apps/aecore/src/aec_next_nonce.erl | erlang | It assumes that in order to pick a nonce for a transaction
account has to be present in state tree.
to put it into the mempool until either
- some funds are transferred to user's account
or
- user mined a block, which was already added to the chain.
prepend the account nonce this relies on it being
smallest number (we don't keep invalid txs in the pool)
Internals | -module(aec_next_nonce).
-export([pick_for_account/1,
pick_for_account/2]).
-include("blocks.hrl").
It implies that user can not pick a nonce to create a transaction ( e.g. spend tx )
-spec pick_for_account(aec_keys:pubkey()) -> {ok, non_neg_integer()} |
{error, account_not_found}.
pick_for_account(Pubkey) ->
pick_for_account(Pubkey, max).
-spec pick_for_account(aec_keys:pubkey(), max | continuity) ->
{ok, non_neg_integer()} | {error, account_not_found}.
pick_for_account(Pubkey, Strategy) ->
case get_state_tree_nonce(Pubkey) of
generalized_account -> {ok, 0};
{ok, StateTreeNonce} ->
case Strategy of
max ->
MempoolNonce = get_mempool_nonce(Pubkey),
NextNonce = max(StateTreeNonce, MempoolNonce) + 1,
{ok, NextNonce};
continuity ->
{ok, AllTxs} = aec_tx_pool:peek(infinity, Pubkey),
get all nonces from the pool sorted asc
AllNoncesInPool = lists:sort([aetx:nonce(aetx_sign:tx(T)) ||
T <- AllTxs]),
Next = hd(lists:seq(StateTreeNonce + 1, StateTreeNonce + length(AllNoncesInPool) + 1) --
AllNoncesInPool),
{ok, Next}
end;
{error, account_not_found} = Error ->
Error
end.
-spec get_state_tree_nonce(aec_keys:pubkey()) -> {ok, non_neg_integer()} |
generalized_account |
{error, account_not_found}.
get_state_tree_nonce(AccountPubkey) ->
case aec_chain:get_account(AccountPubkey) of
{value, Account} ->
case aec_accounts:type(Account) of
basic ->
{ok, aec_accounts:nonce(Account)};
generalized -> generalized_account
end;
none ->
{error, account_not_found}
end.
-spec get_mempool_nonce(aec_keys:pubkey()) -> integer().
get_mempool_nonce(AccountPubkey) ->
case aec_tx_pool:get_max_nonce(AccountPubkey) of
{ok, Nonce} ->
Nonce;
undefined ->
-1
end.
|
f11806da4193e051075de797efc619f8bdc6470a4dbef3b4d1d30a5d423bec44 | hlship/cli-tools | impl.clj | (ns ^:no-doc net.lewisship.cli-tools.impl
"Private namespace for implementation details for new.lewisship.cli-tools, subject to change."
(:require [clojure.string :as str]
[io.aviso.ansi :as ansi]
[clojure.tools.cli :as cli]
[clj-fuzzy.metrics :as m]
[clojure.java.io :as io])
(:import (java.util.regex Pattern)))
(def prevent-exit false)
(def ^:dynamic *options* nil)
(def ^:private supported-keywords #{:in-order :as :args :options :command :summary :let :validate})
(defn exit
[status]
(when-not prevent-exit
(System/exit status))
;; If in testing mode ...
(throw (ex-info "Exit" {:status status})))
(defn- pad-left
[s pad n]
(let [x (- n (.length s))]
(if (pos? x)
(str (apply str (repeat x pad))
s)
s)))
;; better-cond has some dependencies
(defmacro cond-let
"An alternative to `clojure.core/cond` where instead of a test/expression pair, it is possible
to have a :let/binding vector pair."
[& clauses]
(cond (empty? clauses)
nil
(not (even? (count clauses)))
(throw (ex-info (str `cond-let " requires an even number of forms")
{:form &form
:meta (meta &form)}))
:else
(let [[test expr-or-binding-form & more-clauses] clauses]
(if (= :let test)
`(let ~expr-or-binding-form (cond-let ~@more-clauses))
;; Standard case
`(if ~test
~expr-or-binding-form
(cond-let ~@more-clauses))))))
(defn- println-err
[s]
(binding [*out* *err*] (println s)))
(defn- print-errors
[errors]
(when (seq errors)
(println)
(println (if (= 1 (count errors)) "Error:" "Errors:"))
(doseq [e errors]
(println (str " " (ansi/red e))))))
(defn fuzzy-matches
[s values]
(->> values
(map (fn [v]
(assoc (m/mra-comparison s v)
:word v)))
(filter :match)
(sort-by :simularity)
reverse
(map :word)))
(defn- arg-spec->str
[arg-spec]
(let [{:keys [label optional repeatable]} arg-spec]
(apply str
(when optional "[")
label
(when optional "]")
(when repeatable
(if optional "*" "+")))))
(defn- first-sentence
[s]
(-> s
str/trim
str/split-lines
first
(str/split #"\s*\.")
first
str/trim))
(defn- indentation-of-line
[line]
(if (str/blank? line)
[0 ""]
(let [[_ indent text] (re-matches #"(\s+)(.*)" line)]
(if
(some? indent)
[(count indent) text]
[0 line]))))
(defn- strip-indent
[strip-chars [indent text]]
(if (<= indent strip-chars)
text
(str (apply str (repeat (- indent strip-chars) " "))
text)))
(defn- cleanup-docstring
[docstring]
(let [docstring' (str/trim docstring)
lines (->> docstring'
str/split-lines
(map indentation-of-line))
non-zero-indents (->> lines
(map first)
(remove zero?))]
(if (empty? non-zero-indents)
docstring'
(let [indentation (reduce min non-zero-indents)]
(->> lines
(mapv #(strip-indent indentation %))
(str/join "\n"))))))
(defn print-summary
[command-map errors]
(let [{:keys [tool-name]} *options*
{:keys [command-name positional-specs command-doc summary]} command-map]
(apply println
(remove nil? (concat ["Usage:" (when tool-name
(ansi/bold tool-name))
(ansi/bold command-name)
"[OPTIONS]"]
(map arg-spec->str positional-specs))))
(when command-doc
(-> command-doc cleanup-docstring println))
;; There's always at least -h/--help:
(println "\nOptions:")
(println summary)
(when (seq positional-specs)
(let [label-width (->> positional-specs
(map :label)
(map count)
(reduce max)
;; For indentation
(+ 2))]
(println "\nArguments:")
(doseq [{:keys [label doc]} positional-specs]
(println (str
(pad-left label " " label-width)
": "
doc)))))
(print-errors errors)))
(defn- compile-positional-spec
"Positional specs are similar to option specs."
[command-name terms]
(let [[label & more] terms]
;; The label is required, then it's the optional documentation string
(if (-> more first string?)
(recur command-name
(into [label :doc (first more)]
(rest more)))
(let [spec-map (apply hash-map more)
{:keys [id]} spec-map
invalid-keys (-> spec-map
;; :id is actually set from the local symbol
(dissoc :id :doc :optional :repeatable :parse-fn :update-fn :assoc-fn :validate)
keys
sort)
{:keys [validate update-fn repeatable doc optional parse-fn assoc-fn]} spec-map
_ (when (and update-fn assoc-fn)
(throw (ex-info "May only specify one of :update-fn and :assoc-fn"
{:command-name command-name
:spec spec-map})))
assoc-fn' (cond
assoc-fn
assoc-fn
update-fn
(fn [m k v]
(update m k update-fn v))
repeatable
(fn [m k v]
(update m k (fnil conj []) v))
:else
assoc)]
(when (seq invalid-keys)
(println-err (format "Warning: command %s, argument %s contains invalid key(s): %s"
command-name
id
(str/join ", " invalid-keys))))
{:label label
:id id
:doc doc
:optional optional
:repeatable repeatable
:assoc-fn assoc-fn'
:parse-fn (or parse-fn identity)
:validate validate}))))
(defn- compile-positional-specs
[command-name specs]
(let [compiled (map #(compile-positional-spec command-name %) specs)]
(loop [[this-spec & more-specs] compiled
ids #{}
optional-id nil
repeatable-id nil]
;; Do some validation before returning the seq of positional specs (each a map)
(cond-let
(nil? this-spec)
compiled
:let [this-id (:id this-spec)]
(contains? ids this-id)
(throw (ex-info (str "Argument " this-id " of command " command-name " is not unique")
{:command-name command-name
:spec this-spec}))
;; Use the keyword ids, not the labels, since these are programmer errors, not a runtime error
(and optional-id
(not (:optional this-spec)))
(throw (ex-info (str "Argument " this-id " of command " command-name " is not optional but follows optional argument " optional-id)
{:command-name command-name
:spec this-spec}))
(some? repeatable-id)
(throw (ex-info (str "Argument " this-id " of command " command-name " follows repeatable argument " repeatable-id ", but only the final argument may be repeatable")
{:command-name command-name
:spec this-spec}))
:else
(recur more-specs
(conj ids this-id)
(or (when (:optional this-spec)
this-id)
optional-id)
(or (when (:repeatable this-spec)
this-id)
repeatable-id))))))
(defn- validate-argument
"Validates the value against the :validate vector of the spec, returning nil on
success, or the first error. A validation fn that returns false or throws an exception
is a failure."
[positional-spec value]
(loop [[validation-fn validation-msg & more] (:validate positional-spec)]
(when validation-fn
(if-not (try
(validation-fn value)
(catch Exception _ false))
validation-msg
(recur more)))))
(defn- parse-positional-arguments
"Parses the remaining command line arguments based on the positional specs.
Returns [map errors] where map is keyed on argument id, and errors is a seq of strings."
[positional-specs arguments]
(loop [state {:specs positional-specs
:remaining arguments
:argument-map {}
:errors []
:ignore-required false}]
(cond-let
:let [{:keys [specs remaining argument-map errors ignore-required]} state
[this-spec & more-specs] specs
{:keys [label repeatable optional parse-fn assoc-fn id]} this-spec
[this-argument & more-arguments] remaining]
;; specs and arguments exhausted
(and (nil? this-spec)
(nil? this-argument))
[argument-map errors]
Hit the first optional argument and out of command line arguments .
;; Since all subsequent arguments must be optional (verified by compile), we can stop here.
(and (nil? this-argument)
After the first argument is consumed by a repeatable , we treat the repeatable
;; command as optional.
(or optional ignore-required))
[argument-map errors]
;; Have a required argument and nothing to match it against.
(nil? this-argument)
[argument-map (conj errors (str "No value for required argument " label))]
;; Ran out of specs before running out of arguments.
(nil? this-spec)
[argument-map (conj errors (format "Unexpected argument '%s'" this-argument))]
:let [[parsed error] (try
[(parse-fn this-argument) nil]
(catch Exception t
[nil (format "Error in %s: %s" label (ex-message t))]))]
error
[argument-map (conj errors (str label ": " error))]
:let [validation-error (validate-argument this-spec parsed)]
(some? validation-error)
[argument-map (conj errors (str label ": " validation-error))]
:else
(let [state' (assoc state
;; Consume an argument
:remaining more-arguments
;; Apply the argument
:argument-map (assoc-fn argument-map id parsed))]
(recur (if repeatable
;; leave the last, repeatable spec in place
(assoc state' :ignore-required true)
;; Not repeatable; it has "consumed" an argument, so continue with
;; next spec and next argument
(assoc state' :specs more-specs)))))))
(defn abort
[s]
(println-err s)
(exit 1))
(defn- fail
[message state form]
(throw (ex-info message
{:state state
:form form})))
(defmulti consumer (fn [state _form]
Dispatch on the type of value to be consumed
(:consuming state))
:default ::default)
(defmethod consumer ::default
[state form]
(fail "Unexpected interface form" state form))
(defn- consume-keyword
[state form]
(consumer (assoc state :consuming :keyword) form))
(defmethod consumer :options
[state form]
(cond
(keyword? form)
(consume-keyword state form)
(not (simple-symbol? form))
(fail "Expected option name symbol" state form)
(contains? (-> state :option-symbols set) form)
(fail "Option and argument symbols must be unique" state form)
:else
(assoc state
:symbol form
:pending true
:consuming :option-def)))
(defn- append-id
[form id-symbol]
(let [id-keyword (-> id-symbol name keyword)]
(if (vector? form)
(conj form :id id-keyword)
;; Otherwise form is a symbol or a function call list
(list 'conj form :id id-keyword))))
(defn- valid-definition?
[form]
(or (vector? form) ; Normal case
(symbol? form) ; A symbol may be used when sharing options between commands
(list? form))) ; Or maybe it's a function call to generate the vector
(defmethod consumer :option-def
[state option-def]
(when-not (valid-definition? option-def)
(fail "Expected option definition" state option-def))
(let [option-symbol (:symbol state)
;; Explicitly add an :id to the option def to ensure that the value can be extracted
;; from the parsed :options map correctly via a keyword destructure
option-def' (append-id option-def option-symbol)]
(-> state
(update :command-options conj option-def')
(update :option-symbols conj option-symbol)
(dissoc :symbol)
(assoc :consuming :options
:pending false))))
(defmethod consumer :arg-def
;; A positional argument
[state arg-def]
(when-not (valid-definition? arg-def)
(fail "Expected argument definition" state arg-def))
(let [arg-symbol (:symbol state)
arg-def' (append-id arg-def arg-symbol)]
(-> state
(update :command-args conj arg-def')
(update :option-symbols conj arg-symbol)
(dissoc :symbol)
(assoc :consuming :args
:pending false))))
(defmethod consumer :args
[state form]
(cond
(keyword? form)
(consume-keyword state form)
(not (simple-symbol? form))
(fail "Expected argument name symbol" state form)
(contains? (-> state :option-symbols set) form)
(fail "Option and argument symbols must be unique" state form)
:else
(assoc state
:symbol form
:pending true
:consuming :arg-def)))
(defmethod consumer :keyword
[state form]
(when-not (keyword? form)
(fail "Expected a keyword" state form))
(when-not (contains? supported-keywords form)
(fail "Unexpected keyword" state form))
(assoc state :consuming form
:pending true))
(defn- complete-keyword
[state]
(assoc state :consuming :keyword
:pending false))
(defmethod consumer :as
[state form]
(when-not (simple-symbol? form)
(fail "Expected command-map symbol" state form))
(-> state
(assoc :command-map-symbol form)
complete-keyword))
(defmethod consumer :in-order
[state form]
(when-not (boolean? form)
(fail "Expected boolean after :in-order" state form))
(-> state
(assoc-in [:parse-opts-options :in-order] form)
complete-keyword))
(defmethod consumer :let
[state form]
(when-not (and (vector? form)
(even? (count form)))
(fail "Expected a vector of symbol/expression pairs" state form))
(-> state
(update :let-forms into form)
complete-keyword))
(defmethod consumer :command
[state form]
(when-not (string? form)
(fail "Expected string for name of command" state form))
(-> state
(assoc :command-name form)
complete-keyword))
(defmethod consumer :summary
[state form]
(when-not (string? form)
(fail "Expected string summary for command" state form))
(-> state
(assoc :command-summary form)
complete-keyword))
(defmethod consumer :validate
[state form]
(when-not (vector? form)
(fail "Expected a vector of test/message pairs" state form))
(when-not (-> form count even?)
(fail "Expected even number of tests and messages" state form))
(-> state
(update :validate-cases into form)
complete-keyword))
(defn compile-interface
"Parses the interface forms of a `defcommand` into a base command map; the interface
defines the options and positional arguments that will be parsed."
[command-doc forms]
(let [initial-state {:consuming :options
:option-symbols []
:command-options []
:command-args []
:let-forms []
:validate-cases []
:command-doc command-doc}
final-state (reduce consumer
initial-state forms)]
(when (:pending final-state)
(throw (ex-info "Missing data in interface definitions"
{:state final-state
:forms forms})))
(-> final-state
(dissoc :consuming :pending :symbol)
(update :command-options conj ["-h" "--help" "This command summary" :id :help]))))
(defn parse-cli
[command-name command-line-arguments command-map]
(cond-let
:let [{:keys [command-args command-options parse-opts-options]} command-map
{:keys [in-order]
:or {in-order false}} parse-opts-options
positional-specs (compile-positional-specs command-name command-args)
command-map' (merge command-map
{:command-name command-name
:positional-specs positional-specs}
(cli/parse-opts command-line-arguments command-options :in-order in-order))
{:keys [arguments options]} command-map']
Check for help first , as otherwise can get needless errors r.e . missing required positional arguments .
(:help options)
(do
(print-summary command-map' nil)
(exit 0))
:let [[positional-arguments arg-errors] (parse-positional-arguments positional-specs arguments)
errors (concat (:errors command-map')
arg-errors)]
(seq errors)
(do
(print-summary command-map' errors)
(exit 1))
:else
;; option and positional argument are verified to have unique symbols, so merge it all together
(update command-map' :options merge positional-arguments)))
(defn- command-summary
[v]
(let [v-meta (meta v)
{:keys [::command-summary]} v-meta]
(or command-summary
(-> v-meta :doc first-sentence))))
(defn show-tool-help
[]
(let [{:keys [tool-name tool-doc commands]} *options*]
(println "Usage:" (ansi/bold tool-name) "COMMAND ...")
(when tool-doc
(println)
(-> tool-doc cleanup-docstring println))
(println "\nCommands:")
(let [ks (-> commands keys sort)
width (+ 2 (apply max (map count ks)))]
(doseq [k ks]
(println (str (pad-left k " " width) ": "
(-> commands (get k) command-summary))))))
(exit 0))
(defn- to-matcher
[s]
(let [terms (str/split s #"\-")
re-pattern (apply str "(?i)"
(map-indexed (fn [i term]
(str
(when (pos? i) "\\-")
".*\\Q" term "\\E.*")
)
terms))
re (Pattern/compile re-pattern)]
(fn [input]
(re-matches re input))))
(defn find-matches
[s values]
(let [values' (set values)]
;; If can find an exact match, then keep just that;
(if (contains? values' s)
[s]
;; Otherwise, treat s as a match string and find any values that loosely match it.
(filter (to-matcher s) values'))))
(defn use-help-message
[tool-name commands]
(if (contains? commands "help")
(format ", use %s %s to list commands." (ansi/bold tool-name) (ansi/bold "help"))
""))
(defn dispatch
[{:keys [tool-name commands arguments] :as options}]
;; Capture these options for use by help command or when printing usage
(binding [*options* options]
(cond-let
:let [[command-name & command-args] arguments
help-var (get commands "help")]
(str/blank? tool-name)
(throw (ex-info "Must specify :tool-name" {:options options}))
;; In the normal case, when help is available, treat -h or --help the same as help
(and (#{"-h" "--help"} command-name)
help-var)
(help-var command-args)
(or (nil? command-name)
(str/starts-with? command-name "-"))
(abort (str tool-name ": no command provided" (use-help-message tool-name commands)))
:let [matching-names (find-matches command-name (keys commands))
match-count (count matching-names)]
(not= 1 match-count)
(let [body (if (pos? match-count)
(format "matches %d commands" match-count)
"is not a command")
fuzzy-match (first (fuzzy-matches command-name
(keys commands)))
suffix (when fuzzy-match
(format ", did you mean %s?"
(ansi/bold fuzzy-match)))
help? (contains? commands "help")
help-suffix (when help?
(str
(if suffix " Use " ", use ")
ansi/bold-red-font
tool-name
" "
"help"
ansi/reset-font
ansi/red-font
" to list commands."))]
(abort (str
ansi/bold-red-font
(format "%s: %s "
tool-name
command-name)
ansi/reset-font
ansi/red-font
body
suffix
help-suffix
ansi/reset-font)))
:else
(let [command-var (get commands (first matching-names))]
(apply command-var command-args)))
nil))
(defn command-map?
[arguments]
(and (= 1 (count arguments))
(-> arguments first map?)))
(defn default-tool-name
[]
(when-let [path (System/getProperty "babashka.file")]
(-> path io/file .getName)))
(defn invert-tests-in-validate-cases
[validate-cases]
(->> validate-cases
(partition 2)
(mapcat (fn [[test expr]]
[(list not test) expr]))))
| null | https://raw.githubusercontent.com/hlship/cli-tools/9441c1e0711c1aa4b1c606584c01844d3875eb75/src/net/lewisship/cli_tools/impl.clj | clojure | If in testing mode ...
better-cond has some dependencies
Standard case
There's always at least -h/--help:
For indentation
The label is required, then it's the optional documentation string
:id is actually set from the local symbol
Do some validation before returning the seq of positional specs (each a map)
Use the keyword ids, not the labels, since these are programmer errors, not a runtime error
specs and arguments exhausted
Since all subsequent arguments must be optional (verified by compile), we can stop here.
command as optional.
Have a required argument and nothing to match it against.
Ran out of specs before running out of arguments.
Consume an argument
Apply the argument
leave the last, repeatable spec in place
Not repeatable; it has "consumed" an argument, so continue with
next spec and next argument
Otherwise form is a symbol or a function call list
Normal case
A symbol may be used when sharing options between commands
Or maybe it's a function call to generate the vector
Explicitly add an :id to the option def to ensure that the value can be extracted
from the parsed :options map correctly via a keyword destructure
A positional argument
the interface
option and positional argument are verified to have unique symbols, so merge it all together
If can find an exact match, then keep just that;
Otherwise, treat s as a match string and find any values that loosely match it.
Capture these options for use by help command or when printing usage
In the normal case, when help is available, treat -h or --help the same as help | (ns ^:no-doc net.lewisship.cli-tools.impl
"Private namespace for implementation details for new.lewisship.cli-tools, subject to change."
(:require [clojure.string :as str]
[io.aviso.ansi :as ansi]
[clojure.tools.cli :as cli]
[clj-fuzzy.metrics :as m]
[clojure.java.io :as io])
(:import (java.util.regex Pattern)))
(def prevent-exit false)
(def ^:dynamic *options* nil)
(def ^:private supported-keywords #{:in-order :as :args :options :command :summary :let :validate})
(defn exit
[status]
(when-not prevent-exit
(System/exit status))
(throw (ex-info "Exit" {:status status})))
(defn- pad-left
[s pad n]
(let [x (- n (.length s))]
(if (pos? x)
(str (apply str (repeat x pad))
s)
s)))
(defmacro cond-let
"An alternative to `clojure.core/cond` where instead of a test/expression pair, it is possible
to have a :let/binding vector pair."
[& clauses]
(cond (empty? clauses)
nil
(not (even? (count clauses)))
(throw (ex-info (str `cond-let " requires an even number of forms")
{:form &form
:meta (meta &form)}))
:else
(let [[test expr-or-binding-form & more-clauses] clauses]
(if (= :let test)
`(let ~expr-or-binding-form (cond-let ~@more-clauses))
`(if ~test
~expr-or-binding-form
(cond-let ~@more-clauses))))))
(defn- println-err
[s]
(binding [*out* *err*] (println s)))
(defn- print-errors
[errors]
(when (seq errors)
(println)
(println (if (= 1 (count errors)) "Error:" "Errors:"))
(doseq [e errors]
(println (str " " (ansi/red e))))))
(defn fuzzy-matches
[s values]
(->> values
(map (fn [v]
(assoc (m/mra-comparison s v)
:word v)))
(filter :match)
(sort-by :simularity)
reverse
(map :word)))
(defn- arg-spec->str
[arg-spec]
(let [{:keys [label optional repeatable]} arg-spec]
(apply str
(when optional "[")
label
(when optional "]")
(when repeatable
(if optional "*" "+")))))
(defn- first-sentence
[s]
(-> s
str/trim
str/split-lines
first
(str/split #"\s*\.")
first
str/trim))
(defn- indentation-of-line
[line]
(if (str/blank? line)
[0 ""]
(let [[_ indent text] (re-matches #"(\s+)(.*)" line)]
(if
(some? indent)
[(count indent) text]
[0 line]))))
(defn- strip-indent
[strip-chars [indent text]]
(if (<= indent strip-chars)
text
(str (apply str (repeat (- indent strip-chars) " "))
text)))
(defn- cleanup-docstring
[docstring]
(let [docstring' (str/trim docstring)
lines (->> docstring'
str/split-lines
(map indentation-of-line))
non-zero-indents (->> lines
(map first)
(remove zero?))]
(if (empty? non-zero-indents)
docstring'
(let [indentation (reduce min non-zero-indents)]
(->> lines
(mapv #(strip-indent indentation %))
(str/join "\n"))))))
(defn print-summary
[command-map errors]
(let [{:keys [tool-name]} *options*
{:keys [command-name positional-specs command-doc summary]} command-map]
(apply println
(remove nil? (concat ["Usage:" (when tool-name
(ansi/bold tool-name))
(ansi/bold command-name)
"[OPTIONS]"]
(map arg-spec->str positional-specs))))
(when command-doc
(-> command-doc cleanup-docstring println))
(println "\nOptions:")
(println summary)
(when (seq positional-specs)
(let [label-width (->> positional-specs
(map :label)
(map count)
(reduce max)
(+ 2))]
(println "\nArguments:")
(doseq [{:keys [label doc]} positional-specs]
(println (str
(pad-left label " " label-width)
": "
doc)))))
(print-errors errors)))
(defn- compile-positional-spec
"Positional specs are similar to option specs."
[command-name terms]
(let [[label & more] terms]
(if (-> more first string?)
(recur command-name
(into [label :doc (first more)]
(rest more)))
(let [spec-map (apply hash-map more)
{:keys [id]} spec-map
invalid-keys (-> spec-map
(dissoc :id :doc :optional :repeatable :parse-fn :update-fn :assoc-fn :validate)
keys
sort)
{:keys [validate update-fn repeatable doc optional parse-fn assoc-fn]} spec-map
_ (when (and update-fn assoc-fn)
(throw (ex-info "May only specify one of :update-fn and :assoc-fn"
{:command-name command-name
:spec spec-map})))
assoc-fn' (cond
assoc-fn
assoc-fn
update-fn
(fn [m k v]
(update m k update-fn v))
repeatable
(fn [m k v]
(update m k (fnil conj []) v))
:else
assoc)]
(when (seq invalid-keys)
(println-err (format "Warning: command %s, argument %s contains invalid key(s): %s"
command-name
id
(str/join ", " invalid-keys))))
{:label label
:id id
:doc doc
:optional optional
:repeatable repeatable
:assoc-fn assoc-fn'
:parse-fn (or parse-fn identity)
:validate validate}))))
(defn- compile-positional-specs
[command-name specs]
(let [compiled (map #(compile-positional-spec command-name %) specs)]
(loop [[this-spec & more-specs] compiled
ids #{}
optional-id nil
repeatable-id nil]
(cond-let
(nil? this-spec)
compiled
:let [this-id (:id this-spec)]
(contains? ids this-id)
(throw (ex-info (str "Argument " this-id " of command " command-name " is not unique")
{:command-name command-name
:spec this-spec}))
(and optional-id
(not (:optional this-spec)))
(throw (ex-info (str "Argument " this-id " of command " command-name " is not optional but follows optional argument " optional-id)
{:command-name command-name
:spec this-spec}))
(some? repeatable-id)
(throw (ex-info (str "Argument " this-id " of command " command-name " follows repeatable argument " repeatable-id ", but only the final argument may be repeatable")
{:command-name command-name
:spec this-spec}))
:else
(recur more-specs
(conj ids this-id)
(or (when (:optional this-spec)
this-id)
optional-id)
(or (when (:repeatable this-spec)
this-id)
repeatable-id))))))
(defn- validate-argument
"Validates the value against the :validate vector of the spec, returning nil on
success, or the first error. A validation fn that returns false or throws an exception
is a failure."
[positional-spec value]
(loop [[validation-fn validation-msg & more] (:validate positional-spec)]
(when validation-fn
(if-not (try
(validation-fn value)
(catch Exception _ false))
validation-msg
(recur more)))))
(defn- parse-positional-arguments
"Parses the remaining command line arguments based on the positional specs.
Returns [map errors] where map is keyed on argument id, and errors is a seq of strings."
[positional-specs arguments]
(loop [state {:specs positional-specs
:remaining arguments
:argument-map {}
:errors []
:ignore-required false}]
(cond-let
:let [{:keys [specs remaining argument-map errors ignore-required]} state
[this-spec & more-specs] specs
{:keys [label repeatable optional parse-fn assoc-fn id]} this-spec
[this-argument & more-arguments] remaining]
(and (nil? this-spec)
(nil? this-argument))
[argument-map errors]
Hit the first optional argument and out of command line arguments .
(and (nil? this-argument)
After the first argument is consumed by a repeatable , we treat the repeatable
(or optional ignore-required))
[argument-map errors]
(nil? this-argument)
[argument-map (conj errors (str "No value for required argument " label))]
(nil? this-spec)
[argument-map (conj errors (format "Unexpected argument '%s'" this-argument))]
:let [[parsed error] (try
[(parse-fn this-argument) nil]
(catch Exception t
[nil (format "Error in %s: %s" label (ex-message t))]))]
error
[argument-map (conj errors (str label ": " error))]
:let [validation-error (validate-argument this-spec parsed)]
(some? validation-error)
[argument-map (conj errors (str label ": " validation-error))]
:else
(let [state' (assoc state
:remaining more-arguments
:argument-map (assoc-fn argument-map id parsed))]
(recur (if repeatable
(assoc state' :ignore-required true)
(assoc state' :specs more-specs)))))))
(defn abort
[s]
(println-err s)
(exit 1))
(defn- fail
[message state form]
(throw (ex-info message
{:state state
:form form})))
(defmulti consumer (fn [state _form]
Dispatch on the type of value to be consumed
(:consuming state))
:default ::default)
(defmethod consumer ::default
[state form]
(fail "Unexpected interface form" state form))
(defn- consume-keyword
[state form]
(consumer (assoc state :consuming :keyword) form))
(defmethod consumer :options
[state form]
(cond
(keyword? form)
(consume-keyword state form)
(not (simple-symbol? form))
(fail "Expected option name symbol" state form)
(contains? (-> state :option-symbols set) form)
(fail "Option and argument symbols must be unique" state form)
:else
(assoc state
:symbol form
:pending true
:consuming :option-def)))
(defn- append-id
[form id-symbol]
(let [id-keyword (-> id-symbol name keyword)]
(if (vector? form)
(conj form :id id-keyword)
(list 'conj form :id id-keyword))))
(defn- valid-definition?
[form]
(defmethod consumer :option-def
[state option-def]
(when-not (valid-definition? option-def)
(fail "Expected option definition" state option-def))
(let [option-symbol (:symbol state)
option-def' (append-id option-def option-symbol)]
(-> state
(update :command-options conj option-def')
(update :option-symbols conj option-symbol)
(dissoc :symbol)
(assoc :consuming :options
:pending false))))
(defmethod consumer :arg-def
[state arg-def]
(when-not (valid-definition? arg-def)
(fail "Expected argument definition" state arg-def))
(let [arg-symbol (:symbol state)
arg-def' (append-id arg-def arg-symbol)]
(-> state
(update :command-args conj arg-def')
(update :option-symbols conj arg-symbol)
(dissoc :symbol)
(assoc :consuming :args
:pending false))))
(defmethod consumer :args
[state form]
(cond
(keyword? form)
(consume-keyword state form)
(not (simple-symbol? form))
(fail "Expected argument name symbol" state form)
(contains? (-> state :option-symbols set) form)
(fail "Option and argument symbols must be unique" state form)
:else
(assoc state
:symbol form
:pending true
:consuming :arg-def)))
(defmethod consumer :keyword
[state form]
(when-not (keyword? form)
(fail "Expected a keyword" state form))
(when-not (contains? supported-keywords form)
(fail "Unexpected keyword" state form))
(assoc state :consuming form
:pending true))
(defn- complete-keyword
[state]
(assoc state :consuming :keyword
:pending false))
(defmethod consumer :as
[state form]
(when-not (simple-symbol? form)
(fail "Expected command-map symbol" state form))
(-> state
(assoc :command-map-symbol form)
complete-keyword))
(defmethod consumer :in-order
[state form]
(when-not (boolean? form)
(fail "Expected boolean after :in-order" state form))
(-> state
(assoc-in [:parse-opts-options :in-order] form)
complete-keyword))
(defmethod consumer :let
[state form]
(when-not (and (vector? form)
(even? (count form)))
(fail "Expected a vector of symbol/expression pairs" state form))
(-> state
(update :let-forms into form)
complete-keyword))
(defmethod consumer :command
[state form]
(when-not (string? form)
(fail "Expected string for name of command" state form))
(-> state
(assoc :command-name form)
complete-keyword))
(defmethod consumer :summary
[state form]
(when-not (string? form)
(fail "Expected string summary for command" state form))
(-> state
(assoc :command-summary form)
complete-keyword))
(defmethod consumer :validate
[state form]
(when-not (vector? form)
(fail "Expected a vector of test/message pairs" state form))
(when-not (-> form count even?)
(fail "Expected even number of tests and messages" state form))
(-> state
(update :validate-cases into form)
complete-keyword))
(defn compile-interface
defines the options and positional arguments that will be parsed."
[command-doc forms]
(let [initial-state {:consuming :options
:option-symbols []
:command-options []
:command-args []
:let-forms []
:validate-cases []
:command-doc command-doc}
final-state (reduce consumer
initial-state forms)]
(when (:pending final-state)
(throw (ex-info "Missing data in interface definitions"
{:state final-state
:forms forms})))
(-> final-state
(dissoc :consuming :pending :symbol)
(update :command-options conj ["-h" "--help" "This command summary" :id :help]))))
(defn parse-cli
[command-name command-line-arguments command-map]
(cond-let
:let [{:keys [command-args command-options parse-opts-options]} command-map
{:keys [in-order]
:or {in-order false}} parse-opts-options
positional-specs (compile-positional-specs command-name command-args)
command-map' (merge command-map
{:command-name command-name
:positional-specs positional-specs}
(cli/parse-opts command-line-arguments command-options :in-order in-order))
{:keys [arguments options]} command-map']
Check for help first , as otherwise can get needless errors r.e . missing required positional arguments .
(:help options)
(do
(print-summary command-map' nil)
(exit 0))
:let [[positional-arguments arg-errors] (parse-positional-arguments positional-specs arguments)
errors (concat (:errors command-map')
arg-errors)]
(seq errors)
(do
(print-summary command-map' errors)
(exit 1))
:else
(update command-map' :options merge positional-arguments)))
(defn- command-summary
[v]
(let [v-meta (meta v)
{:keys [::command-summary]} v-meta]
(or command-summary
(-> v-meta :doc first-sentence))))
(defn show-tool-help
[]
(let [{:keys [tool-name tool-doc commands]} *options*]
(println "Usage:" (ansi/bold tool-name) "COMMAND ...")
(when tool-doc
(println)
(-> tool-doc cleanup-docstring println))
(println "\nCommands:")
(let [ks (-> commands keys sort)
width (+ 2 (apply max (map count ks)))]
(doseq [k ks]
(println (str (pad-left k " " width) ": "
(-> commands (get k) command-summary))))))
(exit 0))
(defn- to-matcher
[s]
(let [terms (str/split s #"\-")
re-pattern (apply str "(?i)"
(map-indexed (fn [i term]
(str
(when (pos? i) "\\-")
".*\\Q" term "\\E.*")
)
terms))
re (Pattern/compile re-pattern)]
(fn [input]
(re-matches re input))))
(defn find-matches
[s values]
(let [values' (set values)]
(if (contains? values' s)
[s]
(filter (to-matcher s) values'))))
(defn use-help-message
[tool-name commands]
(if (contains? commands "help")
(format ", use %s %s to list commands." (ansi/bold tool-name) (ansi/bold "help"))
""))
(defn dispatch
[{:keys [tool-name commands arguments] :as options}]
(binding [*options* options]
(cond-let
:let [[command-name & command-args] arguments
help-var (get commands "help")]
(str/blank? tool-name)
(throw (ex-info "Must specify :tool-name" {:options options}))
(and (#{"-h" "--help"} command-name)
help-var)
(help-var command-args)
(or (nil? command-name)
(str/starts-with? command-name "-"))
(abort (str tool-name ": no command provided" (use-help-message tool-name commands)))
:let [matching-names (find-matches command-name (keys commands))
match-count (count matching-names)]
(not= 1 match-count)
(let [body (if (pos? match-count)
(format "matches %d commands" match-count)
"is not a command")
fuzzy-match (first (fuzzy-matches command-name
(keys commands)))
suffix (when fuzzy-match
(format ", did you mean %s?"
(ansi/bold fuzzy-match)))
help? (contains? commands "help")
help-suffix (when help?
(str
(if suffix " Use " ", use ")
ansi/bold-red-font
tool-name
" "
"help"
ansi/reset-font
ansi/red-font
" to list commands."))]
(abort (str
ansi/bold-red-font
(format "%s: %s "
tool-name
command-name)
ansi/reset-font
ansi/red-font
body
suffix
help-suffix
ansi/reset-font)))
:else
(let [command-var (get commands (first matching-names))]
(apply command-var command-args)))
nil))
(defn command-map?
[arguments]
(and (= 1 (count arguments))
(-> arguments first map?)))
(defn default-tool-name
[]
(when-let [path (System/getProperty "babashka.file")]
(-> path io/file .getName)))
(defn invert-tests-in-validate-cases
[validate-cases]
(->> validate-cases
(partition 2)
(mapcat (fn [[test expr]]
[(list not test) expr]))))
|
69c78cb32527fadd70dfcd59c3d32a89021781e72de966d3ab89ccfa5dba1de6 | alexwl/haskell-code-explorer | FileAfterPreprocessor.hs | 1 File.hs
2 File.hs
3 File.hs
4 File.hs
5 File.hs
6 File.hs
7 File.hs
# 1 "File1.hs" 1
1 File1.hs
2 File1.hs
# 14 "File1.hs"
# 1 "File2.hs" 1
1 File2.hs
2 File2.hs
3 File2.hs
4 File2.hs
5 File2.hs
# 15 "File1.hs" 2
15 File1.hs
16 File1.hs
# 9 "File.hs" 2
9 File.hs
# 18 "File.hs"
18 File.hs
19 File.hs
# 1 "File3.hs" 1
1 File3.hs
2 File3.hs
3 File3.hs
# 21 "File.hs" 2
21 File.hs
| null | https://raw.githubusercontent.com/alexwl/haskell-code-explorer/2f1c2a4c87ebd55b8a335bc4670eec875af8b4c4/test/data/FileAfterPreprocessor.hs | haskell | 1 File.hs
2 File.hs
3 File.hs
4 File.hs
5 File.hs
6 File.hs
7 File.hs
# 1 "File1.hs" 1
1 File1.hs
2 File1.hs
# 14 "File1.hs"
# 1 "File2.hs" 1
1 File2.hs
2 File2.hs
3 File2.hs
4 File2.hs
5 File2.hs
# 15 "File1.hs" 2
15 File1.hs
16 File1.hs
# 9 "File.hs" 2
9 File.hs
# 18 "File.hs"
18 File.hs
19 File.hs
# 1 "File3.hs" 1
1 File3.hs
2 File3.hs
3 File3.hs
# 21 "File.hs" 2
21 File.hs
| |
e5082c10ac31f4781b598cb5b594aff7a73dc72457c6270ed44e0a97d59f03d5 | ZHaskell/stdio | Checked.hs | # LANGUAGE TypeFamilies #
# LANGUAGE UnboxedTuples #
|
Module : Std . Data . Array . Checked
Description : Bounded checked boxed and unboxed arrays
Copyright : ( c ) , 2017 - 2019
License : BSD
Maintainer :
Stability : experimental
Portability : non - portable
This module provides exactly the same API with " Std . Data . Array " , but will throw an ' IndexOutOfBounds '
' ArrayException ' on bound check failure , it 's useful when debugging array algorithms : just swap this
module with " Std . Data . Array " , segmentation faults caused by out bound access will be turned into exceptions
with more informations .
Module : Std.Data.Array.Checked
Description : Bounded checked boxed and unboxed arrays
Copyright : (c) Dong Han, 2017-2019
License : BSD
Maintainer :
Stability : experimental
Portability : non-portable
This module provides exactly the same API with "Std.Data.Array", but will throw an 'IndexOutOfBounds'
'ArrayException' on bound check failure, it's useful when debugging array algorithms: just swap this
module with "Std.Data.Array", segmentation faults caused by out bound access will be turned into exceptions
with more informations.
-}
module Std.Data.Array.Checked
* re - export
A.Arr
, RealWorld
-- * Bound checked array operations
, newArr
, newArrWith
, readArr
, writeArr
, setArr
, indexArr
, indexArr'
, indexArrM
, freezeArr
, thawArr
, copyArr
, copyMutableArr
, moveArr
, cloneArr
, cloneMutableArr
, resizeMutableArr
, shrinkMutableArr
-- * No bound checked operations
, A.unsafeFreezeArr
, A.unsafeThawArr
, A.sameMutableArr
, A.sizeofArr
, A.sizeofMutableArr
, A.sameArr
-- * Boxed array type
, A.Array(..)
, A.MutableArray(..)
, A.SmallArray(..)
, A.SmallMutableArray(..)
, A.uninitialized
-- * Primitive array type
, A.PrimArray(..)
, A.MutablePrimArray(..)
-- * Bound checked primitive array operations
, newPinnedPrimArray, newAlignedPinnedPrimArray
, copyPrimArrayToPtr, copyMutablePrimArrayToPtr, copyPtrToMutablePrimArray
-- * No bound checked primitive array operations
, A.primArrayContents, A.mutablePrimArrayContents, A.withPrimArrayContents, A.withMutablePrimArrayContents
, A.isPrimArrayPinned, A.isMutablePrimArrayPinned
-- * Unlifted array type
, A.UnliftedArray(..)
, A.MutableUnliftedArray(..)
, A.PrimUnlifted(..)
-- * The 'ArrayException' type
, ArrayException(..)
) where
import Control.Exception (ArrayException (..), throw)
import Control.Monad.Primitive
import Data.Primitive.Types
import GHC.Ptr (Ptr (..))
import GHC.Stack
import qualified Std.Data.Array as A
check :: HasCallStack => Bool -> a -> a
{-# INLINE check #-}
check True x = x
check False _ = throw (IndexOutOfBounds $ show callStack)
newArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> Int -> m (marr s a)
newArr n = check (n>=0) (A.newArr n)
# INLINE newArr #
newArrWith :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> Int -> a -> m (marr s a)
newArrWith n x = check (n>=0) (A.newArrWith n x)
# INLINE newArrWith #
readArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> m a
readArr marr i = do
siz <- A.sizeofMutableArr marr
check
(i>=0 && i<siz)
(A.readArr marr i)
# INLINE readArr #
writeArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> a -> m ()
writeArr marr i x = do
siz <- A.sizeofMutableArr marr
check
(i>=0 && i<siz)
(A.writeArr marr i x)
# INLINE writeArr #
setArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> Int -> a -> m ()
setArr marr s l x = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.setArr marr s l x)
# INLINE setArr #
indexArr :: (A.Arr marr arr a, HasCallStack)
=> arr a -> Int -> a
indexArr arr i = check
(i>=0 && i<A.sizeofArr arr)
(A.indexArr arr i)
# INLINE indexArr #
indexArr' :: (A.Arr marr arr a, HasCallStack)
=> arr a -> Int -> (# a #)
indexArr' arr i =
if (i>=0 && i<A.sizeofArr arr)
then A.indexArr' arr i
else throw (IndexOutOfBounds $ show callStack)
{-# INLINE indexArr' #-}
indexArrM :: (A.Arr marr arr a, Monad m, HasCallStack)
=> arr a -> Int -> m a
indexArrM arr i = check
(i>=0 && i<A.sizeofArr arr)
(A.indexArrM arr i)
# INLINE indexArrM #
freezeArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> Int -> m (arr a)
freezeArr marr s l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.freezeArr marr s l)
# INLINE freezeArr #
thawArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> arr a -> Int -> Int -> m (marr s a)
thawArr arr s l = check
(s>=0 && l>=0 && (s+l)<=A.sizeofArr arr)
(A.thawArr arr s l)
# INLINE thawArr #
copyArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> arr a -> Int -> Int -> m ()
copyArr marr s1 arr s2 l = do
siz <- A.sizeofMutableArr marr
check
(s1>=0 && s2>=0 && l>=0 && (s2+l)<=A.sizeofArr arr && (s1+l)<=siz)
(A.copyArr marr s1 arr s2 l)
# INLINE copyArr #
copyMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> marr s a -> Int -> Int -> m ()
copyMutableArr marr1 s1 marr2 s2 l = do
siz1 <- A.sizeofMutableArr marr1
siz2 <- A.sizeofMutableArr marr2
check
(s1>=0 && s2>=0 && l>=0 && (s2+l)<=siz2 && (s1+l)<=siz1)
(A.copyMutableArr marr1 s1 marr2 s2 l)
# INLINE copyMutableArr #
moveArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> marr s a -> Int -> Int -> m ()
moveArr marr1 s1 marr2 s2 l = do
siz1 <- A.sizeofMutableArr marr1
siz2 <- A.sizeofMutableArr marr2
check
(s1>=0 && s2>=0 && l>=0 && (s2+l)<=siz2 && (s1+l)<=siz1)
(A.copyMutableArr marr1 s1 marr2 s2 l)
# INLINE moveArr #
cloneArr :: (A.Arr marr arr a, HasCallStack)
=> arr a -> Int -> Int -> arr a
cloneArr arr s l = check
(s>=0 && l>=0 && (s+l)<=A.sizeofArr arr)
(A.cloneArr arr s l)
# INLINE cloneArr #
cloneMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> Int -> m (marr s a)
cloneMutableArr marr s l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.cloneMutableArr marr s l)
# INLINE cloneMutableArr #
resizeMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> m (marr s a)
resizeMutableArr marr n = check
(n>=0)
(A.resizeMutableArr marr n)
# INLINE resizeMutableArr #
-- | New size should be >= 0, and <= original size.
--
shrinkMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> m ()
shrinkMutableArr marr n = do
siz <- A.sizeofMutableArr marr
check
(n>=0 && n<=siz)
(A.shrinkMutableArr marr n)
# INLINE shrinkMutableArr #
--------------------------------------------------------------------------------
-- | Create a /pinned/ byte array of the specified size,
-- The garbage collector is guaranteed not to move it.
newPinnedPrimArray :: (PrimMonad m, Prim a, HasCallStack)
=> Int -> m (A.MutablePrimArray (PrimState m) a)
# INLINE newPinnedPrimArray #
newPinnedPrimArray n =
check (n>=0) (A.newPinnedPrimArray n)
-- | Create a /pinned/ primitive array of the specified size and respect given primitive type's
-- alignment. The garbage collector is guaranteed not to move it.
--
newAlignedPinnedPrimArray :: (PrimMonad m, Prim a, HasCallStack)
=> Int -> m (A.MutablePrimArray (PrimState m) a)
# INLINE newAlignedPinnedPrimArray #
newAlignedPinnedPrimArray n =
check (n>=0) (A.newAlignedPinnedPrimArray n)
copyPrimArrayToPtr :: (PrimMonad m, Prim a, HasCallStack)
=> Ptr a
-> A.PrimArray a
-> Int
-> Int
-> m ()
# INLINE copyPrimArrayToPtr #
copyPrimArrayToPtr ptr arr s l = check
(s>=0 && l>=0 && (s+l)<=A.sizeofArr arr)
(A.copyPrimArrayToPtr ptr arr s l)
copyMutablePrimArrayToPtr :: (PrimMonad m, Prim a, HasCallStack)
=> Ptr a
-> A.MutablePrimArray (PrimState m) a
-> Int
-> Int
-> m ()
# INLINE copyMutablePrimArrayToPtr #
copyMutablePrimArrayToPtr ptr marr s l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.copyMutablePrimArrayToPtr ptr marr s l)
copyPtrToMutablePrimArray :: (PrimMonad m, Prim a, HasCallStack)
=> A.MutablePrimArray (PrimState m) a
-> Int
-> Ptr a
-> Int
-> m ()
{-# INLINE copyPtrToMutablePrimArray #-}
copyPtrToMutablePrimArray marr s ptr l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.copyPtrToMutablePrimArray marr s ptr l)
| null | https://raw.githubusercontent.com/ZHaskell/stdio/7887b9413dc9feb957ddcbea96184f904cf37c12/std-data/Std/Data/Array/Checked.hs | haskell | * Bound checked array operations
* No bound checked operations
* Boxed array type
* Primitive array type
* Bound checked primitive array operations
* No bound checked primitive array operations
* Unlifted array type
* The 'ArrayException' type
# INLINE check #
# INLINE indexArr' #
| New size should be >= 0, and <= original size.
------------------------------------------------------------------------------
| Create a /pinned/ byte array of the specified size,
The garbage collector is guaranteed not to move it.
| Create a /pinned/ primitive array of the specified size and respect given primitive type's
alignment. The garbage collector is guaranteed not to move it.
# INLINE copyPtrToMutablePrimArray # | # LANGUAGE TypeFamilies #
# LANGUAGE UnboxedTuples #
|
Module : Std . Data . Array . Checked
Description : Bounded checked boxed and unboxed arrays
Copyright : ( c ) , 2017 - 2019
License : BSD
Maintainer :
Stability : experimental
Portability : non - portable
This module provides exactly the same API with " Std . Data . Array " , but will throw an ' IndexOutOfBounds '
' ArrayException ' on bound check failure , it 's useful when debugging array algorithms : just swap this
module with " Std . Data . Array " , segmentation faults caused by out bound access will be turned into exceptions
with more informations .
Module : Std.Data.Array.Checked
Description : Bounded checked boxed and unboxed arrays
Copyright : (c) Dong Han, 2017-2019
License : BSD
Maintainer :
Stability : experimental
Portability : non-portable
This module provides exactly the same API with "Std.Data.Array", but will throw an 'IndexOutOfBounds'
'ArrayException' on bound check failure, it's useful when debugging array algorithms: just swap this
module with "Std.Data.Array", segmentation faults caused by out bound access will be turned into exceptions
with more informations.
-}
module Std.Data.Array.Checked
* re - export
A.Arr
, RealWorld
, newArr
, newArrWith
, readArr
, writeArr
, setArr
, indexArr
, indexArr'
, indexArrM
, freezeArr
, thawArr
, copyArr
, copyMutableArr
, moveArr
, cloneArr
, cloneMutableArr
, resizeMutableArr
, shrinkMutableArr
, A.unsafeFreezeArr
, A.unsafeThawArr
, A.sameMutableArr
, A.sizeofArr
, A.sizeofMutableArr
, A.sameArr
, A.Array(..)
, A.MutableArray(..)
, A.SmallArray(..)
, A.SmallMutableArray(..)
, A.uninitialized
, A.PrimArray(..)
, A.MutablePrimArray(..)
, newPinnedPrimArray, newAlignedPinnedPrimArray
, copyPrimArrayToPtr, copyMutablePrimArrayToPtr, copyPtrToMutablePrimArray
, A.primArrayContents, A.mutablePrimArrayContents, A.withPrimArrayContents, A.withMutablePrimArrayContents
, A.isPrimArrayPinned, A.isMutablePrimArrayPinned
, A.UnliftedArray(..)
, A.MutableUnliftedArray(..)
, A.PrimUnlifted(..)
, ArrayException(..)
) where
import Control.Exception (ArrayException (..), throw)
import Control.Monad.Primitive
import Data.Primitive.Types
import GHC.Ptr (Ptr (..))
import GHC.Stack
import qualified Std.Data.Array as A
check :: HasCallStack => Bool -> a -> a
check True x = x
check False _ = throw (IndexOutOfBounds $ show callStack)
newArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> Int -> m (marr s a)
newArr n = check (n>=0) (A.newArr n)
# INLINE newArr #
newArrWith :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> Int -> a -> m (marr s a)
newArrWith n x = check (n>=0) (A.newArrWith n x)
# INLINE newArrWith #
readArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> m a
readArr marr i = do
siz <- A.sizeofMutableArr marr
check
(i>=0 && i<siz)
(A.readArr marr i)
# INLINE readArr #
writeArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> a -> m ()
writeArr marr i x = do
siz <- A.sizeofMutableArr marr
check
(i>=0 && i<siz)
(A.writeArr marr i x)
# INLINE writeArr #
setArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> Int -> a -> m ()
setArr marr s l x = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.setArr marr s l x)
# INLINE setArr #
indexArr :: (A.Arr marr arr a, HasCallStack)
=> arr a -> Int -> a
indexArr arr i = check
(i>=0 && i<A.sizeofArr arr)
(A.indexArr arr i)
# INLINE indexArr #
indexArr' :: (A.Arr marr arr a, HasCallStack)
=> arr a -> Int -> (# a #)
indexArr' arr i =
if (i>=0 && i<A.sizeofArr arr)
then A.indexArr' arr i
else throw (IndexOutOfBounds $ show callStack)
indexArrM :: (A.Arr marr arr a, Monad m, HasCallStack)
=> arr a -> Int -> m a
indexArrM arr i = check
(i>=0 && i<A.sizeofArr arr)
(A.indexArrM arr i)
# INLINE indexArrM #
freezeArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> Int -> m (arr a)
freezeArr marr s l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.freezeArr marr s l)
# INLINE freezeArr #
thawArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> arr a -> Int -> Int -> m (marr s a)
thawArr arr s l = check
(s>=0 && l>=0 && (s+l)<=A.sizeofArr arr)
(A.thawArr arr s l)
# INLINE thawArr #
copyArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> arr a -> Int -> Int -> m ()
copyArr marr s1 arr s2 l = do
siz <- A.sizeofMutableArr marr
check
(s1>=0 && s2>=0 && l>=0 && (s2+l)<=A.sizeofArr arr && (s1+l)<=siz)
(A.copyArr marr s1 arr s2 l)
# INLINE copyArr #
copyMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> marr s a -> Int -> Int -> m ()
copyMutableArr marr1 s1 marr2 s2 l = do
siz1 <- A.sizeofMutableArr marr1
siz2 <- A.sizeofMutableArr marr2
check
(s1>=0 && s2>=0 && l>=0 && (s2+l)<=siz2 && (s1+l)<=siz1)
(A.copyMutableArr marr1 s1 marr2 s2 l)
# INLINE copyMutableArr #
moveArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> marr s a -> Int -> Int -> m ()
moveArr marr1 s1 marr2 s2 l = do
siz1 <- A.sizeofMutableArr marr1
siz2 <- A.sizeofMutableArr marr2
check
(s1>=0 && s2>=0 && l>=0 && (s2+l)<=siz2 && (s1+l)<=siz1)
(A.copyMutableArr marr1 s1 marr2 s2 l)
# INLINE moveArr #
cloneArr :: (A.Arr marr arr a, HasCallStack)
=> arr a -> Int -> Int -> arr a
cloneArr arr s l = check
(s>=0 && l>=0 && (s+l)<=A.sizeofArr arr)
(A.cloneArr arr s l)
# INLINE cloneArr #
cloneMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> Int -> m (marr s a)
cloneMutableArr marr s l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.cloneMutableArr marr s l)
# INLINE cloneMutableArr #
resizeMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> m (marr s a)
resizeMutableArr marr n = check
(n>=0)
(A.resizeMutableArr marr n)
# INLINE resizeMutableArr #
shrinkMutableArr :: (A.Arr marr arr a, PrimMonad m, PrimState m ~ s, HasCallStack)
=> marr s a -> Int -> m ()
shrinkMutableArr marr n = do
siz <- A.sizeofMutableArr marr
check
(n>=0 && n<=siz)
(A.shrinkMutableArr marr n)
# INLINE shrinkMutableArr #
newPinnedPrimArray :: (PrimMonad m, Prim a, HasCallStack)
=> Int -> m (A.MutablePrimArray (PrimState m) a)
# INLINE newPinnedPrimArray #
newPinnedPrimArray n =
check (n>=0) (A.newPinnedPrimArray n)
newAlignedPinnedPrimArray :: (PrimMonad m, Prim a, HasCallStack)
=> Int -> m (A.MutablePrimArray (PrimState m) a)
# INLINE newAlignedPinnedPrimArray #
newAlignedPinnedPrimArray n =
check (n>=0) (A.newAlignedPinnedPrimArray n)
copyPrimArrayToPtr :: (PrimMonad m, Prim a, HasCallStack)
=> Ptr a
-> A.PrimArray a
-> Int
-> Int
-> m ()
# INLINE copyPrimArrayToPtr #
copyPrimArrayToPtr ptr arr s l = check
(s>=0 && l>=0 && (s+l)<=A.sizeofArr arr)
(A.copyPrimArrayToPtr ptr arr s l)
copyMutablePrimArrayToPtr :: (PrimMonad m, Prim a, HasCallStack)
=> Ptr a
-> A.MutablePrimArray (PrimState m) a
-> Int
-> Int
-> m ()
# INLINE copyMutablePrimArrayToPtr #
copyMutablePrimArrayToPtr ptr marr s l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.copyMutablePrimArrayToPtr ptr marr s l)
copyPtrToMutablePrimArray :: (PrimMonad m, Prim a, HasCallStack)
=> A.MutablePrimArray (PrimState m) a
-> Int
-> Ptr a
-> Int
-> m ()
copyPtrToMutablePrimArray marr s ptr l = do
siz <- A.sizeofMutableArr marr
check
(s>=0 && l>=0 && (s+l)<=siz)
(A.copyPtrToMutablePrimArray marr s ptr l)
|
07421d7cd7bb2cd4ccb7bd3473db3d135eac6affa404cb8d4409bf6b1b9a3483 | aligusnet/astro | SunInternalsTest.hs | module Data.Astro.Sun.SunInternalsTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.HUnit
import Test.HUnit.Approx
import Test.QuickCheck
import Data.Astro.Utils (reduceToZeroRange)
import Data.Astro.Sun.SunInternals
tests = [testGroup "solveKeplerEquation" [
testCase "a" $ assertApproxEqual ""
1e-7
3.5220041
(solveKeplerEquation 0.016714 3.528210 1e-7)
, testProperty "property" prop_solveKeplerEquation
]
]
prop_solveKeplerEquation (e, m) =
let eps = 1e-7
(_, e') = properFraction e
e'' =( e' * 0.01) + 0.1
m' = (reduceToZeroRange 17 e) - 7
x = solveKeplerEquation e'' m' eps
dx = x - e''*(sin x) - m'
in abs dx < eps
where types = ((e,m)::(Double, Double))
| null | https://raw.githubusercontent.com/aligusnet/astro/a8c951885061ccef98299af053fdc1a1cc808f17/test/Data/Astro/Sun/SunInternalsTest.hs | haskell | module Data.Astro.Sun.SunInternalsTest
(
tests
)
where
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.HUnit
import Test.HUnit.Approx
import Test.QuickCheck
import Data.Astro.Utils (reduceToZeroRange)
import Data.Astro.Sun.SunInternals
tests = [testGroup "solveKeplerEquation" [
testCase "a" $ assertApproxEqual ""
1e-7
3.5220041
(solveKeplerEquation 0.016714 3.528210 1e-7)
, testProperty "property" prop_solveKeplerEquation
]
]
prop_solveKeplerEquation (e, m) =
let eps = 1e-7
(_, e') = properFraction e
e'' =( e' * 0.01) + 0.1
m' = (reduceToZeroRange 17 e) - 7
x = solveKeplerEquation e'' m' eps
dx = x - e''*(sin x) - m'
in abs dx < eps
where types = ((e,m)::(Double, Double))
| |
e82cfbf55a9a5618b4b2c772d322fb4b4d7152b7099ec45bca79e5ca847c6662 | formal-land/coq-of-ocaml | gadts_record.ml | type 'a term =
| T_Int : int -> int term
| T_String : string -> string term
| T_Pair : 'a term * 'b term -> ('a * 'b) term
| T_Rec : { x : 'a term; y : 'b } -> ('a * 'b) term
[@@coq_tag_gadt]
let rec interp : type a. a term -> a = function
| T_Int n -> n
| T_String s -> s
| T_Pair (p1, p2) -> (interp p1, interp p2)
| T_Rec {x; y} -> (interp x, y)
| null | https://raw.githubusercontent.com/formal-land/coq-of-ocaml/c9c86b08eb19d7fd023f48029cc5f9bf53f6a11c/tests/gadts_record.ml | ocaml | type 'a term =
| T_Int : int -> int term
| T_String : string -> string term
| T_Pair : 'a term * 'b term -> ('a * 'b) term
| T_Rec : { x : 'a term; y : 'b } -> ('a * 'b) term
[@@coq_tag_gadt]
let rec interp : type a. a term -> a = function
| T_Int n -> n
| T_String s -> s
| T_Pair (p1, p2) -> (interp p1, interp p2)
| T_Rec {x; y} -> (interp x, y)
| |
2e0cbaf9041aaf992ab61194fa7e9dd1868f04efa158841667b1b3db1e73b14f | cblp/python5 | Control.hs |
Python5 — a hypothetic language
Copyright ( C ) 2015 -
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
Python5 — a hypothetic language
Copyright (C) 2015 - Yuriy Syrovetskiy
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
-}
# LANGUAGE NoImplicitPrelude #
module Control where
import Prelude ( ($) )
import Python5.Builtin
import Test.Tasty.HUnit.X
spec :: TestTree
spec = testCase "var mutates in for" $ do
let numbers = [2, 4, 6, 8]
product <- var 1
for numbers `by` \number ->
product *= number
val product `assertEval` int(384)
| null | https://raw.githubusercontent.com/cblp/python5/897b7bbb7b522fa5653eff10b9ae616a4e01b6ff/python5/tests/Control.hs | haskell |
Python5 — a hypothetic language
Copyright ( C ) 2015 -
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
Python5 — a hypothetic language
Copyright (C) 2015 - Yuriy Syrovetskiy
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
-}
# LANGUAGE NoImplicitPrelude #
module Control where
import Prelude ( ($) )
import Python5.Builtin
import Test.Tasty.HUnit.X
spec :: TestTree
spec = testCase "var mutates in for" $ do
let numbers = [2, 4, 6, 8]
product <- var 1
for numbers `by` \number ->
product *= number
val product `assertEval` int(384)
| |
0d815588b32e0d63aa6946130d41a5051d5a23e8951cb65e49272d09edd80774 | justinmeiners/exercises | 2_42.scm | (use srfi-1)
(define (enumerate-interval start end)
(if (> start end)
'()
(cons start (enumerate-interval (+ 1 start) end))))
(define (flatmap proc seq)
(fold-right append '() (map proc seq)))
(define empty-board '())
(define (adjoin-position row column positions)
(cons (cons row column) positions)) ; append to front
(define (collides-row? position others)
(cond ((null? others) #f)
((= (car position) (caar others)) #t)
(else (collides-row? position (cdr others)))))
(define (collides-diagnol? position others)
(cond ((null? others) #f)
((= (abs (/
(- (car position) (caar others))
(- (cdr position) (cdar others))))
1) #t)
(else (collides-diagnol? position (cdr others)))))
; I slightly modified this from the excercise
; k is unnecessary since the one to test is at the front
(define (safe? positions)
(if (null? positions)
#t
(not (or
(collides-row? (car positions) (cdr positions))
(collides-diagnol? (car positions) (cdr positions))))))
(define (queens board-size)
(define (queen-cols k)
(if (= k 0)
(list empty-board)
(filter
(lambda (positions) (safe? positions))
(flatmap
(lambda (rest-of-queens) (map (lambda (new-row)
(adjoin-position
new-row k rest-of-queens))
(enumerate-interval 1 board-size)))
(queen-cols (- k 1))))))
(queen-cols board-size))
( display ( safe ? ( list ( cons 1 1 ) ( cons -3 -2 ) ( cons 2 3 ) ) ) )
(display (length (queens 8)))
| null | https://raw.githubusercontent.com/justinmeiners/exercises/9491bc16925eae12e048ccd3f424b870ebdc73aa/sicp/2/2_42.scm | scheme | append to front
I slightly modified this from the excercise
k is unnecessary since the one to test is at the front | (use srfi-1)
(define (enumerate-interval start end)
(if (> start end)
'()
(cons start (enumerate-interval (+ 1 start) end))))
(define (flatmap proc seq)
(fold-right append '() (map proc seq)))
(define empty-board '())
(define (adjoin-position row column positions)
(define (collides-row? position others)
(cond ((null? others) #f)
((= (car position) (caar others)) #t)
(else (collides-row? position (cdr others)))))
(define (collides-diagnol? position others)
(cond ((null? others) #f)
((= (abs (/
(- (car position) (caar others))
(- (cdr position) (cdar others))))
1) #t)
(else (collides-diagnol? position (cdr others)))))
(define (safe? positions)
(if (null? positions)
#t
(not (or
(collides-row? (car positions) (cdr positions))
(collides-diagnol? (car positions) (cdr positions))))))
(define (queens board-size)
(define (queen-cols k)
(if (= k 0)
(list empty-board)
(filter
(lambda (positions) (safe? positions))
(flatmap
(lambda (rest-of-queens) (map (lambda (new-row)
(adjoin-position
new-row k rest-of-queens))
(enumerate-interval 1 board-size)))
(queen-cols (- k 1))))))
(queen-cols board-size))
( display ( safe ? ( list ( cons 1 1 ) ( cons -3 -2 ) ( cons 2 3 ) ) ) )
(display (length (queens 8)))
|
85bc53ad1f312984ee3b1bc4c7d7eab3c3629fc2b7e11409f389e638f19abe42 | racket/rhombus-prototype | and_meta.rkt | #lang racket/base
(require "../private/bounce.rkt")
(bounce "../static.rkt"
"meta.rkt")
(module reader syntax/module-reader
#:language 'rhombus/static/and_meta
#:read (lambda (in) (list (syntax->datum (parse-all in))))
#:read-syntax (lambda (src in) (list (parse-all in #:source src)))
#:info rhombus:get-info-proc
#:whole-body-readers? #t
(require shrubbery/parse
(prefix-in rhombus: (submod "../private/core.rkt" reader))))
(module configure-runtime racket/base
(require rhombus/runtime-config))
(module configure-expand racket/base
(require rhombus/expand-config)
(provide enter-parameterization
exit-parameterization))
| null | https://raw.githubusercontent.com/racket/rhombus-prototype/692dfda43c0d6c5a2bb0298cc5f0445125f3c567/rhombus/static/and_meta.rkt | racket | #lang racket/base
(require "../private/bounce.rkt")
(bounce "../static.rkt"
"meta.rkt")
(module reader syntax/module-reader
#:language 'rhombus/static/and_meta
#:read (lambda (in) (list (syntax->datum (parse-all in))))
#:read-syntax (lambda (src in) (list (parse-all in #:source src)))
#:info rhombus:get-info-proc
#:whole-body-readers? #t
(require shrubbery/parse
(prefix-in rhombus: (submod "../private/core.rkt" reader))))
(module configure-runtime racket/base
(require rhombus/runtime-config))
(module configure-expand racket/base
(require rhombus/expand-config)
(provide enter-parameterization
exit-parameterization))
| |
fd90acb6a2583b254e3fc9ed040cdfc5f70d74d7a65aa61006befd1c78307e6c | juspay/atlas | Time.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Fixtures . Time
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Fixtures.Time
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Fixtures.Time (defaultTime) where
import qualified Data.Time as Time
import qualified Data.Time.Calendar.OrdinalDate as Time
defaultTime :: Time.UTCTime
defaultTime =
Time.UTCTime
{ utctDay = Time.fromOrdinalDate 2020 120,
utctDayTime = Time.secondsToDiffTime 40000
}
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/atlas-transport/test/src/Fixtures/Time.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Fixtures . Time
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Fixtures.Time
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Fixtures.Time (defaultTime) where
import qualified Data.Time as Time
import qualified Data.Time.Calendar.OrdinalDate as Time
defaultTime :: Time.UTCTime
defaultTime =
Time.UTCTime
{ utctDay = Time.fromOrdinalDate 2020 120,
utctDayTime = Time.secondsToDiffTime 40000
}
| |
a889ce734e130d4c1cee10aaa532cc0dc4963ff138d3e9afdeebb5016d1d1e86 | fiddlerwoaroof/alimenta | alimenta.lisp | alimenta.lisp -*- tab - width : 8 ; -*-
( declaim ( optimize ( speed 0 ) ( safety 3 ) ( debug 3 ) ) )
(in-package #:alimenta)
(defclass feed-entity ()
((title :initarg :title :initform nil :accessor title)
(link :initarg :link :initform nil :accessor link)
(doc :initarg :doc :initform nil :accessor doc)))
(defgeneric belongs-to (feed-entity)
(:documentation "Returns the person responsible for this feed item"))
(define-condition feed-type-unsupported (error)
((%type :initarg :type :reader feed-type)
(%feed-link :initarg :feed-link :reader feed-link)))
(defgeneric -to-feed (doc type &key feed-link)
(:documentation "Given an xml-document, return a feed object")
(:method (doc type &key feed-link)
(error 'feed-type-unsupported :type type :feed-link feed-link)))
(defgeneric render (object renderer)
(:documentation "Given a lisp object representing a feed, return it rendered
to the specified format"))
(defgeneric generate-xml (feed feed-type &key partial)
(:documentation "Given a lisp object representing a feed, return an xml
document"))
(defgeneric content-el (entity)
(:documentation "Return the element that contains the item's content"))
(defclass item (feed-entity)
((author :initarg :author :initform nil :accessor author)
(content :initarg :content :initform nil :accessor content)
(date :initarg :date :initform nil :accessor date)
(id :initarg :id :initform nil :accessor id)
(links :initform (make-hash-table :test #'equalp) :accessor links)))
(collection-class:define-collection (feed item) (feed-entity)
((description :initarg :description :initform nil :accessor description)
(feed-link :initarg :feed-link :initform nil :accessor feed-link)
(source-type :initarg :source-type :initform nil :accessor source-type)))
(defmethod render ((feed feed) renderer)
(let ((doc (alimenta.render:render-feed feed renderer)))
(for:for ((item over feed))
(setf doc
(alimenta.render:add-rendered-item doc
(alimenta.render:render-item item feed renderer)
renderer)))
doc))
(defmethod (setf feed-link) ((value string) (feed feed))
(setf (slot-value feed 'feed-link)
(puri:parse-uri value)))
(defmethod initialize-instance :after ((feed feed) &key feed-link)
(when feed-link
(setf (feed-link feed) (puri:parse-uri feed-link))))
(defmethod belongs-to ((item item))
(author item))
(defclass complex-value () ())
(defgeneric primary-value (self)
(:documentation "Primarily for COMPLEX-VALUES: this should take one and
return a useful primary value"))
(defgeneric push-item (feed item)
(:documentation "Adds an item to the feed"))
(defgeneric make-item (xml-dom doc-type)
(:documentation "Given an xml document, return an item"))
(defgeneric parse-feed (feed)
(:documentation "Parse a feed into a lisp object"))
(defgeneric get-items (xml feed-type)
(:documentation "Given an xml document, extract its items"))
(defmethod primary-value ((self t))
self)
(define-condition duplicate-link-type (error)
((old :reader duplicate-link-type-old :initarg :old)
(new :reader duplicate-link-type-new :initarg :new))
(:report (lambda (condition stream)
(format stream "Item already has link ~s" (duplicate-link-type-old condition)))))
(defmethod generate-xml :around ((feed feed) feed-type &rest r)
(declare (ignore r))
(let ((result (call-next-method feed feed-type)))
(with-accessors ((items items)) feed
(loop for item in items
do (generate-xml item feed-type :partial result)))
result))
(defmethod -to-feed ((doc stream) doc-type &key feed-link)
(-to-feed (plump:parse doc)
doc-type
:feed-link feed-link))
(defmethod -to-feed ((doc string) doc-type &key feed-link)
(-to-feed (plump:parse doc)
doc-type
:feed-link feed-link))
(defmethod -to-feed :around ((xml-dom plump:node) doc-type &key feed-link)
"This wraps the particular methods so that _they_ don't have to implement
item fetching. NIL passed to the type activates auto-detection"
(aprog1 (call-next-method xml-dom doc-type :feed-link feed-link)
(with-slots (doc source-type) it
(setf doc xml-dom
source-type doc-type))
(setf
(items it)
(loop for item across (get-items xml-dom doc-type)
collect (make-item item doc-type)))))
(defgeneric (setf link) (value self))
(defmethod (setf link) ((value cons) (self item))
(with-slots (links) self
(destructuring-bind (type . href) value
(when (consp href)
(if (null (cdr href))
(setf href (car href))
(error 'type-error "too many arguments")))
(let ((type-keyword (make-keyword (string-upcase type))))
(when (slot-boundp self 'links)
(multiple-value-bind (old-link old-link-p) (gethash type-keyword links)
(when old-link-p
(cerror "Replace Link ~a:~a with ~a:~a" 'duplicate-link-type :old old-link :new href))))
(setf (gethash type-keyword links) href)))))
(defmethod print-object ((object feed) stream)
(print-unreadable-object (object stream :type t :identity t)
(with-slots (title link) object
(format stream "title: ~s link: ~s"
(aif title (shorten-link it) "<untitled>")
(aif link (shorten-link it) "<no link>")))))
(defmethod print-object ((object item) stream)
(print-unreadable-object (object stream :type t :identity t)
(with-slots (title link date) object
(format stream "title: ~s link: ~s date:~s"
(aif title (shorten-link it) "<untitled>")
(aif link (shorten-link it) "<no link>")
(aif date it "<no date>")))))
(defun detect-feed-type (xml-dom)
(let ((root-node-name (make-keyword (string-upcase
($ (inline xml-dom) (children)
(map #'tag-name) (node))))))
(case root-node-name
((:feed) :atom)
(t root-node-name))))
(defgeneric get-random-item (feed)
(:method ((feed feed))
(let* ((items (copy-seq (items feed)))
(num-items (length items)))
(elt items
(random num-items)))))
(defgeneric get-latest-item (feed)
(:method ((feed feed))
(let ((items (copy-seq (items feed))))
(car (sort items
#'local-time:timestamp>
:key #'date)))))
;;(defun generate-xml (feed &key (feed-type :rss))
;; (%generate-xml feed feed-type))
(defun to-feed (doc &key type feed-link)
"Makes an instance of feed from the given document. Specialize to-feed with
an equal-specializer on type with an appropriate symbol to implement a new
sort of feed."
(unless type
(setf type (detect-feed-type doc)))
(-to-feed doc type :feed-link feed-link))
( defun -get - items ( feed xml - dom & key type )
;; (with-accessors ((items items)) feed
;; (loop for item across (get-items xml-dom type)
;; do (push (make-item xml-dom type) items)
;; finally (return items))))
(defun make-feed (&key title link items feed-link description)
(make-instance 'feed
:description description
:feed-link feed-link
:items items
:link link
:title title))
(let ((n 0))
(defun next-id ()
(incf n)))
(defun add-item-to-feed (feed &key title (next-id #'next-id) date link content)
(alet (make-instance 'item :title title :date date :link link :content content)
(with-slots (id) it
(setf id (funcall next-id it)))
(push-item feed it)
(values feed it)))
(defun filter-feed (feed function &key key)
(setf (items feed)
(remove-if-not function (items feed)
:key key))
feed)
(defgeneric transform (item transform)
(:documentation "transform a feed entity by TRANSFORM: the
function will be called with either a feed or a item as an arguments
and, if called upon a feed, it'll automatically be mapped across the
feed's items after being called on the feed. We do not use the results
of this mapping directly, however any modifications to an item mutate
the original.")
(:method :around (item transform)
(call-next-method)
item)
(:method ((feed feed-entity) transform)
(funcall transform feed))
(:method :after ((feed feed) transform)
(map nil (lambda (it)
(transform it transform))
(items feed))))
(defun transform-content (item function)
(setf (content item)
(funcall function
(content item))))
(defun shorten-link (link)
(let ((link (cl-ppcre:regex-replace "^https?:" link "")))
(subseq link 0 (min 30 (length link)))))
(defmethod push-item ((feed feed) (item item))
(push item
(items feed)))
(deftest push-item ()
(let ((feed (make-instance 'feed))
(item (make-instance 'item)))
(with-accessors ((items items)) feed
( should signal error ( push - item feed 2 ) )
(should be eql item
(progn
(push-item feed item)
(car items))))))
vim : set = marker :
| null | https://raw.githubusercontent.com/fiddlerwoaroof/alimenta/2806d56a06a59bfc9d76286a871b170fa1126a72/alimenta.lisp | lisp | -*-
(defun generate-xml (feed &key (feed-type :rss))
(%generate-xml feed feed-type))
(with-accessors ((items items)) feed
(loop for item across (get-items xml-dom type)
do (push (make-item xml-dom type) items)
finally (return items)))) | ( declaim ( optimize ( speed 0 ) ( safety 3 ) ( debug 3 ) ) )
(in-package #:alimenta)
(defclass feed-entity ()
((title :initarg :title :initform nil :accessor title)
(link :initarg :link :initform nil :accessor link)
(doc :initarg :doc :initform nil :accessor doc)))
(defgeneric belongs-to (feed-entity)
(:documentation "Returns the person responsible for this feed item"))
(define-condition feed-type-unsupported (error)
((%type :initarg :type :reader feed-type)
(%feed-link :initarg :feed-link :reader feed-link)))
(defgeneric -to-feed (doc type &key feed-link)
(:documentation "Given an xml-document, return a feed object")
(:method (doc type &key feed-link)
(error 'feed-type-unsupported :type type :feed-link feed-link)))
(defgeneric render (object renderer)
(:documentation "Given a lisp object representing a feed, return it rendered
to the specified format"))
(defgeneric generate-xml (feed feed-type &key partial)
(:documentation "Given a lisp object representing a feed, return an xml
document"))
(defgeneric content-el (entity)
(:documentation "Return the element that contains the item's content"))
(defclass item (feed-entity)
((author :initarg :author :initform nil :accessor author)
(content :initarg :content :initform nil :accessor content)
(date :initarg :date :initform nil :accessor date)
(id :initarg :id :initform nil :accessor id)
(links :initform (make-hash-table :test #'equalp) :accessor links)))
(collection-class:define-collection (feed item) (feed-entity)
((description :initarg :description :initform nil :accessor description)
(feed-link :initarg :feed-link :initform nil :accessor feed-link)
(source-type :initarg :source-type :initform nil :accessor source-type)))
(defmethod render ((feed feed) renderer)
(let ((doc (alimenta.render:render-feed feed renderer)))
(for:for ((item over feed))
(setf doc
(alimenta.render:add-rendered-item doc
(alimenta.render:render-item item feed renderer)
renderer)))
doc))
(defmethod (setf feed-link) ((value string) (feed feed))
(setf (slot-value feed 'feed-link)
(puri:parse-uri value)))
(defmethod initialize-instance :after ((feed feed) &key feed-link)
(when feed-link
(setf (feed-link feed) (puri:parse-uri feed-link))))
(defmethod belongs-to ((item item))
(author item))
(defclass complex-value () ())
(defgeneric primary-value (self)
(:documentation "Primarily for COMPLEX-VALUES: this should take one and
return a useful primary value"))
(defgeneric push-item (feed item)
(:documentation "Adds an item to the feed"))
(defgeneric make-item (xml-dom doc-type)
(:documentation "Given an xml document, return an item"))
(defgeneric parse-feed (feed)
(:documentation "Parse a feed into a lisp object"))
(defgeneric get-items (xml feed-type)
(:documentation "Given an xml document, extract its items"))
(defmethod primary-value ((self t))
self)
(define-condition duplicate-link-type (error)
((old :reader duplicate-link-type-old :initarg :old)
(new :reader duplicate-link-type-new :initarg :new))
(:report (lambda (condition stream)
(format stream "Item already has link ~s" (duplicate-link-type-old condition)))))
(defmethod generate-xml :around ((feed feed) feed-type &rest r)
(declare (ignore r))
(let ((result (call-next-method feed feed-type)))
(with-accessors ((items items)) feed
(loop for item in items
do (generate-xml item feed-type :partial result)))
result))
(defmethod -to-feed ((doc stream) doc-type &key feed-link)
(-to-feed (plump:parse doc)
doc-type
:feed-link feed-link))
(defmethod -to-feed ((doc string) doc-type &key feed-link)
(-to-feed (plump:parse doc)
doc-type
:feed-link feed-link))
(defmethod -to-feed :around ((xml-dom plump:node) doc-type &key feed-link)
"This wraps the particular methods so that _they_ don't have to implement
item fetching. NIL passed to the type activates auto-detection"
(aprog1 (call-next-method xml-dom doc-type :feed-link feed-link)
(with-slots (doc source-type) it
(setf doc xml-dom
source-type doc-type))
(setf
(items it)
(loop for item across (get-items xml-dom doc-type)
collect (make-item item doc-type)))))
(defgeneric (setf link) (value self))
(defmethod (setf link) ((value cons) (self item))
(with-slots (links) self
(destructuring-bind (type . href) value
(when (consp href)
(if (null (cdr href))
(setf href (car href))
(error 'type-error "too many arguments")))
(let ((type-keyword (make-keyword (string-upcase type))))
(when (slot-boundp self 'links)
(multiple-value-bind (old-link old-link-p) (gethash type-keyword links)
(when old-link-p
(cerror "Replace Link ~a:~a with ~a:~a" 'duplicate-link-type :old old-link :new href))))
(setf (gethash type-keyword links) href)))))
(defmethod print-object ((object feed) stream)
(print-unreadable-object (object stream :type t :identity t)
(with-slots (title link) object
(format stream "title: ~s link: ~s"
(aif title (shorten-link it) "<untitled>")
(aif link (shorten-link it) "<no link>")))))
(defmethod print-object ((object item) stream)
(print-unreadable-object (object stream :type t :identity t)
(with-slots (title link date) object
(format stream "title: ~s link: ~s date:~s"
(aif title (shorten-link it) "<untitled>")
(aif link (shorten-link it) "<no link>")
(aif date it "<no date>")))))
(defun detect-feed-type (xml-dom)
(let ((root-node-name (make-keyword (string-upcase
($ (inline xml-dom) (children)
(map #'tag-name) (node))))))
(case root-node-name
((:feed) :atom)
(t root-node-name))))
(defgeneric get-random-item (feed)
(:method ((feed feed))
(let* ((items (copy-seq (items feed)))
(num-items (length items)))
(elt items
(random num-items)))))
(defgeneric get-latest-item (feed)
(:method ((feed feed))
(let ((items (copy-seq (items feed))))
(car (sort items
#'local-time:timestamp>
:key #'date)))))
(defun to-feed (doc &key type feed-link)
"Makes an instance of feed from the given document. Specialize to-feed with
an equal-specializer on type with an appropriate symbol to implement a new
sort of feed."
(unless type
(setf type (detect-feed-type doc)))
(-to-feed doc type :feed-link feed-link))
( defun -get - items ( feed xml - dom & key type )
(defun make-feed (&key title link items feed-link description)
(make-instance 'feed
:description description
:feed-link feed-link
:items items
:link link
:title title))
(let ((n 0))
(defun next-id ()
(incf n)))
(defun add-item-to-feed (feed &key title (next-id #'next-id) date link content)
(alet (make-instance 'item :title title :date date :link link :content content)
(with-slots (id) it
(setf id (funcall next-id it)))
(push-item feed it)
(values feed it)))
(defun filter-feed (feed function &key key)
(setf (items feed)
(remove-if-not function (items feed)
:key key))
feed)
(defgeneric transform (item transform)
(:documentation "transform a feed entity by TRANSFORM: the
function will be called with either a feed or a item as an arguments
and, if called upon a feed, it'll automatically be mapped across the
feed's items after being called on the feed. We do not use the results
of this mapping directly, however any modifications to an item mutate
the original.")
(:method :around (item transform)
(call-next-method)
item)
(:method ((feed feed-entity) transform)
(funcall transform feed))
(:method :after ((feed feed) transform)
(map nil (lambda (it)
(transform it transform))
(items feed))))
(defun transform-content (item function)
(setf (content item)
(funcall function
(content item))))
(defun shorten-link (link)
(let ((link (cl-ppcre:regex-replace "^https?:" link "")))
(subseq link 0 (min 30 (length link)))))
(defmethod push-item ((feed feed) (item item))
(push item
(items feed)))
(deftest push-item ()
(let ((feed (make-instance 'feed))
(item (make-instance 'item)))
(with-accessors ((items items)) feed
( should signal error ( push - item feed 2 ) )
(should be eql item
(progn
(push-item feed item)
(car items))))))
vim : set = marker :
|
e0903cbb386c817413ab5415bf27bf8d4e51f1f8a466d0e833b8ef50929eab92 | infi-nl/alibi | task.clj | (ns alibi.domain.task
(:refer-clojure :exclude [get])
(:require
[alibi.domain.billing-method :refer [billing-method?]]
[clojure.set :refer [rename-keys]]))
(defrecord Task [task-id billing-method name project-id])
(defn hydrate-task
[{:keys [task-id billing-method] :as cmd}]
{:pre [(integer? task-id)
(billing-method? billing-method)]}
(map->Task cmd))
(defn task? [o] (instance? Task o))
(defn new-task
[{:keys [for-project-id task-name billing-method]}]
{:pre [(and (string? task-name) (seq task-name))
(integer? for-project-id)]}
(hydrate-task {:task-id 0
:name task-name
:project-id for-project-id
:billing-method billing-method}))
(defprotocol TaskRepository
(-task-exists? [this task-id])
(-get [this task-id])
(-project-id-for-task-id [this task-id])
(-add! [this task]))
(def ^:private ^:dynamic *repository-impl*)
(defmacro with-impl [impl & body]
`(binding [*repository-impl* ~impl]
~@body))
(defn task-exists? [task-id]
(-task-exists? *repository-impl* task-id))
(defn get [task-id]
(when task-id
(-get *repository-impl* task-id)))
(defn add! [task]
(-add! *repository-impl* task))
(defn project-id-for-task-id [task-id]
(-project-id-for-task-id *repository-impl* task-id))
| null | https://raw.githubusercontent.com/infi-nl/alibi/00e97340ebff483f0ecbb3eef929a4052adbc78b/src/alibi/domain/task.clj | clojure | (ns alibi.domain.task
(:refer-clojure :exclude [get])
(:require
[alibi.domain.billing-method :refer [billing-method?]]
[clojure.set :refer [rename-keys]]))
(defrecord Task [task-id billing-method name project-id])
(defn hydrate-task
[{:keys [task-id billing-method] :as cmd}]
{:pre [(integer? task-id)
(billing-method? billing-method)]}
(map->Task cmd))
(defn task? [o] (instance? Task o))
(defn new-task
[{:keys [for-project-id task-name billing-method]}]
{:pre [(and (string? task-name) (seq task-name))
(integer? for-project-id)]}
(hydrate-task {:task-id 0
:name task-name
:project-id for-project-id
:billing-method billing-method}))
(defprotocol TaskRepository
(-task-exists? [this task-id])
(-get [this task-id])
(-project-id-for-task-id [this task-id])
(-add! [this task]))
(def ^:private ^:dynamic *repository-impl*)
(defmacro with-impl [impl & body]
`(binding [*repository-impl* ~impl]
~@body))
(defn task-exists? [task-id]
(-task-exists? *repository-impl* task-id))
(defn get [task-id]
(when task-id
(-get *repository-impl* task-id)))
(defn add! [task]
(-add! *repository-impl* task))
(defn project-id-for-task-id [task-id]
(-project-id-for-task-id *repository-impl* task-id))
| |
c82a5c8c918124bab3b4a7e7f28b8606ca353b638f16e972b1f21cbfe3e2162d | oakes/Paravim | scroll.cljc | (ns paravim.scroll
(:require [paravim.constants :as constants]
[play-cljc.transforms :as t]
#?(:clj [play-cljc.macros-java :refer [math]]
:cljs [play-cljc.macros-js :refer-macros [math]])))
(def ^:const scroll-speed 40)
(def ^:const scroll-limit 10) ;; per scroll, not cumulative limit
(def ^:const min-scroll-speed 5)
(def ^:const deceleration 0.8)
(defn decelerate
[speed]
(let [speed (* speed deceleration)]
(if (< speed min-scroll-speed)
min-scroll-speed
speed)))
(defn start-scrolling-camera [{:keys [camera-x camera-target-x camera-target-y scroll-speed-x scroll-speed-y] :as buffer} xoffset yoffset]
(let [;; make the left edge "sticky" so it doesn't move unintentionally
xoffset (if (and (== camera-x 0)
(< (math abs (long xoffset)) 2.5))
0
xoffset)
;; restrict the offsets to discard excessive values
xoffset (-> xoffset (min scroll-limit) (max (- scroll-limit)))
yoffset (-> yoffset (min scroll-limit) (max (- scroll-limit)))
;; flip the sign because the camera must go the opposite direction
xdiff (* -1 scroll-speed xoffset)
ydiff (* -1 scroll-speed yoffset)]
{:camera-target-x (+ camera-target-x xdiff)
:camera-target-y (+ camera-target-y ydiff)
:scroll-speed-x (+ scroll-speed-x (math abs (long xdiff)))
:scroll-speed-y (+ scroll-speed-y (math abs (long ydiff)))}))
(defn adjust-camera [text-entity show-minimap? camera-x camera-y font-size text-box {:keys [font-width font-height] :as constants} window]
(let [{game-width :width game-height :height} window
text-top ((:top text-box) game-height font-size)
text-bottom ((:bottom text-box) game-height font-size)
char-counts (get-in text-entity [:uniforms 'u_char_counts])
max-char-count (if (seq char-counts)
(apply max char-counts)
0)
text-width (* max-char-count font-size font-width)
text-height (* (count char-counts) font-size font-height)
text-view-width (if show-minimap?
(- game-width (/ game-width constants/minimap-scale))
game-width)
max-x (- text-width text-view-width)
max-y (- text-height (- text-bottom text-top))]
[(-> camera-x (min max-x) (max 0))
(-> camera-y (min max-y) (max 0))]))
(defn rubber-band-camera [text-entity show-minimap?
camera-target-x camera-target-y
scroll-speed-x scroll-speed-y
font-size text-box constants window]
(let [[new-x new-y] (adjust-camera text-entity show-minimap? camera-target-x camera-target-y font-size text-box constants window)]
(when (or (not (== camera-target-x new-x))
(not (== camera-target-y new-y)))
{:camera-target-x new-x
:camera-target-y new-y
:scroll-speed-x (if (not (== camera-target-x new-x))
min-scroll-speed
scroll-speed-x)
:scroll-speed-y (if (not (== camera-target-y new-y))
min-scroll-speed
scroll-speed-y)})))
(defn animate-camera [camera-x camera-y
camera-target-x camera-target-y
scroll-speed-x scroll-speed-y
delta-time]
(let [min-diff 1
x-diff (long (- camera-target-x camera-x))
y-diff (long (- camera-target-y camera-y))
new-x (if (< (math abs x-diff) min-diff)
camera-target-x
(+ camera-x (* x-diff (min 1 (* delta-time scroll-speed-x)))))
new-y (if (< (math abs y-diff) min-diff)
camera-target-y
(+ camera-y (* y-diff (min 1 (* delta-time scroll-speed-y)))))
new-speed-x (if (== new-x camera-target-x)
0
(decelerate scroll-speed-x))
new-speed-y (if (== new-y camera-target-y)
0
(decelerate scroll-speed-y))]
{:camera (t/translate constants/orig-camera new-x new-y)
:camera-x new-x
:camera-y new-y
:scroll-speed-x new-speed-x
:scroll-speed-y new-speed-y}))
(defn move-camera-to-cursor [buffer font-size text-box window {:keys [left top width height] :as cursor-entity}]
(let [{:keys [camera camera-x camera-y]} buffer
{game-width :width game-height :height} window
text-top ((:top text-box) game-height font-size)
text-bottom ((:bottom text-box) game-height font-size)
cursor-bottom (+ top height)
cursor-right (+ left width)
text-view-width (if (:show-minimap? buffer)
(- game-width (/ game-width constants/minimap-scale))
game-width)
text-view-height (- text-bottom text-top)
camera-bottom (+ camera-y text-view-height)
camera-right (+ camera-x text-view-width)
camera-x (cond
(< left camera-x)
left
(> cursor-right camera-right)
(- cursor-right text-view-width)
:else
camera-x)
camera-y (cond
(< top camera-y)
top
(> cursor-bottom camera-bottom 0)
(- cursor-bottom text-view-height)
:else
camera-y)]
(assoc buffer
:camera-target-x camera-x
:camera-target-y camera-y)))
| null | https://raw.githubusercontent.com/oakes/Paravim/871b9adf4e9819a7b9f2c63466c55640f0f8c280/src/paravim/scroll.cljc | clojure | per scroll, not cumulative limit
make the left edge "sticky" so it doesn't move unintentionally
restrict the offsets to discard excessive values
flip the sign because the camera must go the opposite direction | (ns paravim.scroll
(:require [paravim.constants :as constants]
[play-cljc.transforms :as t]
#?(:clj [play-cljc.macros-java :refer [math]]
:cljs [play-cljc.macros-js :refer-macros [math]])))
(def ^:const scroll-speed 40)
(def ^:const min-scroll-speed 5)
(def ^:const deceleration 0.8)
(defn decelerate
[speed]
(let [speed (* speed deceleration)]
(if (< speed min-scroll-speed)
min-scroll-speed
speed)))
(defn start-scrolling-camera [{:keys [camera-x camera-target-x camera-target-y scroll-speed-x scroll-speed-y] :as buffer} xoffset yoffset]
xoffset (if (and (== camera-x 0)
(< (math abs (long xoffset)) 2.5))
0
xoffset)
xoffset (-> xoffset (min scroll-limit) (max (- scroll-limit)))
yoffset (-> yoffset (min scroll-limit) (max (- scroll-limit)))
xdiff (* -1 scroll-speed xoffset)
ydiff (* -1 scroll-speed yoffset)]
{:camera-target-x (+ camera-target-x xdiff)
:camera-target-y (+ camera-target-y ydiff)
:scroll-speed-x (+ scroll-speed-x (math abs (long xdiff)))
:scroll-speed-y (+ scroll-speed-y (math abs (long ydiff)))}))
(defn adjust-camera [text-entity show-minimap? camera-x camera-y font-size text-box {:keys [font-width font-height] :as constants} window]
(let [{game-width :width game-height :height} window
text-top ((:top text-box) game-height font-size)
text-bottom ((:bottom text-box) game-height font-size)
char-counts (get-in text-entity [:uniforms 'u_char_counts])
max-char-count (if (seq char-counts)
(apply max char-counts)
0)
text-width (* max-char-count font-size font-width)
text-height (* (count char-counts) font-size font-height)
text-view-width (if show-minimap?
(- game-width (/ game-width constants/minimap-scale))
game-width)
max-x (- text-width text-view-width)
max-y (- text-height (- text-bottom text-top))]
[(-> camera-x (min max-x) (max 0))
(-> camera-y (min max-y) (max 0))]))
(defn rubber-band-camera [text-entity show-minimap?
camera-target-x camera-target-y
scroll-speed-x scroll-speed-y
font-size text-box constants window]
(let [[new-x new-y] (adjust-camera text-entity show-minimap? camera-target-x camera-target-y font-size text-box constants window)]
(when (or (not (== camera-target-x new-x))
(not (== camera-target-y new-y)))
{:camera-target-x new-x
:camera-target-y new-y
:scroll-speed-x (if (not (== camera-target-x new-x))
min-scroll-speed
scroll-speed-x)
:scroll-speed-y (if (not (== camera-target-y new-y))
min-scroll-speed
scroll-speed-y)})))
(defn animate-camera [camera-x camera-y
camera-target-x camera-target-y
scroll-speed-x scroll-speed-y
delta-time]
(let [min-diff 1
x-diff (long (- camera-target-x camera-x))
y-diff (long (- camera-target-y camera-y))
new-x (if (< (math abs x-diff) min-diff)
camera-target-x
(+ camera-x (* x-diff (min 1 (* delta-time scroll-speed-x)))))
new-y (if (< (math abs y-diff) min-diff)
camera-target-y
(+ camera-y (* y-diff (min 1 (* delta-time scroll-speed-y)))))
new-speed-x (if (== new-x camera-target-x)
0
(decelerate scroll-speed-x))
new-speed-y (if (== new-y camera-target-y)
0
(decelerate scroll-speed-y))]
{:camera (t/translate constants/orig-camera new-x new-y)
:camera-x new-x
:camera-y new-y
:scroll-speed-x new-speed-x
:scroll-speed-y new-speed-y}))
(defn move-camera-to-cursor [buffer font-size text-box window {:keys [left top width height] :as cursor-entity}]
(let [{:keys [camera camera-x camera-y]} buffer
{game-width :width game-height :height} window
text-top ((:top text-box) game-height font-size)
text-bottom ((:bottom text-box) game-height font-size)
cursor-bottom (+ top height)
cursor-right (+ left width)
text-view-width (if (:show-minimap? buffer)
(- game-width (/ game-width constants/minimap-scale))
game-width)
text-view-height (- text-bottom text-top)
camera-bottom (+ camera-y text-view-height)
camera-right (+ camera-x text-view-width)
camera-x (cond
(< left camera-x)
left
(> cursor-right camera-right)
(- cursor-right text-view-width)
:else
camera-x)
camera-y (cond
(< top camera-y)
top
(> cursor-bottom camera-bottom 0)
(- cursor-bottom text-view-height)
:else
camera-y)]
(assoc buffer
:camera-target-x camera-x
:camera-target-y camera-y)))
|
95418cb57e97cdcce28bb7010778fb6f7f27ee3fc8bfd6698cfb54acfbedee05 | johnwhitington/ocamli | example07.ml | type 'a sequence = Nil | Cons of 'a * 'a sequence
let rec length s =
match s with
Nil -> 0
| Cons (_, t) -> 1 + length t
let rec append a b =
match a with
Nil -> b
| Cons (h, t) -> Cons (h, append t b)
| null | https://raw.githubusercontent.com/johnwhitington/ocamli/28da5d87478a51583a6cb792bf3a8ee44b990e9f/OCaml%20from%20the%20Very%20Beginning/Chapter%2010/example07.ml | ocaml | type 'a sequence = Nil | Cons of 'a * 'a sequence
let rec length s =
match s with
Nil -> 0
| Cons (_, t) -> 1 + length t
let rec append a b =
match a with
Nil -> b
| Cons (h, t) -> Cons (h, append t b)
| |
8e0ff18683e79ffdaad3b8e9d52b145b20ba49015fe4e70b0c85e072a398bb6b | ptol/oczor | CodeGenAst.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module Oczor.Converter.CodeGenAst (module Oczor.Converter.CodeGenAst) where
import Data.Functor.Foldable.TH
import Data.Functor.Foldable
import ClassyPrelude
import Oczor.Utl
type Name = String
data Lits =
LitNull |
LitBool Bool |
LitChar Char |
LitDouble Double |
LitInt Int |
LitString String
deriving (Eq, Ord, Show)
data Ast =
None |
Lit Lits |
UniqObject String |
Ident Name |
NotEqual Ast Ast |
Operator String [Ast] |
Equal Ast Ast |
Var Name Ast |
Set Ast Ast |
Throw String |
Scope [Ast] Ast |
StmtList [Ast] |
BoolAnds [Ast] |
Array [Ast] |
Return Ast |
HasField Ast Name |
Label Name Ast |
Field Ast Name |
ConditionOperator Ast Ast Ast |
Code String |
Call Ast [Ast] |
Parens Ast |
If Ast [Ast] [Ast] |
Object [(Name, Ast)] |
Function [String] [Ast]
deriving (Show, Eq, Ord)
makeBaseFunctor ''Ast
scopeToFunc (ScopeF [] y) = y
scopeToFunc (ScopeF x y) = CallF (Parens (Function [] (embed <$> x <> [ReturnF $ embed y]))) []
-- pattern Scope x <- Function _ x
getVarName (Var x _) = Just x
getVarName _ = Nothing
isFunction Function{} = True
isFunction _ = False
astToList (StmtList x) = x
astToList x = [x]
litString = Lit . LitString
setField obj label expr = Set (Field obj label) expr
emptyObject = Object []
containsIdents :: [String] -> Ast -> [String]
containsIdents list = cata $ \case
IdentF x | oelem x list -> [x]
x -> ffold x
| null | https://raw.githubusercontent.com/ptol/oczor/77255e3c1b3decb956d53754cd3f2ac0ae746c67/src/Oczor/Converter/CodeGenAst.hs | haskell | pattern Scope x <- Function _ x | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module Oczor.Converter.CodeGenAst (module Oczor.Converter.CodeGenAst) where
import Data.Functor.Foldable.TH
import Data.Functor.Foldable
import ClassyPrelude
import Oczor.Utl
type Name = String
data Lits =
LitNull |
LitBool Bool |
LitChar Char |
LitDouble Double |
LitInt Int |
LitString String
deriving (Eq, Ord, Show)
data Ast =
None |
Lit Lits |
UniqObject String |
Ident Name |
NotEqual Ast Ast |
Operator String [Ast] |
Equal Ast Ast |
Var Name Ast |
Set Ast Ast |
Throw String |
Scope [Ast] Ast |
StmtList [Ast] |
BoolAnds [Ast] |
Array [Ast] |
Return Ast |
HasField Ast Name |
Label Name Ast |
Field Ast Name |
ConditionOperator Ast Ast Ast |
Code String |
Call Ast [Ast] |
Parens Ast |
If Ast [Ast] [Ast] |
Object [(Name, Ast)] |
Function [String] [Ast]
deriving (Show, Eq, Ord)
makeBaseFunctor ''Ast
scopeToFunc (ScopeF [] y) = y
scopeToFunc (ScopeF x y) = CallF (Parens (Function [] (embed <$> x <> [ReturnF $ embed y]))) []
getVarName (Var x _) = Just x
getVarName _ = Nothing
isFunction Function{} = True
isFunction _ = False
astToList (StmtList x) = x
astToList x = [x]
litString = Lit . LitString
setField obj label expr = Set (Field obj label) expr
emptyObject = Object []
containsIdents :: [String] -> Ast -> [String]
containsIdents list = cata $ \case
IdentF x | oelem x list -> [x]
x -> ffold x
|
33aeb3945804671b8f5ff841d9f1f2d53aea840612191650447fd0f958f8bf63 | KingoftheHomeless/in-other-words | Cont.hs | # OPTIONS_HADDOCK not - home #
module Control.Effect.Internal.Cont where
import Data.Coerce
import Control.Monad.Trans
import Control.Monad.Base
import qualified Control.Monad.Fail as Fail
import Control.Effect
import Control.Effect.Carrier
import Control.Effect.Internal.Utils
import Control.Monad.Trans.Free.Church.Alternate
-- | An effect for abortive continuations.
newtype Cont :: Effect where
CallCC :: ((forall b. a -> m b) -> m a) -> Cont m a
-- | An effect for non-abortive continuations of a program
that eventually produces a result of type @r@.
--
-- This isn't quite as powerful as proper delimited continuations,
-- as this doesn't provide any equivalent of the @reset@ operator.
--
-- This can be useful as a helper effect.
newtype Shift r :: Effect where
Shift :: ((a -> m r) -> m r) -> Shift r m a
data ContBase mr r a where
Exit :: r -> ContBase mr r void
Attempt :: mr -> ContBase mr r r
GetCont :: ContBase mr r (Either (a -> mr) a)
newtype ContC r m a = ContC { unContC :: FreeT (ContBase (m r) r) m a }
deriving ( Functor, Applicative, Monad
, MonadBase b, Fail.MonadFail, MonadIO
, MonadThrow, MonadCatch
)
instance MonadTrans (ContC s) where
lift = ContC #. lift
# INLINE lift #
instance ( Carrier m
, Threads (FreeT (ContBase (m r) r)) (Prims m)
)
=> Carrier (ContC r m) where
type Derivs (ContC r m) = Cont ': Derivs m
type Prims (ContC r m) = Prims m
algPrims = coerce (thread @(FreeT (ContBase (m r) r)) (algPrims @m))
# INLINEABLE algPrims #
reformulate n alg = powerAlg (reformulate (n . lift) alg) $ \case
CallCC main -> n (ContC $ liftF $ GetCont) >>= \case
Left c -> main (\x -> n $ ContC $ liftF (Attempt (c x)) >>= liftF . Exit)
Right a -> return a
# INLINEABLE reformulate #
newtype ShiftC r m a = ShiftC { unShiftC :: FreeT (ContBase (m r) r) m a }
deriving ( Functor, Applicative, Monad
, MonadBase b, Fail.MonadFail, MonadIO
, MonadThrow, MonadCatch
)
instance MonadTrans (ShiftC s) where
lift = ShiftC #. lift
# INLINE lift #
instance ( Carrier m
, Threads (FreeT (ContBase (m r) r)) (Prims m)
)
=> Carrier (ShiftC r m) where
type Derivs (ShiftC r m) = Shift r ': Derivs m
type Prims (ShiftC r m) = Prims m
algPrims = coerce (thread @(FreeT (ContBase (m r) r)) (algPrims @m))
# INLINEABLE algPrims #
reformulate n alg = powerAlg (reformulate (n . lift) alg) $ \case
Shift main -> n (ShiftC $ liftF $ GetCont) >>= \case
Left c -> main (\x -> n $ ShiftC $ liftF $ Attempt (c x)) >>= \r ->
n (ShiftC $ liftF $ Exit r)
Right a -> return a
# INLINEABLE reformulate #
-- | 'ContThreads' accepts the following primitive effects:
--
-- * 'Control.Effect.Regional.Regional' @s@
-- * 'Control.Effect.Optional.Optional' @s@ (when @s@ is a functor)
* ' Control . Effect . Type . Unravel . Unravel ' @p@
* ' Control . Effect . Type . ListenPrim . ListenPrim ' @o@ ( when @o@ is a ' Monoid ' )
* ' Control . Effect . Type . ReaderPrim . ReaderPrim ' @i@
type ContThreads = FreeThreads
| null | https://raw.githubusercontent.com/KingoftheHomeless/in-other-words/9c864c81beb4fdf71d363b6962db5c90275c57ef/src/Control/Effect/Internal/Cont.hs | haskell | | An effect for abortive continuations.
| An effect for non-abortive continuations of a program
This isn't quite as powerful as proper delimited continuations,
as this doesn't provide any equivalent of the @reset@ operator.
This can be useful as a helper effect.
| 'ContThreads' accepts the following primitive effects:
* 'Control.Effect.Regional.Regional' @s@
* 'Control.Effect.Optional.Optional' @s@ (when @s@ is a functor) | # OPTIONS_HADDOCK not - home #
module Control.Effect.Internal.Cont where
import Data.Coerce
import Control.Monad.Trans
import Control.Monad.Base
import qualified Control.Monad.Fail as Fail
import Control.Effect
import Control.Effect.Carrier
import Control.Effect.Internal.Utils
import Control.Monad.Trans.Free.Church.Alternate
newtype Cont :: Effect where
CallCC :: ((forall b. a -> m b) -> m a) -> Cont m a
that eventually produces a result of type @r@.
newtype Shift r :: Effect where
Shift :: ((a -> m r) -> m r) -> Shift r m a
data ContBase mr r a where
Exit :: r -> ContBase mr r void
Attempt :: mr -> ContBase mr r r
GetCont :: ContBase mr r (Either (a -> mr) a)
newtype ContC r m a = ContC { unContC :: FreeT (ContBase (m r) r) m a }
deriving ( Functor, Applicative, Monad
, MonadBase b, Fail.MonadFail, MonadIO
, MonadThrow, MonadCatch
)
instance MonadTrans (ContC s) where
lift = ContC #. lift
# INLINE lift #
instance ( Carrier m
, Threads (FreeT (ContBase (m r) r)) (Prims m)
)
=> Carrier (ContC r m) where
type Derivs (ContC r m) = Cont ': Derivs m
type Prims (ContC r m) = Prims m
algPrims = coerce (thread @(FreeT (ContBase (m r) r)) (algPrims @m))
# INLINEABLE algPrims #
reformulate n alg = powerAlg (reformulate (n . lift) alg) $ \case
CallCC main -> n (ContC $ liftF $ GetCont) >>= \case
Left c -> main (\x -> n $ ContC $ liftF (Attempt (c x)) >>= liftF . Exit)
Right a -> return a
# INLINEABLE reformulate #
newtype ShiftC r m a = ShiftC { unShiftC :: FreeT (ContBase (m r) r) m a }
deriving ( Functor, Applicative, Monad
, MonadBase b, Fail.MonadFail, MonadIO
, MonadThrow, MonadCatch
)
instance MonadTrans (ShiftC s) where
lift = ShiftC #. lift
# INLINE lift #
instance ( Carrier m
, Threads (FreeT (ContBase (m r) r)) (Prims m)
)
=> Carrier (ShiftC r m) where
type Derivs (ShiftC r m) = Shift r ': Derivs m
type Prims (ShiftC r m) = Prims m
algPrims = coerce (thread @(FreeT (ContBase (m r) r)) (algPrims @m))
# INLINEABLE algPrims #
reformulate n alg = powerAlg (reformulate (n . lift) alg) $ \case
Shift main -> n (ShiftC $ liftF $ GetCont) >>= \case
Left c -> main (\x -> n $ ShiftC $ liftF $ Attempt (c x)) >>= \r ->
n (ShiftC $ liftF $ Exit r)
Right a -> return a
# INLINEABLE reformulate #
* ' Control . Effect . Type . Unravel . Unravel ' @p@
* ' Control . Effect . Type . ListenPrim . ListenPrim ' @o@ ( when @o@ is a ' Monoid ' )
* ' Control . Effect . Type . ReaderPrim . ReaderPrim ' @i@
type ContThreads = FreeThreads
|
f75ca6b203e130ecceb27664c9f87e00722efff03fdfc8bba51a304a075ad75d | janestreet/merlin-jst | mreader_parser.ml | { { { COPYING * (
This file is part of Merlin , an helper for ocaml editors
Copyright ( C ) 2013 - 2015 < frederic.bour(_)lakaban.net >
refis.thomas(_)gmail.com >
< simon.castellan(_)iuwt.fr >
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation the
rights to use , copy , modify , merge , publish , distribute , sublicense , and/or
sell copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
The Software is provided " as is " , without warranty of any kind , express or
implied , including but not limited to the warranties of merchantability ,
fitness for a particular purpose and noninfringement . In no event shall
the authors or copyright holders be liable for any claim , damages or other
liability , whether in an action of contract , tort or otherwise , arising
from , out of or in connection with the software or the use or other dealings
in the Software .
) * } } }
This file is part of Merlin, an helper for ocaml editors
Copyright (C) 2013 - 2015 Frédéric Bour <frederic.bour(_)lakaban.net>
Thomas Refis <refis.thomas(_)gmail.com>
Simon Castellan <simon.castellan(_)iuwt.fr>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
The Software is provided "as is", without warranty of any kind, express or
implied, including but not limited to the warranties of merchantability,
fitness for a particular purpose and noninfringement. In no event shall
the authors or copyright holders be liable for any claim, damages or other
liability, whether in an action of contract, tort or otherwise, arising
from, out of or in connection with the software or the use or other dealings
in the Software.
)* }}} *)
open Std
module I = Parser_raw.MenhirInterpreter
type kind =
| ML
| MLI
(*| MLL | MLY*)
module Dump = struct
let symbol () = Parser_printer.print_symbol
end
module R = Mreader_recover.Make
(I)
(struct
include Parser_recover
let default_value loc x =
Default.default_loc := loc;
default_value x
let guide (type a) : a I.symbol -> bool = function
| I.T I.T_BEGIN -> true
| _ -> false
let token_of_terminal = Parser_printer.token_of_terminal
let nullable = Parser_explain.nullable
end)
(Dump)
type 'a step =
| Correct of 'a I.checkpoint
| Recovering of 'a R.candidates
type tree = [
| `Interface of Parsetree.signature
| `Implementation of Parsetree.structure
]
type steps =[
| `Signature of (Parsetree.signature step * Mreader_lexer.triple) list
| `Structure of (Parsetree.structure step * Mreader_lexer.triple) list
]
type t = {
kind: kind;
tree: tree;
steps: steps;
errors: exn list;
lexer: Mreader_lexer.t;
}
let eof_token = (Parser_raw.EOF, Lexing.dummy_pos, Lexing.dummy_pos)
let errors_ref = ref []
let resume_parse =
let rec normal acc tokens = function
| I.InputNeeded env as checkpoint ->
let token, tokens = match tokens with
| token :: tokens -> token, tokens
| [] -> eof_token, []
in
check_for_error acc token tokens env (I.offer checkpoint token)
| I.Shifting (_,env,_) | I.AboutToReduce (env,_) as checkpoint ->
begin match I.resume checkpoint with
| checkpoint' -> normal acc tokens checkpoint'
| exception exn ->
Msupport.raise_error exn;
let token = match acc with
| [] -> assert false
Parser raised error before parsing anything
| (_, token) :: _ -> token
in
enter_error acc token tokens env
end
| I.Accepted v -> acc, v
| I.Rejected | I.HandlingError _ ->
assert false
and check_for_error acc token tokens env = function
| I.HandlingError _ ->
enter_error acc token tokens env
| I.Shifting _ | I.AboutToReduce _ as checkpoint ->
begin match I.resume checkpoint with
| checkpoint' -> check_for_error acc token tokens env checkpoint'
| exception exn ->
Msupport.raise_error exn;
enter_error acc token tokens env
end
| checkpoint ->
normal ((Correct checkpoint, token) :: acc) tokens checkpoint
and enter_error acc token tokens env =
let candidates = R.generate env in
let explanation =
Mreader_explain.explain env token
candidates.R.popped candidates.R.shifted
in
errors_ref := Mreader_explain.Syntax_explanation explanation :: !errors_ref;
recover acc (token :: tokens) candidates
and recover acc tokens candidates =
let token, tokens = match tokens with
| token :: tokens -> token, tokens
| [] -> eof_token, []
in
let acc' = ((Recovering candidates, token) :: acc) in
match R.attempt candidates token with
| `Fail ->
if tokens = [] then
match candidates.R.final with
| None -> failwith "Empty file"
| Some v -> acc', v
else
recover acc tokens candidates
| `Accept v -> acc', v
| `Ok (checkpoint, _) ->
normal ((Correct checkpoint, token) :: acc) tokens checkpoint
in
fun acc tokens -> function
| Correct checkpoint -> normal acc tokens checkpoint
| Recovering candidates -> recover acc tokens candidates
let seek_step steps tokens =
let rec aux acc = function
| (step :: steps), (token :: tokens) when snd step = token ->
aux (step :: acc) (steps, tokens)
| _, tokens -> acc, tokens
in
aux [] (steps, tokens)
let parse initial steps tokens initial_pos =
let acc, tokens = seek_step steps tokens in
let step =
match acc with
| (step, _) :: _ -> step
| [] -> Correct (initial initial_pos)
in
let acc, result = resume_parse acc tokens step in
List.rev acc, result
let run_parser warnings lexer previous kind =
Msupport.catch_errors warnings errors_ref @@ fun () ->
let tokens = Mreader_lexer.tokens lexer in
let initial_pos = Mreader_lexer.initial_position lexer in
match kind with
| ML ->
let steps = match previous with
| `Structure steps -> steps
| _ -> []
in
let steps, result =
let state = Parser_raw.Incremental.implementation in
parse state steps tokens initial_pos in
`Structure steps, `Implementation result
| MLI ->
let steps = match previous with
| `Signature steps -> steps
| _ -> []
in
let steps, result =
let state = Parser_raw.Incremental.interface in
parse state steps tokens initial_pos in
`Signature steps, `Interface result
let make warnings lexer kind =
errors_ref := [];
let steps, tree = run_parser warnings lexer `None kind in
let errors = !errors_ref in
errors_ref := [];
{kind; steps; tree; errors; lexer}
let result t = t.tree
let errors t = t.errors
| null | https://raw.githubusercontent.com/janestreet/merlin-jst/0152b4e8ef1b7cd0ddee2873aa1860a971585391/src/kernel/mreader_parser.ml | ocaml | | MLL | MLY | { { { COPYING * (
This file is part of Merlin , an helper for ocaml editors
Copyright ( C ) 2013 - 2015 < frederic.bour(_)lakaban.net >
refis.thomas(_)gmail.com >
< simon.castellan(_)iuwt.fr >
Permission is hereby granted , free of charge , to any person obtaining a
copy of this software and associated documentation files ( the " Software " ) ,
to deal in the Software without restriction , including without limitation the
rights to use , copy , modify , merge , publish , distribute , sublicense , and/or
sell copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
The Software is provided " as is " , without warranty of any kind , express or
implied , including but not limited to the warranties of merchantability ,
fitness for a particular purpose and noninfringement . In no event shall
the authors or copyright holders be liable for any claim , damages or other
liability , whether in an action of contract , tort or otherwise , arising
from , out of or in connection with the software or the use or other dealings
in the Software .
) * } } }
This file is part of Merlin, an helper for ocaml editors
Copyright (C) 2013 - 2015 Frédéric Bour <frederic.bour(_)lakaban.net>
Thomas Refis <refis.thomas(_)gmail.com>
Simon Castellan <simon.castellan(_)iuwt.fr>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
The Software is provided "as is", without warranty of any kind, express or
implied, including but not limited to the warranties of merchantability,
fitness for a particular purpose and noninfringement. In no event shall
the authors or copyright holders be liable for any claim, damages or other
liability, whether in an action of contract, tort or otherwise, arising
from, out of or in connection with the software or the use or other dealings
in the Software.
)* }}} *)
open Std
module I = Parser_raw.MenhirInterpreter
type kind =
| ML
| MLI
module Dump = struct
let symbol () = Parser_printer.print_symbol
end
module R = Mreader_recover.Make
(I)
(struct
include Parser_recover
let default_value loc x =
Default.default_loc := loc;
default_value x
let guide (type a) : a I.symbol -> bool = function
| I.T I.T_BEGIN -> true
| _ -> false
let token_of_terminal = Parser_printer.token_of_terminal
let nullable = Parser_explain.nullable
end)
(Dump)
type 'a step =
| Correct of 'a I.checkpoint
| Recovering of 'a R.candidates
type tree = [
| `Interface of Parsetree.signature
| `Implementation of Parsetree.structure
]
type steps =[
| `Signature of (Parsetree.signature step * Mreader_lexer.triple) list
| `Structure of (Parsetree.structure step * Mreader_lexer.triple) list
]
type t = {
kind: kind;
tree: tree;
steps: steps;
errors: exn list;
lexer: Mreader_lexer.t;
}
let eof_token = (Parser_raw.EOF, Lexing.dummy_pos, Lexing.dummy_pos)
let errors_ref = ref []
let resume_parse =
let rec normal acc tokens = function
| I.InputNeeded env as checkpoint ->
let token, tokens = match tokens with
| token :: tokens -> token, tokens
| [] -> eof_token, []
in
check_for_error acc token tokens env (I.offer checkpoint token)
| I.Shifting (_,env,_) | I.AboutToReduce (env,_) as checkpoint ->
begin match I.resume checkpoint with
| checkpoint' -> normal acc tokens checkpoint'
| exception exn ->
Msupport.raise_error exn;
let token = match acc with
| [] -> assert false
Parser raised error before parsing anything
| (_, token) :: _ -> token
in
enter_error acc token tokens env
end
| I.Accepted v -> acc, v
| I.Rejected | I.HandlingError _ ->
assert false
and check_for_error acc token tokens env = function
| I.HandlingError _ ->
enter_error acc token tokens env
| I.Shifting _ | I.AboutToReduce _ as checkpoint ->
begin match I.resume checkpoint with
| checkpoint' -> check_for_error acc token tokens env checkpoint'
| exception exn ->
Msupport.raise_error exn;
enter_error acc token tokens env
end
| checkpoint ->
normal ((Correct checkpoint, token) :: acc) tokens checkpoint
and enter_error acc token tokens env =
let candidates = R.generate env in
let explanation =
Mreader_explain.explain env token
candidates.R.popped candidates.R.shifted
in
errors_ref := Mreader_explain.Syntax_explanation explanation :: !errors_ref;
recover acc (token :: tokens) candidates
and recover acc tokens candidates =
let token, tokens = match tokens with
| token :: tokens -> token, tokens
| [] -> eof_token, []
in
let acc' = ((Recovering candidates, token) :: acc) in
match R.attempt candidates token with
| `Fail ->
if tokens = [] then
match candidates.R.final with
| None -> failwith "Empty file"
| Some v -> acc', v
else
recover acc tokens candidates
| `Accept v -> acc', v
| `Ok (checkpoint, _) ->
normal ((Correct checkpoint, token) :: acc) tokens checkpoint
in
fun acc tokens -> function
| Correct checkpoint -> normal acc tokens checkpoint
| Recovering candidates -> recover acc tokens candidates
let seek_step steps tokens =
let rec aux acc = function
| (step :: steps), (token :: tokens) when snd step = token ->
aux (step :: acc) (steps, tokens)
| _, tokens -> acc, tokens
in
aux [] (steps, tokens)
let parse initial steps tokens initial_pos =
let acc, tokens = seek_step steps tokens in
let step =
match acc with
| (step, _) :: _ -> step
| [] -> Correct (initial initial_pos)
in
let acc, result = resume_parse acc tokens step in
List.rev acc, result
let run_parser warnings lexer previous kind =
Msupport.catch_errors warnings errors_ref @@ fun () ->
let tokens = Mreader_lexer.tokens lexer in
let initial_pos = Mreader_lexer.initial_position lexer in
match kind with
| ML ->
let steps = match previous with
| `Structure steps -> steps
| _ -> []
in
let steps, result =
let state = Parser_raw.Incremental.implementation in
parse state steps tokens initial_pos in
`Structure steps, `Implementation result
| MLI ->
let steps = match previous with
| `Signature steps -> steps
| _ -> []
in
let steps, result =
let state = Parser_raw.Incremental.interface in
parse state steps tokens initial_pos in
`Signature steps, `Interface result
let make warnings lexer kind =
errors_ref := [];
let steps, tree = run_parser warnings lexer `None kind in
let errors = !errors_ref in
errors_ref := [];
{kind; steps; tree; errors; lexer}
let result t = t.tree
let errors t = t.errors
|
fbdf00879f073719a44b01461ff3211ebb40f25bfdae808743a987895e08ce72 | haskell/haskell-language-server | RunMetaprogramSpec.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
module CodeAction.RunMetaprogramSpec where
import Utils
import Test.Hspec
import Wingman.Types
spec :: Spec
spec = do
let metaTest l c f =
goldenTest RunMetaprogram "" l c f
describe "beginMetaprogram" $ do
goldenTest BeginMetaprogram "" 1 7 "MetaBegin"
goldenTest BeginMetaprogram "" 1 9 "MetaBeginNoWildify"
describe "golden" $ do
metaTest 6 11 "MetaMaybeAp"
metaTest 2 32 "MetaBindOne"
metaTest 2 32 "MetaBindAll"
metaTest 2 13 "MetaTry"
metaTest 2 74 "MetaChoice"
metaTest 5 40 "MetaUseImport"
metaTest 6 31 "MetaUseLocal"
metaTest 11 11 "MetaUseMethod"
metaTest 9 38 "MetaCataCollapse"
metaTest 7 16 "MetaCataCollapseUnary"
metaTest 10 32 "MetaCataAST"
metaTest 6 46 "MetaPointwise"
metaTest 4 28 "MetaUseSymbol"
metaTest 7 53 "MetaDeepOf"
metaTest 2 34 "MetaWithArg"
metaTest 2 18 "MetaLetSimple"
metaTest 5 9 "MetaIdiom"
metaTest 7 9 "MetaIdiomRecord"
metaTest 14 10 "MetaFundeps"
metaTest 2 12 "IntrosTooMany"
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/f3ad27ba1634871b2240b8cd7de9f31b91a2e502/plugins/hls-tactics-plugin/new/test/CodeAction/RunMetaprogramSpec.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE CPP #
module CodeAction.RunMetaprogramSpec where
import Utils
import Test.Hspec
import Wingman.Types
spec :: Spec
spec = do
let metaTest l c f =
goldenTest RunMetaprogram "" l c f
describe "beginMetaprogram" $ do
goldenTest BeginMetaprogram "" 1 7 "MetaBegin"
goldenTest BeginMetaprogram "" 1 9 "MetaBeginNoWildify"
describe "golden" $ do
metaTest 6 11 "MetaMaybeAp"
metaTest 2 32 "MetaBindOne"
metaTest 2 32 "MetaBindAll"
metaTest 2 13 "MetaTry"
metaTest 2 74 "MetaChoice"
metaTest 5 40 "MetaUseImport"
metaTest 6 31 "MetaUseLocal"
metaTest 11 11 "MetaUseMethod"
metaTest 9 38 "MetaCataCollapse"
metaTest 7 16 "MetaCataCollapseUnary"
metaTest 10 32 "MetaCataAST"
metaTest 6 46 "MetaPointwise"
metaTest 4 28 "MetaUseSymbol"
metaTest 7 53 "MetaDeepOf"
metaTest 2 34 "MetaWithArg"
metaTest 2 18 "MetaLetSimple"
metaTest 5 9 "MetaIdiom"
metaTest 7 9 "MetaIdiomRecord"
metaTest 14 10 "MetaFundeps"
metaTest 2 12 "IntrosTooMany"
|
8ddb279d508d1fce83668b4c112a77dbabdb9c28c1c0f6eaf280628802ebe902 | gusbicalho/haskell-todo | User.hs | |
Description : Db actions for dealing with Users
This defines a typeclass ' UserDb ' , listing all actions one can use to deal with
' User 's in a database . This module also defines an instance of such class for
the ' SQLiteAction ' type : in other words , an implementation of ' UserDb ' methods
returning values that can be executed by a SQLite ' Transactor ' .
This implementation requires converting from ' User ' to the SQLite types and
vice versa . The easiest way to do this was by implementing some typeclasses
from " Database . SQLite . Simple " . To avoid orphan instances , we wrapped the types
from " HaskellTodo . Models . User " ins @newtype@s .
Compare this approach to the one used in our HTTP layer
( " HaskellTodo . WireTypes . User " ) . There , we built completely separate types with
instances for Aeson 's @ToJSON@ and @FromJSON@ , and used dedicated functions
( from " HaskellTodo . Adapters . User " ) to convert between our models and the API
types .
Part of the reason for this difference was to test and demonstrate the two
approaches . However , I also believe that decoupling is most important when
dealing with and exposing external APIs , which are contracts between separate
applications . We usually have way more control over our own database than over
clients of our API ( or over APIs we consume ) .
I believe this means it is acceptable , in this case , to take the more
lightweight approach when dealing with the database , while using the more
heavy , boilerplate - y approach for our HTTP API .
Description: Db actions for dealing with Users
This defines a typeclass 'UserDb', listing all actions one can use to deal with
'User's in a database. This module also defines an instance of such class for
the 'SQLiteAction' type: in other words, an implementation of 'UserDb' methods
returning values that can be executed by a SQLite 'Transactor'.
This implementation requires converting from 'User' to the SQLite types and
vice versa. The easiest way to do this was by implementing some typeclasses
from "Database.SQLite.Simple". To avoid orphan instances, we wrapped the types
from "HaskellTodo.Models.User" ins @newtype@s.
Compare this approach to the one used in our HTTP layer
("HaskellTodo.WireTypes.User"). There, we built completely separate types with
instances for Aeson's @ToJSON@ and @FromJSON@, and used dedicated functions
(from "HaskellTodo.Adapters.User") to convert between our models and the API
types.
Part of the reason for this difference was to test and demonstrate the two
approaches. However, I also believe that decoupling is most important when
dealing with and exposing external APIs, which are contracts between separate
applications. We usually have way more control over our own database than over
clients of our API (or over APIs we consume).
I believe this means it is acceptable, in this case, to take the more
lightweight approach when dealing with the database, while using the more
heavy, boilerplate-y approach for our HTTP API.
-}
module HaskellTodo.Db.User
( UserDb(..)
) where
import Prelude hiding (id)
import Data.Maybe (fromJust, listToMaybe)
import Data.String (fromString)
import Database.SQLite.Simple
import Common.Db.SQLite
import qualified HaskellTodo.Models.User as M.User
import HaskellTodo.Models.User ( User(..)
, NewUser(..)
, Login
, textToLogin
, textToPassword
, loginToText
, passwordToText
)
class UserDb action where
initDB :: action ()
listUsers :: action [User]
getUser :: Integer -> action (Maybe User)
createUser :: M.User.NewUser -> action User
findUserByLogin :: M.User.Login -> action (Maybe User)
newtype DbUser = DbUser { dbToUser :: User }
instance FromRow DbUser where
fromRow = DbUser <$> user
where user = User <$> id <*> login <*> password
id = field
login = textToLogin <$> field
password = textToPassword <$> field
newtype DbNewUser = DbNewUser NewUser
instance ToRow DbNewUser where
toRow (DbNewUser (NewUser newLogin newPassword)) =
toRow ( loginToText newLogin
, passwordToText newPassword
)
instance UserDb SQLiteAction where
initDB :: SQLiteAction ()
initDB = SQLiteAction $ \conn ->
execute_ conn $
fromString $ "CREATE TABLE IF NOT EXISTS users "
++ "( id integer not null primary key"
++ ", login text not null unique"
++ ", password text not null"
++ ")"
listUsers :: SQLiteAction [User]
listUsers = SQLiteAction listUsers'
getUser :: Integer -> SQLiteAction (Maybe User)
getUser rowId = SQLiteAction $ getUser' rowId
createUser :: M.User.NewUser -> SQLiteAction User
createUser newUser = SQLiteAction $ createUser' newUser
findUserByLogin :: M.User.Login -> SQLiteAction (Maybe User)
findUserByLogin login = SQLiteAction $ findUserByLogin' login
listUsers' :: Connection -> IO [User]
listUsers' conn = do
results <- query conn "SELECT id, login, password FROM users" ()
return . map dbToUser $ results
getUser' :: Integer -> Connection -> IO (Maybe User)
getUser' rowId conn = do
results <- query conn "SELECT id, login, password FROM users WHERE id = ?" [rowId]
let maybeDbUser = listToMaybe results
maybeUser = dbToUser <$> maybeDbUser
return maybeUser
createUser' :: M.User.NewUser -> Connection -> IO User
createUser' newUser conn = do
execute conn "INSERT INTO users (login, password) values (?, ?)" (DbNewUser newUser)
rowId <- lastInsertRowId conn
fromJust <$> getUser' (fromIntegral rowId) conn
findUserByLogin' :: Login -> Connection -> IO (Maybe User)
findUserByLogin' login conn = do
results <- query conn "SELECT id, login, password FROM users WHERE login = ?" [loginToText login]
let maybeDbUser = listToMaybe results
maybeUser = dbToUser <$> maybeDbUser
return maybeUser
| null | https://raw.githubusercontent.com/gusbicalho/haskell-todo/f28b0d05ddb72764cf01d29fa978457ac455ac81/src/HaskellTodo/Db/User.hs | haskell | |
Description : Db actions for dealing with Users
This defines a typeclass ' UserDb ' , listing all actions one can use to deal with
' User 's in a database . This module also defines an instance of such class for
the ' SQLiteAction ' type : in other words , an implementation of ' UserDb ' methods
returning values that can be executed by a SQLite ' Transactor ' .
This implementation requires converting from ' User ' to the SQLite types and
vice versa . The easiest way to do this was by implementing some typeclasses
from " Database . SQLite . Simple " . To avoid orphan instances , we wrapped the types
from " HaskellTodo . Models . User " ins @newtype@s .
Compare this approach to the one used in our HTTP layer
( " HaskellTodo . WireTypes . User " ) . There , we built completely separate types with
instances for Aeson 's @ToJSON@ and @FromJSON@ , and used dedicated functions
( from " HaskellTodo . Adapters . User " ) to convert between our models and the API
types .
Part of the reason for this difference was to test and demonstrate the two
approaches . However , I also believe that decoupling is most important when
dealing with and exposing external APIs , which are contracts between separate
applications . We usually have way more control over our own database than over
clients of our API ( or over APIs we consume ) .
I believe this means it is acceptable , in this case , to take the more
lightweight approach when dealing with the database , while using the more
heavy , boilerplate - y approach for our HTTP API .
Description: Db actions for dealing with Users
This defines a typeclass 'UserDb', listing all actions one can use to deal with
'User's in a database. This module also defines an instance of such class for
the 'SQLiteAction' type: in other words, an implementation of 'UserDb' methods
returning values that can be executed by a SQLite 'Transactor'.
This implementation requires converting from 'User' to the SQLite types and
vice versa. The easiest way to do this was by implementing some typeclasses
from "Database.SQLite.Simple". To avoid orphan instances, we wrapped the types
from "HaskellTodo.Models.User" ins @newtype@s.
Compare this approach to the one used in our HTTP layer
("HaskellTodo.WireTypes.User"). There, we built completely separate types with
instances for Aeson's @ToJSON@ and @FromJSON@, and used dedicated functions
(from "HaskellTodo.Adapters.User") to convert between our models and the API
types.
Part of the reason for this difference was to test and demonstrate the two
approaches. However, I also believe that decoupling is most important when
dealing with and exposing external APIs, which are contracts between separate
applications. We usually have way more control over our own database than over
clients of our API (or over APIs we consume).
I believe this means it is acceptable, in this case, to take the more
lightweight approach when dealing with the database, while using the more
heavy, boilerplate-y approach for our HTTP API.
-}
module HaskellTodo.Db.User
( UserDb(..)
) where
import Prelude hiding (id)
import Data.Maybe (fromJust, listToMaybe)
import Data.String (fromString)
import Database.SQLite.Simple
import Common.Db.SQLite
import qualified HaskellTodo.Models.User as M.User
import HaskellTodo.Models.User ( User(..)
, NewUser(..)
, Login
, textToLogin
, textToPassword
, loginToText
, passwordToText
)
class UserDb action where
initDB :: action ()
listUsers :: action [User]
getUser :: Integer -> action (Maybe User)
createUser :: M.User.NewUser -> action User
findUserByLogin :: M.User.Login -> action (Maybe User)
newtype DbUser = DbUser { dbToUser :: User }
instance FromRow DbUser where
fromRow = DbUser <$> user
where user = User <$> id <*> login <*> password
id = field
login = textToLogin <$> field
password = textToPassword <$> field
newtype DbNewUser = DbNewUser NewUser
instance ToRow DbNewUser where
toRow (DbNewUser (NewUser newLogin newPassword)) =
toRow ( loginToText newLogin
, passwordToText newPassword
)
instance UserDb SQLiteAction where
initDB :: SQLiteAction ()
initDB = SQLiteAction $ \conn ->
execute_ conn $
fromString $ "CREATE TABLE IF NOT EXISTS users "
++ "( id integer not null primary key"
++ ", login text not null unique"
++ ", password text not null"
++ ")"
listUsers :: SQLiteAction [User]
listUsers = SQLiteAction listUsers'
getUser :: Integer -> SQLiteAction (Maybe User)
getUser rowId = SQLiteAction $ getUser' rowId
createUser :: M.User.NewUser -> SQLiteAction User
createUser newUser = SQLiteAction $ createUser' newUser
findUserByLogin :: M.User.Login -> SQLiteAction (Maybe User)
findUserByLogin login = SQLiteAction $ findUserByLogin' login
listUsers' :: Connection -> IO [User]
listUsers' conn = do
results <- query conn "SELECT id, login, password FROM users" ()
return . map dbToUser $ results
getUser' :: Integer -> Connection -> IO (Maybe User)
getUser' rowId conn = do
results <- query conn "SELECT id, login, password FROM users WHERE id = ?" [rowId]
let maybeDbUser = listToMaybe results
maybeUser = dbToUser <$> maybeDbUser
return maybeUser
createUser' :: M.User.NewUser -> Connection -> IO User
createUser' newUser conn = do
execute conn "INSERT INTO users (login, password) values (?, ?)" (DbNewUser newUser)
rowId <- lastInsertRowId conn
fromJust <$> getUser' (fromIntegral rowId) conn
findUserByLogin' :: Login -> Connection -> IO (Maybe User)
findUserByLogin' login conn = do
results <- query conn "SELECT id, login, password FROM users WHERE login = ?" [loginToText login]
let maybeDbUser = listToMaybe results
maybeUser = dbToUser <$> maybeDbUser
return maybeUser
| |
4ada4360b9faa4cd866edc93957fc4413f3014b2e8cf0a5e12c50c8e8f0dde80 | mfelleisen/7GUI | info.rkt | #lang info
(define collection "7GUI")
(define deps
'(
"base"
"racket/gui"
"gui-lib"
"at-exp-lib"
"htdp-lib"
"typed-racket-lib"
"typed-racket-more"
"rackunit-lib"
"gregor"
"rackunit-lib"
))
;; -expander
(define pkg-desc "Sources for 7GUI")
(define pkg-authors '(matthias))
| null | https://raw.githubusercontent.com/mfelleisen/7GUI/e3631e78ab12306ad81b560443913afa4b156dec/info.rkt | racket | -expander | #lang info
(define collection "7GUI")
(define deps
'(
"base"
"racket/gui"
"gui-lib"
"at-exp-lib"
"htdp-lib"
"typed-racket-lib"
"typed-racket-more"
"rackunit-lib"
"gregor"
"rackunit-lib"
))
(define pkg-desc "Sources for 7GUI")
(define pkg-authors '(matthias))
|
36528f92809b9ef9b1377d246dd916db2a2eafd31b255328f48cf75f9bee0943 | fyquah/hardcaml_zprize | with_shift.ml | open Core
open Hardcaml
open Signal
type t =
{ x : Signal.t
; shift : int
}
let width t = Signal.width t.x + t.shift
let no_shift x = { x; shift = 0 }
let create ~shift x = { x; shift }
let sll t ~by = { x = t.x; shift = t.shift + by }
let map ~f t = { x = f t.x; shift = t.shift }
let uresize t new_width =
{ x = Signal.uresize t.x (new_width - t.shift); shift = t.shift }
;;
let validate_all_items_same_width items =
let w = width (List.hd_exn items) in
List.iter items ~f:(fun x -> assert (width x = w));
w
;;
let on_overlapping_bits (items : t list) do_thing =
let item_width = validate_all_items_same_width items in
let smallest_shift =
Option.value_exn
(List.min_elt ~compare:Int.compare (List.map items ~f:(fun i -> i.shift)))
in
let x =
List.map items ~f:(fun item ->
let signal =
match item.shift - smallest_shift with
| 0 -> item.x
| shift -> item.x @: zero shift
in
Signal.uresize signal (item_width - smallest_shift))
|> do_thing
in
{ x; shift = smallest_shift }
;;
let pipe_add ~scope ~enable ~clock ~stages (items : t list) =
on_overlapping_bits items (fun x ->
x |> Adder_subtractor_pipe.add_no_carry ~scope ~enable ~clock ~stages)
;;
let sum (items : t list) =
on_overlapping_bits items (fun x -> List.reduce_exn x ~f:( +: ))
;;
let peek_terms a =
List.map a ~f:(function
| `Add x -> x
| `Sub x -> x)
;;
let mixed ~init arg_items =
on_overlapping_bits (init :: peek_terms arg_items) (fun x ->
let init, tl =
match x with
| hd :: tl -> hd, tl
| _ -> assert false
in
let tl =
List.map2_exn arg_items tl ~f:(fun arg_item x ->
match arg_item with
| `Add _ -> `Add x
| `Sub _ -> `Sub x)
in
List.fold tl ~init ~f:(fun unchanged term ->
match term with
| `Add x -> unchanged +: x
| `Sub x -> unchanged -: x))
;;
let to_signal t = if t.shift = 0 then t.x else t.x @: zero t.shift
| null | https://raw.githubusercontent.com/fyquah/hardcaml_zprize/553b1be10ae9b977decbca850df6ee2d0595e7ff/libs/field_ops/src/with_shift.ml | ocaml | open Core
open Hardcaml
open Signal
type t =
{ x : Signal.t
; shift : int
}
let width t = Signal.width t.x + t.shift
let no_shift x = { x; shift = 0 }
let create ~shift x = { x; shift }
let sll t ~by = { x = t.x; shift = t.shift + by }
let map ~f t = { x = f t.x; shift = t.shift }
let uresize t new_width =
{ x = Signal.uresize t.x (new_width - t.shift); shift = t.shift }
;;
let validate_all_items_same_width items =
let w = width (List.hd_exn items) in
List.iter items ~f:(fun x -> assert (width x = w));
w
;;
let on_overlapping_bits (items : t list) do_thing =
let item_width = validate_all_items_same_width items in
let smallest_shift =
Option.value_exn
(List.min_elt ~compare:Int.compare (List.map items ~f:(fun i -> i.shift)))
in
let x =
List.map items ~f:(fun item ->
let signal =
match item.shift - smallest_shift with
| 0 -> item.x
| shift -> item.x @: zero shift
in
Signal.uresize signal (item_width - smallest_shift))
|> do_thing
in
{ x; shift = smallest_shift }
;;
let pipe_add ~scope ~enable ~clock ~stages (items : t list) =
on_overlapping_bits items (fun x ->
x |> Adder_subtractor_pipe.add_no_carry ~scope ~enable ~clock ~stages)
;;
let sum (items : t list) =
on_overlapping_bits items (fun x -> List.reduce_exn x ~f:( +: ))
;;
let peek_terms a =
List.map a ~f:(function
| `Add x -> x
| `Sub x -> x)
;;
let mixed ~init arg_items =
on_overlapping_bits (init :: peek_terms arg_items) (fun x ->
let init, tl =
match x with
| hd :: tl -> hd, tl
| _ -> assert false
in
let tl =
List.map2_exn arg_items tl ~f:(fun arg_item x ->
match arg_item with
| `Add _ -> `Add x
| `Sub _ -> `Sub x)
in
List.fold tl ~init ~f:(fun unchanged term ->
match term with
| `Add x -> unchanged +: x
| `Sub x -> unchanged -: x))
;;
let to_signal t = if t.shift = 0 then t.x else t.x @: zero t.shift
| |
0a84ab6d16ac663e13da7e0376067904aac36bfb6c89fe36873720f76b21af23 | donaldsonjw/bigloo | exit.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / comptime / Ast / exit.scm * /
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Fri Apr 21 14:19:17 1995 * /
* Last change : We d Mar 30 15:50:38 2011 ( serrano ) * /
;* ------------------------------------------------------------- */
;* The `set-exit' and `jmp-exit' management. */
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module ast_exit
(include "Ast/node.sch"
"Tools/trace.sch")
(import ast_sexp
ast_local
ast_ident
type_cache
tools_progn
tools_location)
(export (set-exit->node::let-fun <sexp> <stack> ::obj ::symbol)
(jump-exit->node::jump-ex-it <sexp> <stack> ::obj ::symbol)))
;*---------------------------------------------------------------------*/
;* set-exit->node ... */
;* ------------------------------------------------------------- */
;* set-exit are always compiled as `set-jmp' `longjmp', then, we */
;* always have to make them nested into a globalized function. */
;* This function is called the `handling' function. */
;*---------------------------------------------------------------------*/
(define (set-exit->node exp stack loc site)
(define (make-local-exit exit handler)
(make-local-sexit exit *exit* (instantiate::sexit (handler handler))))
(let ((loc (find-location/loc exp loc)))
(match-case exp
((?- (?exit) . ?body)
(let* ((hdlg-name (mark-symbol-non-user!
(make-anonymous-name loc "exit")))
(hdlg-sexp `(labels ((,hdlg-name () #unspecified))
(,hdlg-name)))
(hdlg-node (sexp->node hdlg-sexp stack loc site))
(hdlg-fun (car (let-fun-locals hdlg-node)))
(exit (make-local-exit exit hdlg-fun))
(body (sexp->node (normalize-progn body)
(cons exit stack)
loc
'value))
(exit-body (instantiate::set-ex-it
(loc loc)
(type (strict-node-type *obj* *_*))
(var (instantiate::var
(type (strict-node-type *_* *exit*))
(loc loc)
(variable exit)))
(body body))))
;; we have to mark that the local is a user function other
;; bdb will get confused and will consider the handling function
;; as a C function
(local-user?-set! hdlg-fun #t)
hdlg - name ca n't be inlined otherwise the ` set - exit '
is not correct ( due to C / longjmp semantic )
(sfun-class-set! (local-value hdlg-fun) 'snifun)
(sfun-body-set! (local-value hdlg-fun) exit-body)
hdlg-node))
(else
(error-sexp->node "Illegal `set-exit' form" exp loc)))))
;*---------------------------------------------------------------------*/
;* jump-exit->node ... */
;*---------------------------------------------------------------------*/
(define (jump-exit->node exp stack loc site)
(let ((loc (find-location/loc exp loc)))
(match-case exp
((?- ?exit . ?value)
(let ((value (sexp->node (normalize-progn value) stack loc 'value))
(exit (sexp->node exit stack loc 'value)))
(instantiate::jump-ex-it
(loc loc)
(type (strict-node-type *unspec* *_*))
(exit exit)
(value value))))
(else
(error-sexp->node "Illegal `jump-exit' form" exp loc)))))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/comptime/Ast/exit.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* The `set-exit' and `jmp-exit' management. */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* set-exit->node ... */
* ------------------------------------------------------------- */
* set-exit are always compiled as `set-jmp' `longjmp', then, we */
* always have to make them nested into a globalized function. */
* This function is called the `handling' function. */
*---------------------------------------------------------------------*/
we have to mark that the local is a user function other
bdb will get confused and will consider the handling function
as a C function
*---------------------------------------------------------------------*/
* jump-exit->node ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / comptime / Ast / exit.scm * /
* Author : * /
* Creation : Fri Apr 21 14:19:17 1995 * /
* Last change : We d Mar 30 15:50:38 2011 ( serrano ) * /
(module ast_exit
(include "Ast/node.sch"
"Tools/trace.sch")
(import ast_sexp
ast_local
ast_ident
type_cache
tools_progn
tools_location)
(export (set-exit->node::let-fun <sexp> <stack> ::obj ::symbol)
(jump-exit->node::jump-ex-it <sexp> <stack> ::obj ::symbol)))
(define (set-exit->node exp stack loc site)
(define (make-local-exit exit handler)
(make-local-sexit exit *exit* (instantiate::sexit (handler handler))))
(let ((loc (find-location/loc exp loc)))
(match-case exp
((?- (?exit) . ?body)
(let* ((hdlg-name (mark-symbol-non-user!
(make-anonymous-name loc "exit")))
(hdlg-sexp `(labels ((,hdlg-name () #unspecified))
(,hdlg-name)))
(hdlg-node (sexp->node hdlg-sexp stack loc site))
(hdlg-fun (car (let-fun-locals hdlg-node)))
(exit (make-local-exit exit hdlg-fun))
(body (sexp->node (normalize-progn body)
(cons exit stack)
loc
'value))
(exit-body (instantiate::set-ex-it
(loc loc)
(type (strict-node-type *obj* *_*))
(var (instantiate::var
(type (strict-node-type *_* *exit*))
(loc loc)
(variable exit)))
(body body))))
(local-user?-set! hdlg-fun #t)
hdlg - name ca n't be inlined otherwise the ` set - exit '
is not correct ( due to C / longjmp semantic )
(sfun-class-set! (local-value hdlg-fun) 'snifun)
(sfun-body-set! (local-value hdlg-fun) exit-body)
hdlg-node))
(else
(error-sexp->node "Illegal `set-exit' form" exp loc)))))
(define (jump-exit->node exp stack loc site)
(let ((loc (find-location/loc exp loc)))
(match-case exp
((?- ?exit . ?value)
(let ((value (sexp->node (normalize-progn value) stack loc 'value))
(exit (sexp->node exit stack loc 'value)))
(instantiate::jump-ex-it
(loc loc)
(type (strict-node-type *unspec* *_*))
(exit exit)
(value value))))
(else
(error-sexp->node "Illegal `jump-exit' form" exp loc)))))
|
13947220246062a6150f082d5a5a2c8bbda7b203bc7fd6679b740b7c7969d086 | synduce/Synduce | interval_intersection.ml | * @synduce -NB --no - lifting
type list =
| Cons of int * int * list
| Elt of int * int
let rec sorted_by_start = function
| Elt (x, y) -> true
| Cons (a, b, l) -> a < b && a < head l && sorted_by_start l
and head = function
| Elt (a, b) -> a
| Cons (a, b, l) -> a
;;
let rec spec = function
| Elt (a, b) -> false, a, b
| Cons (a, b, l) ->
let w, x, y = spec l in
interwith a b l || w, a, b
and interwith a b = function
| Elt (d, c) -> (not (c < a)) && not (b < d)
| Cons (d, c, l) -> ((not (c < a)) && not (b < d)) || interwith a b l
;;
let rec target = function
| Elt (a, b) -> [%synt f0] a b
| Cons (a, b, l) -> [%synt f1] a b (target l)
[@@requires sorted_by_start]
;;
| null | https://raw.githubusercontent.com/synduce/Synduce/d453b04cfb507395908a270b1906f5ac34298d29/benchmarks/constraints/sortedlist/interval_intersection.ml | ocaml | * @synduce -NB --no - lifting
type list =
| Cons of int * int * list
| Elt of int * int
let rec sorted_by_start = function
| Elt (x, y) -> true
| Cons (a, b, l) -> a < b && a < head l && sorted_by_start l
and head = function
| Elt (a, b) -> a
| Cons (a, b, l) -> a
;;
let rec spec = function
| Elt (a, b) -> false, a, b
| Cons (a, b, l) ->
let w, x, y = spec l in
interwith a b l || w, a, b
and interwith a b = function
| Elt (d, c) -> (not (c < a)) && not (b < d)
| Cons (d, c, l) -> ((not (c < a)) && not (b < d)) || interwith a b l
;;
let rec target = function
| Elt (a, b) -> [%synt f0] a b
| Cons (a, b, l) -> [%synt f1] a b (target l)
[@@requires sorted_by_start]
;;
| |
91a132143acf628dace97df63d0ae00345cebbc26e4392ca20c46d31d1569858 | exercism/common-lisp | pizza-pi-test.lisp | ;; Ensures that pizza-pi.lisp and the testing library are always loaded
(eval-when (:compile-toplevel :load-toplevel :execute)
(load "pizza-pi")
(ql:quickload :fiveam))
;; Defines the testing package with symbols from pizza-pi and FiveAM in scope
;; The `run-tests` function is exported for use by both the user and test-runner
(defpackage :pizza-pi-test
(:use :cl :fiveam :pizza-pi)
(:export :run-tests))
;; Enter the testing package
(in-package :pizza-pi-test)
;; Define and enter a new FiveAM test-suite
(def-suite pizza-pi-suite)
(in-suite pizza-pi-suite)
(test dough-ratio "Calculate the grams of dough needed for given number and size of pizzas"
(is (= 1648 (dough-calculator 4 30)))
(is (= 895 (dough-calculator 2 35)))
(is (= 2048 (dough-calculator 6 20)))
(is (= 306 (dough-calculator 1 15)))
(is (= 1353 (dough-calculator 5 10))))
(defun rounds-to (expected actual)
(flet ((to-2-places (n) (/ (round (* 100 n)) 100.0)))
(is (= (to-2-places expected) (to-2-places actual)))))
(test splash-of-sauces "Calculates the diameter of a pizza from the amount of sauce applied"
(is (rounds-to 32.57 (size-from-sauce 250)))
(is (rounds-to 20.60 (size-from-sauce 100)))
(is (rounds-to 37.42 (size-from-sauce 330)))
(is (rounds-to 46.52 (size-from-sauce 510)))
(is (rounds-to 53.72 (size-from-sauce 680))))
(test cheese-please "Calculates the number of pizzas of a certain size that can be made from an amount of cheese"
(is (= 3 (pizzas-per-cube 25 30)))
(is (= 1 (pizzas-per-cube 15 20)))
(is (= 132 (pizzas-per-cube 100 40)))
(is (= 0 (pizzas-per-cube 5 10)))
(is (= 85 (pizzas-per-cube 45 15))))
(test fair-share "Calculates if some number of pizzas can be evenly divided between friends"
(is-true (fair-share-p 3 4))
(is-false (fair-share-p 2 3))
(is-false (fair-share-p 4 5))
(is-true (fair-share-p 4 8))
(is-true (fair-share-p 1 4))
(is-true (fair-share-p 21 7))
(is-false (fair-share-p 11 10))
(is-true (fair-share-p 0 5))
(is-false (fair-share-p 17 5))
(is-true (fair-share-p 16 64)))
(defun run-tests (&optional (test-or-suite 'pizza-pi-suite))
"Provides human readable results of test run. Default to entire suite."
(run! test-or-suite))
| null | https://raw.githubusercontent.com/exercism/common-lisp/71929e8d8fb4f81a891b742ac1aa2e2372770951/exercises/concept/pizza-pi/pizza-pi-test.lisp | lisp | Ensures that pizza-pi.lisp and the testing library are always loaded
Defines the testing package with symbols from pizza-pi and FiveAM in scope
The `run-tests` function is exported for use by both the user and test-runner
Enter the testing package
Define and enter a new FiveAM test-suite | (eval-when (:compile-toplevel :load-toplevel :execute)
(load "pizza-pi")
(ql:quickload :fiveam))
(defpackage :pizza-pi-test
(:use :cl :fiveam :pizza-pi)
(:export :run-tests))
(in-package :pizza-pi-test)
(def-suite pizza-pi-suite)
(in-suite pizza-pi-suite)
(test dough-ratio "Calculate the grams of dough needed for given number and size of pizzas"
(is (= 1648 (dough-calculator 4 30)))
(is (= 895 (dough-calculator 2 35)))
(is (= 2048 (dough-calculator 6 20)))
(is (= 306 (dough-calculator 1 15)))
(is (= 1353 (dough-calculator 5 10))))
(defun rounds-to (expected actual)
(flet ((to-2-places (n) (/ (round (* 100 n)) 100.0)))
(is (= (to-2-places expected) (to-2-places actual)))))
(test splash-of-sauces "Calculates the diameter of a pizza from the amount of sauce applied"
(is (rounds-to 32.57 (size-from-sauce 250)))
(is (rounds-to 20.60 (size-from-sauce 100)))
(is (rounds-to 37.42 (size-from-sauce 330)))
(is (rounds-to 46.52 (size-from-sauce 510)))
(is (rounds-to 53.72 (size-from-sauce 680))))
(test cheese-please "Calculates the number of pizzas of a certain size that can be made from an amount of cheese"
(is (= 3 (pizzas-per-cube 25 30)))
(is (= 1 (pizzas-per-cube 15 20)))
(is (= 132 (pizzas-per-cube 100 40)))
(is (= 0 (pizzas-per-cube 5 10)))
(is (= 85 (pizzas-per-cube 45 15))))
(test fair-share "Calculates if some number of pizzas can be evenly divided between friends"
(is-true (fair-share-p 3 4))
(is-false (fair-share-p 2 3))
(is-false (fair-share-p 4 5))
(is-true (fair-share-p 4 8))
(is-true (fair-share-p 1 4))
(is-true (fair-share-p 21 7))
(is-false (fair-share-p 11 10))
(is-true (fair-share-p 0 5))
(is-false (fair-share-p 17 5))
(is-true (fair-share-p 16 64)))
(defun run-tests (&optional (test-or-suite 'pizza-pi-suite))
"Provides human readable results of test run. Default to entire suite."
(run! test-or-suite))
|
689da5e8b7f2715e98cc3b8025e3fb7cbbe4442f6079db98aa17ab9d9383f2b9 | semmons99/clojure-euler | prob-055.clj | problem 055 ; ; ; ; ; ; ; ; ; ;
(defn palindrome? [n]
(= (str n) (str (reverse-num n))))
(defn reverse-num [n]
(bigint (str (.reverse (StringBuffer. (str n))))))
(defn reverse-add [n]
(+ n (reverse-num n)))
(defn lychrel?
([n] (lychrel? n 1))
([n i]
(if (> i 50)
true
(let [ra (reverse-add n)]
(if (palindrome? ra)
false
(recur ra (inc i)))))))
(defn prob-055 []
(count (filter lychrel? (range 1 10000)))) | null | https://raw.githubusercontent.com/semmons99/clojure-euler/3480bc313b9df7f282dadf6e0b48d96230f1bfc1/prob-055.clj | clojure | ; ; ; ; ; ; ; ; ; | (defn palindrome? [n]
(= (str n) (str (reverse-num n))))
(defn reverse-num [n]
(bigint (str (.reverse (StringBuffer. (str n))))))
(defn reverse-add [n]
(+ n (reverse-num n)))
(defn lychrel?
([n] (lychrel? n 1))
([n i]
(if (> i 50)
true
(let [ra (reverse-add n)]
(if (palindrome? ra)
false
(recur ra (inc i)))))))
(defn prob-055 []
(count (filter lychrel? (range 1 10000)))) |
ff5e675b0af2d5572464c3e3855397ede3489d759b68b6d1080a4fca0fe51432 | thoughtpolice/claap | Disassembler.hs | -- |
-- Module : AAP.Sim.Test.Disassembler
Copyright : ( c ) 2016
License : BSD3 - style ( see LICENSE.txt )
--
-- Maintainer :
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
-- Test suite harness for the disassembler, when running CLaSH simulations.
--
module AAP.Sim.Test.Disassembler
( checkFile
) where
import CLaSH.Prelude
import qualified Prelude as P
import Data.Char ( isSpace )
import Data.List ( isPrefixOf )
import Data.List.Split ( splitOn )
import Text.Read ( readMaybe )
import Control.Monad ( guard, (>=>) )
import AAP.Decoder ( Instr(..) )
import AAP.Sim.Disassemble
type Tokens = [String]
| Tokenize a line like @add $ r0 , $ r0 , $ r0@ into its constituent tokens ,
-- @[\"add\", \"$r0\", \"$r0\", \"$r0\"]@.
tokenizeEncoding :: String -> Tokens
tokenizeEncoding = words . filter (/= ',')
| A disassembler tests consists of multiple @'CheckLine'@s , which specify a
-- set of bytes to disassemble, and the expected output tokens.
type CheckLine = ([Unsigned 8], Tokens)
-- | Strip comments and whitespace from a test file.
stripComments :: [String] -> [String]
stripComments
= filter (not . ("#" `isPrefixOf`))
. filter (P.not . P.null)
. P.map (dropWhile isSpace)
-- | Parse a line of the disassembler tests. A line for the disassembler test
-- harness to check consists of a string of bytes and the assembler encoding,
-- like:
--
@[0x00,0x02 ] ; ; encoding : add $ r0 , $ r0 , $ r0@
parseLine :: String -> Maybe CheckLine
parseLine inp = do
let [b, t] = splitOn " ;; " inp
bytes <- readMaybe b
guard ("encoding: " `isPrefixOf` t)
let tokens = tokenizeEncoding (P.drop 10 t)
return (bytes, tokens)
parseDisasmFile :: FilePath -> IO [Maybe CheckLine]
parseDisasmFile inp = go <$> P.readFile inp where
go = P.map parseLine
. stripComments
. P.lines
parseBytes :: [Unsigned 8] -> Maybe (Either (BitVector 16) (BitVector 32))
parseBytes [a, b] = Just $ Left (pack (a, b))
parseBytes [a, b, c, d] = Just $ Right (pack (a, b, c, d))
parseBytes _ = Nothing
checkFile :: FilePath -> IO ()
checkFile = parseDisasmFile >=> mapM_ k
where
k x = case x of
Nothing ->
putStrLn $ "ERROR: test had a parse error somewhere! "
P.++ "(but I'm too stupid to remember where)"
Just (bytes, tokens) -> do
case parseBytes bytes of
Nothing -> putStrLn $ "ERROR: Invalid number of bytes for disassembler test! (expected "
P.++ show tokens P.++ ")"
Just op -> case either decode16 decode32 op of
Invalid -> putStrLn $ "ERROR: I couldn't decode an invalid instruction! (expected "
P.++ show tokens P.++ ")"
instr -> do
let tokenout = tokenizeEncoding (prettyInstr instr)
case (tokenout == tokens) of
True -> return ()
False -> putStrLn $ "ERROR: invalid disassembly (expected "
P.++ show tokens P.++ ", got "
P.++ show tokenout P.++ ")"
| null | https://raw.githubusercontent.com/thoughtpolice/claap/4944b6c4ad6aff4097f8ef66231ce7d7a59f5ee7/src/aap/AAP/Sim/Test/Disassembler.hs | haskell | |
Module : AAP.Sim.Test.Disassembler
Maintainer :
Stability : experimental
Test suite harness for the disassembler, when running CLaSH simulations.
@[\"add\", \"$r0\", \"$r0\", \"$r0\"]@.
set of bytes to disassemble, and the expected output tokens.
| Strip comments and whitespace from a test file.
| Parse a line of the disassembler tests. A line for the disassembler test
harness to check consists of a string of bytes and the assembler encoding,
like:
| Copyright : ( c ) 2016
License : BSD3 - style ( see LICENSE.txt )
Portability : non - portable ( GHC extensions )
module AAP.Sim.Test.Disassembler
( checkFile
) where
import CLaSH.Prelude
import qualified Prelude as P
import Data.Char ( isSpace )
import Data.List ( isPrefixOf )
import Data.List.Split ( splitOn )
import Text.Read ( readMaybe )
import Control.Monad ( guard, (>=>) )
import AAP.Decoder ( Instr(..) )
import AAP.Sim.Disassemble
type Tokens = [String]
| Tokenize a line like @add $ r0 , $ r0 , $ r0@ into its constituent tokens ,
tokenizeEncoding :: String -> Tokens
tokenizeEncoding = words . filter (/= ',')
| A disassembler tests consists of multiple @'CheckLine'@s , which specify a
type CheckLine = ([Unsigned 8], Tokens)
stripComments :: [String] -> [String]
stripComments
= filter (not . ("#" `isPrefixOf`))
. filter (P.not . P.null)
. P.map (dropWhile isSpace)
@[0x00,0x02 ] ; ; encoding : add $ r0 , $ r0 , $ r0@
parseLine :: String -> Maybe CheckLine
parseLine inp = do
let [b, t] = splitOn " ;; " inp
bytes <- readMaybe b
guard ("encoding: " `isPrefixOf` t)
let tokens = tokenizeEncoding (P.drop 10 t)
return (bytes, tokens)
parseDisasmFile :: FilePath -> IO [Maybe CheckLine]
parseDisasmFile inp = go <$> P.readFile inp where
go = P.map parseLine
. stripComments
. P.lines
parseBytes :: [Unsigned 8] -> Maybe (Either (BitVector 16) (BitVector 32))
parseBytes [a, b] = Just $ Left (pack (a, b))
parseBytes [a, b, c, d] = Just $ Right (pack (a, b, c, d))
parseBytes _ = Nothing
checkFile :: FilePath -> IO ()
checkFile = parseDisasmFile >=> mapM_ k
where
k x = case x of
Nothing ->
putStrLn $ "ERROR: test had a parse error somewhere! "
P.++ "(but I'm too stupid to remember where)"
Just (bytes, tokens) -> do
case parseBytes bytes of
Nothing -> putStrLn $ "ERROR: Invalid number of bytes for disassembler test! (expected "
P.++ show tokens P.++ ")"
Just op -> case either decode16 decode32 op of
Invalid -> putStrLn $ "ERROR: I couldn't decode an invalid instruction! (expected "
P.++ show tokens P.++ ")"
instr -> do
let tokenout = tokenizeEncoding (prettyInstr instr)
case (tokenout == tokens) of
True -> return ()
False -> putStrLn $ "ERROR: invalid disassembly (expected "
P.++ show tokens P.++ ", got "
P.++ show tokenout P.++ ")"
|
63dc6bed6ae8119322a12eda49f86f590aabd404849b7930a037e1802bdb62e5 | camfort/camfort | Prim.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE GADTs #
# LANGUAGE KindSignatures #
{-# LANGUAGE LambdaCase #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -Wall #-}
|
= Handling primitive Fortran values symbolically .
There are a few challenges that this module attempts to solve :
uses fixed - width machine integers and floating point reals . Sometimes we
might want to reason about these directly ( which is supported by SBV and
therefore feasible ) . However , sometimes they get in the way of the logic and we
just want to pretend that they 're the pure mathematical values that they
approximate . For example floating point addition obeys very few algebraic laws ,
so most theorems about real numbers do n't hold at all for floating point
numbers .
In addition , 's boolean values are actually arbitrary signed integers . If
we treat all boolean values symbolically as bit - vectors , logic can become very
slow ; so it might be best to pretend that all booleans are single bits . However ,
sometimes we might want to verify properties that rely on the actual bit - vector
representation of booleans .
This module deals with these problems by abstracting over the choices : the user
should be able to choose which representation they want to use for each
primitive data type .
The user provides a ' PrimReprSpec ' which specifies how each data type should be
treated . Some examples are provided : ' prsPrecise ' treats all values precisely as
they are represented in the Fortran program . This makes logic slow and makes it
very difficult to prove many things , but it is most accurate . On the other hand ,
' prsIdealized ' treats all values as their idealized mathematical equivalents .
This makes logic fast and lots intuitive properties can be proved easily .
However , these properties often will not hold in actual running Fortran
programs : sometimes in weird edge cases and sometimes in sensible - seeming
executions . It would be interesting future work to provide an analysis that
helps to determine which of the two applies to a particular program !
= Handling primitive Fortran values symbolically.
There are a few challenges that this module attempts to solve:
Fortran uses fixed-width machine integers and floating point reals. Sometimes we
might want to reason about these directly (which is supported by SBV and
therefore feasible). However, sometimes they get in the way of the logic and we
just want to pretend that they're the pure mathematical values that they
approximate. For example floating point addition obeys very few algebraic laws,
so most theorems about real numbers don't hold at all for floating point
numbers.
In addition, Fortran's boolean values are actually arbitrary signed integers. If
we treat all boolean values symbolically as bit-vectors, logic can become very
slow; so it might be best to pretend that all booleans are single bits. However,
sometimes we might want to verify properties that rely on the actual bit-vector
representation of booleans.
This module deals with these problems by abstracting over the choices: the user
should be able to choose which representation they want to use for each
primitive data type.
The user provides a 'PrimReprSpec' which specifies how each data type should be
treated. Some examples are provided: 'prsPrecise' treats all values precisely as
they are represented in the Fortran program. This makes logic slow and makes it
very difficult to prove many things, but it is most accurate. On the other hand,
'prsIdealized' treats all values as their idealized mathematical equivalents.
This makes logic fast and lots intuitive properties can be proved easily.
However, these properties often will not hold in actual running Fortran
programs: sometimes in weird edge cases and sometimes in sensible-seeming
executions. It would be interesting future work to provide an analysis that
helps to determine which of the two applies to a particular program!
-}
module Language.Fortran.Model.Repr.Prim where
import Data.Int (Int16, Int32, Int64, Int8)
import Data.Word (Word8)
import Control.Lens
import Control.Monad.Reader (MonadReader (..))
import qualified Data.SBV as SBV
import Data.SBV.Dynamic (SVal)
import Data.SBV.Internals (SBV (..))
import Language.Fortran.Model.Types
--------------------------------------------------------------------------------
-- * Types
data IntRepr a = MachineInt | ArbitraryInt deriving (Eq, Ord, Show)
data RealRepr a = MachineFloat | ArbitraryReal deriving (Eq, Ord, Show)
data BoolRepr a = IntBool | BitBool deriving (Eq, Ord, Show)
data PrimReprHandler a =
PrimReprHandler
{ _prhKind :: !SBV.Kind
, _prhLiteral :: !(a -> SVal)
, _prhSymbolic :: !(String -> SBV.Symbolic SVal)
}
data PrimReprSpec =
PrimReprSpec
{ _prsInt8Repr :: !(IntRepr Int8)
, _prsInt16Repr :: !(IntRepr Int16)
, _prsInt32Repr :: !(IntRepr Int32)
, _prsInt64Repr :: !(IntRepr Int64)
, _prsFloatRepr :: !(RealRepr Float)
, _prsDoubleRepr :: !(RealRepr Double)
, _prsBool8Repr :: !(BoolRepr Bool8)
, _prsBool16Repr :: !(BoolRepr Bool16)
, _prsBool32Repr :: !(BoolRepr Bool32)
, _prsBool64Repr :: !(BoolRepr Bool64)
}
--------------------------------------------------------------------------------
-- ** Lenses
makeLenses ''PrimReprHandler
makeLenses ''PrimReprSpec
--------------------------------------------------------------------------------
-- * Standard specs
prsPrecise :: PrimReprSpec
prsPrecise = PrimReprSpec
{ _prsInt8Repr = MachineInt
, _prsInt16Repr = MachineInt
, _prsInt32Repr = MachineInt
, _prsInt64Repr = MachineInt
, _prsFloatRepr = MachineFloat
, _prsDoubleRepr = MachineFloat
, _prsBool8Repr = IntBool
, _prsBool16Repr = IntBool
, _prsBool32Repr = IntBool
, _prsBool64Repr = IntBool
}
prsIdealized :: PrimReprSpec
prsIdealized = PrimReprSpec
{ _prsInt8Repr = ArbitraryInt
, _prsInt16Repr = ArbitraryInt
, _prsInt32Repr = ArbitraryInt
, _prsInt64Repr = ArbitraryInt
, _prsFloatRepr = ArbitraryReal
, _prsDoubleRepr = ArbitraryReal
, _prsBool8Repr = BitBool
, _prsBool16Repr = BitBool
, _prsBool32Repr = BitBool
, _prsBool64Repr = BitBool
}
prsWithArbitraryInts :: Bool -> PrimReprSpec -> PrimReprSpec
prsWithArbitraryInts useArbitrary
| useArbitrary =
set prsInt8Repr ArbitraryInt .
set prsInt16Repr ArbitraryInt .
set prsInt32Repr ArbitraryInt .
set prsInt64Repr ArbitraryInt
| otherwise =
set prsInt8Repr MachineInt .
set prsInt16Repr MachineInt .
set prsInt32Repr MachineInt .
set prsInt64Repr MachineInt
prsWithArbitraryReals :: Bool -> PrimReprSpec -> PrimReprSpec
prsWithArbitraryReals useArbitrary
| useArbitrary =
set prsFloatRepr ArbitraryReal .
set prsDoubleRepr ArbitraryReal
| otherwise =
set prsFloatRepr MachineFloat .
set prsDoubleRepr MachineFloat
--------------------------------------------------------------------------------
-- * Using specs
makeSymRepr :: PrimReprSpec -> Prim p k a -> PrimReprHandler a
makeSymRepr spec = \case
PInt8 -> intRepr prsInt8Repr
PInt16 -> intRepr prsInt16Repr
PInt32 -> intRepr prsInt32Repr
PInt64 -> intRepr prsInt64Repr
PFloat -> realRepr prsFloatRepr
PDouble -> realRepr prsDoubleRepr
PBool8 -> boolRepr getBool8 prsBool8Repr
PBool16 -> boolRepr getBool16 prsBool16Repr
PBool32 -> boolRepr getBool32 prsBool32Repr
PBool64 -> boolRepr getBool64 prsBool64Repr
PChar -> bySymWord (0 :: Word8) getChar8
where
intRepr
:: (Integral a, SBV.SymVal a)
=> Lens' PrimReprSpec (IntRepr a) -> PrimReprHandler a
intRepr l = case spec ^. l of
MachineInt -> bySymWord 0 id
ArbitraryInt -> bySymWord (0 :: Integer) fromIntegral
realRepr
:: (RealFloat a, SBV.SymVal a)
=> Lens' PrimReprSpec (RealRepr a) -> PrimReprHandler a
realRepr l = case spec ^. l of
MachineFloat -> bySymWord 0 id
ArbitraryReal -> bySymWord (0 :: SBV.AlgReal) realToFrac
boolRepr
:: (Integral b, SBV.SymVal b)
=> (a -> b) -> Lens' PrimReprSpec (BoolRepr a) -> PrimReprHandler a
boolRepr unwrap l = case spec ^. l of
IntBool -> bySymWord 0 unwrap
BitBool -> bySymWord (False :: Bool) (toBool . unwrap)
bySymWord :: (SBV.SymVal b) => b -> (a -> b) -> PrimReprHandler a
bySymWord (repValue :: b) fromPrim =
PrimReprHandler
{ _prhKind = SBV.kindOf repValue
, _prhLiteral = unSBV . SBV.literal . fromPrim
, _prhSymbolic = fmap (unSBV :: SBV b -> SVal) . SBV.symbolic
}
toBool :: (Ord a, Num a) => a -> Bool
toBool x = x > 0
--------------------------------------------------------------------------------
* Monadic Accessors
class HasPrimReprHandlers r where
primReprHandlers :: r -> PrimReprHandlers
primReprHandlers env = PrimReprHandlers (primReprHandler env)
note that we must eta expand due to GHC 9.0 simplified subsumption
primReprHandler :: r -> Prim p k a -> PrimReprHandler a
primReprHandler r p = unPrimReprHandlers (primReprHandlers r) p
newtype PrimReprHandlers =
PrimReprHandlers { unPrimReprHandlers :: forall p k a. Prim p k a -> PrimReprHandler a }
instance HasPrimReprHandlers PrimReprHandlers where
primReprHandlers = id
primSBVKind :: (MonadReader r m, HasPrimReprHandlers r) => Prim p k a -> m SBV.Kind
primSBVKind p = view (to (flip primReprHandler p) . prhKind)
primLit :: (MonadReader r m, HasPrimReprHandlers r) => Prim p k a -> a -> m SVal
primLit p a = do
lit <- view (to (flip primReprHandler p) . prhLiteral)
return (lit a)
primSymbolic
:: (MonadReader r m, HasPrimReprHandlers r)
=> Prim p k a -> String -> m (SBV.Symbolic SVal)
primSymbolic p nm = do
symbolic <- view (to (flip primReprHandler p) . prhSymbolic)
return (symbolic nm)
| null | https://raw.githubusercontent.com/camfort/camfort/861646ae5af61a41d1519049cfeda60ac82f3d98/src/Language/Fortran/Model/Repr/Prim.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE RankNTypes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -Wall #
------------------------------------------------------------------------------
* Types
------------------------------------------------------------------------------
** Lenses
------------------------------------------------------------------------------
* Standard specs
------------------------------------------------------------------------------
* Using specs
------------------------------------------------------------------------------ | # LANGUAGE GADTs #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
|
= Handling primitive Fortran values symbolically .
There are a few challenges that this module attempts to solve :
uses fixed - width machine integers and floating point reals . Sometimes we
might want to reason about these directly ( which is supported by SBV and
therefore feasible ) . However , sometimes they get in the way of the logic and we
just want to pretend that they 're the pure mathematical values that they
approximate . For example floating point addition obeys very few algebraic laws ,
so most theorems about real numbers do n't hold at all for floating point
numbers .
In addition , 's boolean values are actually arbitrary signed integers . If
we treat all boolean values symbolically as bit - vectors , logic can become very
slow ; so it might be best to pretend that all booleans are single bits . However ,
sometimes we might want to verify properties that rely on the actual bit - vector
representation of booleans .
This module deals with these problems by abstracting over the choices : the user
should be able to choose which representation they want to use for each
primitive data type .
The user provides a ' PrimReprSpec ' which specifies how each data type should be
treated . Some examples are provided : ' prsPrecise ' treats all values precisely as
they are represented in the Fortran program . This makes logic slow and makes it
very difficult to prove many things , but it is most accurate . On the other hand ,
' prsIdealized ' treats all values as their idealized mathematical equivalents .
This makes logic fast and lots intuitive properties can be proved easily .
However , these properties often will not hold in actual running Fortran
programs : sometimes in weird edge cases and sometimes in sensible - seeming
executions . It would be interesting future work to provide an analysis that
helps to determine which of the two applies to a particular program !
= Handling primitive Fortran values symbolically.
There are a few challenges that this module attempts to solve:
Fortran uses fixed-width machine integers and floating point reals. Sometimes we
might want to reason about these directly (which is supported by SBV and
therefore feasible). However, sometimes they get in the way of the logic and we
just want to pretend that they're the pure mathematical values that they
approximate. For example floating point addition obeys very few algebraic laws,
so most theorems about real numbers don't hold at all for floating point
numbers.
In addition, Fortran's boolean values are actually arbitrary signed integers. If
we treat all boolean values symbolically as bit-vectors, logic can become very
slow; so it might be best to pretend that all booleans are single bits. However,
sometimes we might want to verify properties that rely on the actual bit-vector
representation of booleans.
This module deals with these problems by abstracting over the choices: the user
should be able to choose which representation they want to use for each
primitive data type.
The user provides a 'PrimReprSpec' which specifies how each data type should be
treated. Some examples are provided: 'prsPrecise' treats all values precisely as
they are represented in the Fortran program. This makes logic slow and makes it
very difficult to prove many things, but it is most accurate. On the other hand,
'prsIdealized' treats all values as their idealized mathematical equivalents.
This makes logic fast and lots intuitive properties can be proved easily.
However, these properties often will not hold in actual running Fortran
programs: sometimes in weird edge cases and sometimes in sensible-seeming
executions. It would be interesting future work to provide an analysis that
helps to determine which of the two applies to a particular program!
-}
module Language.Fortran.Model.Repr.Prim where
import Data.Int (Int16, Int32, Int64, Int8)
import Data.Word (Word8)
import Control.Lens
import Control.Monad.Reader (MonadReader (..))
import qualified Data.SBV as SBV
import Data.SBV.Dynamic (SVal)
import Data.SBV.Internals (SBV (..))
import Language.Fortran.Model.Types
data IntRepr a = MachineInt | ArbitraryInt deriving (Eq, Ord, Show)
data RealRepr a = MachineFloat | ArbitraryReal deriving (Eq, Ord, Show)
data BoolRepr a = IntBool | BitBool deriving (Eq, Ord, Show)
data PrimReprHandler a =
PrimReprHandler
{ _prhKind :: !SBV.Kind
, _prhLiteral :: !(a -> SVal)
, _prhSymbolic :: !(String -> SBV.Symbolic SVal)
}
data PrimReprSpec =
PrimReprSpec
{ _prsInt8Repr :: !(IntRepr Int8)
, _prsInt16Repr :: !(IntRepr Int16)
, _prsInt32Repr :: !(IntRepr Int32)
, _prsInt64Repr :: !(IntRepr Int64)
, _prsFloatRepr :: !(RealRepr Float)
, _prsDoubleRepr :: !(RealRepr Double)
, _prsBool8Repr :: !(BoolRepr Bool8)
, _prsBool16Repr :: !(BoolRepr Bool16)
, _prsBool32Repr :: !(BoolRepr Bool32)
, _prsBool64Repr :: !(BoolRepr Bool64)
}
makeLenses ''PrimReprHandler
makeLenses ''PrimReprSpec
prsPrecise :: PrimReprSpec
prsPrecise = PrimReprSpec
{ _prsInt8Repr = MachineInt
, _prsInt16Repr = MachineInt
, _prsInt32Repr = MachineInt
, _prsInt64Repr = MachineInt
, _prsFloatRepr = MachineFloat
, _prsDoubleRepr = MachineFloat
, _prsBool8Repr = IntBool
, _prsBool16Repr = IntBool
, _prsBool32Repr = IntBool
, _prsBool64Repr = IntBool
}
prsIdealized :: PrimReprSpec
prsIdealized = PrimReprSpec
{ _prsInt8Repr = ArbitraryInt
, _prsInt16Repr = ArbitraryInt
, _prsInt32Repr = ArbitraryInt
, _prsInt64Repr = ArbitraryInt
, _prsFloatRepr = ArbitraryReal
, _prsDoubleRepr = ArbitraryReal
, _prsBool8Repr = BitBool
, _prsBool16Repr = BitBool
, _prsBool32Repr = BitBool
, _prsBool64Repr = BitBool
}
prsWithArbitraryInts :: Bool -> PrimReprSpec -> PrimReprSpec
prsWithArbitraryInts useArbitrary
| useArbitrary =
set prsInt8Repr ArbitraryInt .
set prsInt16Repr ArbitraryInt .
set prsInt32Repr ArbitraryInt .
set prsInt64Repr ArbitraryInt
| otherwise =
set prsInt8Repr MachineInt .
set prsInt16Repr MachineInt .
set prsInt32Repr MachineInt .
set prsInt64Repr MachineInt
prsWithArbitraryReals :: Bool -> PrimReprSpec -> PrimReprSpec
prsWithArbitraryReals useArbitrary
| useArbitrary =
set prsFloatRepr ArbitraryReal .
set prsDoubleRepr ArbitraryReal
| otherwise =
set prsFloatRepr MachineFloat .
set prsDoubleRepr MachineFloat
makeSymRepr :: PrimReprSpec -> Prim p k a -> PrimReprHandler a
makeSymRepr spec = \case
PInt8 -> intRepr prsInt8Repr
PInt16 -> intRepr prsInt16Repr
PInt32 -> intRepr prsInt32Repr
PInt64 -> intRepr prsInt64Repr
PFloat -> realRepr prsFloatRepr
PDouble -> realRepr prsDoubleRepr
PBool8 -> boolRepr getBool8 prsBool8Repr
PBool16 -> boolRepr getBool16 prsBool16Repr
PBool32 -> boolRepr getBool32 prsBool32Repr
PBool64 -> boolRepr getBool64 prsBool64Repr
PChar -> bySymWord (0 :: Word8) getChar8
where
intRepr
:: (Integral a, SBV.SymVal a)
=> Lens' PrimReprSpec (IntRepr a) -> PrimReprHandler a
intRepr l = case spec ^. l of
MachineInt -> bySymWord 0 id
ArbitraryInt -> bySymWord (0 :: Integer) fromIntegral
realRepr
:: (RealFloat a, SBV.SymVal a)
=> Lens' PrimReprSpec (RealRepr a) -> PrimReprHandler a
realRepr l = case spec ^. l of
MachineFloat -> bySymWord 0 id
ArbitraryReal -> bySymWord (0 :: SBV.AlgReal) realToFrac
boolRepr
:: (Integral b, SBV.SymVal b)
=> (a -> b) -> Lens' PrimReprSpec (BoolRepr a) -> PrimReprHandler a
boolRepr unwrap l = case spec ^. l of
IntBool -> bySymWord 0 unwrap
BitBool -> bySymWord (False :: Bool) (toBool . unwrap)
bySymWord :: (SBV.SymVal b) => b -> (a -> b) -> PrimReprHandler a
bySymWord (repValue :: b) fromPrim =
PrimReprHandler
{ _prhKind = SBV.kindOf repValue
, _prhLiteral = unSBV . SBV.literal . fromPrim
, _prhSymbolic = fmap (unSBV :: SBV b -> SVal) . SBV.symbolic
}
toBool :: (Ord a, Num a) => a -> Bool
toBool x = x > 0
* Monadic Accessors
class HasPrimReprHandlers r where
primReprHandlers :: r -> PrimReprHandlers
primReprHandlers env = PrimReprHandlers (primReprHandler env)
note that we must eta expand due to GHC 9.0 simplified subsumption
primReprHandler :: r -> Prim p k a -> PrimReprHandler a
primReprHandler r p = unPrimReprHandlers (primReprHandlers r) p
newtype PrimReprHandlers =
PrimReprHandlers { unPrimReprHandlers :: forall p k a. Prim p k a -> PrimReprHandler a }
instance HasPrimReprHandlers PrimReprHandlers where
primReprHandlers = id
primSBVKind :: (MonadReader r m, HasPrimReprHandlers r) => Prim p k a -> m SBV.Kind
primSBVKind p = view (to (flip primReprHandler p) . prhKind)
primLit :: (MonadReader r m, HasPrimReprHandlers r) => Prim p k a -> a -> m SVal
primLit p a = do
lit <- view (to (flip primReprHandler p) . prhLiteral)
return (lit a)
primSymbolic
:: (MonadReader r m, HasPrimReprHandlers r)
=> Prim p k a -> String -> m (SBV.Symbolic SVal)
primSymbolic p nm = do
symbolic <- view (to (flip primReprHandler p) . prhSymbolic)
return (symbolic nm)
|
434eaeb0a58a8ca31ed045eb3d15146bc3b7dc2d636962d301ae6c3348d69d10 | UChicago-PL/smyth | list2.ml | open Pervasives2
let pure_bind xs f =
List.map f xs
let pure x =
[x]
let bind xs f =
List.map f xs
|> List.concat
let concat_map f xs =
bind xs f
let maximum =
function
| [] ->
None
| head :: tail ->
Some (List.fold_left max head tail)
let repeat n x =
let rec helper k acc =
if k <= 0 then
acc
else
helper (k - 1) (x :: acc)
in
helper n []
let sequence mxs =
List.fold_right
( fun xs acc ->
bind xs @@ fun x ->
pure_bind acc @@ fun ys ->
x :: ys
)
mxs
([[]])
let filter_somes xs =
List.filter_map Fun.id xs
let intersperse sep xs =
let rec helper acc =
function
| [] -> List.rev acc
| [x] -> List.rev (x :: acc)
| head :: tail -> helper (sep :: head :: acc) tail
in
helper [] xs
let range ~low ~high =
ListLabels.init ~len:(high - low + 1) ~f:((+) low)
let remove_first y xs =
let rec helper acc =
function
| [] ->
List.rev acc
| head :: tail ->
if head = y then
List.rev_append acc tail
else
helper (head :: acc) tail
in
helper [] xs
let permutations ys =
(* Source: *)
let rec permutations' xs =
if xs = [] then
[[]]
else
bind xs @@ fun x ->
bind (permutations' (remove_first x xs)) @@ fun permutation ->
[ x :: permutation ]
in
List.sort_uniq compare (permutations' ys)
let map3 f xs1 xs2 xs3 =
List.map2
(fun (x1, x2) x3 -> f x1 x2 x3)
(List.combine xs1 xs2)
xs3
let hd_opt xs =
match xs with
| [] ->
None
| head :: _ ->
Some head
let tl_opt xs =
match xs with
| [] ->
None
| _ :: tail ->
Some tail
let uncons xs =
match xs with
| [] ->
None
| head :: tail ->
Some (head, tail)
let is_empty xs =
match xs with
| [] ->
true
| _ :: _ ->
false
let rec transpose xss =
if List.for_all is_empty xss then
[]
else
List.filter_map hd_opt xss
:: transpose (List.map (tl_opt >> Option2.with_default []) xss)
let collapse_equal xs =
match xs with
| [] ->
None
| head :: tail ->
if List.for_all (fun x -> x = head) tail then
Some head
else
None
let index_left xs =
List.mapi (fun i x -> (i, x)) xs
let index_right xs =
List.mapi (fun i x -> (x, i)) xs
let rec find_map f xs =
match xs with
| [] ->
None
| head :: tail ->
begin match f head with
| Some x ->
Some x
| None ->
find_map f tail
end
let sum xs =
List.fold_left ((+)) 0 xs
let fsum xs =
List.fold_left ((+.)) 0.0 xs
let average xs =
let len =
List.length xs
in
if Int.equal len 0 then
None
else
Some (fsum xs /. float_of_int len)
let take n xs =
let rec helper acc n xs =
if n <= 0 then
List.rev acc
else
match xs with
| [] ->
List.rev acc
| head :: tail ->
helper (head :: acc) (n - 1) tail
in
helper [] n xs
let rec drop n xs =
if n <= 0 then
xs
else
match xs with
| [] ->
[]
| _ :: tail ->
drop (n - 1) tail
let cartesian_product xs ys =
concat_map (fun x -> List.map (fun y -> (x, y)) ys) xs
let count pred xs =
let rec helper acc =
function
| [] ->
acc
| head :: tail ->
helper (if pred head then acc + 1 else acc) tail
in
helper 0 xs
| null | https://raw.githubusercontent.com/UChicago-PL/smyth/08fea281f70d3ee604fde9dde140c8a570d6905d/lib/stdlib2/list2.ml | ocaml | Source: | open Pervasives2
let pure_bind xs f =
List.map f xs
let pure x =
[x]
let bind xs f =
List.map f xs
|> List.concat
let concat_map f xs =
bind xs f
let maximum =
function
| [] ->
None
| head :: tail ->
Some (List.fold_left max head tail)
let repeat n x =
let rec helper k acc =
if k <= 0 then
acc
else
helper (k - 1) (x :: acc)
in
helper n []
let sequence mxs =
List.fold_right
( fun xs acc ->
bind xs @@ fun x ->
pure_bind acc @@ fun ys ->
x :: ys
)
mxs
([[]])
let filter_somes xs =
List.filter_map Fun.id xs
let intersperse sep xs =
let rec helper acc =
function
| [] -> List.rev acc
| [x] -> List.rev (x :: acc)
| head :: tail -> helper (sep :: head :: acc) tail
in
helper [] xs
let range ~low ~high =
ListLabels.init ~len:(high - low + 1) ~f:((+) low)
let remove_first y xs =
let rec helper acc =
function
| [] ->
List.rev acc
| head :: tail ->
if head = y then
List.rev_append acc tail
else
helper (head :: acc) tail
in
helper [] xs
let permutations ys =
let rec permutations' xs =
if xs = [] then
[[]]
else
bind xs @@ fun x ->
bind (permutations' (remove_first x xs)) @@ fun permutation ->
[ x :: permutation ]
in
List.sort_uniq compare (permutations' ys)
let map3 f xs1 xs2 xs3 =
List.map2
(fun (x1, x2) x3 -> f x1 x2 x3)
(List.combine xs1 xs2)
xs3
let hd_opt xs =
match xs with
| [] ->
None
| head :: _ ->
Some head
let tl_opt xs =
match xs with
| [] ->
None
| _ :: tail ->
Some tail
let uncons xs =
match xs with
| [] ->
None
| head :: tail ->
Some (head, tail)
let is_empty xs =
match xs with
| [] ->
true
| _ :: _ ->
false
let rec transpose xss =
if List.for_all is_empty xss then
[]
else
List.filter_map hd_opt xss
:: transpose (List.map (tl_opt >> Option2.with_default []) xss)
let collapse_equal xs =
match xs with
| [] ->
None
| head :: tail ->
if List.for_all (fun x -> x = head) tail then
Some head
else
None
let index_left xs =
List.mapi (fun i x -> (i, x)) xs
let index_right xs =
List.mapi (fun i x -> (x, i)) xs
let rec find_map f xs =
match xs with
| [] ->
None
| head :: tail ->
begin match f head with
| Some x ->
Some x
| None ->
find_map f tail
end
let sum xs =
List.fold_left ((+)) 0 xs
let fsum xs =
List.fold_left ((+.)) 0.0 xs
let average xs =
let len =
List.length xs
in
if Int.equal len 0 then
None
else
Some (fsum xs /. float_of_int len)
let take n xs =
let rec helper acc n xs =
if n <= 0 then
List.rev acc
else
match xs with
| [] ->
List.rev acc
| head :: tail ->
helper (head :: acc) (n - 1) tail
in
helper [] n xs
let rec drop n xs =
if n <= 0 then
xs
else
match xs with
| [] ->
[]
| _ :: tail ->
drop (n - 1) tail
let cartesian_product xs ys =
concat_map (fun x -> List.map (fun y -> (x, y)) ys) xs
let count pred xs =
let rec helper acc =
function
| [] ->
acc
| head :: tail ->
helper (if pred head then acc + 1 else acc) tail
in
helper 0 xs
|
05a5f6be60ff32e31d02b2b0903e2383c91ba0f94dc99499dbd0f944cc069e5a | darrenldl/ocaml-SeqBox | progress_report.mli | type silence_level = L0 | L1 | L2
type silence_settings
type ('a, 'b, 'c) progress_print_functions =
{ print_progress :
start_time_src:'a ->
units_so_far_src:'b ->
total_units_src:'c ->
unit
; print_newline_if_not_done :
start_time_src:'a ->
units_so_far_src:'b ->
total_units_src:'c ->
unit
}
type progress_element = Percentage
| Progress_bar
| Current_rate_short
| Average_rate_short
| Time_used_short
| Time_left_short
| Current_rate_long
| Average_rate_long
| Time_used_long
| Time_left_long
module Helper : sig
val seconds_to_hms : int -> int * int * int
val silence_level_to_silence_settings : silence_level -> silence_settings
end
val default_silence_settings : silence_settings
val gen_print_generic :
header : string ->
silence_settings : silence_settings ref ->
display_while_active : progress_element list ->
display_on_finish : progress_element list ->
display_on_finish_early : progress_element list ->
unit : string ->
print_interval : float ->
eval_start_time : ('a -> float) ->
eval_units_so_far : ('b -> int64) ->
eval_total_units : ('c -> int64) ->
('a, 'b, 'c) progress_print_functions
| null | https://raw.githubusercontent.com/darrenldl/ocaml-SeqBox/658c623db8745ae1d804c75880b29fb53435860f/src/progress_report.mli | ocaml | type silence_level = L0 | L1 | L2
type silence_settings
type ('a, 'b, 'c) progress_print_functions =
{ print_progress :
start_time_src:'a ->
units_so_far_src:'b ->
total_units_src:'c ->
unit
; print_newline_if_not_done :
start_time_src:'a ->
units_so_far_src:'b ->
total_units_src:'c ->
unit
}
type progress_element = Percentage
| Progress_bar
| Current_rate_short
| Average_rate_short
| Time_used_short
| Time_left_short
| Current_rate_long
| Average_rate_long
| Time_used_long
| Time_left_long
module Helper : sig
val seconds_to_hms : int -> int * int * int
val silence_level_to_silence_settings : silence_level -> silence_settings
end
val default_silence_settings : silence_settings
val gen_print_generic :
header : string ->
silence_settings : silence_settings ref ->
display_while_active : progress_element list ->
display_on_finish : progress_element list ->
display_on_finish_early : progress_element list ->
unit : string ->
print_interval : float ->
eval_start_time : ('a -> float) ->
eval_units_so_far : ('b -> int64) ->
eval_total_units : ('c -> int64) ->
('a, 'b, 'c) progress_print_functions
| |
5ff22d68aca0384292ace4743c7312a2c53cd7ac57ad33935e08fe4f9b80bd8c | marigold-dev/chusai | tez.ml | type t = int64
let of_int amount = Int64.(mul 1_000_000L (of_int amount))
let of_mutez_int = Int64.of_int
let zero = 0L
let one = of_int 1
let mutez_int64 t = t
let of_z amount = amount |> Z.to_int |> of_int
let to_string amount =
let mult_int = 1_000_000L in
let rec left amount =
let d, r = Int64.(div amount 1000L), Int64.(rem amount 1000L) in
if d > 0L then Format.asprintf "%s%03Ld" (left d) r else Format.asprintf "%Ld" r
in
let right amount =
let triplet v =
if v mod 10 > 0
then Format.asprintf "%03d" v
else if v mod 100 > 0
then Format.asprintf "%02d" (v / 10)
else Format.asprintf "%d" (v / 100)
in
let hi, lo = amount / 1000, amount mod 1000 in
if lo = 0
then Format.asprintf "%s" (triplet hi)
else Format.asprintf "%03d%s" hi (triplet lo)
in
let ints, decs = Int64.(div amount mult_int), Int64.(to_int (rem amount mult_int)) in
if decs > 0 then Format.asprintf "%s.%s" (left ints) (right decs) else left ints
;;
let to_float amount = Float.mul (Int64.to_float amount) 0.000_001
let to_mutez amount = Int64.to_int amount
let to_z tez = tez |> to_mutez |> Z.of_int
let ( + ) = Int64.add
let ( - ) = Int64.sub
let parse_floating tez_string =
let re = Tezt.Base.rex "(\\d+)\\.?(\\d*)" in
let fail () = Tezt.Test.fail "Invalid tez value: '%s'." tez_string in
let parse_int s =
match int_of_string_opt s with
| None -> fail ()
| Some i -> i
in
let integral, decimal =
match Tezt.Base.(tez_string =~** re) with
| None -> fail ()
| Some (i, d) -> i, d
in
let integral = parse_int integral in
let decimal =
match String.length decimal with
| 0 -> 0
| 1 -> 100_000 * parse_int decimal
| 2 -> 10_000 * parse_int decimal
| 3 -> 1_000 * parse_int decimal
| 4 -> 100 * parse_int decimal
| 5 -> 10 * parse_int decimal
| 6 -> parse_int decimal
| _ -> fail ()
in
of_int integral + of_mutez_int decimal
;;
| null | https://raw.githubusercontent.com/marigold-dev/chusai/09f798c585121d3b02bf3fed0f52f15c3bdc79a1/tezt/lib/tez.ml | ocaml | type t = int64
let of_int amount = Int64.(mul 1_000_000L (of_int amount))
let of_mutez_int = Int64.of_int
let zero = 0L
let one = of_int 1
let mutez_int64 t = t
let of_z amount = amount |> Z.to_int |> of_int
let to_string amount =
let mult_int = 1_000_000L in
let rec left amount =
let d, r = Int64.(div amount 1000L), Int64.(rem amount 1000L) in
if d > 0L then Format.asprintf "%s%03Ld" (left d) r else Format.asprintf "%Ld" r
in
let right amount =
let triplet v =
if v mod 10 > 0
then Format.asprintf "%03d" v
else if v mod 100 > 0
then Format.asprintf "%02d" (v / 10)
else Format.asprintf "%d" (v / 100)
in
let hi, lo = amount / 1000, amount mod 1000 in
if lo = 0
then Format.asprintf "%s" (triplet hi)
else Format.asprintf "%03d%s" hi (triplet lo)
in
let ints, decs = Int64.(div amount mult_int), Int64.(to_int (rem amount mult_int)) in
if decs > 0 then Format.asprintf "%s.%s" (left ints) (right decs) else left ints
;;
let to_float amount = Float.mul (Int64.to_float amount) 0.000_001
let to_mutez amount = Int64.to_int amount
let to_z tez = tez |> to_mutez |> Z.of_int
let ( + ) = Int64.add
let ( - ) = Int64.sub
let parse_floating tez_string =
let re = Tezt.Base.rex "(\\d+)\\.?(\\d*)" in
let fail () = Tezt.Test.fail "Invalid tez value: '%s'." tez_string in
let parse_int s =
match int_of_string_opt s with
| None -> fail ()
| Some i -> i
in
let integral, decimal =
match Tezt.Base.(tez_string =~** re) with
| None -> fail ()
| Some (i, d) -> i, d
in
let integral = parse_int integral in
let decimal =
match String.length decimal with
| 0 -> 0
| 1 -> 100_000 * parse_int decimal
| 2 -> 10_000 * parse_int decimal
| 3 -> 1_000 * parse_int decimal
| 4 -> 100 * parse_int decimal
| 5 -> 10 * parse_int decimal
| 6 -> parse_int decimal
| _ -> fail ()
in
of_int integral + of_mutez_int decimal
;;
| |
3427fdc84e800123de292ea2401ccf190298d38a12d1d22c605df6c0d2a22f4d | Carnap/Carnap | Qualitative.hs | module Filter.Qualitative (makeQualitativeProblems) where
import Carnap.GHCJS.SharedFunctions (simpleHash, simpleCipher)
import Text.Pandoc
import Filter.Util (numof, contentOf, intoChunks,formatChunk, unlines', exerciseWrapper, sanitizeHtml)
import Data.Map (fromList, toList, unions)
import qualified Data.Text as T
import Data.Text (Text)
import Prelude
makeQualitativeProblems :: Block -> Block
makeQualitativeProblems cb@(CodeBlock (_,classes,extra) contents)
| "QualitativeProblem" `elem` classes = Div ("",[],[]) $ map (activate classes extra) $ intoChunks contents
| otherwise = cb
makeQualitativeProblems x = x
activate :: [Text] -> [(Text, Text)] -> Text -> Block
activate cls extra chunk
| "MultipleChoice" `elem` cls = mctemplate (opts [("qualitativetype","multiplechoice"), ("goal", safeContentOf h) ])
| "MultipleSelection" `elem` cls = mctemplate (opts [("qualitativetype","multipleselection"), ("goal", safeContentOf h) ])
| "ShortAnswer" `elem` cls = template (opts [("qualitativetype", "shortanswer"), ("goal", safeContentOf h) ])
| "Numerical" `elem` cls = case T.splitOn ":" (safeContentOf h) of
[g,p] -> template (opts [ ("qualitativetype","numerical")
, ("goal", T.pack $ show (simpleCipher $ T.unpack g))
, ("problem", sanitizeHtml p)
])
_ -> Div ("",[],[]) [Plain [Str "problem with numerical qualitative problem specification"]]
| otherwise = RawBlock "html" "<div>No Matching Qualitative Problem Type</div>"
where safeContentOf = sanitizeHtml . contentOf
(h:t) = formatChunk chunk
opts adhoc = unions [fromList extra, fromList fixed, fromList adhoc]
fixed = [ ("type","qualitative")
, ("submission", T.concat ["saveAs:", numof h])
]
mctemplate myOpts = exerciseWrapper (toList myOpts) (numof h) $
Need rawblock here to get the linebreaks
--right.
RawBlock "html" $ T.concat
["<div", optString myOpts, ">"
, unlines' (map (T.pack . show . withHash . T.unpack) t)
, "</div>"]
template myOpts = exerciseWrapper (toList myOpts) (numof h) $
Need rawblock here to get the linebreaks
--right.
RawBlock "html" $ T.concat [ "<div", optString myOpts, ">", unlines' t, "</div>" ]
optString myOpts = T.concat $ map (\(x,y) -> T.concat [" data-carnap-", x, "=\"", y, "\""]) (toList myOpts)
withHash s | length s' > 0 = if head s' `elem` ['*','+','-'] then (simpleHash s', tail s') else (simpleHash s',s')
| otherwise = (simpleHash s', s')
where s' = (dropWhile (== ' ') s)
| null | https://raw.githubusercontent.com/Carnap/Carnap/a2b314474db803f31ee228f23ad7406c10a5c85a/Carnap-Server/Filter/Qualitative.hs | haskell | right.
right. | module Filter.Qualitative (makeQualitativeProblems) where
import Carnap.GHCJS.SharedFunctions (simpleHash, simpleCipher)
import Text.Pandoc
import Filter.Util (numof, contentOf, intoChunks,formatChunk, unlines', exerciseWrapper, sanitizeHtml)
import Data.Map (fromList, toList, unions)
import qualified Data.Text as T
import Data.Text (Text)
import Prelude
makeQualitativeProblems :: Block -> Block
makeQualitativeProblems cb@(CodeBlock (_,classes,extra) contents)
| "QualitativeProblem" `elem` classes = Div ("",[],[]) $ map (activate classes extra) $ intoChunks contents
| otherwise = cb
makeQualitativeProblems x = x
activate :: [Text] -> [(Text, Text)] -> Text -> Block
activate cls extra chunk
| "MultipleChoice" `elem` cls = mctemplate (opts [("qualitativetype","multiplechoice"), ("goal", safeContentOf h) ])
| "MultipleSelection" `elem` cls = mctemplate (opts [("qualitativetype","multipleselection"), ("goal", safeContentOf h) ])
| "ShortAnswer" `elem` cls = template (opts [("qualitativetype", "shortanswer"), ("goal", safeContentOf h) ])
| "Numerical" `elem` cls = case T.splitOn ":" (safeContentOf h) of
[g,p] -> template (opts [ ("qualitativetype","numerical")
, ("goal", T.pack $ show (simpleCipher $ T.unpack g))
, ("problem", sanitizeHtml p)
])
_ -> Div ("",[],[]) [Plain [Str "problem with numerical qualitative problem specification"]]
| otherwise = RawBlock "html" "<div>No Matching Qualitative Problem Type</div>"
where safeContentOf = sanitizeHtml . contentOf
(h:t) = formatChunk chunk
opts adhoc = unions [fromList extra, fromList fixed, fromList adhoc]
fixed = [ ("type","qualitative")
, ("submission", T.concat ["saveAs:", numof h])
]
mctemplate myOpts = exerciseWrapper (toList myOpts) (numof h) $
Need rawblock here to get the linebreaks
RawBlock "html" $ T.concat
["<div", optString myOpts, ">"
, unlines' (map (T.pack . show . withHash . T.unpack) t)
, "</div>"]
template myOpts = exerciseWrapper (toList myOpts) (numof h) $
Need rawblock here to get the linebreaks
RawBlock "html" $ T.concat [ "<div", optString myOpts, ">", unlines' t, "</div>" ]
optString myOpts = T.concat $ map (\(x,y) -> T.concat [" data-carnap-", x, "=\"", y, "\""]) (toList myOpts)
withHash s | length s' > 0 = if head s' `elem` ['*','+','-'] then (simpleHash s', tail s') else (simpleHash s',s')
| otherwise = (simpleHash s', s')
where s' = (dropWhile (== ' ') s)
|
debecc2fd101908e030ceb4e7963641218ba966aef5a775eeeeb557d6affc40c | backtracking/bibtex2html | main.ml | (**************************************************************************)
(* bibtex2html - A BibTeX to HTML translator *)
Copyright ( C ) 1997 - 2014 and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU General Public
License version 2 , as published by the Free Software Foundation .
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
See the GNU General Public License version 2 for more details
(* (enclosed in the file GPL). *)
(**************************************************************************)
s Main module of bibtex2html .
open Printf
open Translate
(* Options. *)
let excluded = ref ([] : string list)
let add_exclude k = excluded := k :: !excluded
let style = ref "plain"
let command = ref "bibtex -min-crossrefs=1000"
type sort = Unsorted | By_date | By_author
let sort = ref Unsorted
let reverse_sort = ref false
let ignore_bibtex_errors = ref false
let expand_abbrev_in_bib_output = ref true
(* Optional citation file. *)
let use_cite_file = ref false
let citations = ref ([] : string list)
let add_citations file =
try
let chan = open_in file
and buf = Buffer.create 1024 in
try
while true do Buffer.add_char buf (input_char chan) done
with End_of_file ->
close_in chan;
citations :=
(Str.split (Str.regexp "[ \t\n]+") (Buffer.contents buf)) @
!citations
with Sys_error msg ->
prerr_endline ("Cannot open citation file (" ^ msg ^ ")");
exit 1
(*s Sorting the entries. *)
module KeyMap = Map.Make(struct type t = string let compare = compare end)
let keep_combine combine l1 l2 =
let map =
List.fold_left (fun m ((_,k,_) as e) -> KeyMap.add k e m) KeyMap.empty l2
in
let rec keep_rec = function
| [] ->
[]
| ((_,k,_) as x)::rem ->
if not (List.mem k !excluded) then
try
let y = KeyMap.find k map in (combine x y) :: (keep_rec rem)
with Not_found -> keep_rec rem
else
keep_rec rem
in
keep_rec l1
let combine_f (c,_,b) e = c,b,e
let rev_combine_f x y = combine_f y x
let sort_entries entries bibitems =
if not !Options.quiet then begin eprintf "Sorting..."; flush stderr end;
let el =
if !sort = By_author then
keep_combine combine_f bibitems entries
else
keep_combine rev_combine_f entries bibitems
in
let sl =
if !sort = By_date then
List.sort (fun (_,_,e1) (_,_,e2) -> Expand.date_compare entries e1 e2) el
else
el
in
if not !Options.quiet then begin eprintf "ok.\n"; flush stderr end;
if !reverse_sort then List.rev sl else sl
We use BibTeX itself to format the entries . Operations :
\begin{enumerate }
\item create an auxiliary file tmp.aux
\item call on it
\item read the resulting tmp.bbl file to get the formatted entries
\end{enumerate }
\begin{enumerate}
\item create an auxiliary file tmp.aux
\item call bibtex on it
\item read the resulting tmp.bbl file to get the formatted entries
\end{enumerate} *)
let create_aux_file fbib tmp =
let ch = open_out (tmp ^ ".aux") in
output_string ch "\\relax\n\\bibstyle{";
output_string ch !style;
output_string ch "}\n";
if !use_cite_file then
List.iter
(fun k -> output_string ch ("\\citation{" ^ k ^ "}\n"))
!citations
else
output_string ch "\\citation{*}\n";
output_string ch "\\bibdata{";
output_string ch (Filename.chop_suffix fbib ".bib");
output_string ch "}\n";
close_out ch
let rm f = try Sys.remove f with _ -> ()
let clean tmp =
if not !Options.debug then begin
rm (tmp ^ ".aux");
rm (tmp ^ ".blg");
rm (tmp ^ ".bbl");
rm tmp
end
let call_bibtex tmp =
if not !Options.quiet then begin
eprintf "calling BibTeX..."; flush stderr
end;
match
let redir =
if !output_file = "" || !Options.quiet then
match Sys.os_type with
| "Win32" -> "> nul 2>&1"
| _ -> "> /dev/null 2>&1"
else
""
in
let cmd = sprintf "%s %s %s" !command tmp redir in
if !Options.debug then begin
eprintf "\nbibtex command: %s\n" cmd; flush stderr
end;
Sys.command cmd
with
| 0 ->
if not !Options.quiet then begin eprintf "\n"; flush stderr end
| n ->
if !ignore_bibtex_errors then begin
if not !Options.quiet then begin
eprintf "error %d (ignored)\n" n;
flush stderr
end
end else begin
eprintf "error %d while running bibtex\n" n;
exit n
end
let read_one_biblio lb =
let rec read_items acc lb =
try
let (_,k,_) as item = Bbl_lexer.bibitem lb in
if !Options.debug then begin eprintf "[%s]" k; flush stderr end;
read_items (item::acc) lb
with Bbl_lexer.End_of_biblio ->
List.rev acc
in
let name = Bbl_lexer.biblio_header lb in
let items = read_items [] lb in
(name,items)
let read_biblios lb =
let rec read acc lb =
try
let b = read_one_biblio lb in
read (b::acc) lb
with
End_of_file -> List.rev acc
in
read [] lb
let read_bbl tmp =
let fbbl = tmp ^ ".bbl" in
if not !Options.quiet then begin
eprintf "Reading %s..." fbbl; flush stderr
end;
let ch = open_in fbbl in
let lexbuf = Lexing.from_channel ch in
let biblios = read_biblios lexbuf in
close_in ch;
clean tmp;
if not !Options.quiet then begin
eprintf "ok ";
List.iter
(fun (_,items) -> eprintf "(%d entries)" (List.length items))
biblios;
eprintf "\n"; flush stderr
end;
biblios
temporary files in current directory ( from 's standard library )
module Tmp = struct
external open_desc: string -> open_flag list -> int -> int = "caml_sys_open"
external close_desc: int -> unit = "caml_sys_close"
let prng = Random.State.make_self_init ()
let temp_file prefix suffix =
let rec try_name counter =
let rnd = (Random.State.bits prng) land 0xFFFFFF in
let name = Printf.sprintf "%s%06x%s" prefix rnd suffix in
try
close_desc (open_desc name [Open_wronly; Open_creat; Open_excl] 0o600);
name
with Sys_error _ as e ->
if counter >= 1000 then raise e else try_name (counter + 1)
in
try_name 0
end
let get_biblios fbib =
let tmp = Tmp.temp_file "bib2html" "" in
try
create_aux_file fbib tmp;
call_bibtex tmp;
read_bbl tmp
with
e -> clean tmp; raise e
i
let insert_title_url bib =
let rec remove_assoc x = function
| [ ] - >
raise Not_found
| ( ( y , v ) as p ) : : l - >
if x = y then
( v , l )
else
let ( v',l ' ) = remove_assoc x l in ( v ' , p : : l ' )
in
let url_value = function
| [ Bibtex . Id u ] - > u
| [ Bibtex . String u ] - > u
| _ - > raise Not_found
in
let modify_entry f =
try
let t , f ' = remove_assoc " title " f in
let u , f '' = remove_assoc " url " f ' in
let u ' = Html.normalize_url ( url_value u ) in
let nt =
( Bibtex . String
( sprintf " \\begin{rawhtml}<A HREF=\"%s\">\\end{rawhtml } " u ' ) )
: : t @ [ . String " \\begin{rawhtml}</A>\\end{rawhtml } " ]
in
( " TITLE",nt ) : : f ''
with Not_found - >
f
in
Bibtex.fold
( fun com bib ' - > match com with
| Bibtex . Entry ( ty , k , f ) - >
Bibtex.add_new_entry ( Bibtex . Entry ( ty , k , modify_entry f ) ) bib '
| _ - >
Bibtex.add_new_entry com bib ' )
bib Bibtex.empty_biblio
i
let insert_title_url bib =
let rec remove_assoc x = function
| [] ->
raise Not_found
| ((y,v) as p) :: l ->
if x = y then
(v,l)
else
let (v',l') = remove_assoc x l in (v', p :: l')
in
let url_value = function
| [Bibtex.Id u] -> u
| [Bibtex.String u] -> u
| _ -> raise Not_found
in
let modify_entry f =
try
let t,f' = remove_assoc "title" f in
let u,f'' = remove_assoc "url" f' in
let u' = Html.normalize_url (url_value u) in
let nt =
(Bibtex.String
(sprintf "\\begin{rawhtml}<A HREF=\"%s\">\\end{rawhtml}" u'))
:: t @ [Bibtex.String "\\begin{rawhtml}</A>\\end{rawhtml}"]
in
("TITLE",nt) :: f''
with Not_found ->
f
in
Bibtex.fold
(fun com bib' -> match com with
| Bibtex.Entry (ty,k,f) ->
Bibtex.add_new_entry (Bibtex.Entry (ty,k,modify_entry f)) bib'
| _ ->
Bibtex.add_new_entry com bib')
bib Bibtex.empty_biblio
i*)
let parse_only = ref false
let print_keys = ref false
let translate fullname =
let input_bib = Readbib.read_entries_from_file fullname in
if !parse_only then exit 0;
let entries = List.rev (Expand.expand input_bib) in
let biblios =
if fullname = "" then begin
let tmp = Tmp.temp_file "bibtex2htmlinput" ".bib" in
let ch = open_out tmp in
Biboutput.output_bib ~html:false ch input_bib None;
close_out ch;
let bbl = get_biblios tmp in
Sys.remove tmp;
bbl
end else
get_biblios fullname
in
let sb =
List.map
(fun (name,bibitems) -> (name,sort_entries entries bibitems))
biblios
in
if !print_keys then begin
List.iter
(fun (_,bibitems) ->
List.iter (fun (_,_,(_,k,_)) -> printf "%s\n" k) bibitems)
sb;
flush stdout;
exit 0
end;
format_list
(if !expand_abbrev_in_bib_output then
Bibtex.expand_abbrevs input_bib
else input_bib)
sb
(if !use_cite_file then
let keys =
List.fold_right
(fun s e -> Bibtex.KeySet.add s e) !citations Bibtex.KeySet.empty in
let keys =
List.fold_right
(fun s e -> Bibtex.KeySet.remove s e) !excluded keys in
Some (Bibfilter.saturate input_bib keys)
else None)
(*s Reading macros in a file. *)
let read_macros f =
let chan = open_in f in
let lb = Lexing.from_channel chan in
Latexscan.read_macros lb;
close_in chan
s Command line parsing .
let usage ?(error=true) () =
if error then prerr_endline "bibtex2html: bad command line syntax";
(if error then prerr_endline else print_endline) "
Usage: bibtex2html <options> [filename]
-s style BibTeX style (plain, alpha, ...)
-c command BibTeX command (otherwise bibtex is searched in your path)
-d sort by date
-a sort as BibTeX (usually by author)
-u unsorted i.e. same order as in .bib file (default)
-r reverse the sort
-revkeys entries numbered in reverse order
-t title title of the HTML file (default is the filename)
-bg color background color of the HTML file (default is none)
-css file specify a style sheet file
-o file redirect the output
-header additional header in the HTML file
-footer additional footer in the HTML file
-i ignore BibTeX errors
-both produce versions with and without abstracts
-multiple produce one file per entry
-single produce a single page (with BibTeX input and output)
-nodoc only produces the body of the HTML documents
-nokeys do not print the BibTeX keys
-nolinks do not print any web link
-nobiblinks
do not add web links in the BibTeX output
-rawurl print URL instead of file type
-heveaurl use HeVeA's \\url macro
-noabstract
do not print the abstracts (if any)
-nokeywords
do not print the keywords (if any)
-nodoi do not insert the DOI links
-doi-prefix url
set the DOI links prefix (default is /)
-noeprint do not insert the eprint links
-eprint-prefix url
set the eprint links prefix (default is /)
-linebreak add a linebreak between an entry and its links
-use-table enforce the use of HTML tables (to be used after -nokeys)
-noheader do not print the header (bibtex2html command)
-nofooter do not print the footer (bibtex2html web link)
-noexpand do not expand abbreviations in the BibTeX output
-nobibsource
do not produce the BibTeX entries file
-fsuffix give an alternate suffix for HTML files
-lsuffix give an alternate suffix for HTML links
-suffix s give an alternate suffix for HTML files and links
-citefile f
read keys to include from file f
-e key exclude an entry
-m file read (La)TeX macros in file
-f field add a web link for that BibTeX field
-nf field name
add a web link for that BibTeX field, with the supplied name
-note field
declare a note field
-dl use DL lists instead of TABLEs
-unicode use Unicode characters for some LaTeX macros (as HTML entities)
-html-entities
use HTML entities for some LaTeX macros
-labelname use the label name when inserting a link
--print-keys
print the sorted bibtex keys and exit
-debug verbose mode (to find incorrect BibTeX entries)
-q quiet mode
-w stop on warning
-v print version and exit
On-line documentation at /~filliatr/bibtex2html/
";
exit (if error then 1 else 0)
let parse () =
let rec parse_rec = function
(* General aspect of the web page *)
| ("-t" | "-title" | "--title") :: s :: rem ->
title := s; title_spec := true; parse_rec rem
| ("-t" | "-title" | "--title") :: [] ->
usage()
| ("-bg" | "-background" | "--background") :: s :: rem ->
Html.bgcolor := Some s; parse_rec rem
| ("-bg" | "-background" | "--background") :: [] ->
usage()
| ("-css" | "-style-sheet" | "--style-sheet") :: f :: rem ->
Html.css := Some f; parse_rec rem
| ("-css" | "-style-sheet" | "--style-sheet") :: [] ->
usage()
| ("-header" | "--header") :: s :: rem ->
user_header := s; parse_rec rem
| ("-header" | "--header") :: [] ->
usage()
| ("-footer" | "--footer") :: s :: rem ->
user_footer := s; parse_rec rem
| ("-footer" | "--footer") :: [] ->
usage()
| ("-s" | "-style" | "--style") :: s :: rem ->
style := s; parse_rec rem
| ("-s" | "-style" | "--style") :: [] ->
usage()
| ("-noabstract" | "-no-abstract" | "--no-abstract") :: rem ->
print_abstract := false; parse_rec rem
| ("-nodoi" | "-no-doi" | "--no-doi") :: rem ->
doi := false; parse_rec rem
| ("-doi-prefix" | "--doi-prefix") :: s :: rem ->
doi_prefix := s; parse_rec rem
| ("-doi-prefix" | "--doi-prefix") :: [] ->
usage ()
| ("-noeprint" | "-no-eprint" | "--no-eprint") :: rem ->
eprint := false; parse_rec rem
| ("-eprint-prefix" | "--eprint-prefix") :: s :: rem ->
eprint_prefix := s; parse_rec rem
| ("-eprint-prefix" | "--eprint-prefix") :: [] ->
usage ()
| ("-nokeywords" | "-no-keywords" | "--no-keywords") :: rem ->
print_keywords := false; parse_rec rem
| ("-nolinks" | "-no-links" | "--no-links") :: rem ->
print_links := false; parse_rec rem
| ("-nobiblinks" | "-no-bib-links" | "--no-bib-links") :: rem ->
links_in_bib_file := false; parse_rec rem
| ("-nokeys" | "-no-keys" | "--no-keys") :: rem ->
nokeys := true; table := NoTable; parse_rec rem
| ("-use-table" | "--use-table") :: rem ->
table := Table; parse_rec rem
| ("-usekeys" | "-use-keys" | "--use-keys") :: rem ->
use_keys := true; parse_rec rem
| ("-rawurl" | "-raw-url" | "--raw-url") :: rem ->
raw_url := true; parse_rec rem
i
| ( " -tu " | " -titleurl " | " --title - url " ) : : rem - >
title_url : = true ;
| ("-tu" | "-titleurl" | "--title-url") :: rem ->
title_url := true; parse_rec rem
i*)
| ("-heveaurl" | "-hevea-url" | "--hevea-url") :: rem ->
Latexscan.hevea_url := true; parse_rec rem
| ("-linebreak" | "--linebreak") :: rem ->
linebreak := true; parse_rec rem
| ("-noheader" | "-no-header" | "--no-header") :: rem ->
print_header := false; parse_rec rem
| ("-nofooter" | "-no-footer" | "--no-footer") :: rem ->
print_footer := false; parse_rec rem
| ("-f" | "-field" | "--field") :: s :: rem ->
add_field s; parse_rec rem
| ("-f" | "-field" | "--field") :: [] ->
usage()
| ("-nf" | "-named-field" | "--named-field") :: s :: name :: rem ->
add_named_field s name; parse_rec rem
| ("-nf" | "-named-field" | "--named-field") :: ([_] | []) ->
usage()
| ("-note" | "--note") :: s :: rem ->
add_note_field s; parse_rec rem
| ("-note" | "--note") :: [] ->
usage()
| ("-note-html" | "--note-html") :: s :: rem ->
add_note_html_field s; parse_rec rem
| ("-note-html" | "--note-html") :: [] ->
usage()
| ("-ln" | "-labelname" | "--labelname" | "--label-name") :: rem ->
use_label_name := true; parse_rec rem
| ("-multiple" | "--multiple") :: rem ->
multiple := true; parse_rec rem
| ("-single" | "--single") :: rem ->
multiple := false; both := false; print_keywords := false;
bib_entries := false; single := true; parse_rec rem
| ("-both" | "--both") :: rem ->
both := true; parse_rec rem
| ("-dl" | "--dl") :: rem ->
table := DL; parse_rec rem
| ("-unicode" | "--unicode") :: rem ->
Latexmacros.unicode_entities (); parse_rec rem
| ("-html-entities" | "--html-entities") :: rem ->
Latexscan.html_entities := true;
Latexmacros.html_entities (); parse_rec rem
(* Controlling the translation *)
| ("-m" | "-macros-from" | "--macros-from") :: f :: rem ->
read_macros f; parse_rec rem
| ("-m" | "-macros-from" | "--macros-from") :: [] ->
usage()
(* Sorting the entries *)
| ("-d" | "-sort-by-date" | "--sort-by-date") :: rem ->
sort := By_date; parse_rec rem
| ("-a" | "-sort-as-bibtex" | "--sort-as-bibtex") :: rem ->
sort := By_author; parse_rec rem
| ("-u" | "-unsorted" | "--unsorted") :: rem ->
sort := Unsorted; parse_rec rem
| ("-r" | "-reverse-sort" | "--reverse-sort") :: rem ->
reverse_sort := not !reverse_sort; parse_rec rem
| ("-revkeys" | "--revkeys") :: rem ->
reverse_sort := not !reverse_sort; revkeys := true; parse_rec rem
(* Options for selecting keys *)
| ("-citefile" | "--citefile") :: f :: rem ->
use_cite_file := true;
add_citations f;
parse_rec rem
| ("-citefile" | "--citefile") :: [] ->
usage()
| ("-e" | "-exclude" | "--exclude") :: k :: rem ->
add_exclude k; parse_rec rem
| ("-e" | "-exclude" | "--exclude") :: [] ->
usage()
(* Miscellaneous options *)
| ("-o" | "-output" | "--output") :: f :: rem ->
output_file := f;
parse_rec rem
| ("-o" | "-output" | "--output") :: [] ->
usage()
| ("-nobibsource" | "--nobibsource") :: rem ->
bib_entries := false; parse_rec rem
| ("-nodoc" | "--nodoc" | "-no-doc" | "--no-doc") :: rem ->
nodoc := true; parse_rec rem
| ("-noexpand" | "-no-expand" | "--no-expand") :: rem ->
expand_abbrev_in_bib_output := false; parse_rec rem
| ("-i" | "-ignore-errors" | "--ignore-errors") :: rem ->
ignore_bibtex_errors := true; parse_rec rem
| ("-suffix" | "--suffix") :: s :: rem ->
file_suffix := s; link_suffix := s; parse_rec rem
| ("-fsuffix" | "-file-suffix" | "--file-suffix") :: s :: rem ->
file_suffix := s; parse_rec rem
| ("-lsuffix" | "-link-suffix" | "--link-suffix") :: s :: rem ->
link_suffix := s; parse_rec rem
| ("-suffix" | "--suffix" | "-fsuffix" | "--file-suffix" | "-file-suffix" |
"-lsuffix" | "-link-suffix" | "--link-suffix") :: [] ->
usage()
| ("-c" | "-command" | "--command") :: s :: rem ->
command := s; parse_rec rem
| ("-c" | "-command" | "--command") :: [] ->
usage()
| ("-h" | "-help" | "-?" | "--help") :: rem ->
usage ~error:false ()
| ("-v" | "-version" | "--version") :: _ ->
Copying.banner "bibtex2html"; exit 0
| ("-warranty" | "--warranty") :: _ ->
Copying.banner "bibtex2html"; Copying.copying(); exit 0
| ("-w" | "-warn-error" | "--warn-error") :: rem ->
Options.warn_error := true; parse_rec rem
| ("-q" | "-quiet" | "--quiet") :: rem ->
Options.quiet := true; parse_rec rem
| ("-debug" | "--debug") :: rem ->
Options.debug := true; parse_rec rem
| "-parse-only" :: rem ->
parse_only := true; parse_rec rem
| ("-print-keys" | "--print-keys") :: rem ->
print_keys := true; parse_rec rem
| [fbib] ->
if not (Sys.file_exists fbib) then begin
eprintf "%s: no such file\n" fbib;
exit 1
end;
let basename = Filename.basename fbib in
if Filename.check_suffix basename ".bib" then
(fbib, Filename.chop_suffix basename ".bib")
else begin
prerr_endline "bibtex2html: BibTeX file must have suffix .bib";
exit 1
end
| [] ->
("","")
| _ -> usage ()
in
parse_rec (List.tl (Array.to_list Sys.argv))
(*s Main function. *)
let main () =
let (fbib,f) = parse () in
Copying.banner "bibtex2html";
if fbib = "" then begin
if not !title_spec then title := "bibtex2html output";
begin match !output_file with
| "" -> bib_entries := false
| "-" -> output_file := ""; bib_entries := false
| _ -> ()
end
end else begin
input_file := f ^ ".bib";
begin match !output_file with
| "" -> output_file := f;
| "-" -> output_file := ""; bib_entries := false
| _ -> ()
end;
if not !title_spec then title := f
end;
Latexmacros.init_style_macros !style;
(* producing the documents *)
translate fbib
let _ = Printexc.catch main ()
| null | https://raw.githubusercontent.com/backtracking/bibtex2html/7c9547da79a13c3accffc9947c846df96a6edd68/main.ml | ocaml | ************************************************************************
bibtex2html - A BibTeX to HTML translator
This software is free software; you can redistribute it and/or
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
(enclosed in the file GPL).
************************************************************************
Options.
Optional citation file.
s Sorting the entries.
s Reading macros in a file.
General aspect of the web page
Controlling the translation
Sorting the entries
Options for selecting keys
Miscellaneous options
s Main function.
producing the documents | Copyright ( C ) 1997 - 2014 and
modify it under the terms of the GNU General Public
License version 2 , as published by the Free Software Foundation .
See the GNU General Public License version 2 for more details
s Main module of bibtex2html .
open Printf
open Translate
let excluded = ref ([] : string list)
let add_exclude k = excluded := k :: !excluded
let style = ref "plain"
let command = ref "bibtex -min-crossrefs=1000"
type sort = Unsorted | By_date | By_author
let sort = ref Unsorted
let reverse_sort = ref false
let ignore_bibtex_errors = ref false
let expand_abbrev_in_bib_output = ref true
let use_cite_file = ref false
let citations = ref ([] : string list)
let add_citations file =
try
let chan = open_in file
and buf = Buffer.create 1024 in
try
while true do Buffer.add_char buf (input_char chan) done
with End_of_file ->
close_in chan;
citations :=
(Str.split (Str.regexp "[ \t\n]+") (Buffer.contents buf)) @
!citations
with Sys_error msg ->
prerr_endline ("Cannot open citation file (" ^ msg ^ ")");
exit 1
module KeyMap = Map.Make(struct type t = string let compare = compare end)
let keep_combine combine l1 l2 =
let map =
List.fold_left (fun m ((_,k,_) as e) -> KeyMap.add k e m) KeyMap.empty l2
in
let rec keep_rec = function
| [] ->
[]
| ((_,k,_) as x)::rem ->
if not (List.mem k !excluded) then
try
let y = KeyMap.find k map in (combine x y) :: (keep_rec rem)
with Not_found -> keep_rec rem
else
keep_rec rem
in
keep_rec l1
let combine_f (c,_,b) e = c,b,e
let rev_combine_f x y = combine_f y x
let sort_entries entries bibitems =
if not !Options.quiet then begin eprintf "Sorting..."; flush stderr end;
let el =
if !sort = By_author then
keep_combine combine_f bibitems entries
else
keep_combine rev_combine_f entries bibitems
in
let sl =
if !sort = By_date then
List.sort (fun (_,_,e1) (_,_,e2) -> Expand.date_compare entries e1 e2) el
else
el
in
if not !Options.quiet then begin eprintf "ok.\n"; flush stderr end;
if !reverse_sort then List.rev sl else sl
We use BibTeX itself to format the entries . Operations :
\begin{enumerate }
\item create an auxiliary file tmp.aux
\item call on it
\item read the resulting tmp.bbl file to get the formatted entries
\end{enumerate }
\begin{enumerate}
\item create an auxiliary file tmp.aux
\item call bibtex on it
\item read the resulting tmp.bbl file to get the formatted entries
\end{enumerate} *)
let create_aux_file fbib tmp =
let ch = open_out (tmp ^ ".aux") in
output_string ch "\\relax\n\\bibstyle{";
output_string ch !style;
output_string ch "}\n";
if !use_cite_file then
List.iter
(fun k -> output_string ch ("\\citation{" ^ k ^ "}\n"))
!citations
else
output_string ch "\\citation{*}\n";
output_string ch "\\bibdata{";
output_string ch (Filename.chop_suffix fbib ".bib");
output_string ch "}\n";
close_out ch
let rm f = try Sys.remove f with _ -> ()
let clean tmp =
if not !Options.debug then begin
rm (tmp ^ ".aux");
rm (tmp ^ ".blg");
rm (tmp ^ ".bbl");
rm tmp
end
let call_bibtex tmp =
if not !Options.quiet then begin
eprintf "calling BibTeX..."; flush stderr
end;
match
let redir =
if !output_file = "" || !Options.quiet then
match Sys.os_type with
| "Win32" -> "> nul 2>&1"
| _ -> "> /dev/null 2>&1"
else
""
in
let cmd = sprintf "%s %s %s" !command tmp redir in
if !Options.debug then begin
eprintf "\nbibtex command: %s\n" cmd; flush stderr
end;
Sys.command cmd
with
| 0 ->
if not !Options.quiet then begin eprintf "\n"; flush stderr end
| n ->
if !ignore_bibtex_errors then begin
if not !Options.quiet then begin
eprintf "error %d (ignored)\n" n;
flush stderr
end
end else begin
eprintf "error %d while running bibtex\n" n;
exit n
end
let read_one_biblio lb =
let rec read_items acc lb =
try
let (_,k,_) as item = Bbl_lexer.bibitem lb in
if !Options.debug then begin eprintf "[%s]" k; flush stderr end;
read_items (item::acc) lb
with Bbl_lexer.End_of_biblio ->
List.rev acc
in
let name = Bbl_lexer.biblio_header lb in
let items = read_items [] lb in
(name,items)
let read_biblios lb =
let rec read acc lb =
try
let b = read_one_biblio lb in
read (b::acc) lb
with
End_of_file -> List.rev acc
in
read [] lb
let read_bbl tmp =
let fbbl = tmp ^ ".bbl" in
if not !Options.quiet then begin
eprintf "Reading %s..." fbbl; flush stderr
end;
let ch = open_in fbbl in
let lexbuf = Lexing.from_channel ch in
let biblios = read_biblios lexbuf in
close_in ch;
clean tmp;
if not !Options.quiet then begin
eprintf "ok ";
List.iter
(fun (_,items) -> eprintf "(%d entries)" (List.length items))
biblios;
eprintf "\n"; flush stderr
end;
biblios
temporary files in current directory ( from 's standard library )
module Tmp = struct
external open_desc: string -> open_flag list -> int -> int = "caml_sys_open"
external close_desc: int -> unit = "caml_sys_close"
let prng = Random.State.make_self_init ()
let temp_file prefix suffix =
let rec try_name counter =
let rnd = (Random.State.bits prng) land 0xFFFFFF in
let name = Printf.sprintf "%s%06x%s" prefix rnd suffix in
try
close_desc (open_desc name [Open_wronly; Open_creat; Open_excl] 0o600);
name
with Sys_error _ as e ->
if counter >= 1000 then raise e else try_name (counter + 1)
in
try_name 0
end
let get_biblios fbib =
let tmp = Tmp.temp_file "bib2html" "" in
try
create_aux_file fbib tmp;
call_bibtex tmp;
read_bbl tmp
with
e -> clean tmp; raise e
i
let insert_title_url bib =
let rec remove_assoc x = function
| [ ] - >
raise Not_found
| ( ( y , v ) as p ) : : l - >
if x = y then
( v , l )
else
let ( v',l ' ) = remove_assoc x l in ( v ' , p : : l ' )
in
let url_value = function
| [ Bibtex . Id u ] - > u
| [ Bibtex . String u ] - > u
| _ - > raise Not_found
in
let modify_entry f =
try
let t , f ' = remove_assoc " title " f in
let u , f '' = remove_assoc " url " f ' in
let u ' = Html.normalize_url ( url_value u ) in
let nt =
( Bibtex . String
( sprintf " \\begin{rawhtml}<A HREF=\"%s\">\\end{rawhtml } " u ' ) )
: : t @ [ . String " \\begin{rawhtml}</A>\\end{rawhtml } " ]
in
( " TITLE",nt ) : : f ''
with Not_found - >
f
in
Bibtex.fold
( fun com bib ' - > match com with
| Bibtex . Entry ( ty , k , f ) - >
Bibtex.add_new_entry ( Bibtex . Entry ( ty , k , modify_entry f ) ) bib '
| _ - >
Bibtex.add_new_entry com bib ' )
bib Bibtex.empty_biblio
i
let insert_title_url bib =
let rec remove_assoc x = function
| [] ->
raise Not_found
| ((y,v) as p) :: l ->
if x = y then
(v,l)
else
let (v',l') = remove_assoc x l in (v', p :: l')
in
let url_value = function
| [Bibtex.Id u] -> u
| [Bibtex.String u] -> u
| _ -> raise Not_found
in
let modify_entry f =
try
let t,f' = remove_assoc "title" f in
let u,f'' = remove_assoc "url" f' in
let u' = Html.normalize_url (url_value u) in
let nt =
(Bibtex.String
(sprintf "\\begin{rawhtml}<A HREF=\"%s\">\\end{rawhtml}" u'))
:: t @ [Bibtex.String "\\begin{rawhtml}</A>\\end{rawhtml}"]
in
("TITLE",nt) :: f''
with Not_found ->
f
in
Bibtex.fold
(fun com bib' -> match com with
| Bibtex.Entry (ty,k,f) ->
Bibtex.add_new_entry (Bibtex.Entry (ty,k,modify_entry f)) bib'
| _ ->
Bibtex.add_new_entry com bib')
bib Bibtex.empty_biblio
i*)
let parse_only = ref false
let print_keys = ref false
let translate fullname =
let input_bib = Readbib.read_entries_from_file fullname in
if !parse_only then exit 0;
let entries = List.rev (Expand.expand input_bib) in
let biblios =
if fullname = "" then begin
let tmp = Tmp.temp_file "bibtex2htmlinput" ".bib" in
let ch = open_out tmp in
Biboutput.output_bib ~html:false ch input_bib None;
close_out ch;
let bbl = get_biblios tmp in
Sys.remove tmp;
bbl
end else
get_biblios fullname
in
let sb =
List.map
(fun (name,bibitems) -> (name,sort_entries entries bibitems))
biblios
in
if !print_keys then begin
List.iter
(fun (_,bibitems) ->
List.iter (fun (_,_,(_,k,_)) -> printf "%s\n" k) bibitems)
sb;
flush stdout;
exit 0
end;
format_list
(if !expand_abbrev_in_bib_output then
Bibtex.expand_abbrevs input_bib
else input_bib)
sb
(if !use_cite_file then
let keys =
List.fold_right
(fun s e -> Bibtex.KeySet.add s e) !citations Bibtex.KeySet.empty in
let keys =
List.fold_right
(fun s e -> Bibtex.KeySet.remove s e) !excluded keys in
Some (Bibfilter.saturate input_bib keys)
else None)
let read_macros f =
let chan = open_in f in
let lb = Lexing.from_channel chan in
Latexscan.read_macros lb;
close_in chan
s Command line parsing .
let usage ?(error=true) () =
if error then prerr_endline "bibtex2html: bad command line syntax";
(if error then prerr_endline else print_endline) "
Usage: bibtex2html <options> [filename]
-s style BibTeX style (plain, alpha, ...)
-c command BibTeX command (otherwise bibtex is searched in your path)
-d sort by date
-a sort as BibTeX (usually by author)
-u unsorted i.e. same order as in .bib file (default)
-r reverse the sort
-revkeys entries numbered in reverse order
-t title title of the HTML file (default is the filename)
-bg color background color of the HTML file (default is none)
-css file specify a style sheet file
-o file redirect the output
-header additional header in the HTML file
-footer additional footer in the HTML file
-i ignore BibTeX errors
-both produce versions with and without abstracts
-multiple produce one file per entry
-single produce a single page (with BibTeX input and output)
-nodoc only produces the body of the HTML documents
-nokeys do not print the BibTeX keys
-nolinks do not print any web link
-nobiblinks
do not add web links in the BibTeX output
-rawurl print URL instead of file type
-heveaurl use HeVeA's \\url macro
-noabstract
do not print the abstracts (if any)
-nokeywords
do not print the keywords (if any)
-nodoi do not insert the DOI links
-doi-prefix url
set the DOI links prefix (default is /)
-noeprint do not insert the eprint links
-eprint-prefix url
set the eprint links prefix (default is /)
-linebreak add a linebreak between an entry and its links
-use-table enforce the use of HTML tables (to be used after -nokeys)
-noheader do not print the header (bibtex2html command)
-nofooter do not print the footer (bibtex2html web link)
-noexpand do not expand abbreviations in the BibTeX output
-nobibsource
do not produce the BibTeX entries file
-fsuffix give an alternate suffix for HTML files
-lsuffix give an alternate suffix for HTML links
-suffix s give an alternate suffix for HTML files and links
-citefile f
read keys to include from file f
-e key exclude an entry
-m file read (La)TeX macros in file
-f field add a web link for that BibTeX field
-nf field name
add a web link for that BibTeX field, with the supplied name
-note field
declare a note field
-dl use DL lists instead of TABLEs
-unicode use Unicode characters for some LaTeX macros (as HTML entities)
-html-entities
use HTML entities for some LaTeX macros
-labelname use the label name when inserting a link
--print-keys
print the sorted bibtex keys and exit
-debug verbose mode (to find incorrect BibTeX entries)
-q quiet mode
-w stop on warning
-v print version and exit
On-line documentation at /~filliatr/bibtex2html/
";
exit (if error then 1 else 0)
let parse () =
let rec parse_rec = function
| ("-t" | "-title" | "--title") :: s :: rem ->
title := s; title_spec := true; parse_rec rem
| ("-t" | "-title" | "--title") :: [] ->
usage()
| ("-bg" | "-background" | "--background") :: s :: rem ->
Html.bgcolor := Some s; parse_rec rem
| ("-bg" | "-background" | "--background") :: [] ->
usage()
| ("-css" | "-style-sheet" | "--style-sheet") :: f :: rem ->
Html.css := Some f; parse_rec rem
| ("-css" | "-style-sheet" | "--style-sheet") :: [] ->
usage()
| ("-header" | "--header") :: s :: rem ->
user_header := s; parse_rec rem
| ("-header" | "--header") :: [] ->
usage()
| ("-footer" | "--footer") :: s :: rem ->
user_footer := s; parse_rec rem
| ("-footer" | "--footer") :: [] ->
usage()
| ("-s" | "-style" | "--style") :: s :: rem ->
style := s; parse_rec rem
| ("-s" | "-style" | "--style") :: [] ->
usage()
| ("-noabstract" | "-no-abstract" | "--no-abstract") :: rem ->
print_abstract := false; parse_rec rem
| ("-nodoi" | "-no-doi" | "--no-doi") :: rem ->
doi := false; parse_rec rem
| ("-doi-prefix" | "--doi-prefix") :: s :: rem ->
doi_prefix := s; parse_rec rem
| ("-doi-prefix" | "--doi-prefix") :: [] ->
usage ()
| ("-noeprint" | "-no-eprint" | "--no-eprint") :: rem ->
eprint := false; parse_rec rem
| ("-eprint-prefix" | "--eprint-prefix") :: s :: rem ->
eprint_prefix := s; parse_rec rem
| ("-eprint-prefix" | "--eprint-prefix") :: [] ->
usage ()
| ("-nokeywords" | "-no-keywords" | "--no-keywords") :: rem ->
print_keywords := false; parse_rec rem
| ("-nolinks" | "-no-links" | "--no-links") :: rem ->
print_links := false; parse_rec rem
| ("-nobiblinks" | "-no-bib-links" | "--no-bib-links") :: rem ->
links_in_bib_file := false; parse_rec rem
| ("-nokeys" | "-no-keys" | "--no-keys") :: rem ->
nokeys := true; table := NoTable; parse_rec rem
| ("-use-table" | "--use-table") :: rem ->
table := Table; parse_rec rem
| ("-usekeys" | "-use-keys" | "--use-keys") :: rem ->
use_keys := true; parse_rec rem
| ("-rawurl" | "-raw-url" | "--raw-url") :: rem ->
raw_url := true; parse_rec rem
i
| ( " -tu " | " -titleurl " | " --title - url " ) : : rem - >
title_url : = true ;
| ("-tu" | "-titleurl" | "--title-url") :: rem ->
title_url := true; parse_rec rem
i*)
| ("-heveaurl" | "-hevea-url" | "--hevea-url") :: rem ->
Latexscan.hevea_url := true; parse_rec rem
| ("-linebreak" | "--linebreak") :: rem ->
linebreak := true; parse_rec rem
| ("-noheader" | "-no-header" | "--no-header") :: rem ->
print_header := false; parse_rec rem
| ("-nofooter" | "-no-footer" | "--no-footer") :: rem ->
print_footer := false; parse_rec rem
| ("-f" | "-field" | "--field") :: s :: rem ->
add_field s; parse_rec rem
| ("-f" | "-field" | "--field") :: [] ->
usage()
| ("-nf" | "-named-field" | "--named-field") :: s :: name :: rem ->
add_named_field s name; parse_rec rem
| ("-nf" | "-named-field" | "--named-field") :: ([_] | []) ->
usage()
| ("-note" | "--note") :: s :: rem ->
add_note_field s; parse_rec rem
| ("-note" | "--note") :: [] ->
usage()
| ("-note-html" | "--note-html") :: s :: rem ->
add_note_html_field s; parse_rec rem
| ("-note-html" | "--note-html") :: [] ->
usage()
| ("-ln" | "-labelname" | "--labelname" | "--label-name") :: rem ->
use_label_name := true; parse_rec rem
| ("-multiple" | "--multiple") :: rem ->
multiple := true; parse_rec rem
| ("-single" | "--single") :: rem ->
multiple := false; both := false; print_keywords := false;
bib_entries := false; single := true; parse_rec rem
| ("-both" | "--both") :: rem ->
both := true; parse_rec rem
| ("-dl" | "--dl") :: rem ->
table := DL; parse_rec rem
| ("-unicode" | "--unicode") :: rem ->
Latexmacros.unicode_entities (); parse_rec rem
| ("-html-entities" | "--html-entities") :: rem ->
Latexscan.html_entities := true;
Latexmacros.html_entities (); parse_rec rem
| ("-m" | "-macros-from" | "--macros-from") :: f :: rem ->
read_macros f; parse_rec rem
| ("-m" | "-macros-from" | "--macros-from") :: [] ->
usage()
| ("-d" | "-sort-by-date" | "--sort-by-date") :: rem ->
sort := By_date; parse_rec rem
| ("-a" | "-sort-as-bibtex" | "--sort-as-bibtex") :: rem ->
sort := By_author; parse_rec rem
| ("-u" | "-unsorted" | "--unsorted") :: rem ->
sort := Unsorted; parse_rec rem
| ("-r" | "-reverse-sort" | "--reverse-sort") :: rem ->
reverse_sort := not !reverse_sort; parse_rec rem
| ("-revkeys" | "--revkeys") :: rem ->
reverse_sort := not !reverse_sort; revkeys := true; parse_rec rem
| ("-citefile" | "--citefile") :: f :: rem ->
use_cite_file := true;
add_citations f;
parse_rec rem
| ("-citefile" | "--citefile") :: [] ->
usage()
| ("-e" | "-exclude" | "--exclude") :: k :: rem ->
add_exclude k; parse_rec rem
| ("-e" | "-exclude" | "--exclude") :: [] ->
usage()
| ("-o" | "-output" | "--output") :: f :: rem ->
output_file := f;
parse_rec rem
| ("-o" | "-output" | "--output") :: [] ->
usage()
| ("-nobibsource" | "--nobibsource") :: rem ->
bib_entries := false; parse_rec rem
| ("-nodoc" | "--nodoc" | "-no-doc" | "--no-doc") :: rem ->
nodoc := true; parse_rec rem
| ("-noexpand" | "-no-expand" | "--no-expand") :: rem ->
expand_abbrev_in_bib_output := false; parse_rec rem
| ("-i" | "-ignore-errors" | "--ignore-errors") :: rem ->
ignore_bibtex_errors := true; parse_rec rem
| ("-suffix" | "--suffix") :: s :: rem ->
file_suffix := s; link_suffix := s; parse_rec rem
| ("-fsuffix" | "-file-suffix" | "--file-suffix") :: s :: rem ->
file_suffix := s; parse_rec rem
| ("-lsuffix" | "-link-suffix" | "--link-suffix") :: s :: rem ->
link_suffix := s; parse_rec rem
| ("-suffix" | "--suffix" | "-fsuffix" | "--file-suffix" | "-file-suffix" |
"-lsuffix" | "-link-suffix" | "--link-suffix") :: [] ->
usage()
| ("-c" | "-command" | "--command") :: s :: rem ->
command := s; parse_rec rem
| ("-c" | "-command" | "--command") :: [] ->
usage()
| ("-h" | "-help" | "-?" | "--help") :: rem ->
usage ~error:false ()
| ("-v" | "-version" | "--version") :: _ ->
Copying.banner "bibtex2html"; exit 0
| ("-warranty" | "--warranty") :: _ ->
Copying.banner "bibtex2html"; Copying.copying(); exit 0
| ("-w" | "-warn-error" | "--warn-error") :: rem ->
Options.warn_error := true; parse_rec rem
| ("-q" | "-quiet" | "--quiet") :: rem ->
Options.quiet := true; parse_rec rem
| ("-debug" | "--debug") :: rem ->
Options.debug := true; parse_rec rem
| "-parse-only" :: rem ->
parse_only := true; parse_rec rem
| ("-print-keys" | "--print-keys") :: rem ->
print_keys := true; parse_rec rem
| [fbib] ->
if not (Sys.file_exists fbib) then begin
eprintf "%s: no such file\n" fbib;
exit 1
end;
let basename = Filename.basename fbib in
if Filename.check_suffix basename ".bib" then
(fbib, Filename.chop_suffix basename ".bib")
else begin
prerr_endline "bibtex2html: BibTeX file must have suffix .bib";
exit 1
end
| [] ->
("","")
| _ -> usage ()
in
parse_rec (List.tl (Array.to_list Sys.argv))
let main () =
let (fbib,f) = parse () in
Copying.banner "bibtex2html";
if fbib = "" then begin
if not !title_spec then title := "bibtex2html output";
begin match !output_file with
| "" -> bib_entries := false
| "-" -> output_file := ""; bib_entries := false
| _ -> ()
end
end else begin
input_file := f ^ ".bib";
begin match !output_file with
| "" -> output_file := f;
| "-" -> output_file := ""; bib_entries := false
| _ -> ()
end;
if not !title_spec then title := f
end;
Latexmacros.init_style_macros !style;
translate fbib
let _ = Printexc.catch main ()
|
a31dc5a025e19b5cf215aa9ee2b40ef076e58f8903dae143b6829cd0f02b18d7 | ruhler/smten | Formula.hs |
| Representation of SMT formulas which may contain _ | _ in subterms .
module Smten.Runtime.Formula (
module Smten.Runtime.Formula.BoolF,
module Smten.Runtime.Formula.IntegerF,
module Smten.Runtime.Formula.BitF,
) where
import Smten.Runtime.Formula.BoolF
import Smten.Runtime.Formula.IntegerF
import Smten.Runtime.Formula.BitF
| null | https://raw.githubusercontent.com/ruhler/smten/16dd37fb0ee3809408803d4be20401211b6c4027/smten-base/Smten/Runtime/Formula.hs | haskell |
| Representation of SMT formulas which may contain _ | _ in subterms .
module Smten.Runtime.Formula (
module Smten.Runtime.Formula.BoolF,
module Smten.Runtime.Formula.IntegerF,
module Smten.Runtime.Formula.BitF,
) where
import Smten.Runtime.Formula.BoolF
import Smten.Runtime.Formula.IntegerF
import Smten.Runtime.Formula.BitF
| |
ded79130c685e41305d30d745789525ddee2d022ab8d96fed0b4c5a94fe9dd4c | inaka/elvis_core | fail_no_nested_try_catch.erl | -module(fail_no_nested_try_catch).
-ignore_xref({perhaps, throw, 1}).
-ignore_xref({a_function, that_deals, 2}).
-dialyzer({nowarn_function, bad2/0}).
-export([
bad1/0,
bad2/0,
good1/0,
good2/0
]).
bad1() ->
try
perhaps:throw(exception1),
try
perhaps:throw(exception2),
"We are safe!"
catch
_:exception2 ->
"Oh, no! Exception #2"
end
catch
_:exception1 ->
"Bummer! Exception #1"
end.
bad2() ->
try
perhaps:throw(exception1),
try
perhaps:throw(exception2),
"We are safe!"
catch
_:exception2 ->
"Oh, no! Exception #2"
end,
try
perhaps:throw(exception3),
"We are safe!"
catch
_:exception3 ->
"Oh, no! Exception #3"
end
catch
_:exception1 ->
"Bummer! Exception #1"
end.
good1() ->
try
perhaps:throw(exception1),
perhaps:throw(exception2),
"We are safe!"
catch
_:exception1 ->
"Bummer! Exception #1";
_:exception2 ->
"Oh, no! Exception #2"
end.
good2() ->
try
perhaps:throw(exception1),
a_function:that_deals(with, exception2),
"We are safe!"
catch
_:exception1 ->
"Bummer! Exception #1"
end.
| null | https://raw.githubusercontent.com/inaka/elvis_core/7567abc13db13bfdb4a666ff0f095385f3574fdb/test/examples/fail_no_nested_try_catch.erl | erlang | -module(fail_no_nested_try_catch).
-ignore_xref({perhaps, throw, 1}).
-ignore_xref({a_function, that_deals, 2}).
-dialyzer({nowarn_function, bad2/0}).
-export([
bad1/0,
bad2/0,
good1/0,
good2/0
]).
bad1() ->
try
perhaps:throw(exception1),
try
perhaps:throw(exception2),
"We are safe!"
catch
_:exception2 ->
"Oh, no! Exception #2"
end
catch
_:exception1 ->
"Bummer! Exception #1"
end.
bad2() ->
try
perhaps:throw(exception1),
try
perhaps:throw(exception2),
"We are safe!"
catch
_:exception2 ->
"Oh, no! Exception #2"
end,
try
perhaps:throw(exception3),
"We are safe!"
catch
_:exception3 ->
"Oh, no! Exception #3"
end
catch
_:exception1 ->
"Bummer! Exception #1"
end.
good1() ->
try
perhaps:throw(exception1),
perhaps:throw(exception2),
"We are safe!"
catch
_:exception1 ->
"Bummer! Exception #1";
_:exception2 ->
"Oh, no! Exception #2"
end.
good2() ->
try
perhaps:throw(exception1),
a_function:that_deals(with, exception2),
"We are safe!"
catch
_:exception1 ->
"Bummer! Exception #1"
end.
| |
72014a6b091ef32ce6ce074e9e845ab0a126050e97ee69b73b44ad9421dd4bcc | OCamlPro/alt-ergo | formatshims.ml | (******************************************************************************)
(* *)
Alt - Ergo : The SMT Solver For Software Verification
Copyright ( C ) 2018 - 2020
(* *)
(* This file is distributed under the terms of the license indicated *)
(* in the file 'License.OCamlPro'. If 'License.OCamlPro' is not *)
(* present, please contact us to clarify licensing. *)
(* *)
(* Some parts of this file are exctracted from the *)
(* projectOcaml-containers : *)
-cube/ocaml-containers/blob/master/src/core/mkshims.ml
Thanks to
(* *)
(******************************************************************************)
module C = Configurator.V1
let write_file f s =
let out = open_out f in
output_string out s; flush out; close_out out
let shims_fmt_pre_408 = "
include Format
let pp_open_stag = pp_open_tag
let open_stag = open_tag
let pp_close_stag = pp_close_tag
let close_stag = close_tag
type formatter_stag_functions = formatter_tag_functions
let pp_get_formatter_stag_functions = pp_get_formatter_tag_functions
let get_formatter_stag_functions = get_formatter_tag_functions
let pp_set_formatter_stag_functions = pp_set_formatter_tag_functions
let set_formatter_stag_functions = set_formatter_tag_functions
let get_stag s = s
let update_stag_functions funs start_stag stop_stag =
let open Format in
{ funs with
mark_open_tag = start_stag;
mark_close_tag = stop_stag }
"
let shims_fmt_post_408 = "
include Format
let get_stag = function
| String_tag s -> s
| _ -> raise Not_found
let update_stag_functions funs start_stag stop_stag =
let open Format in
{ funs with
mark_open_stag = start_stag;
mark_close_stag = stop_stag }
"
let () =
C.main ~name:"mkshims" (fun c ->
let version = C.ocaml_config_var_exn c "version" in
let major, minor =
Scanf.sscanf version "%u.%u" (fun maj min -> maj, min) in
write_file "format_shims.ml"
(if (major, minor) >= (4,8)
then shims_fmt_post_408
else shims_fmt_pre_408);
)
| null | https://raw.githubusercontent.com/OCamlPro/alt-ergo/291523151417f4cd112744d740b58ab1e8a630b4/src/lib/util/formatshims.ml | ocaml | ****************************************************************************
This file is distributed under the terms of the license indicated
in the file 'License.OCamlPro'. If 'License.OCamlPro' is not
present, please contact us to clarify licensing.
Some parts of this file are exctracted from the
projectOcaml-containers :
**************************************************************************** | Alt - Ergo : The SMT Solver For Software Verification
Copyright ( C ) 2018 - 2020
-cube/ocaml-containers/blob/master/src/core/mkshims.ml
Thanks to
module C = Configurator.V1
let write_file f s =
let out = open_out f in
output_string out s; flush out; close_out out
let shims_fmt_pre_408 = "
include Format
let pp_open_stag = pp_open_tag
let open_stag = open_tag
let pp_close_stag = pp_close_tag
let close_stag = close_tag
type formatter_stag_functions = formatter_tag_functions
let pp_get_formatter_stag_functions = pp_get_formatter_tag_functions
let get_formatter_stag_functions = get_formatter_tag_functions
let pp_set_formatter_stag_functions = pp_set_formatter_tag_functions
let set_formatter_stag_functions = set_formatter_tag_functions
let get_stag s = s
let update_stag_functions funs start_stag stop_stag =
let open Format in
{ funs with
mark_open_tag = start_stag;
mark_close_tag = stop_stag }
"
let shims_fmt_post_408 = "
include Format
let get_stag = function
| String_tag s -> s
| _ -> raise Not_found
let update_stag_functions funs start_stag stop_stag =
let open Format in
{ funs with
mark_open_stag = start_stag;
mark_close_stag = stop_stag }
"
let () =
C.main ~name:"mkshims" (fun c ->
let version = C.ocaml_config_var_exn c "version" in
let major, minor =
Scanf.sscanf version "%u.%u" (fun maj min -> maj, min) in
write_file "format_shims.ml"
(if (major, minor) >= (4,8)
then shims_fmt_post_408
else shims_fmt_pre_408);
)
|
05e6349483726e688a009549433f4baad765de31e403387fb08dbe84296e54e1 | huangz1990/real-world-haskell-cn | Mutable.hs | new :: (a -> [Word32]) -> Word32 -> ST s (MutBloom s a)
new hash numBits = MB hash `liftM` newArray (0,numBits-1) False
insert :: MutBloom s a -> a -> ST s ()
insert filt elt = indices filt elt >>=
mapM_ (\bit -> writeArray (mutArray filt) bit True)
indices :: MutBloom s a -> a -> ST s [Word32]
indices filt elt = do
modulus <- length filt
return $ map (`mod` modulus) (mutHash filt elt)
elem, notElem :: a -> MutBloom s a -> ST s Bool
elem elt filt = indices filt elt >>=
allM (readArray (mutArray filt))
notElem elt filt = not `liftM` elem elt filt
allM :: Monad m => (a -> m Bool) -> [a] -> m Bool
allM p (x:xs) = do
ok <- p x
if ok
then allM p xs
else return False
allM _ [] = return True | null | https://raw.githubusercontent.com/huangz1990/real-world-haskell-cn/f67b07dd846b1950d17ff941d650089fcbbe9586/code/ch26/BloomFilter/Mutable.hs | haskell | new :: (a -> [Word32]) -> Word32 -> ST s (MutBloom s a)
new hash numBits = MB hash `liftM` newArray (0,numBits-1) False
insert :: MutBloom s a -> a -> ST s ()
insert filt elt = indices filt elt >>=
mapM_ (\bit -> writeArray (mutArray filt) bit True)
indices :: MutBloom s a -> a -> ST s [Word32]
indices filt elt = do
modulus <- length filt
return $ map (`mod` modulus) (mutHash filt elt)
elem, notElem :: a -> MutBloom s a -> ST s Bool
elem elt filt = indices filt elt >>=
allM (readArray (mutArray filt))
notElem elt filt = not `liftM` elem elt filt
allM :: Monad m => (a -> m Bool) -> [a] -> m Bool
allM p (x:xs) = do
ok <- p x
if ok
then allM p xs
else return False
allM _ [] = return True | |
2e0432e9adc1d0ad4a21796478531bc52a3245a789368fad2d88f41e2fb8e05a | justinethier/cyclone | nqueens-cyclone-cps.scm | ; loading nqueens-cyclone-cps.scm ...
((define nqueens
(lambda (k$30 n$4)
((lambda (dec-to$10 ok?$9 try$8)
((lambda (dec-to$13 try$12 ok?$11)
((lambda (r$67)
((lambda (r$31)
((lambda (r$50)
((lambda (r$32)
((lambda (r$37)
((lambda (r$33)
(dec-to$10
(lambda (r$34)
((lambda (r$35)
((lambda (r$36) (try$8 k$30 r$34 r$35 r$36)) '()))
'()))
n$4))
(set! ok?$9 r$37)))
(lambda (k$38 row$16 dist$15 placed$14)
((lambda (r$39)
(if r$39
(k$38 #t)
((lambda (r$48)
((lambda (r$49)
((lambda (r$47)
(not (lambda (r$40)
(if r$40
((lambda (r$45)
((lambda (r$46)
((lambda (r$44)
(not (lambda (r$41)
(if r$41
((lambda (r$42)
((lambda (r$43) (ok?$9 k$38 row$16 r$42 r$43))
(cdr placed$14)))
(+ dist$15 1))
(k$38 #f)))
r$44))
(= r$45 r$46)))
(- row$16 dist$15)))
(car placed$14))
(k$38 #f)))
r$47))
(= r$48 r$49)))
(+ row$16 dist$15)))
(car placed$14))))
(null? placed$14)))))
(set! try$8 r$50)))
(lambda (k$51 x$19 y$18 z$17)
((lambda (r$52)
(if r$52
((lambda (r$53) (if r$53 (k$51 1) (k$51 0)))
(null? y$18))
((lambda (k$59)
((lambda (r$66)
(ok?$9 (lambda (r$60)
(if r$60
((lambda (r$65)
(append
(lambda (r$61)
((lambda (r$62)
((lambda (r$64)
((lambda (r$63) (try$8 k$59 r$61 r$62 r$63))
(cons r$64 z$17)))
(car x$19)))
'()))
r$65
y$18))
(cdr x$19))
(k$59 0)))
r$66
1
z$17))
(car x$19)))
(lambda (r$54)
((lambda (r$56)
((lambda (r$58)
((lambda (r$57)
(try$8 (lambda (r$55) (k$51 (+ r$54 r$55)))
r$56
r$57
z$17))
(cons r$58 y$18)))
(car x$19)))
(cdr x$19))))))
(null? x$19)))))
(set! dec-to$10 r$67)))
(lambda (k$68 n$20)
((lambda (r$69)
((lambda (i$22 l$21)
((lambda (loop$23)
((lambda (r$71)
((lambda (r$70) (loop$23 k$68 i$22 l$21))
(set! loop$23 r$71)))
(lambda (k$72 i$25 l$24)
((lambda (r$73)
(if r$73
(k$72 l$24)
((lambda (r$74)
((lambda (r$75) (loop$23 k$72 r$74 r$75))
(cons i$25 l$24)))
(- i$25 1))))
(= i$25 0)))))
#f))
n$20
r$69))
'()))))
#f
#f
#f))
#f
#f
#f)))
((lambda ()
((lambda (r$26) (nqueens (lambda (r$27) (write %halt r$27)) 8)) 0))))
| null | https://raw.githubusercontent.com/justinethier/cyclone/a1c2a8f282f37ce180a5921ae26a5deb04768269/tests/debug/compilation/nqueens-cyclone-cps.scm | scheme | loading nqueens-cyclone-cps.scm ... | ((define nqueens
(lambda (k$30 n$4)
((lambda (dec-to$10 ok?$9 try$8)
((lambda (dec-to$13 try$12 ok?$11)
((lambda (r$67)
((lambda (r$31)
((lambda (r$50)
((lambda (r$32)
((lambda (r$37)
((lambda (r$33)
(dec-to$10
(lambda (r$34)
((lambda (r$35)
((lambda (r$36) (try$8 k$30 r$34 r$35 r$36)) '()))
'()))
n$4))
(set! ok?$9 r$37)))
(lambda (k$38 row$16 dist$15 placed$14)
((lambda (r$39)
(if r$39
(k$38 #t)
((lambda (r$48)
((lambda (r$49)
((lambda (r$47)
(not (lambda (r$40)
(if r$40
((lambda (r$45)
((lambda (r$46)
((lambda (r$44)
(not (lambda (r$41)
(if r$41
((lambda (r$42)
((lambda (r$43) (ok?$9 k$38 row$16 r$42 r$43))
(cdr placed$14)))
(+ dist$15 1))
(k$38 #f)))
r$44))
(= r$45 r$46)))
(- row$16 dist$15)))
(car placed$14))
(k$38 #f)))
r$47))
(= r$48 r$49)))
(+ row$16 dist$15)))
(car placed$14))))
(null? placed$14)))))
(set! try$8 r$50)))
(lambda (k$51 x$19 y$18 z$17)
((lambda (r$52)
(if r$52
((lambda (r$53) (if r$53 (k$51 1) (k$51 0)))
(null? y$18))
((lambda (k$59)
((lambda (r$66)
(ok?$9 (lambda (r$60)
(if r$60
((lambda (r$65)
(append
(lambda (r$61)
((lambda (r$62)
((lambda (r$64)
((lambda (r$63) (try$8 k$59 r$61 r$62 r$63))
(cons r$64 z$17)))
(car x$19)))
'()))
r$65
y$18))
(cdr x$19))
(k$59 0)))
r$66
1
z$17))
(car x$19)))
(lambda (r$54)
((lambda (r$56)
((lambda (r$58)
((lambda (r$57)
(try$8 (lambda (r$55) (k$51 (+ r$54 r$55)))
r$56
r$57
z$17))
(cons r$58 y$18)))
(car x$19)))
(cdr x$19))))))
(null? x$19)))))
(set! dec-to$10 r$67)))
(lambda (k$68 n$20)
((lambda (r$69)
((lambda (i$22 l$21)
((lambda (loop$23)
((lambda (r$71)
((lambda (r$70) (loop$23 k$68 i$22 l$21))
(set! loop$23 r$71)))
(lambda (k$72 i$25 l$24)
((lambda (r$73)
(if r$73
(k$72 l$24)
((lambda (r$74)
((lambda (r$75) (loop$23 k$72 r$74 r$75))
(cons i$25 l$24)))
(- i$25 1))))
(= i$25 0)))))
#f))
n$20
r$69))
'()))))
#f
#f
#f))
#f
#f
#f)))
((lambda ()
((lambda (r$26) (nqueens (lambda (r$27) (write %halt r$27)) 8)) 0))))
|
9d257ed412dd8db1b58d12799ba0e7ed45c633d14e5df73033d4ad9c42566657 | ucla-pls/jreduce | Logic.hs | # LANGUAGE LambdaCase #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE ApplicativeDo #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE EmptyCase #
# LANGUAGE ViewPatterns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE BlockArguments #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TupleSections #
module JReduce.Logic where
-- lens
import Control.Lens hiding ( andOf
, orOf
, (<.>)
)
-- containers
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import qualified Data.IntSet as IS
-- vector
import qualified Data.Vector as V
-- base
import Data.Foldable hiding ( and
, or
)
import Data.Maybe
import Text.Printf
import Data.IORef
import Data.Tuple
import Data.Char ( isNumber )
import Data.Monoid
import Text.Show
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
import GHC.Generics ( Generic )
import Prelude hiding ( fail
, not
, and
, or
, lookup
)
import qualified Data . ByteString as BS
import qualified Data.ByteString.Lazy as BL
-- jvmhs
import Jvmhs.Data.Type
import Jvmhs.TypeCheck
import Jvmhs.Data.Code
import Jvmhs hiding ( methodExist
, fieldExist
)
import qualified Jvmhs
-- containers
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Tree as T
-- directory
import System.Directory
-- nfdata
import Control.DeepSeq
-- jvm-binary
import qualified Language.JVM as B
import Language.JVM.ByteCode ( ByteCodeOpr(..) )
-- filepath
import System.FilePath
-- text
import qualified Data.Text as Text
import qualified Data.Text.Lazy.IO as LazyText
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text.Lazy.Builder as LazyText
import qualified Data.Text.Lazy.Builder as Builder
-- reduce-util
import Control.Reduce.Boolean
import Control.Reduce.Graph as G
import Control.Reduce.Boolean.CNF as CNF
import Control.Reduce.Progression
import qualified Control.Reduce.Boolean.LiteralSet
as LS
import Control.Reduce.Problem
import Control.Reduce.Reduction
import Control.Reduce.Util.Logger as L
-- unorderd-containers
import qualified Data.HashSet as HS
-- jreduce
import JReduce.Target
import JReduce.Config
data Item
= IContent Content
| ICode ((Class, Method), Code)
| ITarget Target
| ISuperClass (Class, (Annotated ClassType))
| IImplements (Class, (Annotated ClassType))
| IField (Class, Field)
| IFieldFinal (Class, Field)
| IMethod (Class, Method)
| IInnerClass (Class, InnerClass)
| IBootstrapMethod (Class, (Int, BootstrapMethod))
makePrisms ''Item
data Fact
= ClassExist ClassName
| CodeIsUntuched AbsMethodId
| HasSuperClass ClassName ClassName
| HasInterface ClassName ClassName
| FieldExist AbsFieldId
| FieldIsFinal AbsFieldId
| MethodExist AbsMethodId
| IsInnerClass ClassName ClassName
-- | MethodThrows AbsMethodId ClassName
| HasBootstrapMethod ClassName Int
| Meta
deriving (Eq, Ord, Generic, NFData)
displayFact :: Fact -> Builder.Builder
displayFact = \case
ClassExist cn -> toBuilder cn
CodeIsUntuched md -> toBuilder md <> "!code"
HasSuperClass cn1 cn2 -> toBuilder cn1 <> "<S]" <> toBuilder cn2
HasInterface cn1 cn2 -> toBuilder cn1 <> "<I]" <> toBuilder cn2
FieldExist fd -> toBuilder fd
FieldIsFinal fd -> toBuilder fd <> "[final]"
MethodExist md -> toBuilder md
IsInnerClass cn1 cn2 -> toBuilder cn1 <> "[innerOf]" <> toBuilder cn2
-- MethodThrows m cn -> toBuilder m <> "[throws]" <> toBuilder cn
HasBootstrapMethod cn b ->
toBuilder cn <> "[bootstrap]" <> Builder.fromString (show b)
Meta -> "meta"
requireClassNamesOf ::
(HasClassName c, HasClassNames a)
=> c -> Getting (Endo (Endo (Stmt Fact))) s a -> s -> Stmt Fact
requireClassNamesOf c l a =
forallOf (l . classNames) a (requireClassName c)
decompose :: Ord a => S.Set a -> (M.Map a Int, V.Vector a)
decompose a =
(M.fromAscList . toList $ imap (flip (,)) res, res)
where res = V.fromList (toList a)
checkScope :: S.Set ClassName -> Fact -> Bool
checkScope scope = \case
ClassExist a -> fn $ a
CodeIsUntuched m -> fn $ m ^. className
HasSuperClass cn _ -> fn $ cn
HasInterface cn _ -> fn $ cn
FieldExist f -> fn $ f ^. className
FieldIsFinal f -> fn $ f ^. className
MethodExist m -> fn $ m ^. className
IsInnerClass cn _ -> fn $ cn
MethodThrows m _ - > fn $ m ^. className
HasBootstrapMethod cn _ -> fn $ cn
Meta -> True
where fn k = k `S.member` scope
itemR :: PartialReduction Item Item
itemR f' = \case
ITarget t -> fmap ITarget <$> targetR f' t
IContent c -> fmap IContent <$> contentR f' c
IMethod (c, m) -> fmap (IMethod . (c, )) <$> (part $ methodR c) f' m
IField (c, m) -> fmap (IField . (c, )) <$> (part $ fieldR c) f' m
a -> pure (Just a)
where
contentR :: PartialReduction Content Item
contentR f = \case
ClassFile c -> fmap ClassFile <$> (part classR) f c
Jar c -> fmap Jar <$> (deepDirForestR . reduceAs _IContent) f c
a -> pure $ Just a
targetR :: PartialReduction Target Item
targetR = deepDirTreeR . reduceAs _IContent
classR :: Reduction Class Item
classR f c = do
_super <- case c ^. classSuper of
Just a
| a ^. annotatedContent . to classNameFromType == "java/lang/Object" -> pure
$ Just a
| otherwise -> (payload c . reduceAs _ISuperClass) f a <&> \case
Just a' -> Just a'
Nothing ->
Just (withNoAnnotation (classTypeFromName "java/lang/Object"))
Nothing -> pure $ Nothing
fields <- (listR . payload c . reduceAs _IField) f (c ^. classFields)
methods <- (listR . payload c . reduceAs _IMethod) f (c ^. classMethods)
innerClasses <- (listR . payload c . reduceAs _IInnerClass)
f
(c ^. classInnerClasses)
_interfaces <- (listR . payload c . reduceAs _IImplements)
f
(c ^. classInterfaces)
bootstrapMethods <-
(iso IntMap.toAscList IntMap.fromAscList
. listR . payload c . reduceAs _IBootstrapMethod)
f (c ^.classBootstrapMethods)
pure
$ c & classSuper .~ _super
& classFields .~ fields
& classMethods .~ methods
& classInnerClasses .~ innerClasses
& classInterfaces .~ _interfaces
& classBootstrapMethods .~ bootstrapMethods
fieldR :: Class -> Reduction Field Item
fieldR cls fn f = do
if f ^. fieldAccessFlags . contains FFinal
then fn (IFieldFinal (cls, f)) <&> \case
Just (IFieldFinal _) -> f
_ -> f & fieldAccessFlags . at FFinal .~ Nothing
else pure f
methodR :: Class -> Reduction Method Item
methodR cls f m = do
t <- case m ^. methodCode of
Just c -> f (ICode ((cls, m), c)) <&> \case
Just (ICode (_, c')) -> Just c'
_ -> Nothing
_ -> pure Nothing
_ methodThrows < - ( listR . payload ( cls , m ) . reduceAs )
-- f
-- (m ^. methodExceptions)
pure
$ (case t of
Just c -> m & methodCode .~ Just c
Nothing -> stub m
)
-- & methodExceptions
.~ _ methodThrows
unBuilder :: Builder.Builder -> String
unBuilder = LazyText.unpack . Builder.toLazyText
showsVariable :: (k -> Builder.Builder) -> V.Vector (k, [Int]) -> Int -> ShowS
showsVariable displayK variables i =
case variables V.!? i of
Just (k, idx) ->
showString (unBuilder $ displayK k <> display (reverse idx))
Nothing ->
shows i
initializeKeyFunction ::
forall m. MonadIOReader Config m
=> LogicConfig -> Target -> FilePath -> m (V.Vector (Fact, [Int]), Int, ([Int], Item) -> m CNF)
initializeKeyFunction cfg trg wf = L.phase "Initializing key function" do
hry <- fetchHierachy wf (targetClasses trg)
core <- view cfgCore
let
items =
(if reverseOrder cfg then reverse else id)
(itemsOfTarget trg)
tpinfo <- L.phase "Type checking methods" $ M.fromList . catMaybes <$> forM items \case
(_, ICode ((cls, method), code)) -> do
let theMethodName = mkAbsMethodId cls method
case typeCheck hry theMethodName (method^.methodAccessFlags.contains MStatic) code of
(Just (i, x), _) -> do
L.err $ "In " <> display theMethodName
case code^?codeByteCode.ix i of
Just s -> do
L.err $ " at offset: " <> display (B.offset s)
L.err $ " at opcode: " <> display (B.opcode s)
Nothing -> do
return ()
L.err $ " got: " <> display x
view cfgUnsafe >>= \case
True -> pure Nothing
False -> fail $ "Could not typecheck " <> show theMethodName
(Nothing, vc) -> pure (Just (theMethodName, vc))
_ ->
pure Nothing
let
lfn = logic cfg hry tpinfo
factsToVar :: M.Map Fact (S.Set Int)
factsToVar =
M.fromListWith S.union
[ (f, S.singleton i)
| (i, f) <- V.toList (V.indexed facts)
]
back :: V.Vector ([Int], Item)
back =
V.fromList items
facts :: V.Vector Fact
facts =
V.map (fst . lfn . snd) back
variables :: V.Vector (Fact, [Int])
variables = V.zip facts (V.map fst back)
cores :: V.Vector Bool
cores =
V.map (\fact-> serializeWith displayFact fact `HS.member` core) facts
indiciesToVar :: M.Map [Int] Int
indiciesToVar =
M.fromList (map swap . V.toList . V.indexed . V.map fst $ back)
maxid <- liftIO $ newIORef (V.length back)
let
handler :: ([Int], (Fact, Stmt Fact)) -> m CNF
handler (idx, (fact, stmt)) = L.logtime L.DEBUG ("Processing " <> debuginfo) $ do
case stmtWarnings stmt of
[] -> return ()
msgs -> do
L.err
$ "Warnings found while computing logical dependencies for "
<> displayFact fact
forM_ msgs \msg ->
L.err (Builder.fromString msg)
view cfgUnsafe >>= \case
True -> pure ()
False -> fail $ "warnings found while computing logical depenencies"
mid <- liftIO $ readIORef maxid
let cnf = toMinimalCNF mid nnfAfter
liftIO $ writeIORef maxid
(max mid . maybe minBound fst . IS.maxView $ cnfVariables cnf)
whenM (view cfgDumpItems) . liftIO $ do
LazyText.appendFile (wf </> "items.txt") . LazyText.toLazyText
$ displayText key <> display (reverse idx) <> "\n"
LazyText.appendFile (wf </> "items-logical.txt") . LazyText.toLazyText
$ displayText key <> display (reverse idx) <> " " <> display v <> "\n"
<> " LV1 " <> displayString (showsStmtWith showsFact stmt "\n")
<> " LV2 " <> displayString (showsNnfWith showsFact nnf "\n")
<> " LV3 " <> displayString (showsNnfWith (showsVariable displayFact variables) nnfAfter "\n")
<> foldMap ((" " <>) . displayClause) (cnfClauses cnf)
return cnf
where
v = indiciesToVar M.! idx
key = serializeWith displayFact fact
isCore = cores V.! v
debuginfo =
displayText key <> (if isCore then " CORE" else "")
nnf :: Nnf Fact
nnf =
flattenNnf . nnfFromStmt . fromStmt $ stmt
nnfAfter :: Nnf Int
nnfAfter =
flattenNnf . nnfFromStmt . fromStmt
. (case idx of
[] -> id
_:rest -> \s ->
s /\ (tt (indiciesToVar M.! idx) ==> tt (indiciesToVar M.! rest))
)
. (if not isCore then id else \s -> s /\ tt (indiciesToVar M.! idx))
. runIdentity
$ traverseVariables (\f -> pure $ forallOf (ix f.folded) factsToVar \i -> tt i
) stmt
showsFact :: Fact -> ShowS
showsFact = showString . unBuilder . displayFact
displayClause c =
displayString (LS.displayImplication (showsVariable displayFact variables) c "\n")
L.info . L.displayf "Found %d items." $ L.length items
L.info . L.displayf "Found %d facts." $ M.size factsToVar
L.info . L.displayf "The core is %d of them." $ L.length core
mid <- liftIO $ readIORef maxid
return (variables, mid, handler . over _2 lfn)
where
itemsOfTarget :: Target -> [([Int], Item)]
itemsOfTarget = itoListOf (deepSubelements itemR) . review _ITarget
computeCNF ::
MonadIOReader Config m
=> (Int -> ShowS)
-> (a -> m CNF)
-> FilePath
-> [a]
-> m CNF
computeCNF sv keyFun wf items = L.phase "Compute CNF" do
cnf <- CNF . foldMap cnfClauses <$> mapM keyFun items
whenM (view cfgDumpLogic) . liftIO $ do
LazyText.appendFile (wf </> "cnf.txt")
. LazyText.toLazyText
$ foldMap (\c -> displayString $ LS.displayImplication sv
c "\n") (cnfClauses cnf)
return cnf
logProgression ::
forall m k.
MonadIOReader Config m
=> FilePath
-> (k -> Builder.Builder)
-> V.Vector (k, [Int])
-> CNF
-> IS.IntSet
-> m (NE.NonEmpty IS.IntSet)
logProgression prog displayK variables cnf is = do
let (limitedCNF, lok) = limitCNF is cnf
let progs = calculateSimpleProgression cnf is
dumpClosures <- view cfgDumpClosures
when dumpClosures . liftIO $ do
createDirectoryIfMissing True prog
i <- findNext 0
LazyText.writeFile (indexedFile i "progression.txt")
. LazyText.toLazyText
. foldMap
(\a -> (fold
. L.intersperse " | "
. map (displayShowS . showsVariable displayK variables)
$ IS.toList a)
<> "\n")
$ progs
LazyText.writeFile (indexedFile i "cnf.txt")
. LazyText.toLazyText
$ foldMap
(\c ->
displayString $ LS.displayImplication
(showsVariable displayK variables . (lok V.!))
c "\n"
)
(cnfClauses limitedCNF)
LazyText.writeFile ( indexedFile i " variableorder.txt " )
-- . LazyText.toLazyText
-- $ foldMap
( \i - > ( showsVariable displayK variables i ) < > " \n " )
( generateTotalGraphOrder ( ) limitedCNF )
return progs
where
-- showVar variables i =
-- showsVariable displayK variables i ""
-- printTotalGraphOrder' lok i n cnf = do
-- BL.writeFile (indexedFile i "graph.csv") $
-- G.writeEmptyCSV (showVar variables <$> graph)
-- writeCNF lok ( indexedFile i " unit-resolved.txt " ) cnf '
-- writeFile (indexedFile i "dff.txt") (show . map (fmap (showVar variables)) $ G.dff graph)
-- writeFile (indexedFile i "vars.txt") (show $ G.postOrd graph)
-- where
-- -- Just (LS.splitLiterals -> (_, _), cnf') = CNF.unitResolve cnf
-- Make this better , choose free variables first .
-- (graph, _) = G.buildGraphFromNodesAndEdges
-- [ (a, a) | a <- [ 0..n-1 ]]
-- [ G.Edge () t f
-- | c <- S.toList $ CNF.cnfClauses cnf
-- , let (ff, tt) = LS.splitLiterals c
-- , (f, t) <- liftM2 (,) (IS.toList ff) (IS.toList tt)
-- ]
-- -- lookup = V.fromList
-- . (IS.toList tt ++)
-- . reverse
. filter ( Prelude.not . ( ` IS.member ` tt ) ) $ G.postOrd graph
indexedFile :: Int -> String -> FilePath
indexedFile i name = printf (prog </> "%04i-%s") i name
findNext i = do
let path = indexedFile i "progression.txt"
b <- doesPathExist path
if not b then return i else findNext (i+1)
describeLogicProblem ::
forall a m.
( MonadFail m
, MonadIOReader Config m
)
=> LogicConfig
-> FilePath
-> Problem a Target
-> m (CNF, V.Vector (Fact, [Int]), Problem a IS.IntSet)
describeLogicProblem cfg wf p = (\((a,b), c) -> (a,b,c)) <$> flip refineProblemA' p \s -> do
(variables, _, keyFun) <- initializeKeyFunction cfg s wf
cnf <- computeCNF (showsVariable displayFact variables) keyFun wf
$ itoListOf (deepSubelements itemR) (ITarget s)
An IPF should have no clauses with only positive variables .
case CNF.nonIPFClauses cnf of
[] ->
return ()
clauses -> do
L.err "The created CNF was not an IPF, this is a critical error."
L.err "Please report at -pls/jreduce."
forM_ clauses \cls ->
L.err (displayString $ LS.displayImplication (showsVariable displayFact variables) cls "")
fail "The created CNF was not IPF"
let
(cnf', lookup) = CNF.limitCNF (cnfVariables cnf) cnf
n = V.length lookup
(graph, _) = G.buildGraphFromNodesAndEdges
[ (a, a) | a <- [ 0..n-1 ]]
[ G.Edge () t f
| c <- S.toList $ CNF.cnfClauses cnf'
, let (ff', tt') = LS.splitLiterals c
, (f, t) <- liftM2 (,) (IS.toList ff') (IS.toList tt')
]
order = V.fromList . reverse $ G.postOrd graph
-- generateTotalGraphOrder n cnf'
revorder =
inverseOrder order
cnf'' =
CNF.vmapCNF (revorder V.!) cnf'
variables' =
V.map ((variables V.!?) . (lookup V.!)) order
grphvariables =
V.map (variables V.!?) lookup
showGraphVar i =
showsVariable displayFact (V.map fromJust grphvariables) i ""
whenM (view cfgDumpLogic) . liftIO $ do
BL.writeFile (wf </> "variable-graph.csv") $
G.writeEmptyCSV (showGraphVar <$> graph)
LazyText.writeFile (wf </> "cnf-2.txt")
. LazyText.toLazyText
$ foldMap
(\c ->
displayString $ LS.displayImplication
(\i -> (showsVariable displayFact (V.map fromJust variables') i))
c "\n"
)
(cnfClauses cnf'')
writeFile (wf </> "dff.txt") $
T.drawForest (map (fmap showGraphVar) $ G.dff graph)
LazyText.writeFile (wf </> "variableorder.txt") $
LazyText.toLazyText
$ foldMap
(\i -> displayShowS (showsVariable displayFact (V.map fromJust grphvariables) i) <> "\n")
order
let
fromVars :: IS.IntSet -> Maybe Target
fromVars vars = preview _ITarget =<<
limit (deepReduction itemR) (`S.member` varset) (ITarget s)
where
varset = S.fromList . map snd
. mapMaybe (variables' V.!)
. IS.toList
$ vars
return
( (cnf'', V.map fromJust variables')
, (fromVars, cnfVariables cnf')
)
approxLogicProblem ::
CNF
-> Problem a IS.IntSet
-> Problem a [Int]
approxLogicProblem ipf =
refineProblem
(\s -> (Just . calculateLogicalClosure ipf . IS.fromList, IS.toList s))
displayShowS :: ShowS -> Builder.Builder
displayShowS f = displayString (f "")
describeGraphProblem ::
MonadIOReader Config m
=> LogicConfig
-> Bool
^ choose the first item
-> FilePath
-> Problem a Target
-> m (Problem a [IS.IntSet])
describeGraphProblem cfg choose_first wf p = flip refineProblemA p \s -> do
(variables, mid, keyFun) <- initializeKeyFunction cfg s wf
cnf <- computeCNF (showsVariable displayFact variables) keyFun wf
$ itoListOf (deepSubelements itemR) (ITarget s)
let
(required, edges') = fold
[ case (if choose_first
then over both IS.minView
else over both IS.maxView) $ LS.splitLiterals clause of
(Nothing , Just (t, _)) -> (IS.singleton t, mempty)
(Just (f, _), Just (t, _)) -> (mempty, S.singleton (f,t))
_ -> error "CNF is not IPF"
| clause <- S.toList $ cnfClauses cnf
]
(graph, rev) = buildGraphFromNodesAndEdges
[(k,k) | k <- [0..mid - 1]]
[Edge () f t | (f, t) <- S.toList edges']
core = closure graph (mapMaybe rev $ IS.toList required)
fromClosures cls = preview _ITarget =<<
limit (deepReduction itemR) (`S.member` varset) (ITarget s)
where
varset = S.fromList . map snd
. mapMaybe (variables V.!?)
. map (nodeLabel . (nodes graph V.!))
. IS.toList . IS.unions
$ core:cls
_targets =
filter (not . IS.null)
. map (IS.\\ core)
$ closures graph
dumpGraphInfo wf
(graph <&> flip (showsVariable displayFact variables) "")
core _targets
( fromIntegral . IS.size .
. ( V.length variables )
. IS.unions
return (fromClosures, _targets)
logic :: LogicConfig -> Hierarchy -> M.Map AbsMethodId (V.Vector TypeCheckState) -> Item -> (Fact, Stmt Fact)
logic LogicConfig{..} hry tpinfo = \case
IContent (ClassFile cls) -> ClassExist (cls ^. className)
`withLogic` \c ->
[ -- We also do not reduce type parameters. Thier requirements are just that
-- all classes mention should exist if this class exist.
c ==> requireClassNamesOf cls (classTypeParameters.folded) cls
, c ==> requireClassNamesOf cls (classAnnotations.folded) cls
If the class is a enum , it needs to extend java.lang . and have
-- these methods and fields
given (cls^.classAccessFlags.contains CEnum) $ c ==>
requireSubclass hry (cls^.className) "java/lang/Enum"
/\ given (cls^?classSuper._Just.simpleType == Just "java/lang/Enum")
( and
[ requireMethod hry cls . mkAbsMethodId cls $ "values"
<:> MethodDescriptor []
(ReturnDescriptor . Just . JTRef . JTArray .JTRef . JTClass $ cls^.className)
, requireMethod hry cls . mkAbsMethodId cls $ "valueOf"
<:> MethodDescriptor ["Ljava/lang/String;"]
(ReturnDescriptor . Just . JTRef . JTClass $ cls^.className)
, requireField hry cls . mkAbsFieldId cls $ "$VALUES"
<:> FieldDescriptor (JTRef . JTArray .JTRef . JTClass $ cls^.className)
]
)
, -- We also do also not reduce enclosing methods. If a class is enclosed
-- in another class, require that to exist, and if the class is enclosed
-- in a method require that to exist.
forallOf (classEnclosingMethod._Just) cls
\(cn, mMId) -> c ==> case mMId of
Just m -> codeIsUntuched (mkAbsMethodId cn m) /\ isInnerClassOf cls cn
Nothing -> requireClassName cls cn
]
IField (cls, field) -> FieldExist (mkAbsFieldId cls field)
`withLogic` \f ->
[ f ==> requireClassNamesOf cls fieldType field
, f ==> requireClassNamesOf cls (fieldAnnotations.folded) field
, -- TODO: Reconsider this?
-- If any field is synthetic we will require it to not be removed, if the
-- class exist. This helps with many problems.
given (FSynthetic `S.member` flags) do
classExist cls ==> f
, -- If class is an interface and the feild is static keep the
-- classInitializers
given (cls^.classAccessFlags .contains CAbstract && field^.fieldAccessFlags.contains FStatic) do
forallOf classInitializers cls \m ->
f ==> codeIsUntuched m
]
where flags = field^.fieldAccessFlags
IFieldFinal (cls, field) -> FieldIsFinal (mkAbsFieldId cls field)
`withLogic` \f ->
[ -- If a field is final it has to be set. This means we cannot stub
-- class initializers and class constructors.
if FStatic `S.member` flags
then
forallOf classInitializers cls \m ->
f ==> codeIsUntuched m
else
forallOf classConstructors cls \m ->
f ==> codeIsUntuched m
, -- If a field is synthetic or static do not remove any final flags.
-- final static fields are treated differently than other fields, and
-- are more like constants.
given (FSynthetic `S.member` flags \/ FStatic `S.member` flags) $
fieldExist (mkAbsFieldId cls field) ==> f
]
where flags = field^.fieldAccessFlags
IMethod (cls, method) -> MethodExist (mkAbsMethodId cls method)
`withLogic` \m ->
[ -- Since we do not remove the return argument or the arguemnts we have to build
-- their requirements here.
m ==> requireClassNamesOf cls
(methodReturnType.classNames <> methodParameters.folded.classNames)
method
, -- If you are a constructor, you have to be removed completely if you can be
-- removed
given (method^.methodIdName == "<init>") $
m ==> codeIsUntuched (mkAbsMethodId cls method)
-- Require the classNames of the exceptions
, m ==> requireClassNamesOf cls (methodExceptions.folded) method
, -- Type parameters might contain classes
m ==> requireClassNamesOf cls
(methodTypeParameters.folded)
method
, m ==> requireClassNamesOf cls
(methodAnnotations.folded)
method
, if method^.methodAccessFlags.contains MAbstract
then
-- If a method is abstract, then it has to be implemented in all of its
-- implementations. Say that A implements I and is not abstract, then
-- it should implement all the methods of I. For each such path, we
require it to be true or one of it 's super classes to have implemented
-- it.
forall (implementationPaths (cls^.className) hry)
\(def, isAbstract, path) ->
given (not isAbstract)
$ m /\ unbrokenPath path
==> requireNonAbstractMethod hry cls (mkAbsMethodId def method)
else
-- If the methods is not abstract, make sure that the method defintion
-- does exist. A chain from A <: I <: !I. If I does not exit, either
-- this method have to stay or we have to remove the implements interface.
forall (superDeclarationPaths (mkAbsMethodId cls method) hry)
\(decl, isAbstract, path) -> given isAbstract
$ methodExist decl /\ unbrokenPath path ==>
requireNonAbstractMethod hry cls (mkAbsMethodId cls method)
, m ==> requireClassNamesOf cls (methodDefaultAnnotation._Just) method
, -- TODO: Nessary?
-- Finally we require that if a method is synthetic is should be
-- removed alongside its code
given (method^.methodAccessFlags.contains MSynthetic)
$ m ==> codeIsUntuched (mkAbsMethodId cls method)
]
IImplements (cls, ct) -> HasInterface (cls^.className) (ct^.simpleType)
`withLogic` \i ->
[ -- An Implements only depends on the interface that it implements, and
-- its type parameters.
i ==> requireClassNames cls ct
, -- Given that we should keep the extends
given keepHierarchy $ classExist cls ==> i
]
ISuperClass (cls, ct) -> HasSuperClass (cls^.className) (ct^.simpleType)
`withLogic` \s ->
[ -- An Implements only depends on the class of the supertype and its type
-- parameters.
s ==> requireClassNames cls ct
, -- In case the superclass have no empty init method we require at least
one of it 's constructors to exist .
let
ctc = ct^.simpleType
mid = mkAbsMethodId ctc ("<init>:()V" :: MethodId)
in
s ==>
case Jvmhs.methodExist mid hry of
Just (view stubMethodAccess -> access)
| access >= Protected
\/ access == Default /\ ctc^.package == cls^.className.package ->
methodExist mid \/ existOf classConstructors cls codeIsUntuched
_ ->
existOf classConstructors cls codeIsUntuched
, -- Given that we should keep the extends
given keepHierarchy $ classExist cls ==> s
]
IInnerClass (cls, ic) -> IsInnerClass (cls^.className) (ic^.innerClass)
`withLogic` \i ->
An innerclass depends on all classes referenced by the innerClass .
i ==> requireClassNames cls ic
, -- If inner class is ponting to itself, then it required as long at the
-- class exist.
given (cls^.className == ic^.innerClass) $
classExist cls ==> i
, -- If the outer class is an this class then we can not remove this
-- innerclass statement before the innerclass have been removed
given (Just (cls^.className) == ic^.innerOuterClass) $
classExist (ic^.innerClass) ==> i
-- NOTE: That all requirements that a class exist also will check if
the innerclass exist . The rule is that if a class refer to an innerclass
-- it must have an innerclass entry that describes that class.
]
IBootstrapMethod (cls, (i, btm)) ->
HasBootstrapMethod (cls ^.className) i `withLogic` \bm ->
[ bm ==> requireMethodHandle hry cls (btm^.bootstrapMethodHandle)
, bm ==> forallOf (bootstrapMethodArguments.folded) btm \case
VClass rf -> requireClassNames cls rf
VMethodType md ->
-- We would love to require the method, but we do not know the
-- abslocatio of the MethodDescriptor
requireClassNames cls md
VMethodHandle mh ->
requireMethodHandle hry cls mh
_ -> true
]
-- -- We do currently not reduce bootstrap methods, so their requirements are
-- -- handled from here.
-- forallOf (classBootstrapMethods.folded) cls
-- \(BootstrapMethod mhandle args) ->
c = = >
-- /\ requireClassNamesOf cls folded args
-- , ]
IMethodThrows ( ( cls , method ) , mt ) - >
-- MethodThrows (mkAbsMethodId cls method)
( mt^.simpleType )
-- `withLogic` \m ->
-- [ -- A method throws statement depends on all the class it mentions.
-- m ==> requireClassNames cls mt
-- , -- TODO: An indepth analysis of throws of the code?
-- given (has (methodCode._Just) method) $
-- codeIsUntuched (mkAbsMethodId cls method) ==> m
-- , -- Any class mentioned in this setting should extend throwable.
m = = > mt^.simpleType ` requireSubtype ` ( " java / lang / Throwable " : : ClassName )
-- -- , -- TODO: I this method extends a method it has to have it's execeptions.
-- -- forall (superDeclarationPaths mt hry)
-- -- \(decl, isAbstract, path) -> given isAbstract
-- $ /\ unbrokenPath path = = > m
-- ]
ICode ((cls, method), code) -> CodeIsUntuched theMethodName
`withLogic` \c -> case M.lookup theMethodName tpinfo of
Just typeCheckStates ->
[ -- If the code was not stubbed, then we have to require that the
-- classes in the exception table, stack map, and byte-code instructions
-- exits
-- c ==> requireClassNamesOf cls (codeExceptionTable.folded) code
c ==> requireClassNamesOf cls (codeStackMap._Just) code
, c ==> requireClassNamesOf cls (codeByteCode.folded) code
, c ==> forallOf (codeExceptionTable.folded.ehCatchType._Just) code \ct ->
requireClassName cls ct
/\ ct `requireSubtype` ("java/lang/Throwable" :: ClassName)
] ++
[ case oper of
ArrayStore _ ->
-- When we store an item in the array, it should be a subtype of the
-- content of the array.
c ==> stack 0 `requireSubtype` isArray (stack 2)
Get fa fid ->
For a get value is valid the field has to exist , and the first
-- element on the stack has to be a subclass of fields class.
c ==> requireField hry cls fid
/\ given (fa /= B.FldStatic) (stack 0 `requireSubtype` fid^.className)
-- TODO: Experimental overapproximation.
-- The idea is to require all extensions of the class variable.
Push (Just (VClass (JTClass cn))) ->
c ==> forall (S.fromList p') unbrokenPath
where
p'=
[ path
| b <- superclasses cn hry
, path <- subclassPaths cn b hry
]
Put fa fid ->
For a put value is valid the field has to exist , and the first
-- element on the stack has to be a subclass of fields class, and
the second element have to be a subtype of the type of the field
c ==> requireField hry cls fid
/\ stack 0 `requireSubtype` fid^.fieldIdType
/\ given (fa /= B.FldStatic)
(stack 1 `requireSubtype` fid^.className)
Invoke a ->
For the methods there are three general cases , a regular method call ,
-- a static methods call (no-object) and a dynamic call (no-class).
methodRequirements
/\ (c ==> and
[ s `requireSubtype` t
| (s, t) <- zip (state ^. tcStack) (reverse stackTypes)
]
)
where
(methodRequirements, stackTypes) =
case methodInvokeTypes a of
Right (isSpecial, isStatic, m) ->
( let mid = AbsMethodId $ m^.asInClass
in (c ==> if isSpecial then methodExist mid else requireMethod hry cls mid)
/\ given (Text.isPrefixOf "access$" (m^.methodIdName))
(methodExist mid ==> c)
/\ given (
( maybe False (isNumber . fst) . Text.uncons . last . Text.splitOn "$"
$ mid^.className.fullyQualifiedName
)
/\ mid^.className /= cls ^.className)
(classExist (mid^.className) ==> c)
, [asTypeInfo $ m^.asInClass.className | not isStatic]
<> (map asTypeInfo $ m^.methodIdArgumentTypes)
)
Left (i, m) ->
( ( c ==> requireBootstrapMethod cls (fromIntegral i) )
/\ ( requireBootstrapMethod cls (fromIntegral i) ==> c)
BootstrapMethods are bound to thier use without them
-- they are nothing and should be removed
, map asTypeInfo $ m^.methodIdArgumentTypes
)
Throw ->
A Throw operation requires that the first element on the stack is throwable .
c ==> stack 0 `requireSubtype` ("java/lang/Throwable" :: ClassName)
CheckCast fa ->
The check cast operation requires that the first element on the stack
is either a subtype of the cast or the cast is a subtype of the first
element . Often only one of these are true .
c ==> stack 0 `requireSubtype` fa \/ fa `requireSubtype` stack 0
Return (Just B.LRef) ->
We do require that the first element on the stack is a subtype of the return type .
c ==> forall (method^.methodReturnType.simpleType)
\mt -> stack 0 `requireSubtype` mt
InstanceOf ct ->
c ==> ct `requireSubtype` stack 0
_ -> true
| (state, B.opcode -> oper) <-
V.toList $ V.zip typeCheckStates (code ^. codeByteCode)
, let stack n =
case state ^? tcStack.ix n of
Just a -> a
Nothing ->
error $
"Incompatable stack length: " <> show n
<> " at: " <> show theMethodName
<> " bc: " <> show oper
<> " current stack: " <> show (state^.tcStack)
]
Nothing ->
[liftF (TWarning "No type information: unsafely predict no dependencies" True)]
where
methodInvokeTypes = \case
B.InvkSpecial (B.AbsVariableMethodId _ m) -> Right (True, False, m)
B.InvkVirtual m -> Right (False, False, m)
B.InvkStatic (B.AbsVariableMethodId _ m) -> Right (False, True, m)
B.InvkInterface _ (B.AbsInterfaceMethodId m) -> Right (False, False, m)
B.InvkDynamic (B.InvokeDynamic i m') -> Left (i, m')
theMethodName =
mkAbsMethodId cls method
IContent (Jar _) -> (Meta, true)
IContent (MetaData _) -> (Meta, true)
ITarget _ -> (Meta, true)
where
infixl 6 `requireSubtype`
requireSubtype ::
(AsTypeInfo a, AsTypeInfo b)
=> a -> b
-> Stmt Fact
requireSubtype (asTypeInfo -> TRef as) (asTypeInfo -> TRef bs) = and
[ a `requireSubRefType` b | a <- toList as, b <- toList bs]
where
requireSubRefType a b = case a of
B.JTClass s -> case b of
B.JTClass "java/lang/Object" -> true
B.JTClass t -> and
[ unbrokenPath path
| path <- subclassPaths s t hry
]
_ -> true
B.JTArray (JTRef s) -> case b of
B.JTArray (JTRef t) -> s `requireSubRefType` t
_ -> true
_ -> true
requireSubtype _ _ = true
-- Return the type of array execpt if it the typeinfo is null in which case
-- we return Nothing
isArray :: TypeInfo -> TypeInfo
isArray ti =
fromJust $
foldl (\a b -> a >>= meet (asTypeInfo b))
(Just TTop)
(ti ^.._TRef.folded._JTArray)
unbrokenPath :: SubclassPath -> Stmt Fact
unbrokenPath path =
and [ isSubclass f t e | (f, t, e) <- subclassEdges path]
isSubclass :: ClassName -> ClassName -> HEdge -> Stmt Fact
isSubclass cn1 cn2 = \case
Implement -> hasInterface cn1 cn2
Extend -> hasSuperClass cn1 cn2
requireSubclass :: Hierarchy -> ClassName -> ClassName -> Stmt Fact
requireSubclass hry s t = case t of
"java/lang/Object" -> true
_ -> and [ unbrokenPath path | path <- subclassPaths s t hry ]
hasInterface :: ClassName -> ClassName -> Stmt Fact
hasInterface cn1 cn2 = tt (HasInterface cn1 cn2)
hasSuperClass :: ClassName -> ClassName -> Stmt Fact
hasSuperClass cn1 cn2 = tt (HasSuperClass cn1 cn2)
requireMethodHandle :: HasClassName c => Hierarchy -> c -> B.MethodHandle B.High -> Stmt Fact
requireMethodHandle hry cls = \case
B.MHField (B.MethodHandleField _ f)
-> requireField hry cls f
B.MHMethod a -> requireMethod hry cls . AbsMethodId . view asInClass $ case a of
B.MHInvokeVirtual rt -> rt
B.MHInvokeStatic (B.AbsVariableMethodId _ rt) -> rt
B.MHInvokeSpecial (B.AbsVariableMethodId _ rt) -> rt
B.MHNewInvokeSpecial rt -> rt
B.MHInterface (B.MethodHandleInterface (B.AbsInterfaceMethodId rt)) ->
requireMethod hry cls . AbsMethodId . view asInClass $ rt
requireBootstrapMethod :: HasClassName c => c -> Int -> Stmt Fact
requireBootstrapMethod c i = tt (HasBootstrapMethod (c^.className) i)
requireClassNames :: (HasClassName c, HasClassNames a) => c -> a -> Stmt Fact
requireClassNames c =
andOf (classNames . to (requireClassName c))
requireClassName :: (HasClassName c, HasClassName a) => c -> a -> Stmt Fact
requireClassName oc ic =
classExist ic /\ isInnerClassOf oc ic
classExist :: HasClassName a => a -> Stmt Fact
classExist (view className -> cn) =
tt (ClassExist cn)
fieldExist :: AbsFieldId -> Stmt Fact
fieldExist f =
tt (FieldExist f)
methodExist :: AbsMethodId -> Stmt Fact
methodExist f =
tt (MethodExist f)
orFailWith :: String -> [Stmt a] -> Stmt a
orFailWith f = \case
[] -> liftF (TWarning f True)
a:as -> foldr (\/) a as
requireField :: HasClassName c => Hierarchy -> c -> AbsFieldId -> Stmt Fact
requireField hry cn fid = isInnerClassOf cn fid /\ orFailWith ("Could not find " ++ show fid)
[ fieldExist fid' /\ unbrokenPath path
| (fid', path) <- fieldLocationPaths fid hry
]
requireMethod :: HasClassName c => Hierarchy -> c -> AbsMethodId -> Stmt Fact
requireMethod hry cn mid = isInnerClassOf cn mid /\ orFailWith ("Could not find " ++ show mid)
[ methodExist mid' /\ unbrokenPath path
| (mid', _, path) <- superDeclarationPaths mid hry
]
requireNonAbstractMethod :: HasClassName c => Hierarchy -> c -> AbsMethodId -> Stmt Fact
requireNonAbstractMethod hry cn mid = isInnerClassOf cn mid /\ orFailWith ("Could not find " ++ show mid)
[ methodExist mid' /\ unbrokenPath path
| (mid', False, path) <- superDeclarationPaths mid hry
]
codeIsUntuched :: AbsMethodId -> Stmt Fact
codeIsUntuched m =
tt (CodeIsUntuched m)
isInnerClassOf :: (HasClassName c1, HasClassName c2) => c1 -> c2 -> Stmt Fact
isInnerClassOf (view className -> c1) (view className -> c2) =
given (isInnerClass c2) (tt (IsInnerClass c1 c2))
withLogic :: Fact -> (Stmt Fact -> [Stmt Fact]) -> (Fact, Stmt Fact)
withLogic f fn = (f, and (fn (tt f)))
whenM :: Monad m => (m Bool) -> m () -> m ()
whenM mb m = mb >>= \b -> when b m
classConstructors :: Fold Class AbsMethodId
classConstructors = classAbsMethodIds . filtered (elemOf methodIdName "<init>")
classInitializers :: Fold Class AbsMethodId
classInitializers =
classAbsMethodIds . filtered (elemOf methodIdName "<clinit>")
payload :: Functor f => p -> ((p, a) -> f (Maybe (p, a))) -> a -> f (Maybe a)
payload p fn a = fmap snd <$> fn (p, a)
methodIsAbstract :: Method -> Bool
methodIsAbstract = view (methodAccessFlags . contains MAbstract)
| null | https://raw.githubusercontent.com/ucla-pls/jreduce/8f25f355e2b70681464bdbcc1d03198a07b81573/src/JReduce/Logic.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeSynonymInstances #
# LANGUAGE RankNTypes #
lens
containers
vector
base
jvmhs
containers
directory
nfdata
jvm-binary
filepath
text
reduce-util
unorderd-containers
jreduce
| MethodThrows AbsMethodId ClassName
MethodThrows m cn -> toBuilder m <> "[throws]" <> toBuilder cn
f
(m ^. methodExceptions)
& methodExceptions
. LazyText.toLazyText
$ foldMap
showVar variables i =
showsVariable displayK variables i ""
printTotalGraphOrder' lok i n cnf = do
BL.writeFile (indexedFile i "graph.csv") $
G.writeEmptyCSV (showVar variables <$> graph)
writeCNF lok ( indexedFile i " unit-resolved.txt " ) cnf '
writeFile (indexedFile i "dff.txt") (show . map (fmap (showVar variables)) $ G.dff graph)
writeFile (indexedFile i "vars.txt") (show $ G.postOrd graph)
where
-- Just (LS.splitLiterals -> (_, _), cnf') = CNF.unitResolve cnf
Make this better , choose free variables first .
(graph, _) = G.buildGraphFromNodesAndEdges
[ (a, a) | a <- [ 0..n-1 ]]
[ G.Edge () t f
| c <- S.toList $ CNF.cnfClauses cnf
, let (ff, tt) = LS.splitLiterals c
, (f, t) <- liftM2 (,) (IS.toList ff) (IS.toList tt)
]
-- lookup = V.fromList
. (IS.toList tt ++)
. reverse
generateTotalGraphOrder n cnf'
We also do not reduce type parameters. Thier requirements are just that
all classes mention should exist if this class exist.
these methods and fields
We also do also not reduce enclosing methods. If a class is enclosed
in another class, require that to exist, and if the class is enclosed
in a method require that to exist.
TODO: Reconsider this?
If any field is synthetic we will require it to not be removed, if the
class exist. This helps with many problems.
If class is an interface and the feild is static keep the
classInitializers
If a field is final it has to be set. This means we cannot stub
class initializers and class constructors.
If a field is synthetic or static do not remove any final flags.
final static fields are treated differently than other fields, and
are more like constants.
Since we do not remove the return argument or the arguemnts we have to build
their requirements here.
If you are a constructor, you have to be removed completely if you can be
removed
Require the classNames of the exceptions
Type parameters might contain classes
If a method is abstract, then it has to be implemented in all of its
implementations. Say that A implements I and is not abstract, then
it should implement all the methods of I. For each such path, we
it.
If the methods is not abstract, make sure that the method defintion
does exist. A chain from A <: I <: !I. If I does not exit, either
this method have to stay or we have to remove the implements interface.
TODO: Nessary?
Finally we require that if a method is synthetic is should be
removed alongside its code
An Implements only depends on the interface that it implements, and
its type parameters.
Given that we should keep the extends
An Implements only depends on the class of the supertype and its type
parameters.
In case the superclass have no empty init method we require at least
Given that we should keep the extends
If inner class is ponting to itself, then it required as long at the
class exist.
If the outer class is an this class then we can not remove this
innerclass statement before the innerclass have been removed
NOTE: That all requirements that a class exist also will check if
it must have an innerclass entry that describes that class.
We would love to require the method, but we do not know the
abslocatio of the MethodDescriptor
-- We do currently not reduce bootstrap methods, so their requirements are
-- handled from here.
forallOf (classBootstrapMethods.folded) cls
\(BootstrapMethod mhandle args) ->
/\ requireClassNamesOf cls folded args
, ]
MethodThrows (mkAbsMethodId cls method)
`withLogic` \m ->
[ -- A method throws statement depends on all the class it mentions.
m ==> requireClassNames cls mt
, -- TODO: An indepth analysis of throws of the code?
given (has (methodCode._Just) method) $
codeIsUntuched (mkAbsMethodId cls method) ==> m
, -- Any class mentioned in this setting should extend throwable.
-- , -- TODO: I this method extends a method it has to have it's execeptions.
-- forall (superDeclarationPaths mt hry)
-- \(decl, isAbstract, path) -> given isAbstract
$ /\ unbrokenPath path = = > m
]
If the code was not stubbed, then we have to require that the
classes in the exception table, stack map, and byte-code instructions
exits
c ==> requireClassNamesOf cls (codeExceptionTable.folded) code
When we store an item in the array, it should be a subtype of the
content of the array.
element on the stack has to be a subclass of fields class.
TODO: Experimental overapproximation.
The idea is to require all extensions of the class variable.
element on the stack has to be a subclass of fields class, and
a static methods call (no-object) and a dynamic call (no-class).
they are nothing and should be removed
Return the type of array execpt if it the typeinfo is null in which case
we return Nothing | # LANGUAGE LambdaCase #
# LANGUAGE DeriveGeneric #
# LANGUAGE ApplicativeDo #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
# LANGUAGE EmptyCase #
# LANGUAGE ViewPatterns #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleInstances #
# LANGUAGE BlockArguments #
# LANGUAGE TupleSections #
module JReduce.Logic where
import Control.Lens hiding ( andOf
, orOf
, (<.>)
)
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import qualified Data.IntSet as IS
import qualified Data.Vector as V
import Data.Foldable hiding ( and
, or
)
import Data.Maybe
import Text.Printf
import Data.IORef
import Data.Tuple
import Data.Char ( isNumber )
import Data.Monoid
import Text.Show
import Control.Monad
import Control.Monad.IO.Class
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
import GHC.Generics ( Generic )
import Prelude hiding ( fail
, not
, and
, or
, lookup
)
import qualified Data . ByteString as BS
import qualified Data.ByteString.Lazy as BL
import Jvmhs.Data.Type
import Jvmhs.TypeCheck
import Jvmhs.Data.Code
import Jvmhs hiding ( methodExist
, fieldExist
)
import qualified Jvmhs
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Tree as T
import System.Directory
import Control.DeepSeq
import qualified Language.JVM as B
import Language.JVM.ByteCode ( ByteCodeOpr(..) )
import System.FilePath
import qualified Data.Text as Text
import qualified Data.Text.Lazy.IO as LazyText
import qualified Data.Text.Lazy as LazyText
import qualified Data.Text.Lazy.Builder as LazyText
import qualified Data.Text.Lazy.Builder as Builder
import Control.Reduce.Boolean
import Control.Reduce.Graph as G
import Control.Reduce.Boolean.CNF as CNF
import Control.Reduce.Progression
import qualified Control.Reduce.Boolean.LiteralSet
as LS
import Control.Reduce.Problem
import Control.Reduce.Reduction
import Control.Reduce.Util.Logger as L
import qualified Data.HashSet as HS
import JReduce.Target
import JReduce.Config
data Item
= IContent Content
| ICode ((Class, Method), Code)
| ITarget Target
| ISuperClass (Class, (Annotated ClassType))
| IImplements (Class, (Annotated ClassType))
| IField (Class, Field)
| IFieldFinal (Class, Field)
| IMethod (Class, Method)
| IInnerClass (Class, InnerClass)
| IBootstrapMethod (Class, (Int, BootstrapMethod))
makePrisms ''Item
data Fact
= ClassExist ClassName
| CodeIsUntuched AbsMethodId
| HasSuperClass ClassName ClassName
| HasInterface ClassName ClassName
| FieldExist AbsFieldId
| FieldIsFinal AbsFieldId
| MethodExist AbsMethodId
| IsInnerClass ClassName ClassName
| HasBootstrapMethod ClassName Int
| Meta
deriving (Eq, Ord, Generic, NFData)
displayFact :: Fact -> Builder.Builder
displayFact = \case
ClassExist cn -> toBuilder cn
CodeIsUntuched md -> toBuilder md <> "!code"
HasSuperClass cn1 cn2 -> toBuilder cn1 <> "<S]" <> toBuilder cn2
HasInterface cn1 cn2 -> toBuilder cn1 <> "<I]" <> toBuilder cn2
FieldExist fd -> toBuilder fd
FieldIsFinal fd -> toBuilder fd <> "[final]"
MethodExist md -> toBuilder md
IsInnerClass cn1 cn2 -> toBuilder cn1 <> "[innerOf]" <> toBuilder cn2
HasBootstrapMethod cn b ->
toBuilder cn <> "[bootstrap]" <> Builder.fromString (show b)
Meta -> "meta"
requireClassNamesOf ::
(HasClassName c, HasClassNames a)
=> c -> Getting (Endo (Endo (Stmt Fact))) s a -> s -> Stmt Fact
requireClassNamesOf c l a =
forallOf (l . classNames) a (requireClassName c)
decompose :: Ord a => S.Set a -> (M.Map a Int, V.Vector a)
decompose a =
(M.fromAscList . toList $ imap (flip (,)) res, res)
where res = V.fromList (toList a)
checkScope :: S.Set ClassName -> Fact -> Bool
checkScope scope = \case
ClassExist a -> fn $ a
CodeIsUntuched m -> fn $ m ^. className
HasSuperClass cn _ -> fn $ cn
HasInterface cn _ -> fn $ cn
FieldExist f -> fn $ f ^. className
FieldIsFinal f -> fn $ f ^. className
MethodExist m -> fn $ m ^. className
IsInnerClass cn _ -> fn $ cn
MethodThrows m _ - > fn $ m ^. className
HasBootstrapMethod cn _ -> fn $ cn
Meta -> True
where fn k = k `S.member` scope
itemR :: PartialReduction Item Item
itemR f' = \case
ITarget t -> fmap ITarget <$> targetR f' t
IContent c -> fmap IContent <$> contentR f' c
IMethod (c, m) -> fmap (IMethod . (c, )) <$> (part $ methodR c) f' m
IField (c, m) -> fmap (IField . (c, )) <$> (part $ fieldR c) f' m
a -> pure (Just a)
where
contentR :: PartialReduction Content Item
contentR f = \case
ClassFile c -> fmap ClassFile <$> (part classR) f c
Jar c -> fmap Jar <$> (deepDirForestR . reduceAs _IContent) f c
a -> pure $ Just a
targetR :: PartialReduction Target Item
targetR = deepDirTreeR . reduceAs _IContent
classR :: Reduction Class Item
classR f c = do
_super <- case c ^. classSuper of
Just a
| a ^. annotatedContent . to classNameFromType == "java/lang/Object" -> pure
$ Just a
| otherwise -> (payload c . reduceAs _ISuperClass) f a <&> \case
Just a' -> Just a'
Nothing ->
Just (withNoAnnotation (classTypeFromName "java/lang/Object"))
Nothing -> pure $ Nothing
fields <- (listR . payload c . reduceAs _IField) f (c ^. classFields)
methods <- (listR . payload c . reduceAs _IMethod) f (c ^. classMethods)
innerClasses <- (listR . payload c . reduceAs _IInnerClass)
f
(c ^. classInnerClasses)
_interfaces <- (listR . payload c . reduceAs _IImplements)
f
(c ^. classInterfaces)
bootstrapMethods <-
(iso IntMap.toAscList IntMap.fromAscList
. listR . payload c . reduceAs _IBootstrapMethod)
f (c ^.classBootstrapMethods)
pure
$ c & classSuper .~ _super
& classFields .~ fields
& classMethods .~ methods
& classInnerClasses .~ innerClasses
& classInterfaces .~ _interfaces
& classBootstrapMethods .~ bootstrapMethods
fieldR :: Class -> Reduction Field Item
fieldR cls fn f = do
if f ^. fieldAccessFlags . contains FFinal
then fn (IFieldFinal (cls, f)) <&> \case
Just (IFieldFinal _) -> f
_ -> f & fieldAccessFlags . at FFinal .~ Nothing
else pure f
methodR :: Class -> Reduction Method Item
methodR cls f m = do
t <- case m ^. methodCode of
Just c -> f (ICode ((cls, m), c)) <&> \case
Just (ICode (_, c')) -> Just c'
_ -> Nothing
_ -> pure Nothing
_ methodThrows < - ( listR . payload ( cls , m ) . reduceAs )
pure
$ (case t of
Just c -> m & methodCode .~ Just c
Nothing -> stub m
)
.~ _ methodThrows
unBuilder :: Builder.Builder -> String
unBuilder = LazyText.unpack . Builder.toLazyText
showsVariable :: (k -> Builder.Builder) -> V.Vector (k, [Int]) -> Int -> ShowS
showsVariable displayK variables i =
case variables V.!? i of
Just (k, idx) ->
showString (unBuilder $ displayK k <> display (reverse idx))
Nothing ->
shows i
initializeKeyFunction ::
forall m. MonadIOReader Config m
=> LogicConfig -> Target -> FilePath -> m (V.Vector (Fact, [Int]), Int, ([Int], Item) -> m CNF)
initializeKeyFunction cfg trg wf = L.phase "Initializing key function" do
hry <- fetchHierachy wf (targetClasses trg)
core <- view cfgCore
let
items =
(if reverseOrder cfg then reverse else id)
(itemsOfTarget trg)
tpinfo <- L.phase "Type checking methods" $ M.fromList . catMaybes <$> forM items \case
(_, ICode ((cls, method), code)) -> do
let theMethodName = mkAbsMethodId cls method
case typeCheck hry theMethodName (method^.methodAccessFlags.contains MStatic) code of
(Just (i, x), _) -> do
L.err $ "In " <> display theMethodName
case code^?codeByteCode.ix i of
Just s -> do
L.err $ " at offset: " <> display (B.offset s)
L.err $ " at opcode: " <> display (B.opcode s)
Nothing -> do
return ()
L.err $ " got: " <> display x
view cfgUnsafe >>= \case
True -> pure Nothing
False -> fail $ "Could not typecheck " <> show theMethodName
(Nothing, vc) -> pure (Just (theMethodName, vc))
_ ->
pure Nothing
let
lfn = logic cfg hry tpinfo
factsToVar :: M.Map Fact (S.Set Int)
factsToVar =
M.fromListWith S.union
[ (f, S.singleton i)
| (i, f) <- V.toList (V.indexed facts)
]
back :: V.Vector ([Int], Item)
back =
V.fromList items
facts :: V.Vector Fact
facts =
V.map (fst . lfn . snd) back
variables :: V.Vector (Fact, [Int])
variables = V.zip facts (V.map fst back)
cores :: V.Vector Bool
cores =
V.map (\fact-> serializeWith displayFact fact `HS.member` core) facts
indiciesToVar :: M.Map [Int] Int
indiciesToVar =
M.fromList (map swap . V.toList . V.indexed . V.map fst $ back)
maxid <- liftIO $ newIORef (V.length back)
let
handler :: ([Int], (Fact, Stmt Fact)) -> m CNF
handler (idx, (fact, stmt)) = L.logtime L.DEBUG ("Processing " <> debuginfo) $ do
case stmtWarnings stmt of
[] -> return ()
msgs -> do
L.err
$ "Warnings found while computing logical dependencies for "
<> displayFact fact
forM_ msgs \msg ->
L.err (Builder.fromString msg)
view cfgUnsafe >>= \case
True -> pure ()
False -> fail $ "warnings found while computing logical depenencies"
mid <- liftIO $ readIORef maxid
let cnf = toMinimalCNF mid nnfAfter
liftIO $ writeIORef maxid
(max mid . maybe minBound fst . IS.maxView $ cnfVariables cnf)
whenM (view cfgDumpItems) . liftIO $ do
LazyText.appendFile (wf </> "items.txt") . LazyText.toLazyText
$ displayText key <> display (reverse idx) <> "\n"
LazyText.appendFile (wf </> "items-logical.txt") . LazyText.toLazyText
$ displayText key <> display (reverse idx) <> " " <> display v <> "\n"
<> " LV1 " <> displayString (showsStmtWith showsFact stmt "\n")
<> " LV2 " <> displayString (showsNnfWith showsFact nnf "\n")
<> " LV3 " <> displayString (showsNnfWith (showsVariable displayFact variables) nnfAfter "\n")
<> foldMap ((" " <>) . displayClause) (cnfClauses cnf)
return cnf
where
v = indiciesToVar M.! idx
key = serializeWith displayFact fact
isCore = cores V.! v
debuginfo =
displayText key <> (if isCore then " CORE" else "")
nnf :: Nnf Fact
nnf =
flattenNnf . nnfFromStmt . fromStmt $ stmt
nnfAfter :: Nnf Int
nnfAfter =
flattenNnf . nnfFromStmt . fromStmt
. (case idx of
[] -> id
_:rest -> \s ->
s /\ (tt (indiciesToVar M.! idx) ==> tt (indiciesToVar M.! rest))
)
. (if not isCore then id else \s -> s /\ tt (indiciesToVar M.! idx))
. runIdentity
$ traverseVariables (\f -> pure $ forallOf (ix f.folded) factsToVar \i -> tt i
) stmt
showsFact :: Fact -> ShowS
showsFact = showString . unBuilder . displayFact
displayClause c =
displayString (LS.displayImplication (showsVariable displayFact variables) c "\n")
L.info . L.displayf "Found %d items." $ L.length items
L.info . L.displayf "Found %d facts." $ M.size factsToVar
L.info . L.displayf "The core is %d of them." $ L.length core
mid <- liftIO $ readIORef maxid
return (variables, mid, handler . over _2 lfn)
where
itemsOfTarget :: Target -> [([Int], Item)]
itemsOfTarget = itoListOf (deepSubelements itemR) . review _ITarget
computeCNF ::
MonadIOReader Config m
=> (Int -> ShowS)
-> (a -> m CNF)
-> FilePath
-> [a]
-> m CNF
computeCNF sv keyFun wf items = L.phase "Compute CNF" do
cnf <- CNF . foldMap cnfClauses <$> mapM keyFun items
whenM (view cfgDumpLogic) . liftIO $ do
LazyText.appendFile (wf </> "cnf.txt")
. LazyText.toLazyText
$ foldMap (\c -> displayString $ LS.displayImplication sv
c "\n") (cnfClauses cnf)
return cnf
logProgression ::
forall m k.
MonadIOReader Config m
=> FilePath
-> (k -> Builder.Builder)
-> V.Vector (k, [Int])
-> CNF
-> IS.IntSet
-> m (NE.NonEmpty IS.IntSet)
logProgression prog displayK variables cnf is = do
let (limitedCNF, lok) = limitCNF is cnf
let progs = calculateSimpleProgression cnf is
dumpClosures <- view cfgDumpClosures
when dumpClosures . liftIO $ do
createDirectoryIfMissing True prog
i <- findNext 0
LazyText.writeFile (indexedFile i "progression.txt")
. LazyText.toLazyText
. foldMap
(\a -> (fold
. L.intersperse " | "
. map (displayShowS . showsVariable displayK variables)
$ IS.toList a)
<> "\n")
$ progs
LazyText.writeFile (indexedFile i "cnf.txt")
. LazyText.toLazyText
$ foldMap
(\c ->
displayString $ LS.displayImplication
(showsVariable displayK variables . (lok V.!))
c "\n"
)
(cnfClauses limitedCNF)
LazyText.writeFile ( indexedFile i " variableorder.txt " )
( \i - > ( showsVariable displayK variables i ) < > " \n " )
( generateTotalGraphOrder ( ) limitedCNF )
return progs
where
. filter ( Prelude.not . ( ` IS.member ` tt ) ) $ G.postOrd graph
indexedFile :: Int -> String -> FilePath
indexedFile i name = printf (prog </> "%04i-%s") i name
findNext i = do
let path = indexedFile i "progression.txt"
b <- doesPathExist path
if not b then return i else findNext (i+1)
describeLogicProblem ::
forall a m.
( MonadFail m
, MonadIOReader Config m
)
=> LogicConfig
-> FilePath
-> Problem a Target
-> m (CNF, V.Vector (Fact, [Int]), Problem a IS.IntSet)
describeLogicProblem cfg wf p = (\((a,b), c) -> (a,b,c)) <$> flip refineProblemA' p \s -> do
(variables, _, keyFun) <- initializeKeyFunction cfg s wf
cnf <- computeCNF (showsVariable displayFact variables) keyFun wf
$ itoListOf (deepSubelements itemR) (ITarget s)
An IPF should have no clauses with only positive variables .
case CNF.nonIPFClauses cnf of
[] ->
return ()
clauses -> do
L.err "The created CNF was not an IPF, this is a critical error."
L.err "Please report at -pls/jreduce."
forM_ clauses \cls ->
L.err (displayString $ LS.displayImplication (showsVariable displayFact variables) cls "")
fail "The created CNF was not IPF"
let
(cnf', lookup) = CNF.limitCNF (cnfVariables cnf) cnf
n = V.length lookup
(graph, _) = G.buildGraphFromNodesAndEdges
[ (a, a) | a <- [ 0..n-1 ]]
[ G.Edge () t f
| c <- S.toList $ CNF.cnfClauses cnf'
, let (ff', tt') = LS.splitLiterals c
, (f, t) <- liftM2 (,) (IS.toList ff') (IS.toList tt')
]
order = V.fromList . reverse $ G.postOrd graph
revorder =
inverseOrder order
cnf'' =
CNF.vmapCNF (revorder V.!) cnf'
variables' =
V.map ((variables V.!?) . (lookup V.!)) order
grphvariables =
V.map (variables V.!?) lookup
showGraphVar i =
showsVariable displayFact (V.map fromJust grphvariables) i ""
whenM (view cfgDumpLogic) . liftIO $ do
BL.writeFile (wf </> "variable-graph.csv") $
G.writeEmptyCSV (showGraphVar <$> graph)
LazyText.writeFile (wf </> "cnf-2.txt")
. LazyText.toLazyText
$ foldMap
(\c ->
displayString $ LS.displayImplication
(\i -> (showsVariable displayFact (V.map fromJust variables') i))
c "\n"
)
(cnfClauses cnf'')
writeFile (wf </> "dff.txt") $
T.drawForest (map (fmap showGraphVar) $ G.dff graph)
LazyText.writeFile (wf </> "variableorder.txt") $
LazyText.toLazyText
$ foldMap
(\i -> displayShowS (showsVariable displayFact (V.map fromJust grphvariables) i) <> "\n")
order
let
fromVars :: IS.IntSet -> Maybe Target
fromVars vars = preview _ITarget =<<
limit (deepReduction itemR) (`S.member` varset) (ITarget s)
where
varset = S.fromList . map snd
. mapMaybe (variables' V.!)
. IS.toList
$ vars
return
( (cnf'', V.map fromJust variables')
, (fromVars, cnfVariables cnf')
)
approxLogicProblem ::
CNF
-> Problem a IS.IntSet
-> Problem a [Int]
approxLogicProblem ipf =
refineProblem
(\s -> (Just . calculateLogicalClosure ipf . IS.fromList, IS.toList s))
displayShowS :: ShowS -> Builder.Builder
displayShowS f = displayString (f "")
describeGraphProblem ::
MonadIOReader Config m
=> LogicConfig
-> Bool
^ choose the first item
-> FilePath
-> Problem a Target
-> m (Problem a [IS.IntSet])
describeGraphProblem cfg choose_first wf p = flip refineProblemA p \s -> do
(variables, mid, keyFun) <- initializeKeyFunction cfg s wf
cnf <- computeCNF (showsVariable displayFact variables) keyFun wf
$ itoListOf (deepSubelements itemR) (ITarget s)
let
(required, edges') = fold
[ case (if choose_first
then over both IS.minView
else over both IS.maxView) $ LS.splitLiterals clause of
(Nothing , Just (t, _)) -> (IS.singleton t, mempty)
(Just (f, _), Just (t, _)) -> (mempty, S.singleton (f,t))
_ -> error "CNF is not IPF"
| clause <- S.toList $ cnfClauses cnf
]
(graph, rev) = buildGraphFromNodesAndEdges
[(k,k) | k <- [0..mid - 1]]
[Edge () f t | (f, t) <- S.toList edges']
core = closure graph (mapMaybe rev $ IS.toList required)
fromClosures cls = preview _ITarget =<<
limit (deepReduction itemR) (`S.member` varset) (ITarget s)
where
varset = S.fromList . map snd
. mapMaybe (variables V.!?)
. map (nodeLabel . (nodes graph V.!))
. IS.toList . IS.unions
$ core:cls
_targets =
filter (not . IS.null)
. map (IS.\\ core)
$ closures graph
dumpGraphInfo wf
(graph <&> flip (showsVariable displayFact variables) "")
core _targets
( fromIntegral . IS.size .
. ( V.length variables )
. IS.unions
return (fromClosures, _targets)
logic :: LogicConfig -> Hierarchy -> M.Map AbsMethodId (V.Vector TypeCheckState) -> Item -> (Fact, Stmt Fact)
logic LogicConfig{..} hry tpinfo = \case
IContent (ClassFile cls) -> ClassExist (cls ^. className)
`withLogic` \c ->
c ==> requireClassNamesOf cls (classTypeParameters.folded) cls
, c ==> requireClassNamesOf cls (classAnnotations.folded) cls
If the class is a enum , it needs to extend java.lang . and have
given (cls^.classAccessFlags.contains CEnum) $ c ==>
requireSubclass hry (cls^.className) "java/lang/Enum"
/\ given (cls^?classSuper._Just.simpleType == Just "java/lang/Enum")
( and
[ requireMethod hry cls . mkAbsMethodId cls $ "values"
<:> MethodDescriptor []
(ReturnDescriptor . Just . JTRef . JTArray .JTRef . JTClass $ cls^.className)
, requireMethod hry cls . mkAbsMethodId cls $ "valueOf"
<:> MethodDescriptor ["Ljava/lang/String;"]
(ReturnDescriptor . Just . JTRef . JTClass $ cls^.className)
, requireField hry cls . mkAbsFieldId cls $ "$VALUES"
<:> FieldDescriptor (JTRef . JTArray .JTRef . JTClass $ cls^.className)
]
)
forallOf (classEnclosingMethod._Just) cls
\(cn, mMId) -> c ==> case mMId of
Just m -> codeIsUntuched (mkAbsMethodId cn m) /\ isInnerClassOf cls cn
Nothing -> requireClassName cls cn
]
IField (cls, field) -> FieldExist (mkAbsFieldId cls field)
`withLogic` \f ->
[ f ==> requireClassNamesOf cls fieldType field
, f ==> requireClassNamesOf cls (fieldAnnotations.folded) field
given (FSynthetic `S.member` flags) do
classExist cls ==> f
given (cls^.classAccessFlags .contains CAbstract && field^.fieldAccessFlags.contains FStatic) do
forallOf classInitializers cls \m ->
f ==> codeIsUntuched m
]
where flags = field^.fieldAccessFlags
IFieldFinal (cls, field) -> FieldIsFinal (mkAbsFieldId cls field)
`withLogic` \f ->
if FStatic `S.member` flags
then
forallOf classInitializers cls \m ->
f ==> codeIsUntuched m
else
forallOf classConstructors cls \m ->
f ==> codeIsUntuched m
given (FSynthetic `S.member` flags \/ FStatic `S.member` flags) $
fieldExist (mkAbsFieldId cls field) ==> f
]
where flags = field^.fieldAccessFlags
IMethod (cls, method) -> MethodExist (mkAbsMethodId cls method)
`withLogic` \m ->
m ==> requireClassNamesOf cls
(methodReturnType.classNames <> methodParameters.folded.classNames)
method
given (method^.methodIdName == "<init>") $
m ==> codeIsUntuched (mkAbsMethodId cls method)
, m ==> requireClassNamesOf cls (methodExceptions.folded) method
m ==> requireClassNamesOf cls
(methodTypeParameters.folded)
method
, m ==> requireClassNamesOf cls
(methodAnnotations.folded)
method
, if method^.methodAccessFlags.contains MAbstract
then
require it to be true or one of it 's super classes to have implemented
forall (implementationPaths (cls^.className) hry)
\(def, isAbstract, path) ->
given (not isAbstract)
$ m /\ unbrokenPath path
==> requireNonAbstractMethod hry cls (mkAbsMethodId def method)
else
forall (superDeclarationPaths (mkAbsMethodId cls method) hry)
\(decl, isAbstract, path) -> given isAbstract
$ methodExist decl /\ unbrokenPath path ==>
requireNonAbstractMethod hry cls (mkAbsMethodId cls method)
, m ==> requireClassNamesOf cls (methodDefaultAnnotation._Just) method
given (method^.methodAccessFlags.contains MSynthetic)
$ m ==> codeIsUntuched (mkAbsMethodId cls method)
]
IImplements (cls, ct) -> HasInterface (cls^.className) (ct^.simpleType)
`withLogic` \i ->
i ==> requireClassNames cls ct
given keepHierarchy $ classExist cls ==> i
]
ISuperClass (cls, ct) -> HasSuperClass (cls^.className) (ct^.simpleType)
`withLogic` \s ->
s ==> requireClassNames cls ct
one of it 's constructors to exist .
let
ctc = ct^.simpleType
mid = mkAbsMethodId ctc ("<init>:()V" :: MethodId)
in
s ==>
case Jvmhs.methodExist mid hry of
Just (view stubMethodAccess -> access)
| access >= Protected
\/ access == Default /\ ctc^.package == cls^.className.package ->
methodExist mid \/ existOf classConstructors cls codeIsUntuched
_ ->
existOf classConstructors cls codeIsUntuched
given keepHierarchy $ classExist cls ==> s
]
IInnerClass (cls, ic) -> IsInnerClass (cls^.className) (ic^.innerClass)
`withLogic` \i ->
An innerclass depends on all classes referenced by the innerClass .
i ==> requireClassNames cls ic
given (cls^.className == ic^.innerClass) $
classExist cls ==> i
given (Just (cls^.className) == ic^.innerOuterClass) $
classExist (ic^.innerClass) ==> i
the innerclass exist . The rule is that if a class refer to an innerclass
]
IBootstrapMethod (cls, (i, btm)) ->
HasBootstrapMethod (cls ^.className) i `withLogic` \bm ->
[ bm ==> requireMethodHandle hry cls (btm^.bootstrapMethodHandle)
, bm ==> forallOf (bootstrapMethodArguments.folded) btm \case
VClass rf -> requireClassNames cls rf
VMethodType md ->
requireClassNames cls md
VMethodHandle mh ->
requireMethodHandle hry cls mh
_ -> true
]
c = = >
IMethodThrows ( ( cls , method ) , mt ) - >
( mt^.simpleType )
m = = > mt^.simpleType ` requireSubtype ` ( " java / lang / Throwable " : : ClassName )
ICode ((cls, method), code) -> CodeIsUntuched theMethodName
`withLogic` \c -> case M.lookup theMethodName tpinfo of
Just typeCheckStates ->
c ==> requireClassNamesOf cls (codeStackMap._Just) code
, c ==> requireClassNamesOf cls (codeByteCode.folded) code
, c ==> forallOf (codeExceptionTable.folded.ehCatchType._Just) code \ct ->
requireClassName cls ct
/\ ct `requireSubtype` ("java/lang/Throwable" :: ClassName)
] ++
[ case oper of
ArrayStore _ ->
c ==> stack 0 `requireSubtype` isArray (stack 2)
Get fa fid ->
For a get value is valid the field has to exist , and the first
c ==> requireField hry cls fid
/\ given (fa /= B.FldStatic) (stack 0 `requireSubtype` fid^.className)
Push (Just (VClass (JTClass cn))) ->
c ==> forall (S.fromList p') unbrokenPath
where
p'=
[ path
| b <- superclasses cn hry
, path <- subclassPaths cn b hry
]
Put fa fid ->
For a put value is valid the field has to exist , and the first
the second element have to be a subtype of the type of the field
c ==> requireField hry cls fid
/\ stack 0 `requireSubtype` fid^.fieldIdType
/\ given (fa /= B.FldStatic)
(stack 1 `requireSubtype` fid^.className)
Invoke a ->
For the methods there are three general cases , a regular method call ,
methodRequirements
/\ (c ==> and
[ s `requireSubtype` t
| (s, t) <- zip (state ^. tcStack) (reverse stackTypes)
]
)
where
(methodRequirements, stackTypes) =
case methodInvokeTypes a of
Right (isSpecial, isStatic, m) ->
( let mid = AbsMethodId $ m^.asInClass
in (c ==> if isSpecial then methodExist mid else requireMethod hry cls mid)
/\ given (Text.isPrefixOf "access$" (m^.methodIdName))
(methodExist mid ==> c)
/\ given (
( maybe False (isNumber . fst) . Text.uncons . last . Text.splitOn "$"
$ mid^.className.fullyQualifiedName
)
/\ mid^.className /= cls ^.className)
(classExist (mid^.className) ==> c)
, [asTypeInfo $ m^.asInClass.className | not isStatic]
<> (map asTypeInfo $ m^.methodIdArgumentTypes)
)
Left (i, m) ->
( ( c ==> requireBootstrapMethod cls (fromIntegral i) )
/\ ( requireBootstrapMethod cls (fromIntegral i) ==> c)
BootstrapMethods are bound to thier use without them
, map asTypeInfo $ m^.methodIdArgumentTypes
)
Throw ->
A Throw operation requires that the first element on the stack is throwable .
c ==> stack 0 `requireSubtype` ("java/lang/Throwable" :: ClassName)
CheckCast fa ->
The check cast operation requires that the first element on the stack
is either a subtype of the cast or the cast is a subtype of the first
element . Often only one of these are true .
c ==> stack 0 `requireSubtype` fa \/ fa `requireSubtype` stack 0
Return (Just B.LRef) ->
We do require that the first element on the stack is a subtype of the return type .
c ==> forall (method^.methodReturnType.simpleType)
\mt -> stack 0 `requireSubtype` mt
InstanceOf ct ->
c ==> ct `requireSubtype` stack 0
_ -> true
| (state, B.opcode -> oper) <-
V.toList $ V.zip typeCheckStates (code ^. codeByteCode)
, let stack n =
case state ^? tcStack.ix n of
Just a -> a
Nothing ->
error $
"Incompatable stack length: " <> show n
<> " at: " <> show theMethodName
<> " bc: " <> show oper
<> " current stack: " <> show (state^.tcStack)
]
Nothing ->
[liftF (TWarning "No type information: unsafely predict no dependencies" True)]
where
methodInvokeTypes = \case
B.InvkSpecial (B.AbsVariableMethodId _ m) -> Right (True, False, m)
B.InvkVirtual m -> Right (False, False, m)
B.InvkStatic (B.AbsVariableMethodId _ m) -> Right (False, True, m)
B.InvkInterface _ (B.AbsInterfaceMethodId m) -> Right (False, False, m)
B.InvkDynamic (B.InvokeDynamic i m') -> Left (i, m')
theMethodName =
mkAbsMethodId cls method
IContent (Jar _) -> (Meta, true)
IContent (MetaData _) -> (Meta, true)
ITarget _ -> (Meta, true)
where
infixl 6 `requireSubtype`
requireSubtype ::
(AsTypeInfo a, AsTypeInfo b)
=> a -> b
-> Stmt Fact
requireSubtype (asTypeInfo -> TRef as) (asTypeInfo -> TRef bs) = and
[ a `requireSubRefType` b | a <- toList as, b <- toList bs]
where
requireSubRefType a b = case a of
B.JTClass s -> case b of
B.JTClass "java/lang/Object" -> true
B.JTClass t -> and
[ unbrokenPath path
| path <- subclassPaths s t hry
]
_ -> true
B.JTArray (JTRef s) -> case b of
B.JTArray (JTRef t) -> s `requireSubRefType` t
_ -> true
_ -> true
requireSubtype _ _ = true
isArray :: TypeInfo -> TypeInfo
isArray ti =
fromJust $
foldl (\a b -> a >>= meet (asTypeInfo b))
(Just TTop)
(ti ^.._TRef.folded._JTArray)
unbrokenPath :: SubclassPath -> Stmt Fact
unbrokenPath path =
and [ isSubclass f t e | (f, t, e) <- subclassEdges path]
isSubclass :: ClassName -> ClassName -> HEdge -> Stmt Fact
isSubclass cn1 cn2 = \case
Implement -> hasInterface cn1 cn2
Extend -> hasSuperClass cn1 cn2
requireSubclass :: Hierarchy -> ClassName -> ClassName -> Stmt Fact
requireSubclass hry s t = case t of
"java/lang/Object" -> true
_ -> and [ unbrokenPath path | path <- subclassPaths s t hry ]
hasInterface :: ClassName -> ClassName -> Stmt Fact
hasInterface cn1 cn2 = tt (HasInterface cn1 cn2)
hasSuperClass :: ClassName -> ClassName -> Stmt Fact
hasSuperClass cn1 cn2 = tt (HasSuperClass cn1 cn2)
requireMethodHandle :: HasClassName c => Hierarchy -> c -> B.MethodHandle B.High -> Stmt Fact
requireMethodHandle hry cls = \case
B.MHField (B.MethodHandleField _ f)
-> requireField hry cls f
B.MHMethod a -> requireMethod hry cls . AbsMethodId . view asInClass $ case a of
B.MHInvokeVirtual rt -> rt
B.MHInvokeStatic (B.AbsVariableMethodId _ rt) -> rt
B.MHInvokeSpecial (B.AbsVariableMethodId _ rt) -> rt
B.MHNewInvokeSpecial rt -> rt
B.MHInterface (B.MethodHandleInterface (B.AbsInterfaceMethodId rt)) ->
requireMethod hry cls . AbsMethodId . view asInClass $ rt
requireBootstrapMethod :: HasClassName c => c -> Int -> Stmt Fact
requireBootstrapMethod c i = tt (HasBootstrapMethod (c^.className) i)
requireClassNames :: (HasClassName c, HasClassNames a) => c -> a -> Stmt Fact
requireClassNames c =
andOf (classNames . to (requireClassName c))
requireClassName :: (HasClassName c, HasClassName a) => c -> a -> Stmt Fact
requireClassName oc ic =
classExist ic /\ isInnerClassOf oc ic
classExist :: HasClassName a => a -> Stmt Fact
classExist (view className -> cn) =
tt (ClassExist cn)
fieldExist :: AbsFieldId -> Stmt Fact
fieldExist f =
tt (FieldExist f)
methodExist :: AbsMethodId -> Stmt Fact
methodExist f =
tt (MethodExist f)
orFailWith :: String -> [Stmt a] -> Stmt a
orFailWith f = \case
[] -> liftF (TWarning f True)
a:as -> foldr (\/) a as
requireField :: HasClassName c => Hierarchy -> c -> AbsFieldId -> Stmt Fact
requireField hry cn fid = isInnerClassOf cn fid /\ orFailWith ("Could not find " ++ show fid)
[ fieldExist fid' /\ unbrokenPath path
| (fid', path) <- fieldLocationPaths fid hry
]
requireMethod :: HasClassName c => Hierarchy -> c -> AbsMethodId -> Stmt Fact
requireMethod hry cn mid = isInnerClassOf cn mid /\ orFailWith ("Could not find " ++ show mid)
[ methodExist mid' /\ unbrokenPath path
| (mid', _, path) <- superDeclarationPaths mid hry
]
requireNonAbstractMethod :: HasClassName c => Hierarchy -> c -> AbsMethodId -> Stmt Fact
requireNonAbstractMethod hry cn mid = isInnerClassOf cn mid /\ orFailWith ("Could not find " ++ show mid)
[ methodExist mid' /\ unbrokenPath path
| (mid', False, path) <- superDeclarationPaths mid hry
]
codeIsUntuched :: AbsMethodId -> Stmt Fact
codeIsUntuched m =
tt (CodeIsUntuched m)
isInnerClassOf :: (HasClassName c1, HasClassName c2) => c1 -> c2 -> Stmt Fact
isInnerClassOf (view className -> c1) (view className -> c2) =
given (isInnerClass c2) (tt (IsInnerClass c1 c2))
withLogic :: Fact -> (Stmt Fact -> [Stmt Fact]) -> (Fact, Stmt Fact)
withLogic f fn = (f, and (fn (tt f)))
whenM :: Monad m => (m Bool) -> m () -> m ()
whenM mb m = mb >>= \b -> when b m
classConstructors :: Fold Class AbsMethodId
classConstructors = classAbsMethodIds . filtered (elemOf methodIdName "<init>")
classInitializers :: Fold Class AbsMethodId
classInitializers =
classAbsMethodIds . filtered (elemOf methodIdName "<clinit>")
payload :: Functor f => p -> ((p, a) -> f (Maybe (p, a))) -> a -> f (Maybe a)
payload p fn a = fmap snd <$> fn (p, a)
methodIsAbstract :: Method -> Bool
methodIsAbstract = view (methodAccessFlags . contains MAbstract)
|
d6d28b4a8c69c4efb32bbdb1078aa10a909afccf784f0a5ddc31624150e2714e | exercism/erlang | nth_prime.erl | -module(nth_prime).
-export([prime/1]).
prime(_N) -> undefined.
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/nth-prime/src/nth_prime.erl | erlang | -module(nth_prime).
-export([prime/1]).
prime(_N) -> undefined.
| |
fc1847bbad9358b97db33ce7e7b378ef5117e1b112298171555a424274bbab0f | hhucn/decide3 | moderator_tab.cljs | (ns decide.ui.process.moderator-tab
(:require
[com.fulcrologic.fulcro-i18n.i18n :as i18n]
[com.fulcrologic.fulcro.algorithms.data-targeting :as targeting]
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.data-fetch :as df]
[com.fulcrologic.fulcro.dom :as dom]
[com.fulcrologic.fulcro.dom.events :as evt]
[com.fulcrologic.fulcro.mutations :refer [defmutation]]
[com.fulcrologic.fulcro.react.hooks :as hooks]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[decide.models.process :as process]
[decide.models.process.mutations :as process.mutations]
[decide.models.user :as user]
[decide.routes :as routes]
[decide.ui.process.moderator.participant-list :as participant-list]
[decide.ui.user :as user.ui]
[mui.data-display :as dd]
[mui.data-display.list :as list]
[mui.inputs :as inputs]
[mui.inputs.form :as form]
[mui.layout :as layout]
[mui.layout.grid :as grid]
["@mui/icons-material/Clear" :default ClearIcon]
["@mui/icons-material/ExpandMore" :default ExpandMoreIcon]
["@mui/icons-material/RemoveCircleOutline" :default RemoveCircleIcon]
[mui.surfaces.accordion :as accordion]
[mui.surfaces.card :as card]
[mui.x.date-pickers :as date-pickers]))
(defn- accordion [{:keys [title]} body]
(accordion/accordion {:defaultExpanded true}
(accordion/summary {:expandIcon (dom/create-element ExpandMoreIcon)}
(dd/typography {:variant "body1"} title))
(accordion/details {} body)))
(defsc Moderator [_ {::user/keys [id display-name]
:keys [root/current-session >/avatar] :as props} {:keys [onDelete]}]
{:query [::user/id ::user/display-name
{:>/avatar (comp/get-query user.ui/Avatar)}
TODO Replace join with Session .
(let [self? (= id (::user/id current-session))]
(list/item {}
(list/item-avatar {}
(user.ui/ui-avatar avatar))
(list/item-text {:primary display-name})
(when onDelete
(list/item-secondary-action {}
(inputs/icon-button
{:edge :end
:disabled self? ; can't remove yourself from moderators
:onClick onDelete}
(dom/create-element RemoveCircleIcon)))))))
(def ui-moderator (comp/computed-factory Moderator {:keyfn ::user/id}))
(defsc ModeratorList [this {::process/keys [slug moderators]}]
{:query [::process/slug {::process/moderators (comp/get-query Moderator)}]
:ident ::process/slug
:use-hooks? true}
(let [[new-moderator-email set-new-moderator-email] (hooks/use-state "")]
(accordion {:title (i18n/tr "Moderators")}
(grid/container {:spacing 2}
(grid/item {:xs 12}
(list/list {}
(->> moderators
(sort-by ::user/display-name)
(map ui-moderator)
vec)))
(grid/item
{:component :form :xs 12
:onSubmit
(fn [e]
(evt/prevent-default! e)
(set-new-moderator-email "")
(comp/transact! this [(process.mutations/add-moderator {::process/slug slug ::user/email new-moderator-email})]))}
(dd/typography {:variant :h6} (i18n/tr "Add moderator"))
(inputs/textfield
{:label (i18n/tr "Email")
:value new-moderator-email
:onChange (fn [e]
(let [value (evt/target-value e)]
(set-new-moderator-email value)
(when (< 2 (count value))
(df/load! this :autocomplete/users Moderator {:params {:term value}
:target [:abc]}))))
:fullWidth true
:InputProps {:endAdornment (inputs/button {:type :submit} (i18n/trc "Submit new moderator form" "Add"))}}))))))
(def ui-moderator-list (comp/computed-factory ModeratorList))
(def default-input-props
{:fullWidth true
:autoComplete "off"
:margin "normal"})
(defn- dissoc-equal-vals
"Dissocs all keys from `m1` that have the same value in `m2` or aren't present."
[m1 m2]
(reduce-kv
(fn [m k v]
(if (and (contains? m2 k) (= v (k m2)))
m
(assoc m k v)))
{}
m1))
(defsc ProcessEdit [this {::process/keys [slug title description end-time type] :as props}]
{:query [::process/slug ::process/title ::process/description ::process/start-time ::process/end-time ::process/type :process/features]
:ident ::process/slug
:use-hooks? true}
(let [[form-state set-form-state] (hooks/use-state props)
dirty? (not= form-state props)]
(accordion {:title (i18n/tr "Edit process")}
(grid/container
{:component :form
:spacing 1
:onSubmit
(fn [evt]
(evt/prevent-default! evt)
(when dirty?
(comp/transact! this [(process.mutations/update-process
;; calculate diff ;; NOTE have a look at clojure.data/diff
(merge (dissoc-equal-vals form-state props) {::process/slug slug}))])))}
(grid/item {:xs 12}
(inputs/textfield
(merge default-input-props
{:label (i18n/trc "Title of a process" "Title")
:value (::process/title form-state)
:helperText (when (not= title (::process/title form-state)) (i18n/tr "Edited"))
:onChange #(set-form-state (assoc form-state ::process/title (evt/target-value %)))
:inputProps {:maxLength 140}})))
(grid/item {:xs 12}
(inputs/textfield
(merge default-input-props
{:label (i18n/trc "Description of a process" "Description")
:helperText (when (not= description (::process/description form-state)) (i18n/tr "Edited"))
:multiline true
:rows 7
:value (::process/description form-state)
:onChange #(set-form-state (assoc form-state ::process/description (evt/target-value %)))})))
(grid/item {:xs 12}
(form/group {:row true}
(let [current-type (::process/type form-state)]
(form/control-label
{:label (i18n/tr "Is the process public?")
:checked (= current-type ::process/type.public)
:onChange #(set-form-state (assoc form-state ::process/type (if (= current-type ::process/type.public) ::process/type.private ::process/type.public)))
:control (inputs/checkbox {})}))))
(grid/container {:item true :xs 12 :spacing 2}
(grid/item {:xs 12 :sm 6}
(date-pickers/date-time-picker
{:renderInput #(inputs/textfield (merge (js->clj %)
default-input-props
{:helperText (i18n/tr "Optional")}))
:value (or (::process/start-time form-state) js/undefined)
:maxDate (or (::process/end-time form-state) js/undefined)
:onChange #(set-form-state (assoc form-state ::process/start-time %))
:clearable true
:label (i18n/trc "Start of a process" "Start")}))
(grid/item {:xs 12 :sm 6}
(date-pickers/date-time-picker
{:renderInput #(inputs/textfield (merge (js->clj %)
default-input-props
{:helperText (i18n/tr "Optional")}))
:value (or (::process/end-time form-state) js/undefined)
:maxDate (or (::process/start-time form-state) js/undefined)
:onChange #(set-form-state (assoc form-state ::process/end-time %))
:clearable true
:label (i18n/trc "End of a process" "End")})))
(grid/item {:xs 12}
(accordion/accordion {:variant :outlined}
(accordion/summary {:expandIcon (dom/create-element ExpandMoreIcon)}
(i18n/tr "Advanced"))
(accordion/details {}
(grid/container {}
(grid/item {:xs 12}
(form/group {:row true}
(form/control {:component :fieldset}
(form/label {:component :legend}
(i18n/tr "Features"))
(for [{:keys [key label help]}
[{:key :process.feature/single-approve
:label (i18n/tr "Single approval")
:help (i18n/tr "Participants can approve at most one proposal")}
{:key :process.feature/voting.public
:label (i18n/tr "Public votes")
:help (i18n/tr "Everyone can see who voted for what")}
#_{:key :process.feature/rejects :label (i18n/tr "Rejects") :help (i18n/tr "Participants can reject proposals.")}]
:let [active? (contains? (:process/features form-state) key)]] ; TODO Move somewhere sensible
(comp/fragment {:key key}
(form/helper-text {} help)
(form/group {:row true}
(form/control-label
{:label label
:control
(inputs/checkbox
{:checked active?
:onChange
#(set-form-state
(update form-state
:process/features (if active? disj conj) key))})})))))))))))
(grid/item {:xs 12}
(inputs/button
{:color :primary
:type "submit"
:disabled (not dirty?)}
(i18n/trc "Submit form" "Submit")))))))
(def ui-process-edit (comp/computed-factory ProcessEdit))
(defsc Process [_ _]
{:query (fn []
(->> [[::process/slug]
(comp/get-query ProcessEdit)
(comp/get-query ModeratorList)]
(apply concat) set vec))
:ident ::process/slug})
(defmutation init-moderator-tab [{:keys [slug]}]
(action [{:keys [app ref]}]
(let [process-ident [::process/slug slug]]
( df / load ! app process - ident ModeratorList { : target ( ref : moderator - list ) } )
( df / load ! app process - ident { : target ( : process - edit ) } )
(df/load! app process-ident participant-list/ParticipantList {:target (conj ref :participant-list)})
;; combine loads of same entity into one.
(df/load! app process-ident Process
{:target (targeting/multiple-targets
(conj ref :moderator-list)
(conj ref :process)
(conj ref :process-edit))
:post-mutation `dr/target-ready
:post-mutation-params {:target ref}}))))
(defsc ProcessModeratorTab [this {:keys [moderator-list process-edit participant-list process] :as props}]
{:query [{:process (comp/get-query Process)}
{:process-edit (comp/get-query ProcessEdit)}
{:moderator-list (comp/get-query ModeratorList)}
{:participant-list (comp/get-query participant-list/ParticipantList)}]
:ident (fn [] [::ProcessModeratorTab (::process/slug process)])
:route-segment (routes/segment ::routes/process-moderation)
:will-enter
(fn [app {:process/keys [slug]}]
(let [ident (comp/get-ident ProcessModeratorTab {:process {::process/slug slug}})]
(dr/route-deferred ident
#(comp/transact! app [(init-moderator-tab {:slug slug})] {:ref ident}))))}
(layout/container {}
(layout/box {:my 2}
(grid/container {:spacing 2}
(grid/item {:xs 12 :md 8}
(ui-process-edit process-edit))
(when participant-list
(grid/item {:xs 12 :sm 6 :md 4}
(card/card {}
(card/header {:title (i18n/trc "Label for list of participants" "Participants")})
(card/content {}
(participant-list/ui-participant-list participant-list)))))
(grid/item {}
(ui-moderator-list moderator-list)))))) | null | https://raw.githubusercontent.com/hhucn/decide3/bd32079c7ca801197cb685fdbffd8755cf002134/src/main/decide/ui/process/moderator_tab.cljs | clojure | can't remove yourself from moderators
calculate diff ;; NOTE have a look at clojure.data/diff
TODO Move somewhere sensible
combine loads of same entity into one. | (ns decide.ui.process.moderator-tab
(:require
[com.fulcrologic.fulcro-i18n.i18n :as i18n]
[com.fulcrologic.fulcro.algorithms.data-targeting :as targeting]
[com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.data-fetch :as df]
[com.fulcrologic.fulcro.dom :as dom]
[com.fulcrologic.fulcro.dom.events :as evt]
[com.fulcrologic.fulcro.mutations :refer [defmutation]]
[com.fulcrologic.fulcro.react.hooks :as hooks]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[decide.models.process :as process]
[decide.models.process.mutations :as process.mutations]
[decide.models.user :as user]
[decide.routes :as routes]
[decide.ui.process.moderator.participant-list :as participant-list]
[decide.ui.user :as user.ui]
[mui.data-display :as dd]
[mui.data-display.list :as list]
[mui.inputs :as inputs]
[mui.inputs.form :as form]
[mui.layout :as layout]
[mui.layout.grid :as grid]
["@mui/icons-material/Clear" :default ClearIcon]
["@mui/icons-material/ExpandMore" :default ExpandMoreIcon]
["@mui/icons-material/RemoveCircleOutline" :default RemoveCircleIcon]
[mui.surfaces.accordion :as accordion]
[mui.surfaces.card :as card]
[mui.x.date-pickers :as date-pickers]))
(defn- accordion [{:keys [title]} body]
(accordion/accordion {:defaultExpanded true}
(accordion/summary {:expandIcon (dom/create-element ExpandMoreIcon)}
(dd/typography {:variant "body1"} title))
(accordion/details {} body)))
(defsc Moderator [_ {::user/keys [id display-name]
:keys [root/current-session >/avatar] :as props} {:keys [onDelete]}]
{:query [::user/id ::user/display-name
{:>/avatar (comp/get-query user.ui/Avatar)}
TODO Replace join with Session .
(let [self? (= id (::user/id current-session))]
(list/item {}
(list/item-avatar {}
(user.ui/ui-avatar avatar))
(list/item-text {:primary display-name})
(when onDelete
(list/item-secondary-action {}
(inputs/icon-button
{:edge :end
:onClick onDelete}
(dom/create-element RemoveCircleIcon)))))))
(def ui-moderator (comp/computed-factory Moderator {:keyfn ::user/id}))
(defsc ModeratorList [this {::process/keys [slug moderators]}]
{:query [::process/slug {::process/moderators (comp/get-query Moderator)}]
:ident ::process/slug
:use-hooks? true}
(let [[new-moderator-email set-new-moderator-email] (hooks/use-state "")]
(accordion {:title (i18n/tr "Moderators")}
(grid/container {:spacing 2}
(grid/item {:xs 12}
(list/list {}
(->> moderators
(sort-by ::user/display-name)
(map ui-moderator)
vec)))
(grid/item
{:component :form :xs 12
:onSubmit
(fn [e]
(evt/prevent-default! e)
(set-new-moderator-email "")
(comp/transact! this [(process.mutations/add-moderator {::process/slug slug ::user/email new-moderator-email})]))}
(dd/typography {:variant :h6} (i18n/tr "Add moderator"))
(inputs/textfield
{:label (i18n/tr "Email")
:value new-moderator-email
:onChange (fn [e]
(let [value (evt/target-value e)]
(set-new-moderator-email value)
(when (< 2 (count value))
(df/load! this :autocomplete/users Moderator {:params {:term value}
:target [:abc]}))))
:fullWidth true
:InputProps {:endAdornment (inputs/button {:type :submit} (i18n/trc "Submit new moderator form" "Add"))}}))))))
(def ui-moderator-list (comp/computed-factory ModeratorList))
(def default-input-props
{:fullWidth true
:autoComplete "off"
:margin "normal"})
(defn- dissoc-equal-vals
"Dissocs all keys from `m1` that have the same value in `m2` or aren't present."
[m1 m2]
(reduce-kv
(fn [m k v]
(if (and (contains? m2 k) (= v (k m2)))
m
(assoc m k v)))
{}
m1))
(defsc ProcessEdit [this {::process/keys [slug title description end-time type] :as props}]
{:query [::process/slug ::process/title ::process/description ::process/start-time ::process/end-time ::process/type :process/features]
:ident ::process/slug
:use-hooks? true}
(let [[form-state set-form-state] (hooks/use-state props)
dirty? (not= form-state props)]
(accordion {:title (i18n/tr "Edit process")}
(grid/container
{:component :form
:spacing 1
:onSubmit
(fn [evt]
(evt/prevent-default! evt)
(when dirty?
(comp/transact! this [(process.mutations/update-process
(merge (dissoc-equal-vals form-state props) {::process/slug slug}))])))}
(grid/item {:xs 12}
(inputs/textfield
(merge default-input-props
{:label (i18n/trc "Title of a process" "Title")
:value (::process/title form-state)
:helperText (when (not= title (::process/title form-state)) (i18n/tr "Edited"))
:onChange #(set-form-state (assoc form-state ::process/title (evt/target-value %)))
:inputProps {:maxLength 140}})))
(grid/item {:xs 12}
(inputs/textfield
(merge default-input-props
{:label (i18n/trc "Description of a process" "Description")
:helperText (when (not= description (::process/description form-state)) (i18n/tr "Edited"))
:multiline true
:rows 7
:value (::process/description form-state)
:onChange #(set-form-state (assoc form-state ::process/description (evt/target-value %)))})))
(grid/item {:xs 12}
(form/group {:row true}
(let [current-type (::process/type form-state)]
(form/control-label
{:label (i18n/tr "Is the process public?")
:checked (= current-type ::process/type.public)
:onChange #(set-form-state (assoc form-state ::process/type (if (= current-type ::process/type.public) ::process/type.private ::process/type.public)))
:control (inputs/checkbox {})}))))
(grid/container {:item true :xs 12 :spacing 2}
(grid/item {:xs 12 :sm 6}
(date-pickers/date-time-picker
{:renderInput #(inputs/textfield (merge (js->clj %)
default-input-props
{:helperText (i18n/tr "Optional")}))
:value (or (::process/start-time form-state) js/undefined)
:maxDate (or (::process/end-time form-state) js/undefined)
:onChange #(set-form-state (assoc form-state ::process/start-time %))
:clearable true
:label (i18n/trc "Start of a process" "Start")}))
(grid/item {:xs 12 :sm 6}
(date-pickers/date-time-picker
{:renderInput #(inputs/textfield (merge (js->clj %)
default-input-props
{:helperText (i18n/tr "Optional")}))
:value (or (::process/end-time form-state) js/undefined)
:maxDate (or (::process/start-time form-state) js/undefined)
:onChange #(set-form-state (assoc form-state ::process/end-time %))
:clearable true
:label (i18n/trc "End of a process" "End")})))
(grid/item {:xs 12}
(accordion/accordion {:variant :outlined}
(accordion/summary {:expandIcon (dom/create-element ExpandMoreIcon)}
(i18n/tr "Advanced"))
(accordion/details {}
(grid/container {}
(grid/item {:xs 12}
(form/group {:row true}
(form/control {:component :fieldset}
(form/label {:component :legend}
(i18n/tr "Features"))
(for [{:keys [key label help]}
[{:key :process.feature/single-approve
:label (i18n/tr "Single approval")
:help (i18n/tr "Participants can approve at most one proposal")}
{:key :process.feature/voting.public
:label (i18n/tr "Public votes")
:help (i18n/tr "Everyone can see who voted for what")}
#_{:key :process.feature/rejects :label (i18n/tr "Rejects") :help (i18n/tr "Participants can reject proposals.")}]
(comp/fragment {:key key}
(form/helper-text {} help)
(form/group {:row true}
(form/control-label
{:label label
:control
(inputs/checkbox
{:checked active?
:onChange
#(set-form-state
(update form-state
:process/features (if active? disj conj) key))})})))))))))))
(grid/item {:xs 12}
(inputs/button
{:color :primary
:type "submit"
:disabled (not dirty?)}
(i18n/trc "Submit form" "Submit")))))))
(def ui-process-edit (comp/computed-factory ProcessEdit))
(defsc Process [_ _]
{:query (fn []
(->> [[::process/slug]
(comp/get-query ProcessEdit)
(comp/get-query ModeratorList)]
(apply concat) set vec))
:ident ::process/slug})
(defmutation init-moderator-tab [{:keys [slug]}]
(action [{:keys [app ref]}]
(let [process-ident [::process/slug slug]]
( df / load ! app process - ident ModeratorList { : target ( ref : moderator - list ) } )
( df / load ! app process - ident { : target ( : process - edit ) } )
(df/load! app process-ident participant-list/ParticipantList {:target (conj ref :participant-list)})
(df/load! app process-ident Process
{:target (targeting/multiple-targets
(conj ref :moderator-list)
(conj ref :process)
(conj ref :process-edit))
:post-mutation `dr/target-ready
:post-mutation-params {:target ref}}))))
(defsc ProcessModeratorTab [this {:keys [moderator-list process-edit participant-list process] :as props}]
{:query [{:process (comp/get-query Process)}
{:process-edit (comp/get-query ProcessEdit)}
{:moderator-list (comp/get-query ModeratorList)}
{:participant-list (comp/get-query participant-list/ParticipantList)}]
:ident (fn [] [::ProcessModeratorTab (::process/slug process)])
:route-segment (routes/segment ::routes/process-moderation)
:will-enter
(fn [app {:process/keys [slug]}]
(let [ident (comp/get-ident ProcessModeratorTab {:process {::process/slug slug}})]
(dr/route-deferred ident
#(comp/transact! app [(init-moderator-tab {:slug slug})] {:ref ident}))))}
(layout/container {}
(layout/box {:my 2}
(grid/container {:spacing 2}
(grid/item {:xs 12 :md 8}
(ui-process-edit process-edit))
(when participant-list
(grid/item {:xs 12 :sm 6 :md 4}
(card/card {}
(card/header {:title (i18n/trc "Label for list of participants" "Participants")})
(card/content {}
(participant-list/ui-participant-list participant-list)))))
(grid/item {}
(ui-moderator-list moderator-list)))))) |
f63eea0c27120c1d66cc57ef8f6f40b321c25d47cbadf2e554d6118378cfaa0e | chrislomaxjones/part-ii-project | message.ml | (* message.ml *)
open Types
open Capnp_rpc_lwt
open Lwt.Infix
exception DeserializationError
let sturdy_refs = Hashtbl.create 10
let serialize_phase1_response acceptor_id ballot_num accepted : Yojson.Basic.json =
let acceptor_id = ("acceptor_id", `String (Types.string_of_id acceptor_id)) in
let ballot_json = Yojson.Basic.Util.to_assoc (Ballot.serialize ballot_num) in
let pvalues_json = Yojson.Basic.Util.to_assoc ( (`Assoc [("pvalues", Pval.serialize_list accepted)])) in
let response_json = `Assoc ( acceptor_id :: (Core.List.concat [ballot_json; pvalues_json]) ) in
`Assoc [ ("response", response_json )]
let deserialize_phase1_response (response_json : Yojson.Basic.json) =
let inner_json = Yojson.Basic.Util.member "response" response_json in
let acceptor_id_json = Yojson.Basic.Util.member "acceptor_id" inner_json in
let ballot_number_json = Yojson.Basic.Util.member "ballot_num" inner_json in
let pvalues_json = Yojson.Basic.Util.member "pvalues" inner_json in
(acceptor_id_json |> Yojson.Basic.Util.to_string |> Types.id_of_string,
Ballot.deserialize (`Assoc [("ballot_num",ballot_number_json)]),
Pval.deserialize_list pvalues_json)
let serialize_phase2_response acceptor_id ballot_num : Yojson.Basic.json =
let acceptor_id = ("acceptor_id", `String (Types.string_of_id acceptor_id)) in
let ballot_json = Yojson.Basic.Util.to_assoc (Ballot.serialize ballot_num) in
`Assoc [ ("response", `Assoc ( acceptor_id :: ballot_json ) ) ]
let deserialize_phase2_response (response_json : Yojson.Basic.json) =
let inner_json = Yojson.Basic.Util.member "response" response_json in
let acceptor_id_json = Yojson.Basic.Util.member "acceptor_id" inner_json in
let ballot_number_json = Yojson.Basic.Util.member "ballot_num" inner_json in
(acceptor_id_json |> Yojson.Basic.Util.to_string |> Types.id_of_string,
Ballot.deserialize (`Assoc [ ("ballot_num", ballot_number_json) ]))
Exceptions resulting in undefined values being sent in Capnp unions
exception Undefined_oper;;
exception Undefined_result;;
(* Exception arising from the wrong kind of response being received *)
exception Invalid_response;;
Expose the API service for the RPC system
module Api = Message_api.MakeRPC(Capnp_rpc_lwt);;
let local ?(request_callback : (command -> unit) option)
?(proposal_callback : (proposal -> unit) option)
?(response_callback : ((command_id * result) -> unit) option)
?(phase1_callback : (Ballot.t -> (Types.unique_id * Ballot.t * Pval.t list)) option)
?(phase2_callback : (Pval.t -> (Types.unique_id * Ballot.t)) option)
() =
let module Message = Api.Service.Message in
Message.local @@ object
inherit Message.service
method phase2_impl params release_param_caps =
let open Message.Phase2 in
let module Params = Message.Phase2.Params in
let pvalue = Params.pvalue_get params
|> Yojson.Basic.from_string
|> Pval.deserialize in
release_param_caps ();
match phase2_callback with Some f ->
let (acceptor_id, ballot_num) = f(pvalue) in
let json = serialize_phase2_response acceptor_id ballot_num in
let result_str = Yojson.Basic.to_string json in
let response,results = Service.Response.create Results.init_pointer in
Results.result_set results result_str;
Service.return response;
method phase1_impl params release_param_caps =
let open Message.Phase1 in
let module Params = Message.Phase1.Params in
let ballot_number = Params.ballot_number_get params
|> Yojson.Basic.from_string
|> Ballot.deserialize in
release_param_caps ();
match phase1_callback with Some f ->
let (acceptor_id, ballot_num', pvalues) = f(ballot_number) in
let json = serialize_phase1_response acceptor_id ballot_num' pvalues in
let result_str = Yojson.Basic.to_string json in
let response, results = Service.Response.create Results.init_pointer in
Results.result_set results result_str;
Service.return response;
method client_response_impl params release_param_caps =
let open Message.ClientResponse in
let module Params = Message.ClientResponse.Params in
let open Api.Reader.Message in
let result_reader = Params.result_get params in
(* Pull out all the necessary data from the params *)
let result = (match Result.get result_reader with
| Result.Failure -> Types.Failure
| Result.Success -> Types.Success
| Result.Read v -> Types.ReadSuccess v
| Result.Undefined _ -> raise Undefined_result) in
let command_id = Params.command_id_get params in
(* Call a callback to notify client *)
match response_callback with Some h -> h(command_id,result);
(* Release capabilities, doesn't matter for us *)
release_param_caps ();
(* Return an empty response *)
Service.return_empty ();
method send_proposal_impl params release_param_caps =
let open Message.SendProposal in
let module Params = Message.SendProposal.Params in
(* Pull out all the slot number from params *)
let slot_number = Params.slot_number_get params in
(* Get an API reader for the command, since its a nested struct *)
let cmd_reader = Params.command_get params in
(* Retrieve the fields from the command struct passed in decision *)
let open Api.Reader.Message in
(* Retrieve the client id and command id fields from the struct *)
let id = Command.client_id_get cmd_reader in
let uri = Command.client_uri_get cmd_reader in
let command_id = Command.command_id_get cmd_reader in
(* Operation is more difficult as it is a nested struct *)
let op_reader = Command.operation_get cmd_reader in
Operations are a union type in Capnp so match over the variant
let operation = (match Command.Operation.get op_reader with
| Command.Operation.Nop -> Types.Nop
| Command.Operation.Create c_struct ->
let k = Command.Operation.Create.key_get c_struct in
let v = Command.Operation.Create.value_get c_struct in
Types.Create(k,v)
| Command.Operation.Read r_struct ->
let k = Command.Operation.Read.key_get r_struct in
Types.Read(k)
| Command.Operation.Update u_struct ->
let k = Command.Operation.Update.key_get u_struct in
let v = Command.Operation.Update.value_get u_struct in
Types.Update(k,v)
| Command.Operation.Remove r_struct ->
let k = Command.Operation.Remove.key_get r_struct in
Types.Remove(k)
| Command.Operation.Undefined(_) -> raise Undefined_oper) in
(* Form the proposal from the message parameters *)
let proposal = (slot_number, ((Core.Uuid.of_string id,Uri.of_string uri), command_id, operation)) in
(* Do something with the proposal here *)
(* This is nonsense at the moment *)
(match proposal_callback with
| None -> ()
| Some g -> g(proposal) );
(* Release capabilities, doesn't matter for us *)
release_param_caps ();
(* Return an empty response *)
Service.return_empty ();
method decision_impl params release_param_caps =
let open Message.Decision in
let module Params = Message.Decision.Params in
(* Get slot number *)
let slot_number = Params.slot_number_get params in
(* Get an API reader for the command, since its a nested struct *)
let cmd_reader = Params.command_get params in
(* Retrieve the fields from the command struct passed in decision *)
let open Api.Reader.Message in
(* Retrieve the client id and command id fields from the struct *)
let id = Command.client_id_get cmd_reader in
let uri = Command.client_uri_get cmd_reader in
let command_id = Command.command_id_get cmd_reader in
(* Operation is more difficult as it is a nested struct *)
let op_reader = Command.operation_get cmd_reader in
Operations are a union type in Capnp so match over the variant
let operation = (match Command.Operation.get op_reader with
| Command.Operation.Nop -> Types.Nop
| Command.Operation.Create c_struct ->
let k = Command.Operation.Create.key_get c_struct in
let v = Command.Operation.Create.value_get c_struct in
Types.Create(k,v)
| Command.Operation.Read r_struct ->
let k = Command.Operation.Read.key_get r_struct in
Types.Read(k)
| Command.Operation.Update u_struct ->
let k = Command.Operation.Update.key_get u_struct in
let v = Command.Operation.Update.value_get u_struct in
Types.Update(k,v)
| Command.Operation.Remove r_struct ->
let k = Command.Operation.Remove.key_get r_struct in
Types.Remove(k)
| Command.Operation.Undefined(_) -> raise Undefined_oper) in
(* Form the proposal from the message parameters *)
let proposal = (slot_number, ((Core.Uuid.of_string id,Uri.of_string uri), command_id, operation)) in
(* Call the callback function that will process the decision *)
(match proposal_callback with
| None -> ()
| Some g -> g(proposal) );
(* Release capabilities, doesn't matter for us *)
release_param_caps ();
(* Return an empty response *)
Service.return_empty ();
method client_request_impl params release_param_caps =
let open Message.ClientRequest in
let module Params = Message.ClientRequest.Params in
(* Retrieve the fields from the command struct passed in request *)
let cmd_reader = Params.command_get params in
let open Api.Reader.Message in
(* Retrieve the client id and command id fields from the struct *)
let id = Command.client_id_get cmd_reader in
let uri = Command.client_uri_get cmd_reader in
let command_id = Command.command_id_get cmd_reader in
(* Operation is more difficult as it is a nested struct *)
let op_reader = Command.operation_get cmd_reader in
Operations are a union type in Capnp so match over the variant
let operation = (match Command.Operation.get op_reader with
| Command.Operation.Nop -> Types.Nop
| Command.Operation.Create c_struct ->
let k = Command.Operation.Create.key_get c_struct in
let v = Command.Operation.Create.value_get c_struct in
Types.Create(k,v)
| Command.Operation.Read r_struct ->
let k = Command.Operation.Read.key_get r_struct in
Types.Read(k)
| Command.Operation.Update u_struct ->
let k = Command.Operation.Update.key_get u_struct in
let v = Command.Operation.Update.value_get u_struct in
Types.Update(k,v)
| Command.Operation.Remove r_struct ->
let k = Command.Operation.Remove.key_get r_struct in
Types.Remove(k)
| Command.Operation.Undefined(_) -> raise Undefined_oper) in
(* Get back response for request *)
Note here there is a temporay Nop passed
(* This pattern matching is not exhaustive but
we always want some callback f here
So it is suitable to raise an exception
if one is not passed in this case
*)
match request_callback with Some f ->
f ((Core.Uuid.of_string id,Uri.of_string uri), command_id, operation);
(* Releases capabilities, doesn't matter for us *)
release_param_caps ();
(* Return an empty response *)
Service.return_empty ()
end;;
(*---------------------------------------------------------------------------*)
let client_request_rpc t (cmd : Types.command) =
let open Api.Client.Message.ClientRequest in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty command type as recognised by Capnp
let cmd_rpc = (Command.init_root ()) in
Construct a command struct for Capnp from the cmd argument given
let ((id,uri), command_id, operation) = cmd in
Command.client_id_set cmd_rpc (Core.Uuid.to_string id);
Command.client_uri_set cmd_rpc (Uri.to_string uri);
Command.command_id_set_exn cmd_rpc command_id;
Construct an operation struct here
let oper_rpc = (Command.Operation.init_root ()) in
(* Populate the operation struct with the correct values *)
(match operation with
| Nop ->
Command.Operation.nop_set oper_rpc
| Create(k,v) ->
let create = (Command.Operation.create_init oper_rpc) in
Command.Operation.Create.key_set_exn create k;
Command.Operation.Create.value_set create v;
| Read (k) ->
let read = Command.Operation.read_init oper_rpc in
Command.Operation.Read.key_set_exn read k;
| Update(k,v) ->
let update = Command.Operation.update_init oper_rpc in
Command.Operation.Update.key_set_exn update k;
Command.Operation.Update.value_set update v;
| Remove(k) ->
let remove = Command.Operation.remove_init oper_rpc in
Command.Operation.Remove.key_set_exn remove k);
(Command.operation_set_builder cmd_rpc oper_rpc |> ignore);
(* Constructs the command struct and associates with params *)
(Params.command_set_reader params (Command.to_reader cmd_rpc) |> ignore);
(* Send the message and pull out the result *)
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
let client_response_rpc t (cid : Types.command_id) (result : Types.result) =
let open Api.Client.Message.ClientResponse in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty result type as recognised by Capnp
let result_rpc = Result.init_root () in
As result is a Capnp union , match over the variant result argument
and set the appropriate Capnp value of result_rpc
and set the appropriate Capnp value of result_rpc *)
(match result with
| Failure ->
Result.failure_set result_rpc
| Success ->
Result.success_set result_rpc
| ReadSuccess v ->
Result.read_set result_rpc v);
(* Set the reader for the results union of the parameters *)
Params.result_set_reader params (Result.to_reader result_rpc) |> ignore;
(* Set the command id in the parameters to argument given *)
Params.command_id_set_exn params cid;
(* Send the message and ignore the response *)
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
let decision_rpc t (p : Types.proposal) =
let open Api.Client.Message.Decision in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty command type as recognised by Capnp
let cmd_rpc = Command.init_root () in
Construct a command struct for Capnp from the cmd argument given
let (slot_number, ((id,uri), command_id, operation)) = p in
Command.client_id_set cmd_rpc (Core.Uuid.to_string id);
Command.client_uri_set cmd_rpc (Uri.to_string uri);
Command.command_id_set_exn cmd_rpc command_id;
Construct an operation struct here
let oper_rpc = (Command.Operation.init_root ()) in
(* Populate the operation struct with the correct values *)
(match operation with
| Nop ->
Command.Operation.nop_set oper_rpc
| Create(k,v) ->
let create = (Command.Operation.create_init oper_rpc) in
Command.Operation.Create.key_set_exn create k;
Command.Operation.Create.value_set create v;
| Read (k) ->
let read = Command.Operation.read_init oper_rpc in
Command.Operation.Read.key_set_exn read k;
| Update(k,v) ->
let update = Command.Operation.update_init oper_rpc in
Command.Operation.Update.key_set_exn update k;
Command.Operation.Update.value_set update v;
| Remove(k) ->
let remove = Command.Operation.remove_init oper_rpc in
Command.Operation.Remove.key_set_exn remove k);
(Command.operation_set_builder cmd_rpc oper_rpc |> ignore);
(* Constructs the command struct and associates with params *)
(Params.command_set_reader params (Command.to_reader cmd_rpc) |> ignore);
(* Add the given slot number argument to the message parameters *)
Params.slot_number_set_exn params slot_number;
(* Send the message and ignore the response *)
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
let proposal_rpc t (p : Types.proposal) =
let open Api.Client.Message.SendProposal in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty command type as recognised by Capnp
let cmd_rpc = Command.init_root () in
Construct a command struct for Capnp from the cmd argument given
let (slot_number, ((id,uri), command_id, operation)) = p in
Command.client_id_set cmd_rpc (Core.Uuid.to_string id);
Command.client_uri_set cmd_rpc (Uri.to_string uri);
Command.command_id_set_exn cmd_rpc command_id;
Construct an operation struct here
let oper_rpc = (Command.Operation.init_root ()) in
(* Populate the operation struct with the correct values *)
(match operation with
| Nop ->
Command.Operation.nop_set oper_rpc
| Create(k,v) ->
let create = (Command.Operation.create_init oper_rpc) in
Command.Operation.Create.key_set_exn create k;
Command.Operation.Create.value_set create v;
| Read (k) ->
let read = Command.Operation.read_init oper_rpc in
Command.Operation.Read.key_set_exn read k;
| Update(k,v) ->
let update = Command.Operation.update_init oper_rpc in
Command.Operation.Update.key_set_exn update k;
Command.Operation.Update.value_set update v;
| Remove(k) ->
let remove = Command.Operation.remove_init oper_rpc in
Command.Operation.Remove.key_set_exn remove k);
(Command.operation_set_builder cmd_rpc oper_rpc |> ignore);
(* Constructs the command struct and associates with params *)
(Params.command_set_reader params (Command.to_reader cmd_rpc) |> ignore);
(* Add the given slot number argument to the message parameters *)
Params.slot_number_set_exn params slot_number;
(* Send the message and ignore the response *)
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
(*---------------------------------------------------------------------------*)
(* Types of message that can be passed between nodes:
- This represents the application-level representation of a message.
- These can be passed to the RPC api to be prepared for transport etc. *)
type message = ClientRequestMessage of command
| ProposalMessage of proposal
| DecisionMessage of proposal
| ClientResponseMessage of command_id * result
(* | ... further messages will be added *)
Start a new server advertised at address ( host , port )
This server does not serve with TLS and the service ID for the
server is derived from its address
This server does not serve with TLS and the service ID for the
server is derived from its address *)
let start_new_server ?request_callback ?proposal_callback ?response_callback ?phase1_callback ?phase2_callback host port =
let listen_address = `TCP (host, port) in
let config = Capnp_rpc_unix.Vat_config.create ~serve_tls:false ~secret_key:`Ephemeral listen_address in
(* let service_id = Capnp_rpc_unix.Vat_config.derived_id config "main" in *)
let service_id = Capnp_rpc_lwt.Restorer.Id.derived ~secret:"" (host ^ (string_of_int port)) in
let restore = Capnp_rpc_lwt.Restorer.single service_id (local ?request_callback ?proposal_callback ?response_callback ?phase1_callback ?phase2_callback () ) in
Capnp_rpc_unix.serve config ~restore >|= fun vat ->
Capnp_rpc_unix.Vat.sturdy_uri vat service_id;;
(* Resolve the URI for a given service from the host,port address pair *)
let uri_from_address host port =
let service_id = Capnp_rpc_lwt.Restorer.Id.derived ~secret:"" (host ^ (string_of_int port)) in
let service_id_str = Capnp_rpc_lwt.Restorer.Id.to_string service_id in
let location = Capnp_rpc_unix.Network.Location.tcp host port in
let digest = Capnp_rpc_lwt.Auth.Digest.insecure in
Capnp_rpc_unix.Network.Address.to_uri ((location,digest),service_id_str)
Takes a Capnp URI for a service and returns the lwt capability of that
service
service *)
let rec service_from_uri uri =
(try Lwt.return (Some (Hashtbl.find sturdy_refs uri))
with Not_found ->
(try (
let client_vat = Capnp_rpc_unix.client_only_vat () in
let sr = Capnp_rpc_unix.Vat.import_exn client_vat uri in
Sturdy_ref.connect sr >>= function
| Ok capability ->
(Hashtbl.add sturdy_refs uri capability;
Lwt.return_some capability)
| Error e -> Lwt.return_none)
with _ -> failwith "hello"))
Derive the service from an address by indirectly computing the URI .
This is mostly for legacy reasons - all of the local node code sends
messages based on URIs .
: Modify the code so that we do n't need this extra indirection
This is mostly for legacy reasons - all of the local node code sends
messages based on URIs.
TODO: Modify the code so that we don't need this extra indirection *)
let service_from_addr host port =
uri_from_address host port |> service_from_uri
(* Accepts as input a message and prepares it for RPC transport,
given the URI of the service to which it will be sent*)
let send_request message uri =
Get the service for the given URI
service_from_uri uri >>= function
| None -> Lwt.return_unit
| Some service -> (
match message with
| ClientRequestMessage cmd ->
client_request_rpc service cmd;
| DecisionMessage p ->
decision_rpc service p;
| ProposalMessage p ->
proposal_rpc service p;
| ClientResponseMessage (cid, result) ->
client_response_rpc service cid result)
(*---------------------------------------------------------------------------*)
let phase1_rpc t (b : Ballot.t) =
let open Api.Client.Message.Phase1 in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Params.ballot_number_set params (b |> Ballot.serialize |> Yojson.Basic.to_string);
Capability.call_for_value_exn t method_id request >|=
Results.result_get >|=
Yojson.Basic.from_string >|=
deserialize_phase1_response
(* TODO: Pattern matching here exhaustive *)
let send_phase1_message (b : Ballot.t) uri =
service_from_uri uri >>= function
| Some service ->
phase1_rpc service b;;
let phase2_rpc t (pval : Pval.t) =
let open Api.Client.Message.Phase2 in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Params.pvalue_set params (pval |> Pval.serialize |> Yojson.Basic.to_string);
Capability.call_for_value_exn t method_id request >|=
Results.result_get >|=
Yojson.Basic.from_string >|=
deserialize_phase2_response
(* TODO: Pattern matching here is not exhaustive *)
let send_phase2_message (pval : Pval.t) uri =
service_from_uri uri >>= function
| Some service ->
phase2_rpc service pval
| null | https://raw.githubusercontent.com/chrislomaxjones/part-ii-project/fb638a6d9759717a610ff8cf2fe985b94f003648/src/message.ml | ocaml | message.ml
Exception arising from the wrong kind of response being received
Pull out all the necessary data from the params
Call a callback to notify client
Release capabilities, doesn't matter for us
Return an empty response
Pull out all the slot number from params
Get an API reader for the command, since its a nested struct
Retrieve the fields from the command struct passed in decision
Retrieve the client id and command id fields from the struct
Operation is more difficult as it is a nested struct
Form the proposal from the message parameters
Do something with the proposal here
This is nonsense at the moment
Release capabilities, doesn't matter for us
Return an empty response
Get slot number
Get an API reader for the command, since its a nested struct
Retrieve the fields from the command struct passed in decision
Retrieve the client id and command id fields from the struct
Operation is more difficult as it is a nested struct
Form the proposal from the message parameters
Call the callback function that will process the decision
Release capabilities, doesn't matter for us
Return an empty response
Retrieve the fields from the command struct passed in request
Retrieve the client id and command id fields from the struct
Operation is more difficult as it is a nested struct
Get back response for request
This pattern matching is not exhaustive but
we always want some callback f here
So it is suitable to raise an exception
if one is not passed in this case
Releases capabilities, doesn't matter for us
Return an empty response
---------------------------------------------------------------------------
Populate the operation struct with the correct values
Constructs the command struct and associates with params
Send the message and pull out the result
Set the reader for the results union of the parameters
Set the command id in the parameters to argument given
Send the message and ignore the response
Populate the operation struct with the correct values
Constructs the command struct and associates with params
Add the given slot number argument to the message parameters
Send the message and ignore the response
Populate the operation struct with the correct values
Constructs the command struct and associates with params
Add the given slot number argument to the message parameters
Send the message and ignore the response
---------------------------------------------------------------------------
Types of message that can be passed between nodes:
- This represents the application-level representation of a message.
- These can be passed to the RPC api to be prepared for transport etc.
| ... further messages will be added
let service_id = Capnp_rpc_unix.Vat_config.derived_id config "main" in
Resolve the URI for a given service from the host,port address pair
Accepts as input a message and prepares it for RPC transport,
given the URI of the service to which it will be sent
---------------------------------------------------------------------------
TODO: Pattern matching here exhaustive
TODO: Pattern matching here is not exhaustive |
open Types
open Capnp_rpc_lwt
open Lwt.Infix
exception DeserializationError
let sturdy_refs = Hashtbl.create 10
let serialize_phase1_response acceptor_id ballot_num accepted : Yojson.Basic.json =
let acceptor_id = ("acceptor_id", `String (Types.string_of_id acceptor_id)) in
let ballot_json = Yojson.Basic.Util.to_assoc (Ballot.serialize ballot_num) in
let pvalues_json = Yojson.Basic.Util.to_assoc ( (`Assoc [("pvalues", Pval.serialize_list accepted)])) in
let response_json = `Assoc ( acceptor_id :: (Core.List.concat [ballot_json; pvalues_json]) ) in
`Assoc [ ("response", response_json )]
let deserialize_phase1_response (response_json : Yojson.Basic.json) =
let inner_json = Yojson.Basic.Util.member "response" response_json in
let acceptor_id_json = Yojson.Basic.Util.member "acceptor_id" inner_json in
let ballot_number_json = Yojson.Basic.Util.member "ballot_num" inner_json in
let pvalues_json = Yojson.Basic.Util.member "pvalues" inner_json in
(acceptor_id_json |> Yojson.Basic.Util.to_string |> Types.id_of_string,
Ballot.deserialize (`Assoc [("ballot_num",ballot_number_json)]),
Pval.deserialize_list pvalues_json)
let serialize_phase2_response acceptor_id ballot_num : Yojson.Basic.json =
let acceptor_id = ("acceptor_id", `String (Types.string_of_id acceptor_id)) in
let ballot_json = Yojson.Basic.Util.to_assoc (Ballot.serialize ballot_num) in
`Assoc [ ("response", `Assoc ( acceptor_id :: ballot_json ) ) ]
let deserialize_phase2_response (response_json : Yojson.Basic.json) =
let inner_json = Yojson.Basic.Util.member "response" response_json in
let acceptor_id_json = Yojson.Basic.Util.member "acceptor_id" inner_json in
let ballot_number_json = Yojson.Basic.Util.member "ballot_num" inner_json in
(acceptor_id_json |> Yojson.Basic.Util.to_string |> Types.id_of_string,
Ballot.deserialize (`Assoc [ ("ballot_num", ballot_number_json) ]))
Exceptions resulting in undefined values being sent in Capnp unions
exception Undefined_oper;;
exception Undefined_result;;
exception Invalid_response;;
Expose the API service for the RPC system
module Api = Message_api.MakeRPC(Capnp_rpc_lwt);;
let local ?(request_callback : (command -> unit) option)
?(proposal_callback : (proposal -> unit) option)
?(response_callback : ((command_id * result) -> unit) option)
?(phase1_callback : (Ballot.t -> (Types.unique_id * Ballot.t * Pval.t list)) option)
?(phase2_callback : (Pval.t -> (Types.unique_id * Ballot.t)) option)
() =
let module Message = Api.Service.Message in
Message.local @@ object
inherit Message.service
method phase2_impl params release_param_caps =
let open Message.Phase2 in
let module Params = Message.Phase2.Params in
let pvalue = Params.pvalue_get params
|> Yojson.Basic.from_string
|> Pval.deserialize in
release_param_caps ();
match phase2_callback with Some f ->
let (acceptor_id, ballot_num) = f(pvalue) in
let json = serialize_phase2_response acceptor_id ballot_num in
let result_str = Yojson.Basic.to_string json in
let response,results = Service.Response.create Results.init_pointer in
Results.result_set results result_str;
Service.return response;
method phase1_impl params release_param_caps =
let open Message.Phase1 in
let module Params = Message.Phase1.Params in
let ballot_number = Params.ballot_number_get params
|> Yojson.Basic.from_string
|> Ballot.deserialize in
release_param_caps ();
match phase1_callback with Some f ->
let (acceptor_id, ballot_num', pvalues) = f(ballot_number) in
let json = serialize_phase1_response acceptor_id ballot_num' pvalues in
let result_str = Yojson.Basic.to_string json in
let response, results = Service.Response.create Results.init_pointer in
Results.result_set results result_str;
Service.return response;
method client_response_impl params release_param_caps =
let open Message.ClientResponse in
let module Params = Message.ClientResponse.Params in
let open Api.Reader.Message in
let result_reader = Params.result_get params in
let result = (match Result.get result_reader with
| Result.Failure -> Types.Failure
| Result.Success -> Types.Success
| Result.Read v -> Types.ReadSuccess v
| Result.Undefined _ -> raise Undefined_result) in
let command_id = Params.command_id_get params in
match response_callback with Some h -> h(command_id,result);
release_param_caps ();
Service.return_empty ();
method send_proposal_impl params release_param_caps =
let open Message.SendProposal in
let module Params = Message.SendProposal.Params in
let slot_number = Params.slot_number_get params in
let cmd_reader = Params.command_get params in
let open Api.Reader.Message in
let id = Command.client_id_get cmd_reader in
let uri = Command.client_uri_get cmd_reader in
let command_id = Command.command_id_get cmd_reader in
let op_reader = Command.operation_get cmd_reader in
Operations are a union type in Capnp so match over the variant
let operation = (match Command.Operation.get op_reader with
| Command.Operation.Nop -> Types.Nop
| Command.Operation.Create c_struct ->
let k = Command.Operation.Create.key_get c_struct in
let v = Command.Operation.Create.value_get c_struct in
Types.Create(k,v)
| Command.Operation.Read r_struct ->
let k = Command.Operation.Read.key_get r_struct in
Types.Read(k)
| Command.Operation.Update u_struct ->
let k = Command.Operation.Update.key_get u_struct in
let v = Command.Operation.Update.value_get u_struct in
Types.Update(k,v)
| Command.Operation.Remove r_struct ->
let k = Command.Operation.Remove.key_get r_struct in
Types.Remove(k)
| Command.Operation.Undefined(_) -> raise Undefined_oper) in
let proposal = (slot_number, ((Core.Uuid.of_string id,Uri.of_string uri), command_id, operation)) in
(match proposal_callback with
| None -> ()
| Some g -> g(proposal) );
release_param_caps ();
Service.return_empty ();
method decision_impl params release_param_caps =
let open Message.Decision in
let module Params = Message.Decision.Params in
let slot_number = Params.slot_number_get params in
let cmd_reader = Params.command_get params in
let open Api.Reader.Message in
let id = Command.client_id_get cmd_reader in
let uri = Command.client_uri_get cmd_reader in
let command_id = Command.command_id_get cmd_reader in
let op_reader = Command.operation_get cmd_reader in
Operations are a union type in Capnp so match over the variant
let operation = (match Command.Operation.get op_reader with
| Command.Operation.Nop -> Types.Nop
| Command.Operation.Create c_struct ->
let k = Command.Operation.Create.key_get c_struct in
let v = Command.Operation.Create.value_get c_struct in
Types.Create(k,v)
| Command.Operation.Read r_struct ->
let k = Command.Operation.Read.key_get r_struct in
Types.Read(k)
| Command.Operation.Update u_struct ->
let k = Command.Operation.Update.key_get u_struct in
let v = Command.Operation.Update.value_get u_struct in
Types.Update(k,v)
| Command.Operation.Remove r_struct ->
let k = Command.Operation.Remove.key_get r_struct in
Types.Remove(k)
| Command.Operation.Undefined(_) -> raise Undefined_oper) in
let proposal = (slot_number, ((Core.Uuid.of_string id,Uri.of_string uri), command_id, operation)) in
(match proposal_callback with
| None -> ()
| Some g -> g(proposal) );
release_param_caps ();
Service.return_empty ();
method client_request_impl params release_param_caps =
let open Message.ClientRequest in
let module Params = Message.ClientRequest.Params in
let cmd_reader = Params.command_get params in
let open Api.Reader.Message in
let id = Command.client_id_get cmd_reader in
let uri = Command.client_uri_get cmd_reader in
let command_id = Command.command_id_get cmd_reader in
let op_reader = Command.operation_get cmd_reader in
Operations are a union type in Capnp so match over the variant
let operation = (match Command.Operation.get op_reader with
| Command.Operation.Nop -> Types.Nop
| Command.Operation.Create c_struct ->
let k = Command.Operation.Create.key_get c_struct in
let v = Command.Operation.Create.value_get c_struct in
Types.Create(k,v)
| Command.Operation.Read r_struct ->
let k = Command.Operation.Read.key_get r_struct in
Types.Read(k)
| Command.Operation.Update u_struct ->
let k = Command.Operation.Update.key_get u_struct in
let v = Command.Operation.Update.value_get u_struct in
Types.Update(k,v)
| Command.Operation.Remove r_struct ->
let k = Command.Operation.Remove.key_get r_struct in
Types.Remove(k)
| Command.Operation.Undefined(_) -> raise Undefined_oper) in
Note here there is a temporay Nop passed
match request_callback with Some f ->
f ((Core.Uuid.of_string id,Uri.of_string uri), command_id, operation);
release_param_caps ();
Service.return_empty ()
end;;
let client_request_rpc t (cmd : Types.command) =
let open Api.Client.Message.ClientRequest in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty command type as recognised by Capnp
let cmd_rpc = (Command.init_root ()) in
Construct a command struct for Capnp from the cmd argument given
let ((id,uri), command_id, operation) = cmd in
Command.client_id_set cmd_rpc (Core.Uuid.to_string id);
Command.client_uri_set cmd_rpc (Uri.to_string uri);
Command.command_id_set_exn cmd_rpc command_id;
Construct an operation struct here
let oper_rpc = (Command.Operation.init_root ()) in
(match operation with
| Nop ->
Command.Operation.nop_set oper_rpc
| Create(k,v) ->
let create = (Command.Operation.create_init oper_rpc) in
Command.Operation.Create.key_set_exn create k;
Command.Operation.Create.value_set create v;
| Read (k) ->
let read = Command.Operation.read_init oper_rpc in
Command.Operation.Read.key_set_exn read k;
| Update(k,v) ->
let update = Command.Operation.update_init oper_rpc in
Command.Operation.Update.key_set_exn update k;
Command.Operation.Update.value_set update v;
| Remove(k) ->
let remove = Command.Operation.remove_init oper_rpc in
Command.Operation.Remove.key_set_exn remove k);
(Command.operation_set_builder cmd_rpc oper_rpc |> ignore);
(Params.command_set_reader params (Command.to_reader cmd_rpc) |> ignore);
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
let client_response_rpc t (cid : Types.command_id) (result : Types.result) =
let open Api.Client.Message.ClientResponse in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty result type as recognised by Capnp
let result_rpc = Result.init_root () in
As result is a Capnp union , match over the variant result argument
and set the appropriate Capnp value of result_rpc
and set the appropriate Capnp value of result_rpc *)
(match result with
| Failure ->
Result.failure_set result_rpc
| Success ->
Result.success_set result_rpc
| ReadSuccess v ->
Result.read_set result_rpc v);
Params.result_set_reader params (Result.to_reader result_rpc) |> ignore;
Params.command_id_set_exn params cid;
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
let decision_rpc t (p : Types.proposal) =
let open Api.Client.Message.Decision in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty command type as recognised by Capnp
let cmd_rpc = Command.init_root () in
Construct a command struct for Capnp from the cmd argument given
let (slot_number, ((id,uri), command_id, operation)) = p in
Command.client_id_set cmd_rpc (Core.Uuid.to_string id);
Command.client_uri_set cmd_rpc (Uri.to_string uri);
Command.command_id_set_exn cmd_rpc command_id;
Construct an operation struct here
let oper_rpc = (Command.Operation.init_root ()) in
(match operation with
| Nop ->
Command.Operation.nop_set oper_rpc
| Create(k,v) ->
let create = (Command.Operation.create_init oper_rpc) in
Command.Operation.Create.key_set_exn create k;
Command.Operation.Create.value_set create v;
| Read (k) ->
let read = Command.Operation.read_init oper_rpc in
Command.Operation.Read.key_set_exn read k;
| Update(k,v) ->
let update = Command.Operation.update_init oper_rpc in
Command.Operation.Update.key_set_exn update k;
Command.Operation.Update.value_set update v;
| Remove(k) ->
let remove = Command.Operation.remove_init oper_rpc in
Command.Operation.Remove.key_set_exn remove k);
(Command.operation_set_builder cmd_rpc oper_rpc |> ignore);
(Params.command_set_reader params (Command.to_reader cmd_rpc) |> ignore);
Params.slot_number_set_exn params slot_number;
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
let proposal_rpc t (p : Types.proposal) =
let open Api.Client.Message.SendProposal in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Create an empty command type as recognised by Capnp
let cmd_rpc = Command.init_root () in
Construct a command struct for Capnp from the cmd argument given
let (slot_number, ((id,uri), command_id, operation)) = p in
Command.client_id_set cmd_rpc (Core.Uuid.to_string id);
Command.client_uri_set cmd_rpc (Uri.to_string uri);
Command.command_id_set_exn cmd_rpc command_id;
Construct an operation struct here
let oper_rpc = (Command.Operation.init_root ()) in
(match operation with
| Nop ->
Command.Operation.nop_set oper_rpc
| Create(k,v) ->
let create = (Command.Operation.create_init oper_rpc) in
Command.Operation.Create.key_set_exn create k;
Command.Operation.Create.value_set create v;
| Read (k) ->
let read = Command.Operation.read_init oper_rpc in
Command.Operation.Read.key_set_exn read k;
| Update(k,v) ->
let update = Command.Operation.update_init oper_rpc in
Command.Operation.Update.key_set_exn update k;
Command.Operation.Update.value_set update v;
| Remove(k) ->
let remove = Command.Operation.remove_init oper_rpc in
Command.Operation.Remove.key_set_exn remove k);
(Command.operation_set_builder cmd_rpc oper_rpc |> ignore);
(Params.command_set_reader params (Command.to_reader cmd_rpc) |> ignore);
Params.slot_number_set_exn params slot_number;
Capability.call_for_unit t method_id request >|= function
| Ok () -> ()
| Error e -> Hashtbl.clear sturdy_refs
type message = ClientRequestMessage of command
| ProposalMessage of proposal
| DecisionMessage of proposal
| ClientResponseMessage of command_id * result
Start a new server advertised at address ( host , port )
This server does not serve with TLS and the service ID for the
server is derived from its address
This server does not serve with TLS and the service ID for the
server is derived from its address *)
let start_new_server ?request_callback ?proposal_callback ?response_callback ?phase1_callback ?phase2_callback host port =
let listen_address = `TCP (host, port) in
let config = Capnp_rpc_unix.Vat_config.create ~serve_tls:false ~secret_key:`Ephemeral listen_address in
let service_id = Capnp_rpc_lwt.Restorer.Id.derived ~secret:"" (host ^ (string_of_int port)) in
let restore = Capnp_rpc_lwt.Restorer.single service_id (local ?request_callback ?proposal_callback ?response_callback ?phase1_callback ?phase2_callback () ) in
Capnp_rpc_unix.serve config ~restore >|= fun vat ->
Capnp_rpc_unix.Vat.sturdy_uri vat service_id;;
let uri_from_address host port =
let service_id = Capnp_rpc_lwt.Restorer.Id.derived ~secret:"" (host ^ (string_of_int port)) in
let service_id_str = Capnp_rpc_lwt.Restorer.Id.to_string service_id in
let location = Capnp_rpc_unix.Network.Location.tcp host port in
let digest = Capnp_rpc_lwt.Auth.Digest.insecure in
Capnp_rpc_unix.Network.Address.to_uri ((location,digest),service_id_str)
Takes a Capnp URI for a service and returns the lwt capability of that
service
service *)
let rec service_from_uri uri =
(try Lwt.return (Some (Hashtbl.find sturdy_refs uri))
with Not_found ->
(try (
let client_vat = Capnp_rpc_unix.client_only_vat () in
let sr = Capnp_rpc_unix.Vat.import_exn client_vat uri in
Sturdy_ref.connect sr >>= function
| Ok capability ->
(Hashtbl.add sturdy_refs uri capability;
Lwt.return_some capability)
| Error e -> Lwt.return_none)
with _ -> failwith "hello"))
Derive the service from an address by indirectly computing the URI .
This is mostly for legacy reasons - all of the local node code sends
messages based on URIs .
: Modify the code so that we do n't need this extra indirection
This is mostly for legacy reasons - all of the local node code sends
messages based on URIs.
TODO: Modify the code so that we don't need this extra indirection *)
let service_from_addr host port =
uri_from_address host port |> service_from_uri
let send_request message uri =
Get the service for the given URI
service_from_uri uri >>= function
| None -> Lwt.return_unit
| Some service -> (
match message with
| ClientRequestMessage cmd ->
client_request_rpc service cmd;
| DecisionMessage p ->
decision_rpc service p;
| ProposalMessage p ->
proposal_rpc service p;
| ClientResponseMessage (cid, result) ->
client_response_rpc service cid result)
let phase1_rpc t (b : Ballot.t) =
let open Api.Client.Message.Phase1 in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Params.ballot_number_set params (b |> Ballot.serialize |> Yojson.Basic.to_string);
Capability.call_for_value_exn t method_id request >|=
Results.result_get >|=
Yojson.Basic.from_string >|=
deserialize_phase1_response
let send_phase1_message (b : Ballot.t) uri =
service_from_uri uri >>= function
| Some service ->
phase1_rpc service b;;
let phase2_rpc t (pval : Pval.t) =
let open Api.Client.Message.Phase2 in
let request, params = Capability.Request.create Params.init_pointer in
let open Api.Builder.Message in
Params.pvalue_set params (pval |> Pval.serialize |> Yojson.Basic.to_string);
Capability.call_for_value_exn t method_id request >|=
Results.result_get >|=
Yojson.Basic.from_string >|=
deserialize_phase2_response
let send_phase2_message (pval : Pval.t) uri =
service_from_uri uri >>= function
| Some service ->
phase2_rpc service pval
|
4efbbcd1c5d12775ab1231ba7f9f858d37581772adbed516ddc53c1704800fe8 | gebi/jungerl | xmerl_scan.erl | The contents of this file are subject to the Erlang Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
%%% compliance with the License. You may obtain a copy of the License at
%%%
%%%
Software distributed under the License is distributed on an " AS IS "
%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%%% the License for the specific language governing rights and limitations
%%% under the License.
%%%
The Original Code is xmerl-0.15
%%%
The Initial Developer of the Original Code is Ericsson Telecom
AB . Portions created by Ericsson are Copyright ( C ) , 1998 , Ericsson
Telecom AB . All Rights Reserved .
%%%
%%% Contributor(s):
< > :
< >
%%%
%%%----------------------------------------------------------------------
# 0 . BASIC INFORMATION
%%%----------------------------------------------------------------------
%%% File: xmerl_scan.erl
Author : < >
Description : Simgle - pass XML scanner . See xmerl.hrl for data defs .
%%%
%%% Modules used : ets, file, filename, io, lists, ucs, uri
%%%
%%%----------------------------------------------------------------------
%% @doc
The XML parser is activated through
%% <tt>xmerl_scan:string/[1,2]</tt> or
%% <tt>xmerl_scan:file/[1,2]</tt>.
%% It returns records of the type defined in xmerl.hrl.
%% See also <a href="xmerl_examples.html">tutorial</a> on customization
%% functions.
%% @type global_state(). <p>
The global state of the scanner , represented by the # xmerl_scanner { } record .
%% </p>
%% @type option_list(). <p>Options allows to customize the behaviour of the
%% scanner.
%% See also <a href="xmerl_examples.html">tutorial</a> on customization
%% functions.
%% </p>
%% Possible options are:
%% <dl>
%% <dt><code>{acc_fun, Fun}</code></dt>
%% <dd>Call back function to accumulate contents of entity.</dd>
%% <dt><code>{continuation_fun, Fun} |
%% {continuation_fun, Fun, ContinuationState}</code></dt>
< dd > Call back function to decide what to do if the scanner runs into eof
before the document is complete.</dd >
%% <dt><code>{event_fun, Fun} |
%% {event_fun, Fun, EventState}</code></dt>
%% <dd>Call back function to handle scanner events.</dd>
%% <dt><code>{fetch_fun, Fun} |
{ fetch_fun , Fun , FetchState}</code></dt >
%% <dd>Call back function to fetch an external resource.</dd>
%% <dt><code>{hook_fun, Fun} |
%% {hook_fun, Fun, HookState}</code></dt>
%% <dd>Call back function to process the document entities once
%% identified.</dd>
%% <dt><code>{close_fun, Fun}</code></dt>
%% <dd>Called when document has been completely parsed.</dd>
< dt><code>{rules , ReadFun , WriteFun , RulesState } |
%% {rules, Rules}</code></dt>
%% <dd>Handles storing of scanner information when parsing.</dd>
%% <dt><code>{user_state, UserState}</code></dt>
< dd > Global state variable accessible from all customization functions</dd >
%%
%% <dt><code>{fetch_path, PathList}</code></dt>
%% <dd>PathList is a list of
%% directories to search when fetching files. If the file in question
is not in the fetch_path , the URI will be used as a file
%% name.</dd>
< dt><code>{space , >
( default ) to preserve spaces , ' normalize ' to
accumulate consecutive whitespace and replace it with one space.</dd >
%% <dt><code>{line, Line}</code></dt>
%% <dd>To specify starting line for scanning in document which contains
%% fragments of XML.</dd>
< dt><code>{namespace_conformant , >
< dd > Controls whether to behave as a namespace conformant XML parser ,
%% 'false' (default) to not otherwise 'true'.</dd>
< dt><code>{validation , >
< dd > Controls whether to process as a validating XML parser ,
%% 'false' (default) to not otherwise 'true'.</dd>
< dt><code>{quiet , >
%% <dd>Set to 'true' if xmerl should behave quietly and not output any info
%% to standard output (default 'false').</dd>
< dt><code>{doctype_DTD , DTD}</code></dt >
< dd > Allows to specify DTD name when it is n't available in the XML
%% document.</dd>
< dt><code>{xmlbase , Dir}</code></dt >
%% <dd>XML Base directory. If using string/1 default is current directory.
%% If using file/1 default is directory of given file.</dd>
%% <dt><code>{encoding, Enc}</code></dt>
< dd > Set default character set used ( default UTF-8 ) .
This character set is used only if not explicitly given by the XML
%% declaration. </dd>
%% </dl>
%% @end
%% Only used internally are:
%% <dt><code>{environment,Env}</code></dt>
%% <dd>What is this?</dd>
< dt><code>{text_decl , Bool}</code></dt >
%% <dd>What is this?</dd>
-module(xmerl_scan).
-vsn('0.19').
-date('03-09-16').
%% main API
-export([string/1, string/2,
file/1, file/2]).
%% access functions for various states
-export([user_state/1, user_state/2,
event_state/1, event_state/2,
hook_state/1, hook_state/2,
rules_state/1, rules_state/2,
fetch_state/1, fetch_state/2,
cont_state/1, cont_state/2]).
%% helper functions. To xmerl_lib ??
-export([accumulate_whitespace/4]).
-define(debug , 1 ) .
-include("xmerl.hrl"). % record def, macros
-include_lib("kernel/include/file.hrl").
-define(fatal(Reason, S),
if
S#xmerl_scanner.quiet ->
ok;
true ->
ok=io:format("~p- fatal: ~p~n", [?LINE, Reason])
end,
fatal(Reason, S)).
-define(ustate(U, S), S#xmerl_scanner{user_state = U}).
%% Functions to access the various states
%%% @spec user_state(S::global_state()) -> global_state()
%%% @equiv user_state(UserState,S)
user_state(#xmerl_scanner{user_state = S}) -> S.
%%% @spec event_state(S::global_state()) -> global_state()
%%% @equiv event_state(EventState,S)
event_state(#xmerl_scanner{fun_states = #xmerl_fun_states{event = S}}) -> S.
%%% @spec hook_state(S::global_state()) -> global_state()
%%% @equiv hook_state(HookState,S)
hook_state(#xmerl_scanner{fun_states = #xmerl_fun_states{hook = S}}) -> S.
rules_state(S::global_state ( ) ) - > global_state ( )
@equiv rules_state(RulesState , S )
rules_state(#xmerl_scanner{fun_states = #xmerl_fun_states{rules = S}}) -> S.
%%% @spec fetch_state(S::global_state()) -> global_state()
%%% @equiv fetch_state(FetchState,S)
fetch_state(#xmerl_scanner{fun_states = #xmerl_fun_states{fetch = S}}) -> S.
%%% @spec cont_state(S::global_state()) -> global_state()
%%% @equiv cont_state(ContinuationState,S)
cont_state(#xmerl_scanner{fun_states = #xmerl_fun_states{cont = S}}) -> S.
%%%% Functions to modify the various states
%%% @spec user_state(UserState, S::global_state()) -> global_state()
@doc For controlling the UserState , to be used in a user function .
%%% See <a href="xmerl_examples.html">tutorial</a> on customization functions.
user_state(X, S) ->
S#xmerl_scanner{user_state = X}.
@spec event_state(EventState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the EventState , to be used in an event
%%% function, and called at the beginning and at the end of a parsed entity.
%%% See <a href="xmerl_examples.html">tutorial</a> on customization functions.
event_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{event = X},
S#xmerl_scanner{fun_states = FS1}.
@spec hook_state(HookState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the HookState , to be used in a hook
%%% function, and called when the parser has parsed a complete entity.
%%% See <a href="xmerl_examples.html">tutorial</a> on customization functions.
hook_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{hook = X},
S#xmerl_scanner{fun_states = FS1}.
%%% @spec rules_state(RulesState, S::global_state()) -> global_state()
@doc For controlling the RulesState , to be used in a rules
%%% function, and called when the parser store scanner information in a rules
%%% database.
%%% See <a href="xmerl_examples.html">tutorial</a> on customization functions.
rules_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{rules = X},
S#xmerl_scanner{fun_states = FS1}.
@spec fetch_state(FetchState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the FetchState , to be used in a fetch
function , and called when the parser fetch an external resource ( eg . a ) .
%%% See <a href="xmerl_examples.html">tutorial</a> on customization functions.
fetch_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{fetch = X},
S#xmerl_scanner{fun_states = FS1}.
@spec cont_state(ContinuationState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the ContinuationState , to be used in a continuation
%%% function, and called when the parser encounters the end of the byte stream.
%%% See <a href="xmerl_examples.html">tutorial</a> on customization functions.
cont_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{cont = X},
S#xmerl_scanner{fun_states = FS1}.
%% @spec file(Filename::string()) -> {xmlElement(),Rest}
%% Rest = list()
%% @equiv file(Filename, [])
file(F) ->
file(F, []).
@spec file(Filename::string ( ) , Options::option_list ( ) ) - > { xmlElement(),Rest }
%% Rest = list()
@doc Parse file containing an XML document
file(F, Options) ->
ExtCharset=case lists:keysearch(encoding,1,Options) of
{value,{_,Val}} -> Val;
false -> undefined
end,
case int_file(F,Options,ExtCharset) of
{Res, Tail,S=#xmerl_scanner{close_fun=Close}} ->
Close(S), % for side effects only - final state is dropped
{Res,Tail};
{error, Reason} ->
{error, Reason};
Other ->
{error, Other}
end.
int_file(F, Options,_ExtCharset) ->
% io:format("int_file F=~p~n",[F]),
case file:read_file(F) of
{ok, Bin} ->
int_string(binary_to_list(Bin), Options, filename:dirname(F));
Error ->
Error
end.
int_file_decl(F, Options,_ExtCharset) ->
% io:format("int_file_decl F=~p~n",[F]),
case file:read_file(F) of
{ok, Bin} ->
int_string_decl(binary_to_list(Bin), Options, filename:dirname(F));
Error ->
Error
end.
%% @spec string(Text::list()) -> {xmlElement(),Rest}
%% Rest = list()
%% @equiv string(Test, [])
string(Str) ->
string(Str, []).
@spec string(Text::list(),Options::option_list ( ) ) - > { xmlElement(),Rest }
%% Rest = list()
@doc string containing an XML document
string(Str, Options) ->
case int_string(Str, Options) of
{Res, Tail, S=#xmerl_scanner{close_fun = Close}} ->
Close(S), % for side effects only - final state is dropped
{Res,Tail};
{error, Reason} ->
{error, Reason}; % (This can't happen, currently)
Other ->
{error, Other}
end.
int_string(Str, Options) ->
{ok, XMLBase} = file:get_cwd(),
int_string(Str, Options, XMLBase).
int_string(Str, Options, XMLBase) ->
S=initial_state0(Options,XMLBase),
case xmerl_lib:detect_charset(S#xmerl_scanner.encoding,Str) of
{auto,'iso-10646-utf-1',Str2} ->
scan_document(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{external,'iso-10646-utf-1',Str2} ->
scan_document(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{undefined,undefined,Str2} ->
scan_document(Str2, S);
{external,ExtCharset,Str2} ->
scan_document(Str2, S#xmerl_scanner{encoding=atom_to_list(ExtCharset)})
end.
int_string_decl(Str, Options, XMLBase) ->
S=initial_state0(Options,XMLBase),
case xmerl_lib:detect_charset(S#xmerl_scanner.encoding,Str) of
{auto,'iso-10646-utf-1',Str2} ->
scan_decl(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{external,'iso-10646-utf-1',Str2} ->
scan_decl(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{undefined,undefined,Str2} ->
scan_decl(Str2, S);
{external,ExtCharset,Str2} ->
scan_decl(Str2, S#xmerl_scanner{encoding=atom_to_list(ExtCharset)})
end.
initial_state0(Options,XMLBase) ->
initial_state(Options, #xmerl_scanner{
event_fun = fun event/2,
hook_fun = fun hook/2,
acc_fun = fun acc/3,
fetch_fun = fun fetch/2,
close_fun = fun close/1,
continuation_fun = fun cont/3,
rules_read_fun = fun rules_read/3,
rules_write_fun = fun rules_write/4,
rules_delete_fun= fun rules_delete/3,
xmlbase = XMLBase
}).
initial_state([{event_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{event_fun = F});
initial_state([{event_fun, F, ES}|T], S) ->
S1 = event_state(ES, S#xmerl_scanner{event_fun = F}),
initial_state(T, S1);
initial_state([{acc_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{acc_fun = F});
initial_state([{hook_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{hook_fun = F});
initial_state([{hook_fun, F, HS}|T], S) ->
S1 = hook_state(HS, S#xmerl_scanner{hook_fun = F}),
initial_state(T, S1);
initial_state([{close_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{close_fun = F});
initial_state([{fetch_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{fetch_fun = F});
initial_state([{fetch_fun, F, FS}|T], S) ->
S1 = fetch_state(FS, S#xmerl_scanner{fetch_fun = F}),
initial_state(T, S1);
initial_state([{fetch_path, P}|T], S) ->
initial_state(T, S#xmerl_scanner{fetch_path = P});
initial_state([{continuation_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{continuation_fun = F});
initial_state([{continuation_fun, F, CS}|T], S) ->
S1 = cont_state(CS, S#xmerl_scanner{continuation_fun = F}),
initial_state(T, S1);
initial_state([{rules, R}|T], S) ->
initial_state(T, S#xmerl_scanner{rules = R,
keep_rules = true});
initial_state([{rules, Read, Write, RS}|T], S) ->
S1 = rules_state(RS, S#xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write,
keep_rules = true}),
initial_state(T, S1);
initial_state([{user_state, F}|T], S) ->
initial_state(T, S#xmerl_scanner{user_state = F});
initial_state([{space, L}|T], S) ->
initial_state(T, S#xmerl_scanner{space = L});
initial_state([{line, L}|T], S) ->
initial_state(T, S#xmerl_scanner{line = L});
initial_state([{namespace_conformant, F}|T], S) when F==true; F==false ->
initial_state(T, S#xmerl_scanner{namespace_conformant = F});
initial_state([{validation, F}|T], S) when F==true; F==false ->
initial_state(T, S#xmerl_scanner{validation = F});
initial_state([{quiet, F}|T], S) when F==true; F==false ->
initial_state(T, S#xmerl_scanner{quiet = F});
initial_state([{doctype_DTD,DTD}|T], S) ->
initial_state(T,S#xmerl_scanner{doctype_DTD = DTD});
initial_state([{text_decl,Bool}|T], S) ->
initial_state(T,S#xmerl_scanner{text_decl=Bool});
initial_state([{environment,Env}|T], S) ->
initial_state(T,S#xmerl_scanner{environment=Env});
initial_state([{xmlbase, D}|T], S) ->
initial_state(T, S#xmerl_scanner{xmlbase = D});
initial_state([{encoding, Enc}|T], S) ->
initial_state(T, S#xmerl_scanner{encoding = Enc});
initial_state([], S=#xmerl_scanner{rules = undefined}) ->
Tab = ets:new(rules, [set, public]),
S#xmerl_scanner{rules = Tab};
initial_state([], S) ->
S.
%%% -----------------------------------------------------
%%% Default modifier functions
%%% Hooks:
- { element , Line , Name , , Content }
%%% - {processing_instruction, Line, Data}
hook(X, State) ->
{X, State}.
%%% Events:
%%%
%%% #xmerl_event{event : started | ended,
%%% line : integer(),
%%% col : integer(),
%%% data}
%%%
Data Events
%%% document started, ended
%%% #xmlElement started, ended
%%% #xmlAttribute ended
# xmlPI ended
%%% #xmlComment ended
# xmlText ended
event(_X, S) ->
S.
The acc/3 function can return either { Acc ´ , S ' } or { Acc ' , Pos ' , S ' } ,
where Pos ' can be derived from , , or
X#xmlAttribute.pos ( whichever is the current object type . )
%% The acc/3 function is not allowed to redefine the type of object
%% being defined, but _is_ allowed to either ignore it or split it
%% into multiple objects (in which case {Acc',Pos',S'} should be returned.)
If { Acc',S ' } is returned , Pos will be incremented by 1 by default .
%% Below is an example of an acceptable operation
acc(X = #xmlText{value = Text}, Acc, S) ->
{[X#xmlText{value = lists:flatten(Text)}|Acc], S};
acc(X, Acc, S) ->
{[X|Acc], S}.
fetch({system, URI}, S) ->
fetch_URI(URI, S);
fetch({public, _PublicID, URI}, S) ->
fetch_URI(URI, S).
%%% Always assume an external resource can be found locally! Thus
%%% don't bother fetching with e.g. HTTP. Returns the path where the
%%% resource is found. The path to the external resource is given by
%%% URI directly or the option fetch_path (additional paths) or
%%% directory (base path to external resource)
fetch_URI(URI, S) ->
assume URI is a filename
Split = filename:split(URI),
Filename = fun([])->[];(X)->lists:last(X) end (Split),
Fullname =
case Split of %% how about Windows systems?
absolute path , see RFC2396 sect 3
%% file:/dtd_name
filename:join(["/"|Name]);
["/"|Rest] when Rest /= [] ->
%% absolute path name
URI;
["http:"|_Rest] ->
{http,URI};
[] -> %% empty systemliteral
[];
_ ->
filename:join(S#xmerl_scanner.xmlbase, URI)
end,
Path = path_locate(S#xmerl_scanner.fetch_path, Filename, Fullname),
?dbg("fetch(~p) -> {file, ~p}.~n", [URI, Path]),
{ok, Path, S}.
path_locate(_, _, {http,_}=URI) ->
URI;
path_locate(_, _, []) ->
[];
path_locate([Dir|Dirs], FN, FullName) ->
F = filename:join(Dir, FN),
case file:read_file_info(F) of
{ok, #file_info{type = regular}} ->
{file,F};
_ ->
path_locate(Dirs, FN, FullName)
end;
path_locate([], _FN, FullName) ->
{file,FullName}.
cont(_F, Exception, US) ->
Exception(US).
close(S) ->
S.
%%% -----------------------------------------------------
Scanner
[ 1 ] document : : = prolog element Misc *
scan_document(Str0, S=#xmerl_scanner{event_fun = Event,
line = L, col = C,
environment=Env,
encoding=Charset,
validation=ValidateResult}) ->
S1 = Event(#xmerl_event{event = started,
line = L,
col = C,
data = document}, S),
%% Transform to given character set.
%% Note that if another character set is given in the encoding
attribute in a XML declaration that one will be used later
Str=if
Default character set is UTF-8
ucs:to_unicode(Str0,list_to_atom(Charset));
true ->
Str0
end,
{"<"++T2, S2} = scan_prolog(Str, S1, _StartPos = 1),
{Res, T3, S3} =scan_element(T2,S2,_StartPos = 1),
{Tail, S4}=scan_misc(T3, S3, _StartPos = 1),
S5 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S4#xmerl_scanner.line,
col = S4#xmerl_scanner.col,
data = document}, S4),
S6 = case ValidateResult of
false ->
cleanup(S5);
true when Env == element; Env == prolog ->
check_decl2(S5),
case xmerl_validate:validate(S5,Res) of
{'EXIT',{error,Reason}} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
{'EXIT',Reason} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
{error,Reason} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
{error,Reason,_Next} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
_XML ->
cleanup(S5)
end;
true ->
cleanup(S5)
end,
{Res, Tail, S6}.
scan_decl(Str, S=#xmerl_scanner{event_fun = Event,
line = L, col = C,
environment=_Env,
encoding=_Charset,
validation=_ValidateResult}) ->
S1 = Event(#xmerl_event{event = started,
line = L,
col = C,
data = document}, S),
case scan_prolog(Str, S1, _StartPos = 1) of
{T2="<"++_, S2} ->
{{S2#xmerl_scanner.user_state,T2},[],S2};
{[], S2}->
{[],[],S2};
{T2, S2} ->
{_,_,S3} = scan_content(T2,S2,[],_Attrs=[],S2#xmerl_scanner.space,
_Lang=[],_Parents=[],#xmlNamespace{}),
{T2,[],S3}
end.
[ 22 ] Prolog
prolog : : = XMLDecl ? Misc * ( * ) ?
%%%
%% Text declaration may be empty
scan_prolog([], S=#xmerl_scanner{text_decl=true},_Pos) ->
{[],S};
scan_prolog([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_prolog(MoreBytes, S1, Pos) end,
fun(S1) -> {[], S1} end,
S);
scan_prolog("<?xml"++T,S0=#xmerl_scanner{encoding=Charset0,col=Col,line=L},Pos)
when ?whitespace(hd(T)) ->
{Charset,T3, S3}=
if
Col==1,L==1,S0#xmerl_scanner.text_decl==true ->
?dbg("prolog(\"<?xml\")~n", []),
?bump_col(5),
{_,T1,S1} = mandatory_strip(T,S0),
{Decl,T2, S2}=scan_text_decl(T1,S1),
Encoding=Decl#xmlDecl.encoding,
{Encoding,T2, S2#xmerl_scanner{encoding=Encoding}};
Col==1,L==1 ->
?dbg("prolog(\"<?xml\")~n", []),
?bump_col(5),
{Decl,T2, S2}=scan_xml_decl(T, S0),
Encoding=Decl#xmlDecl.encoding,
{Encoding,T2, S2#xmerl_scanner{encoding=Encoding}};
true ->
?fatal({xml_declaration_must_be_first_in_doc,Col,L},S0)
end,
%% Now transform to declared character set.
if
Charset==Charset0 -> % Document already transformed to this charset!
scan_prolog(T3, S3, Pos);
Charset0=/=undefined -> % Document transformed to other bad charset!
?fatal({xml_declaration_must_be_first_in_doc,Col,L},S3);
Charset=/=undefined -> % Document not previously transformed
T4=ucs:to_unicode(T3,list_to_atom(Charset)),
scan_prolog(T4, S3, Pos);
true -> % No encoding info given
scan_prolog(T3, S3, Pos)
end;
scan_prolog("<!DOCTYPE" ++ T, S0=#xmerl_scanner{environment=prolog,
encoding=Charset}, Pos) ->
?dbg("prolog(\"<!DOCTYPE\")~n", []),
?bump_col(9),
If no known character set assume it is UTF-8
T1=if
Charset==undefined -> ucs:to_unicode(T,'utf-8');
true -> T
end,
{T2, S1} = scan_doctype(T1, S),
scan_misc(T2, S1, Pos);
scan_prolog(Str, S0 = #xmerl_scanner{user_state=_US,encoding=Charset},Pos) ->
?dbg("prolog(\"<\")~n", []),
Check for Comments , PI before possible DOCTYPE declaration
?bump_col(1),
If no known character set assume it is UTF-8
T=if
Charset==undefined -> ucs:to_unicode(Str,'utf-8');
true -> Str
end,
{T1, S1}=scan_misc(T, S, Pos),
scan_prolog2(T1,S1,Pos).
scan_prolog2([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_prolog2(MoreBytes, S1, Pos) end,
fun(S1) -> {[], S1} end,
S);
scan_prolog2("<!DOCTYPE" ++ T, S0=#xmerl_scanner{environment=prolog}, Pos) ->
?dbg("prolog(\"<!DOCTYPE\")~n", []),
?bump_col(9),
{T1, S1} = scan_doctype(T, S),
scan_misc(T1, S1, Pos);
scan_prolog2(Str = "<!" ++ _, S, _Pos) ->
?dbg("prolog(\"<!\")~n", []),
In e.g. a DTD , we jump directly to markup declarations
scan_ext_subset(Str, S);
scan_prolog2(Str, S0 = #xmerl_scanner{user_state=_US},Pos) ->
?dbg("prolog(\"<\")~n", []),
Check for more Comments and PI after DOCTYPE declaration
?bump_col(1),
scan_misc(Str, S, Pos).
[ 27 ] Misc : : = Comment | PI | S
%% Note:
%% - Neither of Comment and PI are returned in the resulting parsed
%% structure.
%% - scan_misc/3 implements Misc* as that is how the rule is always used
scan_misc([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_misc(MoreBytes, S1, Pos) end,
fun(S1) -> {[], S1} end,
S);
scan_misc("<!--" ++ T, S, Pos) -> % Comment
{_, T1, S1} = scan_comment(T, S, Pos, _Parents = [], _Lang = []),
scan_misc(T1,S1,Pos);
scan_misc("<?" ++ T, S0, Pos) -> % PI
?dbg("prolog(\"<?\")~n", []),
?bump_col(2),
{_PI, T1, S1} = scan_pi(T, S, Pos),
scan_misc(T1,S1,Pos);
scan_misc([H|T], S, Pos) when ?whitespace(H) ->
?dbg("prolog(whitespace)~n", []),
scan_misc(T,S,Pos);
scan_misc(T,S,_Pos) ->
{T,S}.
cleanup(S=#xmerl_scanner{keep_rules = false,
rules = Rules}) ->
ets:delete(Rules),
S#xmerl_scanner{rules = undefined};
cleanup(S) ->
S.
Prolog and Document Type Declaration XML 1.0 Section 2.8
[ 23 ] XMLDecl : : = ' < ? xml ' ? SDDecl ? S ? ' ? > '
[ 24 ] : : = S ' version ' Eq ( " ' " VersionNum " ' " | ' " ' VersionNum ' " ' )
scan_xml_decl(T, S) ->
[ 24 ] is mandatory
{_,T1,S2} = mandatory_strip(T,S),
"version" ++ T2 = T1,
{T3, S3} = scan_eq(T2, S2),
{Vsn, T4, S4} = scan_xml_vsn(T3, S3),
Attr = #xmlAttribute{name = version,
parents = [{xml, _XMLPos = 1}],
value = Vsn},
scan_xml_decl(T4, S4, #xmlDecl{attributes = [Attr]}).
scan_xml_decl([], S=#xmerl_scanner{continuation_fun = F}, Decl) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_xml_decl(MoreBytes, S1, Decl) end,
fun(S1) -> {[], [], S1} end,
S);
scan_xml_decl("?>" ++ T, S0, Decl) ->
?bump_col(2),
return_xml_decl(T,S,Decl);
scan_xml_decl(T,S=#xmerl_scanner{event_fun = _Event},Decl) ->
{_,T1,S1}=mandatory_strip(T,S),
scan_xml_decl2(T1,S1,Decl).
scan_xml_decl2("?>" ++ T, S0,Decl) ->
?bump_col(2),
return_xml_decl(T,S,Decl);
scan_xml_decl2("encoding" ++ T, S0 = #xmerl_scanner{event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
[ 80 ] EncodingDecl
?bump_col(8),
{T1, S1} = scan_eq(T, S),
{EncName, T2, S2} = scan_enc_name(T1, S1),
LowEncName=httpd_util:to_lower(EncName),
Attr = #xmlAttribute{name = encoding,
parents = [{xml, _XMLPos = 1}],
value = LowEncName},
Decl = Decl0#xmlDecl{encoding = LowEncName,
attributes = [Attr|Attrs]},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S0#xmerl_scanner.line,
col = S0#xmerl_scanner.col,
data = Attr}, S2),
case T2 of
"?>" ++ _T3 ->
scan_xml_decl3(T2,S3,Decl);
_ ->
{_,T3,S4} = mandatory_strip(T2,S3),
scan_xml_decl3(T3, S4, Decl)
end;
scan_xml_decl2(T="standalone" ++ _T,S,Decl) ->
scan_xml_decl3(T,S,Decl).
scan_xml_decl3("?>" ++ T, S0,Decl) ->
?bump_col(2),
return_xml_decl(T,S,Decl);
scan_xml_decl3("standalone" ++ T,S0 = #xmerl_scanner{event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
[ 32 ] SDDecl
?bump_col(10),
{T1, S1} = scan_eq(T, S),
{StValue,T2,S2}=scan_standalone_value(T1,S1),
Attr = #xmlAttribute{name = standalone,
parents = [{xml, _XMLPos = 1}],
value = StValue},
Decl = Decl0#xmlDecl{standalone = StValue,
attributes = [Attr|Attrs]},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S0#xmerl_scanner.line,
col = S0#xmerl_scanner.col,
data = Attr}, S2),
{_,T3,S4} = strip(T2,S3),
"?>" ++ T4 = T3,
return_xml_decl(T4, S4#xmerl_scanner{col=S4#xmerl_scanner.col+2}, Decl).
return_xml_decl(T,S=#xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
?strip1,
Decl = Decl0#xmlDecl{attributes = lists:reverse(Attrs)},
S2 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S#xmerl_scanner.line,
col = S#xmerl_scanner.col,
data = Decl}, S1),
{Ret, S3} = Hook(Decl, S2),
{Ret, T1, S3}.
scan_standalone_value("'yes'" ++T,S0)->
?bump_col(5),
{'yes',T,S#xmerl_scanner{standalone=yes}};
scan_standalone_value("\"yes\"" ++T,S0)->
?bump_col(5),
{'yes',T,S#xmerl_scanner{standalone=yes}};
scan_standalone_value("'no'" ++T,S0) ->
?bump_col(4),
{'no',T,S};
scan_standalone_value("\"no\"" ++T,S0) ->
?bump_col(4),
{'no',T,S}.
%%%
%%% Text declaration XML 1.0 section 4.3.1
[ 77 ] TextDecl : : = ' < ? xml ' ? ? ' ? > '
scan_text_decl(T,S=#xmerl_scanner{event_fun = Event}) ->
{#xmlDecl{attributes=Attrs}=Decl0,T1,S1} = scan_optional_version(T,S),
"encoding" ++ T2 = T1,
S2 = S1#xmerl_scanner{col = S1#xmerl_scanner.col + 8},
{T3, S3} = scan_eq(T2, S2),
{EncName, T4, S4} = scan_enc_name(T3, S3),
LowEncName=httpd_util:to_lower(EncName),
?strip5,
Attr = #xmlAttribute{name = encoding,
parents = [{xml,1}],
value = LowEncName},
Decl = Decl0#xmlDecl{encoding = LowEncName,
attributes = [Attr|Attrs]},
S6=#xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S5#xmerl_scanner.line,
col = S5#xmerl_scanner.col,
data = Attr}, S5),
scan_text_decl(T5,S6,Decl).
scan_text_decl("?>"++T,S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
?bump_col(2),
?strip1,
Decl = Decl0#xmlDecl{attributes = lists:reverse(Attrs)},
S2 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S0#xmerl_scanner.line,
col = S0#xmerl_scanner.col,
data = Decl}, S1),
{Ret, S3} = Hook(Decl, S2),
{Ret, T1, S3}.
scan_optional_version("version"++T,S0) ->
?bump_col(7),
?strip1,
{T2, S2} = scan_eq(T1, S1),
{Vsn, T3, S3} = scan_xml_vsn(T2, S2),
{_,T4,S4} = mandatory_strip(T3,S3),
Attr = #xmlAttribute{name = version,parents = [{xml,1}],value = Vsn},
{#xmlDecl{attributes=[Attr]},T4,S4};
scan_optional_version(T,S) ->
{#xmlDecl{attributes=[]},T,S}.
[ 81 ] EncName
scan_enc_name([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enc_name(MoreBytes, S1) end,
fun(S1) -> ?fatal(expected_encoding_name, S1) end,
S);
scan_enc_name([H|T], S0) when H >= $"; H =< $' ->
?bump_col(1),
scan_enc_name(T, S, H, []).
scan_enc_name([], S=#xmerl_scanner{continuation_fun = F}, Delim, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enc_name(MoreBytes, S1, Delim, Acc) end,
fun(S1) -> ?fatal(expected_encoding_name, S1) end,
S);
scan_enc_name([H|T], S0, Delim, Acc) when H >= $a, H =< $z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name([H|T], S0, Delim, Acc) when H >= $A, H =< $Z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name([H|_T],S,_Delim,_Acc) ->
?fatal({error,{unexpected_character_in_Enc_Name,H}},S).
scan_enc_name2([], S=#xmerl_scanner{continuation_fun = F}, Delim, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enc_name2(MoreBytes, S1, Delim, Acc) end,
fun(S1) -> ?fatal(expected_encoding_name, S1) end,
S);
scan_enc_name2([H|T], S0, H, Acc) ->
?bump_col(1),
{lists:reverse(Acc), T, S};
scan_enc_name2([H|T], S0, Delim, Acc) when H >= $a, H =< $z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name2([H|T], S0, Delim, Acc) when H >= $A, H =< $Z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name2([H|T], S0, Delim, Acc) when H >= $0, H =< $9 ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name2([H|T], S0, Delim, Acc) when H == $.; H == $_; H == $- ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]).
[ 26 ] VersionNum
%%% VersionNum ::= ([a-zA-Z0-9_.:] | '-')+
scan_xml_vsn([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_xml_vsn(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_xml_vsn([H|T], S) when H==$"; H==$'->
xml_vsn(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}, H, []).
xml_vsn([], S=#xmerl_scanner{continuation_fun = F}, Delim, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> xml_vsn(MoreBytes, S1, Delim, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
xml_vsn([H|T], S=#xmerl_scanner{col = C}, H, Acc) ->
{lists:reverse(Acc), T, S#xmerl_scanner{col = C+1}};
xml_vsn([H|T], S=#xmerl_scanner{col = C},Delim, Acc) when H >= $a, H =< $z ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
xml_vsn([H|T], S=#xmerl_scanner{col = C},Delim, Acc) when H >= $A, H =< $Z ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
xml_vsn([H|T], S=#xmerl_scanner{col = C},Delim, Acc) when H >= $0, H =< $9 ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
xml_vsn([H|T], S=#xmerl_scanner{col = C}, Delim, Acc) ->
case lists:member(H, "_.:-") of
true ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
false ->
?fatal({invalid_vsn_char, H}, S)
end.
[ 16 ] PI : : = ' < ? ' PITarget ( S ( ( * ' ? > ' * ) ) ) ? ' ? > '
scan_pi([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pi(MoreBytes, S1, Pos) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pi(Str = [H1,H2,H3 | T],S=#xmerl_scanner{line = L, col = C}, Pos)
when H1==$x;H1==$X ->
%% names beginning with [xX][mM][lL] are reserved for future use.
if
((H2==$m) or (H2==$M)) and
((H3==$l) or (H3==$L)) ->
scan_wellknown_pi(T,S,Pos);
true ->
{Target, _NamespaceInfo, T1, S1} = scan_name(Str, S),
scan_pi(T1, S1, Target, L, C, Pos, [])
end;
scan_pi(Str, S=#xmerl_scanner{line = L, col = C}, Pos) ->
{Target, _NamespaceInfo, T1, S1} = scan_name(Str, S),
scan_pi(T1, S1, Target, L, C, Pos,[]).
%%% More info on xml-stylesheet can be found at:
" Associating Style Sheets with XML documents " , Version 1.0 ,
W3C Recommendation 29 June 1999 ( -stylesheet/ )
scan_wellknown_pi("-stylesheet"++T, S0=#xmerl_scanner{line=L,col=C},Pos) ->
?dbg("prolog(\"<?xml-stylesheet\")~n", []),
?bump_col(16),
scan_pi(T, S, "xml-stylesheet",L,C,Pos,[]);
scan_wellknown_pi(Str,S,_Pos) ->
?fatal({invalid_target_name, lists:sublist(Str, 1, 10)}, S).
scan_pi(Str="?>"++_T , S , Target , L , C , Pos ) - >
scan_pi(Str , S , Target , L , C , Pos , [ ] ) ;
scan_pi(Str=[],S , Target , L , C , Pos ) - >
scan_pi(Str , S , Target , L , C , Pos , [ ] ) ;
scan_pi(T , S , Target , L , C , Pos ) - >
{ _ , } = mandatory_strip(T , S ) ,
scan_pi(T1,S1,Target , L , C , Pos , [ ] ) .
scan_pi([], S=#xmerl_scanner{continuation_fun = F}, Target,L, C, Pos, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pi(MoreBytes, S1, Target, L, C, Pos, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pi("?>" ++ T, S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Target, L, C, Pos, Acc) ->
?bump_col(2),
PI = #xmlPI{name = Target,
pos = Pos,
value = lists:reverse(Acc)},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = PI}, S),
{Ret, S2} = Hook(PI, S1),
{Ret, T, S2};
scan_pi([H|T], S, Target, L, C, Pos, Acc) when ?whitespace(H) ->
?strip1,
scan_pi2(T1, S1, Target, L, C, Pos, Acc).
scan_pi2([], S=#xmerl_scanner{continuation_fun = F}, Target,L, C, Pos, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pi2(MoreBytes, S1, Target, L, C, Pos, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pi2("?>" ++ T, S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Target, L, C, Pos, Acc) ->
?bump_col(2),
PI = #xmlPI{name = Target,
pos = Pos,
value = lists:reverse(Acc)},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = PI}, S),
{Ret, S2} = Hook(PI, S1),
{Ret, T, S2};
scan_pi2([H|T], S0, Target, L, C, Pos, Acc) ->
?bump_col(1),
wfc_legal_char(H,S),
scan_pi2(T, S, Target, L, C, Pos, [H|Acc]).
[ 28 ] doctypedecl : : =
' < ! DOCTYPE ' S Name ( S ExternalID ) ? S ? ( ' [ ' intSubset ' ] ' S ? ) ? ' > '
scan_doctype([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype(T, S) ->
{_,T1,S1} = mandatory_strip(T,S),
{DTName, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
?strip3,
scan_doctype1(T3, S3#xmerl_scanner{doctype_name = DTName}).
[ 75 ] ExternalID : : = ' SYSTEM ' S SystemLiteral
%% | 'PUBLIC' S PubidLiteral S SystemLiteral
scan_doctype1([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype1(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype1("PUBLIC" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{PIDL, T2, S2} = scan_pubid_literal(T1, S1),
{_,T3,S3} = mandatory_strip(T2,S2),
{SL, T4, S4} = scan_system_literal(T3, S3),
?strip5,
scan_doctype2(T5, S5, {public, PIDL, SL});
scan_doctype1("SYSTEM" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{SL, T2, S2} = scan_system_literal(T1, S1),
?strip3,
scan_doctype2(T3, S3, {system, SL});
scan_doctype1(T, S) ->
scan_doctype2(T, S, undefined).
scan_doctype2([], S=#xmerl_scanner{continuation_fun = F},DTD) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype2(MoreBytes, S1, DTD) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype2("[" ++ T, S0, DTD) ->
?bump_col(1),
?strip1,
scan_doctype3(T1, S1, DTD);
scan_doctype2(">" ++ T, S0, DTD) ->
?bump_col(1),
?strip1,
S2 = fetch_DTD(DTD, S1),
check_decl(S2),
{T1, S2}.
[ 28a ] DeclSep : : = PEReference | S
[ 28b ] intSubset : : = ( markupdecl | DeclSep ) *
scan_doctype3([], S=#xmerl_scanner{continuation_fun = F},DTD) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype3(MoreBytes, S1,DTD) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype3("%" ++ T, S0, DTD) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
?strip2,
case expand_pe_reference(PERefName, S2,as_PE) of
{system, _} = Name ->
S3 = fetch_DTD(Name, S2),
scan_doctype3(T2, S3, DTD);
{public, _} = Name ->
S3 = fetch_DTD(Name, S2),
scan_doctype3(T2, S3, DTD);
{public, _, _} = Name ->
S3 = fetch_DTD(Name, S2),
scan_doctype3(T2, S3, DTD);
Space added , see Section 4.4.8
{_,T3,S3} = strip(ExpRef++T2,S2),
scan_doctype3(T3,S3,DTD)
end;
scan_doctype3("]" ++ T, S0, DTD) ->
?bump_col(1),
?strip1,
S2 = fetch_DTD(DTD, S1),
check_decl(S2),
">" ++ T2 = T1,
{T2, S2};
scan_doctype3(T, S, DTD) ->
{_, T1, S1} = scan_markup_decl(T, S),
scan_doctype3(T1, S1, DTD).
fetch_DTD(undefined, S=#xmerl_scanner{doctype_DTD=URI}) when list(URI)->
allow to specify DTD name when it is n't available in xml stream
fetch_DTD({system,URI},S);
fetch_DTD(undefined, S) ->
S;
fetch_DTD(DTDSpec, S)->
case fetch_and_parse(DTDSpec,S,[{text_decl,true},
{environment,{external,subset}}]) of
NewS when record(NewS,xmerl_scanner) ->
NewS;
{_Res,_Tail,_Sx} -> % Continue with old scanner data, result in Rules
S
end.
fetch_and_parse(ExtSpec,S=#xmerl_scanner{fetch_fun=Fetch,
rules=Rules,
xmlbase = XMLBase},
Options0) ->
RetS =
case Fetch(ExtSpec, S) of
{ok, NewS} ->
%% For backward compatibility only. This will be removed later!!
NewS;
{ok, not_fetched,NewS} ->
NewS;
{ok, DataRet, NewS = #xmerl_scanner{user_state = UState,
event_fun = Event,
hook_fun = Hook,
fetch_fun = Fetch1,
close_fun = Close1,
continuation_fun = Cont,
acc_fun = Acc,
rules_read_fun = Read,
rules_write_fun = Write,
validation = Valid,
quiet = Quiet,
encoding = Charset
}} ->
EvS = event_state(NewS),
HoS = hook_state(NewS),
FeS = fetch_state(NewS),
CoS = cont_state(NewS),
Options = Options0++[{user_state, UState},
{rules, Rules},
{event_fun, Event, EvS},
{hook_fun, Hook, HoS},
{fetch_fun, Fetch1, FeS},
{close_fun, Close1},
{continuation_fun, Cont, CoS},
{rules, Read, Write, ""},
{acc_fun, Acc},
{validation,Valid},
{quiet,Quiet},
{encoding,Charset}],
case DataRet of
{file, F} ->
int_file_decl(F, Options,Charset);
{string, String} ->
int_string_decl(String, Options,XMLBase);
_ ->
%% other scheme
{DataRet,[],NewS}
end;
Error ->
?fatal({error_fetching_DTD, {ExtSpec, Error}}, S)
end,
case RetS of
#xmerl_scanner{} ->
RetS#xmerl_scanner{text_decl=false,
environment=S#xmerl_scanner.environment};
_ -> RetS
end.
fetch_not_parse(ExtSpec,S=#xmerl_scanner{fetch_fun=Fetch}) ->
case Fetch(ExtSpec,S) of
{ok, not_fetched,_NewS} ->
?fatal({error_fetching_external_source,ExtSpec},S);
{ok, DataRet, NewS} ->
String =
case DataRet of
{file,F} ->
get_file(F,S);
{string,Str} ->
binary_to_list(Str);
_ -> DataRet
end,
{String, NewS};
_ ->
?fatal({error_fetching_external_resource,ExtSpec},S)
end.
get_file(F,S) ->
% io:format("get_file F=~p~n",[F]),
case file:read_file(F) of
{ok,Bin} ->
binary_to_list(Bin);
Err ->
?fatal({error_reading_file,F,Err},S)
end.
%% check_decl/1
%% Now it is necessary to check that all referenced types is declared,
%% since it is legal to reference some xml types before they are
%% declared.
check_decl(#xmerl_scanner{validation=false}) ->
ok;
check_decl(#xmerl_scanner{rules=Tab} = S) ->
check_notations(Tab,S),
check also attribute defs for element
check_entities(Tab,S).
check_notations(Tab,S) ->
case ets:match(Tab,{{notation,'$1'},undeclared}) of
[[]] -> ok;
[] -> ok;
[L] when list(L) ->
?fatal({error_missing_declaration_in_DTD,hd(L)},S);
Err ->
?fatal({error_missing_declaration_in_DTD,Err},S)
end.
check_elements(Tab,S) ->
case ets:match(Tab,{{elem_def,'_'},'$2'},10) of
{_,_}=M ->
Fun = fun({Match,'$end_of_table'},_F) ->
lists:foreach(fun(X)->check_elements2(X,S) end,
Match),
ok;
('$end_of_table',_) ->
ok;
({Match,Cont},F) ->
lists:foreach(fun(X)->check_elements2(X,S) end,
Match),
F(ets:match(Cont),F)
end,
Fun(M,Fun);
'$end_of_table' -> ok;
Err -> ?fatal({error_missing_declaration_in_DTD,Err},S)
end.
% it is not an error to declare attributes for an element that is not
% declared.
check_elements2([#xmlElement{attributes=Attrs}],S) ->
check_attributes(Attrs,S);
check_elements2(_,_) ->
ok.
check_attributes([{N1,'ID',_,_,_}=Attr|Rest],S) ->
case lists:keysearch('ID',2,Rest) of
{value,Att2} ->
?fatal({error_more_than_one_ID_def,N1,element(1,Att2)},S);
_ ->
ok
end,
vc_ID_Attribute_Default(Attr,S),
check_attributes(Rest,S);
check_attributes([{_,{enumeration,_},_,_,_}=Attr|T],S) ->
vc_Enumeration(Attr,S),
check_attributes(T,S);
check_attributes([{_,Ent,_,_,_}=Attr|T],S)
when Ent=='ENTITY';Ent=='ENTITIES' ->
vc_Entity_Name(Attr,S),
check_attributes(T,S);
check_attributes([_|T],S) ->
check_attributes(T,S);
check_attributes([],_S) ->
ok.
check_entities(Tab,S=#xmerl_scanner{validation=true}) ->
case ets:match(Tab,{{entity,'$1'},undeclared}) of
[[]] -> ok;
[] -> ok;
[L] when list(L) ->
?fatal({error_missing_declaration_in_DTD,hd(L)},S);
Err ->
?fatal({error_missing_declaration_in_DTD,Err},S)
end;
check_entities(_,_) ->
ok.
check_decl2/1 : checks that all referenced ID attributes are declared
check_decl2(S=#xmerl_scanner{rules=Tab}) ->
check_referenced_ids(Tab,S).
check_referenced_ids(Tab,S) ->
case ets:match(Tab,{{id,'$1'},undeclared}) of
[[]] -> ok;
[] -> ok;
[L] when list(L) ->
?fatal({error_missing_declaration_in_DTD,hd(L)},S);
Err ->
?fatal({error_missing_declaration_in_DTD,Err},S)
end.
[ 30 ] extSubSet : : = TextDecl ? extSubsetDecl
scan_ext_subset([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_ext_subset(MoreBytes, S1) end,
fun(S1) -> {[], S1} end,
S);
scan_ext_subset("%" ++ T, S0) ->
[ 28a ] : WFC : PE Between Declarations .
%% The replacement text of a parameter entity reference in a
must match the production extSubsetDecl .
?bump_col(1),
{_,T1,S1} = scan_decl_sep(T,S),
scan_ext_subset(T1, S1);
scan_ext_subset("<![" ++ T, S0) ->
?bump_col(3),
?strip1,
{_, T2, S2} = scan_conditional_sect(T1, S1),
scan_ext_subset(T2,S2);
scan_ext_subset(T, S) when ?whitespace(hd(T)) ->
{_,T1,S1} = strip(T,S),
scan_ext_subset(T1, S1);
scan_ext_subset(T, S) ->
{_, T1, S1} = scan_markup_decl(T, S),
scan_ext_subset(T1, S1).
[ 28a ] : : = PEReference | S
scan_decl_sep(T,S=#xmerl_scanner{rules_read_fun=Read,
rules_write_fun=Write,
rules_delete_fun=Delete}) ->
{PERefName, T1, S1} = scan_pe_reference(T, S),
{ExpandedRef,S2} =
case expand_pe_reference(PERefName,S1,as_PE) of
Tuple when tuple(Tuple) ->
%% {system,URI} or {public,URI}
{ExpRef,_Sx}=fetch_not_parse(Tuple,S1),
{EntV,_,_S2} = scan_entity_value(ExpRef, S1, no_delim,
PERefName,parameter),
%% should do an update Write(parameter_entity) so next
%% expand_pe_reference is faster
Delete(parameter_entity,PERefName,_S2),
_S3 = Write(parameter_entity,PERefName,EntV,_S2),
EntV2 = Read(parameter_entity,PERefName,_S3),
{" " ++ EntV2 ++ " ",_S3};
ExpRef ->
{ExpRef,S1}
end,
{_, T3, S3} = strip(ExpandedRef,S2),
{_T4,S4} = scan_ext_subset(T3,S3),
strip(T1,S4).
[ 61 ] ConditionalSect : : = includeSect | ignoreSect
scan_conditional_sect([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_conditional_sect(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_conditional_sect("IGNORE" ++ T, S0) ->
?bump_col(6),
?strip1,
"[" ++ T2 = T1,
{_,T3,S3} = strip(T2,S1),
scan_ignore(T3,S3);
scan_conditional_sect("INCLUDE" ++ T, S0) ->
?bump_col(7),
?strip1,
"[" ++ T2 = T1,
{_,T3,S3} = strip(T2,S1),
scan_include(T3, S3);
scan_conditional_sect("%"++T,S0) ->
?bump_col(1),
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_conditional_sect(T2,S2).
[ 63 ] ignoreSect : : = ' < ! [ ' S ? ' IGNORE ' S ? ' [ ' ignoreSectContents * ' ] ] > '
[ 64 ] ignoreSectContents : : = Ignore ( ' < ! [ ' ignoreSectContents ' ] ] > ' Ignore ) *
[ 65 ] Ignore : : = ( * ( ' < ! [ ' | ' ] ] > ' ) * )
scan_ignore(Str,S) ->
scan_ignore(Str,S,0).
scan_ignore([], S=#xmerl_scanner{continuation_fun = F},Level) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_ignore(MoreBytes, S1,Level) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_ignore("<![" ++ T, S0,Level) ->
%% nested conditional section. Topmost condition is ignore, though
?bump_col(3),
scan_ignore(T, S,Level+1);
scan_ignore("]]>" ++ T, S0,0) ->
?bump_col(3),
{[], T, S};
scan_ignore("]]>" ++ T, S0,Level) ->
?bump_col(3),
scan_ignore(T, S,Level-1);
scan_ignore([_H|T],S0,Level) ->
?bump_col(1),
scan_ignore(T,S,Level).
[ 62 ] includeSect : : = ' < ! [ ' S ? ' INCLUDE ' S ? ' [ ' extSubsetDecl ' ] ] > '
scan_include([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_include(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_include("]]>" ++ T, S0) ->
?bump_col(3),
{[], T, S};
scan_include("%" ++ T, S0) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_include(T2, S2);
scan_include("<![" ++ T, S0) ->
?bump_col(3),
?strip1,
{_, T2, S2} = scan_conditional_sect(T1, S1),
?strip3,
scan_include(T3,S3);
scan_include(T, S) ->
{_, T1, S1} = scan_markup_decl(T, S),
scan_include(T1, S1).
[ 29 ] markupdecl : : = elementdecl | AttlistDecl | EntityDecl |
%%%%%%% NotationDecl | PI |Comment
[ 45 ] elementdecl : : = ' < ! ELEMENT ' S Name S contentspec S ? ' > '
%% Validity constraint: Unique Type Declaration: No element type may be
%% declared more than once.
%%
scan_markup_decl([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_markup_decl(MoreBytes, S1) end,
fun(S1) -> {[], [], S1} end,
S);
scan_markup_decl("<!--" ++ T, S0) ->
?bump_col(4),
{_, T1, S1} = scan_comment(T, S),
?strip2;
scan_markup_decl("<?" ++ T, S0) ->
?bump_col(2),
{_PI, T1, S1} = scan_pi(T, S,_Pos=markup),
?strip2;
scan_markup_decl("<!ELEMENT" ++ T,
#xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write,
rules_delete_fun = Delete} = S0) ->
?bump_col(9),
{_,T1,S1} = mandatory_strip(T,S),
{Ename, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
Element =
case Read(elem_def, Ename, S2) of
El = #xmlElement{elementdef=Decl} when Decl /= undeclared ->
case S2#xmerl_scanner.validation of
true ->
?fatal({already_defined, Ename}, S2);
_ ->
Delete(elem_def,Ename,S2),
El
end;
El = #xmlElement{} ->
Delete(elem_def,Ename,S2),
El;
undefined ->
#xmlElement{}
end,
{_,T3,S3} = mandatory_strip(T2,S2),
{Edef, T4, S4} = scan_contentspec(T3, S3),
?strip5,
{">" ++ T6,S6} = scan_element_completion(T5,S5),
S7 = Write(elem_def, Ename,
Element#xmlElement{name = Ename,
content = Edef,
elementdef=S6#xmerl_scanner.environment}, S6),
strip(T6,S7);
scan_markup_decl("<!ENTITY" ++ T, S0) ->
< ! ENTITY [ % ] entity.name NDATA notation.name >
%% <!ENTITY [%] entity.name "replacement text">
%% <!ENTITY [%] entity.name SYSTEM "system.identifier">
%% <!ENTITY [%] entity.name PUBLIC public.identifier "system.identifier">
?bump_col(8),
{_,T1,S1} = mandatory_strip(T,S),
{T2, S2} = scan_entity(T1, S1),
?strip3;
scan_markup_decl("<!NOTATION" ++ T, S0) ->
< ! NOTATION notation.name " public.identifier " " helper.application " >
?bump_col(10),
{_,T1,S1} = mandatory_strip(T,S),
{T2, S2} = scan_notation_decl(T1, S1),
?strip3;
scan_markup_decl("<!ATTLIST" ++ T,
#xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write,
rules_delete_fun= Delete} = S0) ->
< ! ( AttrName Type Value ) * >
?bump_col(9),
{_,T1,S1} = mandatory_strip(T,S),
{Ename, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
? ,
{Attributes, T4, S4} = scan_attdef(T2, S2),
{EDEF,MergedAttrs} =
case Read(elem_def, Ename, S4) of
undefined -> %% this may happen when the ELEMENT is declared in
the external DTD but the ATTLIST in the
internal DTD .
{#xmlElement{},update_attributes(Attributes,[])};
Edef = #xmlElement{attributes = OldAttrs} ->
Delete(elem_def,Ename,S4),
%% the slot in rules table must be empty so that the
%% later write has the assumed effect. Read maybe
%% should empty the table slot.
{Edef,update_attributes(Attributes, OldAttrs)}
end,
NewEdef = EDEF#xmlElement{name=Ename,attributes = MergedAttrs},
S5 = Write(elem_def, Ename, NewEdef, S4),
T5 = T4,
?strip6.
scan_element_completion(T,S) ->
scan_markup_completion_gt(T,S).
update_attributes(NewAttrs, OldAttrs) ->
update_attributes1(NewAttrs,lists:reverse(OldAttrs)).
update_attributes1([A = {Name,_Type,_DefaultV,_DefaultD,_Env}|Attrs],
OldAttrs) ->
case lists:keymember(Name, 1, OldAttrs) of
true ->
update_attributes1(Attrs, OldAttrs);
false ->
update_attributes1(Attrs, [A|OldAttrs])
end;
update_attributes1([],Acc) ->
lists:reverse(Acc).
[ 53 ] AttDef
scan_attdef([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_attdef(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_attdef(T, S) ->
scan_attdef(T, S, _AttrAcc = []).
scan_attdef([], S=#xmerl_scanner{continuation_fun = F}, Attrs) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_attdef(MoreBytes, S1, Attrs) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_attdef(">" ++ T, S0, Attrs) ->
?bump_col(1),
{lists:reverse(Attrs), T, S};
scan_attdef("%" ++ _T, S=#xmerl_scanner{environment=prolog}, _Attrs) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_attdef("%" ++ T, S0, Attrs) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S0),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_attdef(T2, S2, Attrs);
scan_attdef(T,S,Attrs) ->
{_,T1,S1} = mandatory_strip(T,S),
scan_attdef2(T1,S1,Attrs).
scan_attdef2(">" ++ T, S0, Attrs) ->
?bump_col(1),
{lists:reverse(Attrs), T, S};
scan_attdef2("%" ++ _T, S=#xmerl_scanner{environment=prolog}, _Attrs) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_attdef2("%" ++ T, S0, Attrs) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S0),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_attdef2(T2, S2, Attrs);
scan_attdef2(T, S, Attrs) ->
{AttName, _NamespaceInfo, T1, S1} = scan_name(T, S),
{_,T2,S2} = mandatory_strip(T1,S1),
{AttType, T3, S3} = scan_att_type(T2, S2),
{_,T4,S4} = mandatory_strip(T3,S3),
{{DefaultDecl,DefaultValue}, T5, S5} = scan_default_decl(T4, S4, AttType),
?strip6,
Attr = {AttName, AttType,DefaultValue,DefaultDecl,
S#xmerl_scanner.environment},
scan_attdef2(T6, S6, [Attr|Attrs]).
[ 54 ] StringType
scan_att_type([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_att_type(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_att_type("CDATA" ++ T, S0) ->
?bump_col(5),
{'CDATA', T, S};
[ 55 ] TokenizedType
scan_att_type("IDREFS" ++ T, S0) ->
?bump_col(6),
{'IDREFS', T, S};
scan_att_type("IDREF" ++ T, S0) ->
?bump_col(5),
{'IDREF', T, S};
scan_att_type("ID" ++ T, S0) ->
?bump_col(2),
{'ID', T, S};
scan_att_type("ENTITY" ++ T, S0) ->
?bump_col(6),
{'ENTITY', T, S};
scan_att_type("ENTITIES" ++ T, S0) ->
?bump_col(8),
{'ENTITIES', T, S};
scan_att_type("NMTOKENS" ++ T, S0) ->
?bump_col(8),
{'NMTOKENS', T, S};
scan_att_type("NMTOKEN" ++ T, S0) ->
?bump_col(7),
{'NMTOKEN', T, S};
[ 57 ] EnumeratedType
scan_att_type("NOTATION" ++ T, S0) ->
?bump_col(8),
{_,T1,S1} = mandatory_strip(T,S),
"(" ++ T2 = T1,
S2 = S1,
?strip3,
{Name, _NamespaceInfo, T4, S4} = scan_name(T3, S3),
notation_exists(Name, S4),
?strip5,
scan_notation_type(T5, S5, [Name]);
scan_att_type("(" ++ T, S0) ->
?bump_col(1),
?strip1,
{NmToken, _NamespaceInfo, T2, S2} = scan_nmtoken(T1, S1),
?strip3,
scan_enumeration(T3, S3, [NmToken]);
scan_att_type("%" ++ _T, S=#xmerl_scanner{environment=prolog}) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_att_type("%" ++ T, S0) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S0),
ExpRef = expand_pe_reference(PERefName, S1,in_literal),
{ExpRef,T1,S1}.
[ 58 ] NotationType
scan_notation_type([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_notation_type(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_notation_type(")" ++ T, S0, Acc) ->
?bump_col(1),
{{notation, lists:reverse(Acc)}, T, S};
scan_notation_type("|" ++ T, S0, Acc) ->
?bump_col(1),
?strip1,
{Name, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
notation_exists(Name, S2),
?strip3,
scan_notation_type(T3, S3, [Name | Acc]).
Validity constraint for NotationType :
The used notation names must be declared in the DTD , but they may
%%% be declared later.
notation_exists(Name, #xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write } = S) ->
case Read(notation, Name, S) of
undefined ->
this is legal , since the referenced NOTATION
%% may be declared later in internal or external
%% subset.
Write(notation,Name,undeclared,S);
_Value ->
ok
end.
[ 59 ] Enumeration
scan_enumeration([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enumeration(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_enumeration(")" ++ T, S0, Acc) ->
?bump_col(1),
{{enumeration, lists:reverse(Acc)}, T, S};
scan_enumeration("|" ++ T, S0, Acc) ->
?bump_col(1),
?strip1,
{NmToken, _NamespaceInfo, T2, S2} = scan_nmtoken(T1, S1),
?strip3,
scan_enumeration(T3, S3, [NmToken|Acc]).
[ 60 ] DefaultDecl
scan_default_decl([], S=#xmerl_scanner{continuation_fun = F}, Type) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_default_decl(MoreBytes, S1, Type) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_default_decl("#REQUIRED" ++ T, S0, _Type) ->
?bump_col(9),
{{'#REQUIRED',no_value}, T, S};
scan_default_decl("#IMPLIED" ++ T, S0, _Type) ->
?bump_col(8),
{{'#IMPLIED',no_value}, T, S};
scan_default_decl("#FIXED" ++ T, S0, Type) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{Value,T2,S2,_} = default_value(T1, S1, Type),
{{'#FIXED',Value},T2,S2};
scan_default_decl(Str, S, Type) ->
{Value,T1,S1,_} = default_value(Str, S, Type),
{{no_decl,Value},T1,S1}.
%% There is room here to validate against Type, but we don't do it at
%% the moment.
default_value(T, S, Type) ->
{_Val, _T1, _S1,_} = scan_att_value(T, S, Type).
[ 71 ] EntityDef
scan_entity([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_entity(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity("%" ++ T, #xmerl_scanner{rules_write_fun = Write} = S0) ->
%% parameter entity
?bump_col(1),
{_,T1,S1} = mandatory_strip(T,S),
{PEName, _NamespaceInfo, T2, S2} = scan_name_no_colons(T1, S1),
{_,T3,S3} = mandatory_strip(T2,S2),
{PEDef, T4, S4} = scan_pe_def(T3, S3, PEName),
?strip5,
{">" ++ T6,S6} = scan_entity_completion(T5,S5),
S7 = Write(parameter_entity, PEName, PEDef, S6),
{T6, S7};
scan_entity(T, #xmerl_scanner{rules_write_fun = Write,
rules_read_fun = Read,
rules_delete_fun = Delete} = S) ->
%% generic entity
{EName, _NamespaceInfo, T1, S1} = scan_name_no_colons(T, S),
{_,T2,S2} = mandatory_strip(T1,S1),
{EDef, EntType, T3, S3} = scan_entity_def(T2, S2, EName),
check_entity_recursion(EName,S3),
?strip4,
{">" ++ T5,S5} = scan_entity_completion(T4,S4),
case Read(entity,EName,S5) of
undeclared -> Delete(entity,EName,S5);
_ -> ok
end,
S6 = Write(entity, EName, {S5#xmerl_scanner.environment,EntType,EDef}, S5),
{T5, S6}.
scan_entity_completion(T,S) ->
scan_markup_completion_gt(T,S).
[ 73 ] EntityDef
scan_entity_def([], S=#xmerl_scanner{continuation_fun = F}, EName) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_entity_def(MoreBytes, S1, EName) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity_def("'" ++ T, S0, EName) ->
?bump_col(1),
{EVal,T1,S1}=scan_entity_value(T, S, $', EName,general),
{EVal,internal,T1,S1};
scan_entity_def("\"" ++ T, S0, EName) ->
?bump_col(1),
{EVal,T1,S1}=scan_entity_value(T, S, $", EName,general),
{EVal,internal,T1,S1};
%% external general entity, parsed or unparsed.
scan_entity_def(Str, S, EName) ->
{ExtID, T1, S1} = scan_external_id(Str, S),
{NData, T2, S2} = scan_ndata_decl(T1, S1),
case NData of
{ndata,_} ->
if NDATA exists it is an unparsed ENTITY
{{ExtID,NData},external,T2,S2};
_ ->
case fetch_and_parse(ExtID,S2,
[{text_decl,true},
{environment,{external,{entity,EName}}}]) of
{{_USret,Entity},_Tail,_Sx} ->
{Entity, external,T2, S2};
{Entity,_Tail,Sx} ->
OldRef=S2#xmerl_scanner.entity_references,
NewRef=Sx#xmerl_scanner.entity_references,
{Entity,T2,
S2#xmerl_scanner{entity_references=OldRef++NewRef}};
{error,enoent} -> % this bad entity is declared,
% but it may not be referenced,
% then it would not be an
% error.
{{error,enoent},external,T2,S2}
end
end.
scan_ndata_decl([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_ndata_decl(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_ndata_decl(Str = ">"++_T, S) ->
{[], Str, S};
scan_ndata_decl(T, S) ->
{_,T1,S1} = mandatory_strip(T,S),
scan_ndata_decl2(T1,S1).
scan_ndata_decl2(Str = ">"++_T,S) ->
{[], Str, S};
scan_ndata_decl2("NDATA" ++ T,S0 = #xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write}) ->
?bump_col(5),
{_,T1,S1} = mandatory_strip(T,S),
{Name, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
case Read(notation, Name, S2) of
this is legal , since the referenced NOTATION
%% may be declared later in internal or external
%% subset.
Write(notation,Name,undeclared,S2),
{{ndata,Name},T2,S2};
_Value ->
{{ndata, Name}, T2, S2}
end.
[ 39 ] element
scan_element(T, S, Pos) ->
scan_element(T, S, Pos, S#xmerl_scanner.space,
_Lang = [], _Parents = [], #xmlNamespace{}).
scan_element(T, S=#xmerl_scanner{line=L,col=C},
Pos, SpaceDefault,Lang, Parents, NS) ->
{Name, NamespaceInfo, T1, S1} = scan_name(T, S),
vc_Element_valid(Name,S),
?strip2,
scan_element(T2, S2, Pos, Name, L, C, _Attrs = [],
Lang, Parents, NamespaceInfo, NS,
SpaceDefault).
scan_element("/", S=#xmerl_scanner{continuation_fun = F},
Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
?dbg("trailing / detected~n", []),
F(fun(MoreBytes, S1) -> scan_element("/" ++ MoreBytes, S1,
Pos, Name, StartL, StartC, Attrs,
Lang,Parents,NSI,NS,SpaceDefault) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_element([], S=#xmerl_scanner{continuation_fun = F},
Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_element(MoreBytes, S1,
Pos, Name, StartL, StartC, Attrs,
Lang,Parents,NSI,NS,SpaceDefault) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_element("/>" ++ T, S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event,
line = L, col = C,
xmlbase_cache=XMLBase}, Pos,
Name, _StartL, _StartC, Attrs0, Lang, Parents, NSI,
Namespace, _SpaceDefault) ->
?bump_col(2),
Attrs = lists:reverse(Attrs0),
E=processed_whole_element(S, Pos, Name, Attrs, Lang, Parents,NSI,Namespace),
wfc_unique_att_spec(Attrs,S),
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = E}, S0),
{Ret, S2} = Hook(E, S1),
S2b=S2#xmerl_scanner{xmlbase=XMLBase},
{Ret, T, S2b};
scan_element(">", S=#xmerl_scanner{continuation_fun = F},
Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
?dbg("trailing > detected~n", []),
F(fun(MoreBytes, S1) -> scan_element(">" ++ MoreBytes, S1,
Pos, Name, StartL, StartC, Attrs,
Lang,Parents,NSI,NS,SpaceDefault) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_element(">" ++ T, S0 = #xmerl_scanner{event_fun = Event,
hook_fun = Hook,
line = L, col = C,
xmlbase_cache=XMLBase,
space = SpaceOption},
Pos, Name, StartL, StartC, Attrs0, Lang, Parents,
NSI, Namespace, SpaceDefault) ->
?bump_col(1),
Attrs = lists:reverse(Attrs0),
wfc_unique_att_spec(Attrs,S),
XMLSpace = case lists:keysearch('xml:space', 1, Attrs) of
false -> SpaceDefault;
{value, "default"} -> SpaceOption;
{value, "preserve"} -> preserve;
_ -> SpaceDefault
end,
E0=processed_whole_element(S,Pos,Name,Attrs,Lang,Parents,NSI,Namespace),
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = started,
line = StartL,
col = StartC,
data = E0}, S),
{Content, T1, S2} = scan_content(T, S1, Name, Attrs, XMLSpace,
E0#xmlElement.language,
[{Name, Pos}|Parents], Namespace),
Element=E0#xmlElement{content=Content,
xmlbase=E0#xmlElement.xmlbase},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = Element}, S2),
{Ret, S4} = Hook(Element, S3),
S4b=S4#xmerl_scanner{xmlbase=XMLBase},
{Ret, T1, S4b};
scan_element(T, S, Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
{AttName, NamespaceInfo, T1, S1} = scan_name(T, S),
{T2, S2} = scan_eq(T1, S1),
{AttType,_DefaultDecl} = get_att_type(S2,AttName,Name),
{AttValue, T3, S3,IsNorm} = scan_att_value(T2, S2, AttType),
check_default_value(S3,DefaultDecl , AttValue ) ,
NewNS = check_namespace(AttName, NamespaceInfo, AttValue, NS),
wfc_whitespace_betw_attrs(hd(T3),S3),
?strip4,
AttrPos = case Attrs of
[] ->
1;
[#xmlAttribute{pos = P}|_] ->
P+1
end,
Attr = #xmlAttribute{name = AttName,
pos = AttrPos,
language = Lang,
namespace = NamespaceInfo,
value = AttValue,
normalized = IsNorm},
XMLBase=if
AttName=='xml:base' ->
resolve_relative_uri(AttValue,S4#xmerl_scanner.xmlbase);
true ->
S4#xmerl_scanner.xmlbase
end,
#xmerl_scanner{event_fun = Event,
line = Line,
col = Col} = S4,
S5 = Event(#xmerl_event{event = ended,
line = Line,
col = Col,
data = Attr},
S4#xmerl_scanner{xmlbase=XMLBase,
xmlbase_cache=S#xmerl_scanner.xmlbase}),
scan_element(T4, S5, Pos, Name, StartL, StartC, [Attr|Attrs],
Lang, Parents, NSI, NewNS, SpaceDefault).
get_att_type(S=#xmerl_scanner{rules_read_fun=Read},AttName,ElemName) ->
case Read(elem_def,ElemName,S) of
#xmlElement{attributes = Attrs} ->
case lists:keysearch(AttName,1,Attrs) of
{value,{_,AttType,_,DefaultDecl,_}} ->
{AttType,DefaultDecl};
undefined attribute shall be treated as CDATA
end;
_ -> {'CDATA',no_value}
end.
resolve_relative_uri(NewBase="/"++_,CurrentBase) ->
case uri:parse(CurrentBase) of
{error,no_scheme} ->
NewBase;
{Scheme,Host,Port,_PathQuery} ->
atom_to_list(Scheme)++Host++":"++integer_to_list(Port)++NewBase
end;
resolve_relative_uri(NewBase,CurrentBase) ->
filename:join(CurrentBase,NewBase).
processed_whole_element(S=#xmerl_scanner{hook_fun = _Hook,
xmlbase = XMLBase,
line = _L, col = _C,
event_fun = _Event},
Pos, Name, Attrs, Lang, Parents, NSI, Namespace) ->
Language = check_language(Attrs, Lang),
{ExpName, ExpAttrs} =
case S#xmerl_scanner.namespace_conformant of
true ->
%% expand attribute names. We need to do this after having
%% scanned all attributes of the element, since (as far as
I can tell ) , XML Names only specifies that namespace attrs
%% are valid within the whole scope of the element in which
%% they are declared, which should also mean that even if they
%% are declared after some other attributes, the namespace
%% should apply to those attributes as well.
Note that the default URI does not apply to attrbute names .
TempNamespace = Namespace#xmlNamespace{default = []},
ExpAttrsX =
[A#xmlAttribute{
expanded_name=expanded_name(
A#xmlAttribute.name,
A#xmlAttribute.namespace,
NSI ,
TempNamespace, S)} || A <- Attrs],
{expanded_name(Name, NSI, Namespace, S), ExpAttrsX};
false ->
{Name, Attrs}
end,
#xmlElement{name = Name,
xmlbase = XMLBase,
pos = Pos,
parents = Parents,
attributes = ExpAttrs,
language = Language,
expanded_name = ExpName,
nsinfo = NSI,
namespace = Namespace}.
check_language([#xmlAttribute{name='xml:lang',value=Lang}|_], _) ->
Lang;
check_language([_|T], Lang) ->
check_language(T, Lang);
check_language([], Lang) ->
Lang.
check_namespace(xmlns, _, Value, NS) ->
NS#xmlNamespace{default = list_to_atom(Value)};
check_namespace(_, {"xmlns", Prefix}, Value,
NS = #xmlNamespace{nodes = Ns}) ->
NS#xmlNamespace{nodes = keyreplaceadd(
Prefix, 1, Ns, {Prefix, list_to_atom(Value)})};
check_namespace(_, _, _, NS) ->
NS.
expanded_name(Name, [], #xmlNamespace{default = []}, _S) ->
Name;
expanded_name(Name, [], #xmlNamespace{default = URI}, _S) ->
{URI, Name};
expanded_name(_Name, {"xmlns", Local}, _NS, _S) -> % CHECK THIS /JB
{"xmlns",Local};
expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) ->
case lists:keysearch(Prefix, 1, Ns) of
{value, {_, URI}} ->
{URI, list_to_atom(Local)};
false ->
%% A namespace constraint of XML Names is that the prefix
%% must be declared
?fatal({namespace_prefix_not_declared, Prefix}, S)
end.
keyreplaceadd(K, Pos, [H|T], Obj) when K == element(Pos, H) ->
[Obj|T];
keyreplaceadd(K, Pos, [H|T], Obj) ->
[H|keyreplaceadd(K, Pos, T, Obj)];
keyreplaceadd(_K, _Pos, [], Obj) ->
[Obj].
[ 10 ] AttValue
normalize the attribute value according to XML 1.0 section 3.3.3
scan_att_value([], S=#xmerl_scanner{continuation_fun = F},AT) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_att_value(MoreBytes, S1, AT) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_att_value("%"++_T,S=#xmerl_scanner{environment=prolog},_AttType) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_att_value("%"++T,S0=#xmerl_scanner{rules_read_fun=Read,
rules_write_fun=Write,
rules_delete_fun=Delete},AttType) ->
?bump_col(1),
{Name,T1,S1} = scan_pe_reference(T,S),
{ExpandedRef,S2} =
case expand_pe_reference(Name,S1,in_literal) of
Tuple when tuple(Tuple) ->
%% {system,URI} or {public,URI}
%% Included in literal, just get external file.
{ExpRef,_Sx}=fetch_not_parse(Tuple,S1),
{EntV,_,_S2} = scan_entity_value(ExpRef, S1, no_delim,
Name,parameter),
%% should do an update Write(parameter_entity) so next
%% expand_pe_reference is faster
Delete(parameter_entity,Name,_S2),
_S3 = Write(parameter_entity,Name,EntV,_S2),
EntV2 = Read(parameter_entity,Name,_S3),
{EntV2,_S3};
ExpRef ->
{ExpRef,S1}
end,
{_,T2,S3} = strip(ExpandedRef ++ T1,S2),
scan_att_value(T2,S3,AttType);
scan_att_value([H|T], S0,'CDATA'=AT) when H == $"; H == $' ->
?bump_col(1),
scan_att_chars(T, S, H, [],[], AT,false);
scan_att_value([H|T], S0,AttType) when H == $"; H == $' ->
?bump_col(1),
{T1,S1,IsNorm} = normalize(T,S,false),
scan_att_chars(T1, S1, H, [],[], AttType,IsNorm).
scan_att_chars([],S=#xmerl_scanner{continuation_fun=F},H,Acc,TmpAcc,AT,IsNorm)->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) ->
scan_att_chars(MoreBytes, S1, H, Acc,TmpAcc,AT,IsNorm)
end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_att_chars([H|T], S0, H, Acc, TmpAcc,AttType,IsNorm) -> % End quote
?bump_col(1),
check_att_default_val(S#xmerl_scanner.validation,TmpAcc,AttType,S),
{Acc2,S2,IsNorm2} =
if
AttType == 'CDATA' -> {Acc,S,IsNorm};
true ->
normalize(Acc,S,IsNorm)
end,
{lists:reverse(Acc2), T, S2,IsNorm2};
scan_att_chars("&" ++ T, S0, Delim, Acc, TmpAcc,AT,IsNorm) -> % Reference
?bump_col(1),
{ExpRef, T1, S1} = scan_reference(T, S),
case markup_delimeter(ExpRef) of
true ->
scan_att_chars(T1,S1,Delim,[ExpRef|Acc],[ExpRef|TmpAcc],AT,IsNorm);
_ ->
scan_att_chars(ExpRef ++ T1, S1, Delim, Acc,TmpAcc, AT,IsNorm)
end;
scan_att_chars("<" ++ _T, S0, _Delim, _Acc,_, _,_) -> % Tags not allowed here
?fatal(unexpected_char, S0);
scan_att_chars([H|T], S0, Delim, Acc, _TmpAcc,'CDATA',IsNorm)
when ?whitespace(H) ->
?bump_col(1),
scan_att_chars(T, S, Delim, [$\s|Acc], [],'CDATA',IsNorm);
scan_att_chars([H|T], S0, Delim, Acc, TmpAcc,AT,IsNorm)
when ?whitespace(H) ->
?bump_col(1),
{T1,S1,IsNorm2} = normalize(T,S,IsNorm),
check_att_default_val(S#xmerl_scanner.validation,TmpAcc,AT,S1),
scan_att_chars(T1, S1, Delim, [$\s|Acc],[], AT,IsNorm2);
scan_att_chars([H|T], S0, Delim, Acc, TmpAcc,AT,IsNorm) ->
?bump_col(1),
valid_Char(S#xmerl_scanner.validation,AT,H,S),
scan_att_chars(T, S, Delim, [H|Acc], [H|TmpAcc],AT,IsNorm).
markup_delimeter("&")-> true;
markup_delimeter("\"") -> true;
markup_delimeter("\'") -> true;
markup_delimeter("<") -> true;
markup_delimeter(">") -> true;
markup_delimeter("%") -> true;
markup_delimeter(_) -> false.
check_att_default_val(true,[],_Ent,_S) ->
ok;
check_att_default_val(true,RevName,Ent,S) ->
check_att_default_val(lists:reverse(RevName),Ent,S);
check_att_default_val(_,_,_,_) ->
ok.
check_att_default_val(Name,Ent,S=#xmerl_scanner{rules_write_fun=Write})
when Ent == 'ENTITY'; Ent == 'ENTITIES' ->
case xmerl_lib:is_letter(hd(Name)) of
true -> ok;
_ -> ?fatal({illegal_first_character,Ent,Name},S)
end,
SName = list_to_atom(Name),
Write(entity,SName,undeclared,S);
check_att_default_val(Name,IDR,S=#xmerl_scanner{rules_write_fun=Write})
when IDR == 'IDREF'; IDR == 'IDREFS' ->
case xmerl_lib:is_letter(hd(Name)) of
true -> ok;
_ -> ?fatal({illegal_first_character,IDR,Name},S)
end,
SName = list_to_atom(Name),
Write(id,SName,undeclared,S);
check_att_default_val(Name,'ID',S=#xmerl_scanner{rules_write_fun=Write,
rules_read_fun=Read,
rules_delete_fun=Delete}) ->
case xmerl_lib:is_name(Name) of
false ->
?fatal({'ID_names_must_be_Name_production',Name},S);
_ ->
ok
end,
SName = if
list(Name) -> list_to_atom(Name);
true -> Name
end,
case Read(id,SName,S) of
was referenced in / IDREFS before defined
Delete(id,SName,S);
SName -> ?fatal({values_must_be_unique,'ID',SName},S);
undefined -> ok
end,
Write(id,SName,SName,S);
check_att_default_val(_,_,_) ->
ok.
valid_Char(true,AT,C,S) when AT=='NMTOKEN';AT=='NMTOKENS' ->
vc_Valid_Char(AT,C,S);
valid_Char(_,_,[C],S) ->
case xmerl_lib:is_char(C) of
true ->
ok;
false ->
?fatal({unexpected_char,C}, S)
end;
valid_Char(_,_,C,S) ->
case xmerl_lib:is_char(C) of
true ->
ok;
false ->
?fatal({unexpected_char,C}, S)
end.
[ 43 ] content
scan_content(T, S, Name, Attrs, Space, Lang, Parents, NS) ->
scan_content(T, S, _Pos = 1, Name, Attrs, Space,
Lang, Parents, NS, _Acc = [],_MarkupDel=[]).
scan_content("<", S= #xmerl_scanner{continuation_fun = F},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
?dbg("trailing < detected~n", []),
F(fun(MoreBytes, S1) -> scan_content("<" ++ MoreBytes, S1,
Pos, Name, Attrs,
Space, Lang, Parents, NS, Acc,[]) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_content([], S=#xmerl_scanner{environment={external,{entity,_}}},
_Pos, _Name, _Attrs, _Space, _Lang, _Parents, _NS, Acc,_) ->
{lists:reverse(Acc),[],S};
scan_content([], S=#xmerl_scanner{environment=internal_parsed_entity},
_Pos, _Name, _Attrs, _Space, _Lang, _Parents, _NS, Acc,_) ->
{lists:reverse(Acc),[],S};
scan_content([], S=#xmerl_scanner{continuation_fun = F},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_content(MoreBytes, S1,
Pos, Name, Attrs,
Space, Lang, Parents, NS, Acc,[]) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_content("</" ++ T, S0, _Pos, Name, _Attrs, _Space, _Lang,
_Parents, _NS, Acc,[]) ->
?bump_col(2),
{ETagName, _NamespaceInfo, T1, S1} = scan_name(T, S),
if ETagName == Name ->
ok;
true ->
?fatal({endtag_does_not_match, {ETagName, Name}}, S1)
end,
?strip2,
case T2 of
">" ++ T3 ->
{lists:reverse(Acc), T3, S2};
_ ->
?fatal({error,{unexpected_end_of_STag}},S)
end;
scan_content([$&|_T]=Str,
#xmerl_scanner{environment={external,{entity,EName}}} = S0,
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
{_EntV,T1,S1}=scan_entity_value(Str,S0 ,[],EName,general),
%%This is a problem. All referenced entities in the external entity must be checked for recursion, thus parse the contentbut,skip result.
scan_content(T1,S1,Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content("&"++T,
#xmerl_scanner{environment=internal_parsed_entity} = S,
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
{_, T1, S1} = scan_reference(T, S),
scan_content(T1,S1,Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content("&" ++ T, S0, Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]) ->
?bump_col(1),
{ExpRef, T1, S1} = scan_reference(T, S),
case markup_delimeter(ExpRef) of
true -> scan_content(ExpRef++T1,S1,Pos,Name,Attrs,Space,Lang,Parents,NS,Acc,ExpRef);
_ ->
scan_content(ExpRef++T1,S1,Pos,Name,Attrs,Space,Lang,Parents,NS,Acc,[])
end;
scan_content("<!--" ++ T, S, Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]) ->
{_, T1, S1} = scan_comment(T, S, Pos, Parents, Lang),
scan_content(T1, S1, Pos+1, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content("<" ++ T, S0, Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]) ->
?bump_col(1),
{Markup, T1, S1} =
scan_content_markup(T, S, Pos, Name, Attrs, Space, Lang, Parents, NS),
AccF = S1#xmerl_scanner.acc_fun,
{NewAcc, NewPos, NewS} = case AccF(Markup, Acc, S1) of
{Acc2, S2} ->
{Acc2, Pos+1, S2};
{Acc2, Pos2, S2} ->
{Acc2, Pos2, S2}
end,
scan_content(T1, NewS, NewPos, Name, Attrs, Space, Lang,
Parents, NS, NewAcc,[]);
scan_content([_H|T], S= #xmerl_scanner{environment={external,{entity,_}}},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
%% Guess we have to scan the content to find any internal entity
%% references.
scan_content(T,S,Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content(T, S=#xmerl_scanner{acc_fun = F,
event_fun = Event,
line = _L},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,MarkupDel) ->
Text0 = #xmlText{pos = Pos,
parents = Parents},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = started,
line = S#xmerl_scanner.line,
data = Text0}, S),
{Data, T1, S2} = scan_char_data(T, S1, Space,MarkupDel),
Text = Text0#xmlText{value = Data},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S2#xmerl_scanner.line,
data = Text}, S2),
{NewAcc, NewPos, NewS} = case F(Text, Acc, S3) of
{Acc4, S4} ->
{Acc4, Pos+1, S4};
{Acc4, Pos4, S4} ->
{Acc4, Pos4, S4}
end,
scan_content(T1, NewS, NewPos, Name, Attrs, Space, Lang,
Parents, NS, NewAcc,[]).
scan_content_markup([], S=#xmerl_scanner{continuation_fun = F},
Pos, Name, Attrs, Space, Lang, Parents, NS) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_content_markup(
MoreBytes,S1,Pos,Name,
Attrs,Space,Lang,Parents,NS) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_content_markup("![CDATA[" ++ T, S0, Pos, _Name, _Attrs,
_Space, _Lang, Parents, _NS) ->
?bump_col(8),
scan_cdata(T, S, Pos, Parents);
scan_content_markup("?"++T,S0,Pos,_Name,_Attrs,_Space,_Lang,_Parents,_NS) ->
?bump_col(1),
scan_pi(T, S, Pos);
scan_content_markup(T, S, Pos, _Name, _Attrs, Space, Lang, Parents, NS) ->
scan_element(T, S, Pos, Space, Lang, Parents, NS).
scan_char_data(T, S, Space,MUD) ->
scan_char_data(T, S, Space,MUD, _Acc = []).
[ 14 ] CharData
scan_char_data([], S=#xmerl_scanner{environment={external,{entity,_}}},
_Space,_MUD, Acc) ->
{lists:reverse(Acc), [], S};
scan_char_data([], S=#xmerl_scanner{environment=internal_parsed_entity},
_Space, _MUD,Acc) ->
{lists:reverse(Acc), [], S};
scan_char_data([], S=#xmerl_scanner{continuation_fun = F}, Space, _MUD,Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_char_data(MoreBytes,S1,Space,_MUD,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_char_data([$&|T], S,Space,"&",Acc) ->
scan_char_data(T, S, Space,[], [$&|Acc]);
scan_char_data(T=[$&|_], S,_Space,_MUD,Acc) ->
{lists:reverse(Acc), T, S};
scan_char_data("]]>" ++ _T, S, _Space,_MUD, _Acc) ->
%% See Section 2.4: Especially:
" The right angle bracket ( > ) MAY be represented using the string " & gt ; " ,
%% and MUST, for compatibility, be escaped using either ">" or a
%% character reference when it appears in the string "]]>" in content, when
that string is not marking the end of a CDATA section .
?fatal(unexpected_cdata_end, S);
scan_char_data([$<|T],S,Space,"<", Acc) ->
scan_char_data(T, S, Space,[], [$<|Acc]);
scan_char_data(T = [$<|_], S, _Space,_MUD,Acc) ->
{lists:reverse(Acc), T, S};
scan_char_data(T = [H|_], S, Space,MUD, Acc) when ?whitespace(H) ->
{NewAcc, T1, S1} = accumulate_whitespace(T, S, Space, Acc),
scan_char_data(T1, S1, Space,MUD,NewAcc);
scan_char_data([H1,H2|_T],S,_Space,_MUD,_Acc) when ?non_character(H1,H2) ->
?fatal({error,{not_allowed_to_use_Unicode_noncharacters}},S);
scan_char_data("]]>"++_T,S,_Space,_MUD,_Acc) ->
?fatal({error,{illegal_character_in_content,"]]>"}},S);
scan_char_data([H|T],S0,Space,MUD,Acc) ->
?bump_col(1),
wfc_legal_char(H,S),
scan_char_data(T,S,Space,MUD,[H|Acc]).
[ 18]-[21 ] CDATA
scan_cdata(Str, S, Pos, Parents) ->
scan_cdata(Str, S, Pos, Parents, _Acc = []).
scan_cdata([], S=#xmerl_scanner{continuation_fun = F}, Pos, Parents, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_cdata(MoreBytes, S1, Pos, Parents, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_cdata("]]>" ++ T, S0, Pos, Parents, Acc) ->
?bump_col(3),
{#xmlText{pos = Pos,
parents = Parents,
value = lists:reverse(Acc),
type = cdata}, T, S};
scan_cdata([H|T], S0, Pos, Parents, Acc) ->
case xmerl_lib:is_char(H) of
true ->
?bump_col(1),
scan_cdata(T, S, Pos, Parents, [H|Acc]);
false ->
?fatal({unexpected_char,H}, S0)
end.
[ 67 ] Reference
returns a three tuple { Result , RestBuf , State }
scan_reference([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_reference(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_reference("#x" ++ T, S0) ->
[ 66 ] CharRef
?bump_col(1),
if hd(T) /= $; ->
scan_char_ref_hex(T, S, 0);
true ->
?fatal(invalid_char_ref, S)
end;
scan_reference("#" ++ T, S0) ->
[ 66 ] CharRef
?bump_col(1),
if hd(T) /= $; ->
scan_char_ref_dec(T, S, []);
true ->
?fatal(invalid_char_ref, S)
end;
scan_reference(T, S) ->
case catch scan_entity_ref(T, S) of
{'EXIT', _} ->
?fatal(error_scanning_entity_ref,S);
Other ->
Other
end.
Chapter 4.4.2 : ... the replacement text of entities used to escape
%% markup delimiters (the entities amp, lt, gt, apos, quot) is always treated
as data . ( The string " AT&T ; " expands to " AT&T ; " and the remaining
%% ampersand is not recognized as an entity-reference delimiter.)"
%%
%% How to achieve this? My current approach is to insert the *strings* "&",
%% "<", ">", "'", and "\"" instead of the characters. The processor will
%% ignore them when performing multiple expansions. This means, for now, that
the character data output by the processor is ( 1 - 2 levels ) deep .
%% At some suitable point, we should flatten these, so that application-level
%% processors should not have to be aware of this detail.
scan_entity_ref([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_entity_ref(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity_ref("amp;" ++ T, S0) ->
?bump_col(4),
{"&", T, S};
scan_entity_ref("lt;" ++ T, S0) ->
?bump_col(3),
{"<", T, S};
scan_entity_ref("gt;" ++ T, S0) ->
?bump_col(3),
{">", T, S};
scan_entity_ref("apos;" ++ T, S0) ->
?bump_col(5),
{"'", T, S};
scan_entity_ref("quot;" ++ T, S0) ->
?bump_col(5),
{"\"", T, S};
scan_entity_ref(T, S) ->
{Name, _NamespaceInfo, T1, S1} = scan_name(T, S),
";" ++ T2 = T1,
S2 = S1,
Entity = expand_reference(Name, S2),
{Entity, T2, S2}.
[ 69 ]
scan_pe_reference(T, S) ->
{Name, _NamespaceInfo, T1, S1} = scan_name(T, S),
";" ++ T2 = T1,
{Name, T2, S1#xmerl_scanner{col = S1#xmerl_scanner.col+1}}.
expand_pe_reference(Name, #xmerl_scanner{rules_read_fun = Read} = S,WS) ->
case Read(parameter_entity, Name, S) of
undefined when S#xmerl_scanner.validation==true ;
% S#xmerl_scanner.standalone==yes;
% S#xmerl_scanner.environment==prolog ->
? fatal({unknown_parameter_entity , Name } , S ) ; % WFC or VC failure
undefined ->
WFC or VC failure
Err={error,_Reason} ->
?fatal(Err,S);
Tuple when tuple(Tuple) ->
Tuple;
Result ->
if
WS == in_literal -> Result;
true -> " "++Result++" "
end
end.
% Currently unused
%
expand_external_pe_reference(Name , } = S ) - >
% case Read(parameter_entity, Name, S) of
% undefined ->
% ?fatal({unknown_parameter_entity, Name}, S);
% Result ->
% fetch_DTD(Result,S)
% end.
[ 68 ] EntityReference
expand_reference(Name, #xmerl_scanner{environment={external,{entity,_}}}) ->
atom_to_list(Name);
expand_reference(Name, #xmerl_scanner{environment=internal_parsed_entity}) ->
atom_to_list(Name);
expand_reference(Name, #xmerl_scanner{rules_read_fun = Read} = S) ->
case Read(entity, Name, S) of
undefined ->
?fatal({unknown_entity_ref, Name}, S);
{_,external,{error,enoent}} ->
?fatal({error,{entity_target_not_found,{error,enoent},Name}},S);
{DefEnv,EntType,Value} ->
wfc_Entity_Declared(DefEnv,S,Name),
wfc_Internal_parsed_entity(EntType,Value,S),
Value
end.
[ 66 ] CharRef
scan_char_ref_dec([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_char_ref_dec(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_char_ref_dec([H|T], S0, Acc) when H >= $0, H =< $9 ->
?bump_col(1),
scan_char_ref_dec(T, S, [H|Acc]);
scan_char_ref_dec(";" ++ T, S0, Acc) ->
?bump_col(1),
Ref = list_to_integer(lists:reverse(Acc)),
wfc_legal_char(Ref,S),
{[Ref], T, S}. %% changed return value from [[Ref]]
scan_char_ref_hex([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_char_ref_hex(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_char_ref_hex([H|T], S0, Acc) when H >= $0, H =< $9 ->
?bump_col(1),
Dec = H - $0,
scan_char_ref_hex(T, S, (Dec bor (Acc bsl 4)));
scan_char_ref_hex([H|T], S0, Acc) when H >= $a, H =< $f ->
?bump_col(1),
Dec = (H - $a) + 10,
scan_char_ref_hex(T, S, (Dec bor (Acc bsl 4)));
scan_char_ref_hex([H|T], S0, Acc) when H >= $A, H =< $F ->
?bump_col(1),
Dec = (H - $A) + 10,
scan_char_ref_hex(T, S, (Dec bor (Acc bsl 4)));
scan_char_ref_hex(";" ++ T, S0, Acc) ->
?bump_col(1),
wfc_legal_char(Acc,S),
{[Acc], T, S}. %% changed return value from [[Acc]]
[ 25 ] Eq
%%% Eq ::= S? '=' S?
scan_eq(T, S) ->
?strip1,
case T1 of
[$=|T2] ->
S2 = S1,
?strip3,
{T3, S3};
_ ->
?fatal(assignment_expected,S)
end.
%% scan_name/2
%%
%% We perform some checks here to make sure that the names conform to
%% the "Namespaces in XML" specification. This is an option.
%%
%% Qualified Name:
[ 6 ] QName : : = ( Prefix ' :') ? LocalPart
[ 7 ] Prefix : : = NCName
[ 8 ] LocalPart : : = NCName
[ 4 ] NCName : : = ( Letter | ' _ ' ) ( NCNameChar ) *
[ 5 ] NCNameChar : : = Letter | Digit | ' . ' | ' - ' | ' _ '
%% | CombiningChar | Extender
%% The effect of XML Names (namespace) conformance is that:
- All element types and attribute names contain either zero or one colon
%% - No entity names, PI targets, or notation names contain any colons.
%%
%% scan_name_no_colons/2 will ensure that the name contains no colons iff
%% the scanner has been told to be namespace conformant. Otherwise, it will
%% behave exactly like scan_name/2.
%%
scan_name_no_colons(Str, S) ->
NSC = S#xmerl_scanner.namespace_conformant,
case NSC of
true ->
{Target, NSI, T1, S1} =
scan_name(Str,S#xmerl_scanner{namespace_conformant=no_colons}),
{Target,NSI,T1,S1#xmerl_scanner{namespace_conformant=NSC}};
false ->
scan_name(Str, S)
end.
[ 5 ] Name : : = ( Letter | ' _ ' | ' :') ( NameChar ) *
scan_name([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_name(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_name(Str = [$:|T], S0 = #xmerl_scanner{namespace_conformant = NSC}) ->
if NSC == false ->
?bump_col(1),
scan_nmtoken(T, S, [$:], NSC);
NSC == no_colons ->
?fatal({invalid_NCName, lists:sublist(Str, 1, 6)}, S0);
true ->
%% In order to conform with the "Namespaces in XML" spec,
%% we cannot allow names to begin with ":"
?fatal({invalid_NCName, lists:sublist(Str, 1, 6)}, S0)
end;
scan_name([$_|T], S0 = #xmerl_scanner{namespace_conformant = NSC}) ->
?bump_col(1),
scan_nmtoken(T, S, [$_], NSC);
scan_name("%"++_T,S=#xmerl_scanner{environment=prolog}) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_name("%"++T,S0=#xmerl_scanner{environment={external,_}}) ->
%% parameter entity that expands to a name
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_name(T2,S2);
scan_name([H|T], S0 = #xmerl_scanner{namespace_conformant = NSC}) ->
case xmerl_lib:is_letter(H) of
true ->
?bump_col(1),
scan_nmtoken(T, S, [H], NSC);
false ->
?fatal({invalid_name, lists:sublist([H|T], 1, 6)}, S0)
end;
scan_name(Str, S) ->
?fatal({invalid_name, Str}, S).
scan_nmtoken(Str, S, Acc, NSC) ->
scan_nmtoken(Str, S, Acc, _Prefix = [], _Local = Acc, NSC,is7bAscii(hd(Acc),true)).
%% scan_nmtoken/2
[ 7 ] : : = ( NameChar)+
scan_nmtoken([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_nmtoken(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_nmtoken(Str = [H|T], S) ->
case xmerl_lib:is_namechar(H) of
true ->
scan_nmtoken(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1},
_Acc = [H], _Prefix = [], _Local = [H],
_NamespaceConformant = false,is7bAscii(H,true));
false ->
?fatal({invalid_nmtoken, lists:sublist(Str, 1, 6)}, S)
end.
scan_nmtoken([], S=#xmerl_scanner{continuation_fun = F},
Acc, Prefix, Local, NSC,Is7bAscii) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_nmtoken(MoreBytes,S1,Acc,Prefix,Local,NSC,Is7bAscii) end,
fun(S1) -> {list_to_atom(lists:reverse(Acc)),
namespace_info(Prefix, Local),[],S1} end,
S);
%% whitespace marks the end of a name
scan_nmtoken(Str = [H|_], S, Acc, Prefix, Local, _NSC,true) when ?whitespace(H) ->
%% we don't strip here because the occurrence of whitespace may be an error
%% e.g. <!ELEMENT spec (front, body, back ?)>
NmString = lists:reverse(Acc),
{list_to_atom(NmString), namespace_info(Prefix, Local), Str, S};
scan_nmtoken(Str = [$:|_], S, Acc, [], _Local, no_colons,_Is7bAscii) ->
?fatal({invalid_NCName,
lists:sublist(lists:reverse(Acc) ++ Str, 1, 6)}, S);
scan_nmtoken([$:|T], S0, Acc, [], Local, NSC, Is7bAscii) ->
?bump_col(1),
scan_nmtoken(T, S, [$:|Acc], lists:reverse(Local), [], NSC,Is7bAscii);
scan_nmtoken(Str = [$:|_T], S, Acc, _Prefix, _Local, _NSC = true,_Is7bAscii) ->
%% non-empty Prefix means that we've encountered a ":" already.
%% Conformity with "Namespaces in XML" requires
%% at most one colon in a name
?fatal({invalid_NCName,
lists:sublist(lists:reverse(Acc) ++ Str, 1, 6)}, S);
also marks the end of a name
scan_nmtoken(Str=[H|T], S0, Acc, Prefix, Local, NSC,Is7bAscii) ->
?bump_col(1),
case xmerl_lib:is_namechar(H) of
true ->
scan_nmtoken(T, S, [H|Acc], Prefix, [H|Local], NSC,is7bAscii(H,Is7bAscii));
_ ->
NmStr = lists:reverse(Acc),
{list_to_atom(NmStr), namespace_info(Prefix, Local), Str, S}
end.
namespace_info([], _) ->
[];
namespace_info(Prefix, Local) ->
{Prefix, lists:reverse(Local)}.
is7bAscii(_Ch,false) ->
false;
is7bAscii(Ch,_) when Ch > 127 ->
false;
is7bAscii(_,_) ->
true.
[ 11 ] SystemLiteral
scan_system_literal([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_system_literal(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_system_literal("\"" ++ T, S) ->
scan_system_literal(T, S, $", []);
scan_system_literal("'" ++ T, S) ->
scan_system_literal(T, S, $', []).
scan_system_literal([], S=#xmerl_scanner{continuation_fun = F},
Delimiter, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_system_literal(MoreBytes,S1,Delimiter,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_system_literal([H|T], S, H, Acc) ->
{lists:reverse(Acc), T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}};
scan_system_literal([H|T], S, Delimiter, Acc) ->
scan_system_literal(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1},
Delimiter, [H|Acc]).
[ 12 ] PubidLiteral
scan_pubid_literal([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pubid_literal(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pubid_literal([H|T], S) when H == $"; H == $' ->
scan_pubid_literal(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}, H, []);
scan_pubid_literal([H|_T], S) ->
?fatal({invalid_pubid_char, H}, S).
scan_pubid_literal([], S=#xmerl_scanner{continuation_fun = F},
Delimiter, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pubid_literal(MoreBytes,S1,Delimiter,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pubid_literal([H|T], S, H, Acc) ->
{lists:reverse(Acc), T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}};
scan_pubid_literal(Str = [H|_], S, Delimiter, Acc) when ?whitespace(H) ->
%% Before matching public identifiers, all whitespace must be normalized,
%% so we do that here
{_, T, S1} = pub_id_strip(Str, S),
scan_pubid_literal(T, S1, Delimiter, [16#20|Acc]);
scan_pubid_literal([H|T], S, Delimiter, Acc) ->
case is_pubid_char(H) of
true ->
scan_pubid_literal(
T, S#xmerl_scanner{col = S#xmerl_scanner.col+1},
Delimiter, [H|Acc]);
false ->
?fatal({invalid_pubid_char, H}, S)
end.
%% We do not match whitespace here, even though they're allowed in public
%% identifiers. This is because we normalize this whitespace as we scan
%% (see above in scan_pubid_literal())
%%
is_pubid_char(X) when X >= $a, X =< $z -> true;
is_pubid_char(X) when X >= $A, X =< $Z -> true;
is_pubid_char(X) when X >= $0, X =< $9 -> true;
is_pubid_char(X) ->
lists:member(X, "-'()+,./:=?;!*#@$_%").
[ 46 ] contentspec
scan_contentspec([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_contentspec(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_contentspec("EMPTY" ++ T, S0) ->
?bump_col(5),
{empty, T, S};
scan_contentspec("ANY" ++ T, S0) ->
?bump_col(3),
{any, T, S};
scan_contentspec("%" ++ _T, S=#xmerl_scanner{environment=prolog}) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_contentspec("%" ++ T, S0) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_contentspec(T2, S2);
scan_contentspec("(" ++ T, S0) ->
?bump_col(1),
?strip1,
scan_elem_content(T1, S1).
[ 47 ] children
[ 51 ] Mixed
scan_elem_content(T, S) ->
scan_elem_content(T, S, _Context = children, _Mode = unknown, _Acc = []).
scan_elem_content([], S=#xmerl_scanner{continuation_fun = F},
Context, Mode, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes,S1) -> scan_elem_content(MoreBytes,S1,Context,Mode,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_elem_content(")" ++ T, S0, Context, Mode0, Acc0) ->
?bump_col(1),
{Mode, Acc} = case {Mode0, Acc0} of
{unknown, [_X]} ->
{seq, Acc0};
{M, _L} when M == seq; M == choice ->
{Mode0, lists:reverse(Acc0)}
end,
{Occurrence, T1, S1} = scan_occurrence(T, S),
vc_No_Duplicate_Types(S,Context,Acc),
case {Occurrence, Context,Acc} of
{once, mixed,['#PCDATA']} -> ok; % It is not ok when there are
% more names than '#PCDATA'
% and no '*'.
{'*', mixed,_} -> ok;
{Other, mixed,_} ->
?fatal({illegal_for_mixed_content, Other}, S1);
_ ->
ok
end,
?strip2,
{format_elem_content({Occurrence, {Mode, Acc}}), T2, S2};
scan_elem_content("#PCDATA" ++ _T, S, not_mixed, _Mode, _Acc) ->
?fatal({error,{extra_set_of_parenthesis}},S);
scan_elem_content("#PCDATA" ++ _T, S, _Cont, Mode, Acc)
when Mode==choice;Mode==seq;Acc/=[] ->
?fatal({error,{invalid_format_of_mixed_content}},S);
scan_elem_content("#PCDATA" ++ T, S0, _Context, Mode, Acc) ->
?bump_col(7),
?strip1,
scan_elem_content(T1, S1, mixed, Mode, ['#PCDATA'|Acc]);
scan_elem_content("," ++ _T, S, _Context, choice, _Acc) ->
?fatal({mixing_comma_and_vertical_bar_in_content_model},S);
scan_elem_content("," ++ T, S0, Context, _Mode, Acc) ->
?bump_col(1),
?strip1,
scan_elem_content2(T1, S1, Context, seq, Acc);
scan_elem_content("|" ++ _T, S, _Context, seq, _Acc) ->
?fatal({mixing_comma_and_vertical_bar_in_content_model},S);
scan_elem_content("|" ++ T, S0, Context, _Mode, Acc) ->
?bump_col(1),
?strip1,
scan_elem_content2(T1, S1, Context, choice, Acc);
scan_elem_content(T, S, Context, Mode, Acc) ->
scan_elem_content2(T, S, Context, Mode, Acc).
scan_elem_content2("(" ++ _T, S, mixed, _Mode, _Acc) ->
?fatal({error,
{element_names_must_not_be_parenthesized_in_mixed_content}},S);
scan_elem_content2("(" ++ T, S0, Context, Mode, Acc) ->
?bump_col(1),
?strip1,
{Inner, T2, S2} = scan_elem_content(T1, S1, not_mixed, unknown, []),
scan_elem_content(T2, S2, Context, Mode, [Inner|Acc]);
scan_elem_content2("%" ++ _T,S=#xmerl_scanner{environment=prolog},_Context,_Mode,_Acc) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_elem_content2("%" ++ T, S0, Context, Mode, Acc) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2}=strip(ExpRef++T1,S1),
scan_elem_content(T2, S2, Context, Mode, Acc);
scan_elem_content2(T, S, Context, Mode, Acc) ->
{Name, _NameStr, T1, S1} = scan_name(T, S),
{Occurrence, T2, S2} = scan_occurrence(T1, S1),
case {Occurrence, Context} of
{once, mixed} -> ok;
{Other, mixed} ->
?fatal({illegal_for_mixed_content, Other}, S1);
_ ->
ok
end,
?strip3,
mandatory_delimeter_wfc(T3,S3),
NewAcc = [format_elem_content({Occurrence, Name}) | Acc],
scan_elem_content(T3, S3, Context, Mode, NewAcc).
format_elem_content({once, What}) -> What;
format_elem_content(Other) -> Other.
scan_occurrence([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_occurrence(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_occurrence([$?|T], S0) ->
?bump_col(1),
{'?', T, S};
scan_occurrence([$+|T], S0) ->
?bump_col(1),
{'+', T, S};
scan_occurrence([$*|T], S0) ->
?bump_col(1),
{'*', T, S};
scan_occurrence(T, S) ->
{once, T , S}.
%%% Tests of Validity Constraints
first part of VC : Name Token
vc_Valid_Char(_AT,C,S) ->
case xmerl_lib:is_namechar(C) of
true ->
ok;
_ ->
?fatal({error,{validity_constraint_Name_Token,C}},S)
end.
% Currently unused
%
% vc_test_attr_value(_,#xmerl_scanner{validation=false}) ->
% ok;
% vc_test_attr_value(Attr={_,'ID',_,_,_},S) ->
% vc_ID_Attribute_Default(Attr,S);
% vc_test_attr_value({_,{enumeration,_NameL},_,_,_},_S) ->
% ok.
vc_ID_Attribute_Default(_,#xmerl_scanner{validation=false}) ->
ok;
vc_ID_Attribute_Default({_,'ID',_,Def,_},_S)
when Def=='#IMPLIED';Def=='#REQUIRED' ->
ok;
vc_ID_Attribute_Default({_,'ID',_,Def,_},S) ->
?fatal({error,{validity_constraint_error_ID_Attribute_Default,Def}},S);
vc_ID_Attribute_Default(_,_) ->
ok.
vc_Enumeration({_Name,{_,NameList},DefaultVal,_,_},S)
when list(DefaultVal) ->
case lists:member(list_to_atom(DefaultVal),NameList) of
true ->
ok;
_ ->
?fatal({error,{vc_enumeration,list_to_atom(DefaultVal),NameList}},S)
end;
vc_Enumeration({_Name,{_,_NameList},_DefaultVal,_,_},_S) ->
ok.
vc_Entity_Name({_Name,'ENTITY',DefaultVal,_,_},S) when list(DefaultVal) ->
Read = S#xmerl_scanner.rules_read_fun,
case Read(entity,list_to_atom(DefaultVal),S) of
{_,external,{_,{ndata,_}}} ->
ok;
_ -> ?fatal({error,{vc_Entity_Name,list_to_atom(DefaultVal)}},S)
end;
vc_Entity_Name({_Name,'ENTITY',_,_,_},_S) ->
ok;
vc_Entity_Name({_,'ENTITIES',DefaultVal,_,_},S) when list(DefaultVal) ->
Read = S#xmerl_scanner.rules_read_fun,
NameListFun = fun([],Acc,_St,_Fun) ->
lists:reverse(Acc);
(Str,Acc,St,Fun) ->
{N,_,St2,Str2} = scan_name(Str,St),
Fun(Str2,[N|Acc],St2,Fun)
end,
NameList = NameListFun(DefaultVal,[],S,NameListFun),
VcFun =
fun(X) ->
case Read(entity,X,S) of
{_,external,{_,{ndata,_}}} ->
ok;
_ -> ?fatal({error,{vc_Entity_Name,X}},S)
end
end,
lists:foreach(VcFun,NameList);
vc_Entity_Name({_,'ENTITIES',_,_,_},_S) ->
ok.
vc_No_Duplicate_Types(#xmerl_scanner{validation=true} = S,mixed,Acc) ->
CheckDupl =
fun([H|T],F) ->
case lists:member(H,T) of
true ->
?fatal({no_duplicate_types_allowed,H},S);
_ -> F(T,F)
end;
([],_) -> ok
end,
CheckDupl(Acc,CheckDupl);
vc_No_Duplicate_Types(_,_,_) ->
ok.
Tests of Well - Formededness Constraints
mandatory_delimeter_wfc(","++_T,_S) ->
ok;
mandatory_delimeter_wfc("|"++_T,_S) ->
ok;
mandatory_delimeter_wfc(")"++_T,_S) ->
ok;
mandatory_delimeter_wfc("%"++_T,_S) ->
%% a parameter reference is ok
ok;
mandatory_delimeter_wfc(T,S) ->
?fatal({comma_or_vertical_bar_mandatory_between_names_in_content_model,T},S).
wfc_unique_att_spec([],_S) ->
ok;
wfc_unique_att_spec([#xmlAttribute{name=N}|Atts],S) ->
case lists:keymember(N,#xmlAttribute.name,Atts) of
true ->
?fatal({error,{unique_att_spec_required,N}},S);
_ ->
wfc_unique_att_spec(Atts,S)
end.
wfc_legal_char([Ch],S) ->
case xmerl_lib:is_char(Ch) of
true ->
ok;
_ ->
?fatal({error,{wfc_Legal_Character,Ch}},S)
end;
wfc_legal_char(Ch,S) ->
case xmerl_lib:is_char(Ch) of
true ->
ok;
_ ->
?fatal({error,{wfc_Legal_Character,Ch}},S)
end.
wfc_whitespace_betw_attrs(WS,_S) when ?whitespace(WS) ->
ok;
wfc_whitespace_betw_attrs($/,_S) ->
ok;
wfc_whitespace_betw_attrs($>,_S) ->
ok;
wfc_whitespace_betw_attrs(_,S) ->
?fatal({whitespace_required_between_attributes},S).
wfc_Entity_Declared({external,_},S=#xmerl_scanner{standalone=yes},Name) ->
?fatal({reference_to_externally_defed_entity_standalone_doc,Name},S);
wfc_Entity_Declared({external,_},_S,_) ->
ok;
wfc_Entity_Declared(_Env,_S,_) ->
ok.
wfc_Internal_parsed_entity(internal,Value,S) ->
WFC test that replacement text matches production content
scan_content(Value,S#xmerl_scanner{environment=internal_parsed_entity},
_Name=[],[],S#xmerl_scanner.space,_Lang=[],_Prnt=[],
#xmlNamespace{});
wfc_Internal_parsed_entity(_,_,_) ->
ok.
vc_Element_valid(_Name,#xmerl_scanner{environment=internal_parsed_entity}) ->
ok;
vc_Element_valid(Name,S=#xmerl_scanner{rules_read_fun=Read,
validation=true}) ->
case Read(elem_def,Name,S) of
#xmlElement{elementdef=undeclared} ->
?fatal({error,{error_missing_element_declaration_in_DTD,Name}},S); undefined ->
?fatal({error,{error_missing_element_declaration_in_DTD,Name}},S); _ -> ok
end;
vc_Element_valid(_,_) ->
ok.
[ 74 ] PEDef
scan_pe_def([], S=#xmerl_scanner{continuation_fun = F}, PEName) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pe_def(MoreBytes, S1, PEName) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pe_def("'" ++ T, S0, PEName) ->
?bump_col(1),
scan_entity_value(T, S, $', PEName,parameter);
scan_pe_def("\"" ++ T, S0, PEName) ->
?bump_col(1),
scan_entity_value(T, S, $", PEName,parameter);
scan_pe_def(Str, S, _PEName) ->
scan_external_id(Str, S).
[ 82 ] NotationDecl
scan_notation_decl(T, #xmerl_scanner{rules_write_fun = Write,
rules_read_fun=Read,
rules_delete_fun=Delete} = S) ->
{Name, _NameStr, T1, S1} = scan_name_no_colons(T, S),
{_,T2,S2} = mandatory_strip(T1,S1),
{Def, T3, S3} = scan_notation_decl1(T2, S2),
?strip4,
">" ++ T5 = T4,
case Read(notation,Name,S) of
undeclared -> Delete(notation,Name,S4);
_ -> ok
end,
S5 = Write(notation, Name, Def, S4),
{T5, S5}.
scan_notation_decl1([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_notation_decl1(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_notation_decl1("SYSTEM" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{SL, T2, S2} = scan_system_literal(T1, S1),
{{system, SL}, T2, S2};
scan_notation_decl1("PUBLIC" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{PIDL, T2, S2} = scan_pubid_literal(T1, S1),
?strip3,
case T3 of
">" ++ _ ->
{{public, PIDL}, T3,
S3#xmerl_scanner{col = S3#xmerl_scanner.col+1}};
_ ->
{SL, T4, S4} = scan_system_literal(T3, S3),
{{public, PIDL, SL}, T4, S4}
end.
[ 75 ] ExternalID
scan_external_id([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_external_id(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_external_id("SYSTEM" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{SL, T2, S2} = scan_system_literal(T1, S1),
{{system, SL}, T2, S2};
scan_external_id("PUBLIC" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{PIDL, T2, S2} = scan_pubid_literal(T1, S1),
{_,T3,S3} = mandatory_strip(T2,S2),
{SL, T4, S4} = scan_system_literal(T3, S3),
{{public, PIDL, SL}, T4, S4}.
[ 9 ] EntityValue
Note that we have two different scan functions for EntityValue
%% They differ in that this one checks for recursive calls to the same
%% parameter entity.
scan_entity_value(Str, S, Delim, Name, Namespace) ->
scan_entity_value(Str, S, Delim, _Acc = [], Name, Namespace,[]).
scan_entity_value([], S=#xmerl_scanner{environment={external,{entity,_}}},
_Delim,Acc,_,_,[]) ->
{lists:flatten(lists:reverse(Acc)), [], S};
scan_entity_value([], S=#xmerl_scanner{environment={external,{entity,_}},
validation=true},
_Delim,_Acc,PEName,_,_) ->
{{error,{failed_VC_Proper_Declaration_PE_Nesting,1,PEName}},[],S};
scan_entity_value([],S,
no_delim,Acc,_,_,[]) ->
{lists:flatten(lists:reverse(Acc)),[],S};
scan_entity_value([],S=#xmerl_scanner{validation=true},
no_delim,_Acc,PEName,_,_PENesting) ->
{{error,{failed_VC_Proper_Declaration_PE_Nesting,2,PEName}},[],S};
scan_entity_value([], S=#xmerl_scanner{continuation_fun = F},
Delim, Acc, PEName,Namespace,PENesting) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) ->
scan_entity_value(MoreBytes,S1,
Delim,Acc,PEName,Namespace,PENesting)
end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity_value([Delim|T], S=#xmerl_scanner{validation=true},
Delim,_Acc,PEName,_NS,PENesting) when length(PENesting) /= 0 ->
{{error,{failed_VC_Proper_Declaration_PE_Nesting,3,PEName}},T,S};
scan_entity_value([Delim|T], S0,
Delim, Acc, _PEName,_NS,_PENesting) ->
?bump_col(1),
{lists:flatten(lists:reverse(Acc)), T, S};
scan_entity_value("%" ++ _T,S=#xmerl_scanner{environment=prolog},_,_,_,_,_) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_entity_value("%" ++ T, S0, Delim, Acc, PEName,Namespace,PENesting) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
if PERefName == PEName,Namespace==parameter ->
?fatal({illegal_recursion_in_PE, PEName}, S1);
true ->
{ExpandedRef,S2} =
case expand_pe_reference(PERefName, S1, in_literal) of
actually should be expanded as_PE but
%% handle whitespace explicitly in this case.
Tuple when tuple(Tuple) ->
%% {system,URI} or {public,URI}
%% Included in literal.
{ExpRef,_Sx}=fetch_not_parse(Tuple,S1),
{EntV,_,_S2} =
scan_entity_value(ExpRef, S1, no_delim,[],
PERefName,parameter,[]),
%% should do an update Write(parameter_entity)
%% so next expand_pe_reference is faster
{EntV,_S2};
ExpRef ->
{ExpRef,S1}
end,
%% single or duoble qoutes are not treated as delimeters
%% in passages "included in literal"
S3 = S2#xmerl_scanner{col=S2#xmerl_scanner.col+1},
{Acc2,_,S4} = scan_entity_value(ExpandedRef,S3,no_delim,Acc,
PEName,Namespace,[]),
{_,T2,S5} = strip(" "++T1,S4),
scan_entity_value(T2,S5,Delim,lists:reverse(Acc2),
PEName,Namespace,PENesting)
end;
scan_entity_value("&" ++ T, S0, Delim, Acc, PEName,Namespace,PENesting) ->
%% This is either a character entity or a general entity (internal
%% or external) reference. An internal general entity shall not be
%% expanded in an entity def XML1.0 section 4.5.
?bump_col(1),
case T of
"#"++_T ->
{ExpRef, T1, S1} = scan_reference(T, S),
Tok = pe_nesting_token(ExpRef++T1,Namespace,S1#xmerl_scanner.validation),
case markup_delimeter(ExpRef) of
true ->
scan_entity_value(T1, S1, Delim, [ExpRef|Acc], PEName,
Namespace,pe_push(Tok,PENesting,S1));
_ ->
scan_entity_value(ExpRef ++ T1, S1, Delim, Acc, PEName,
Namespace,pe_push(Tok,PENesting,S1))
end;
_ -> %% General Entity is bypassed, though must check for
%% recursion: save referenced name now and check for
%% recursive reference after the hole entity definition is
%% completed.
{Name, _NamespaceInfo, T1, S1} = scan_name(T,S),
S2=save_refed_entity_name(Name,PEName,S1),
scan_entity_value(T1,S2,Delim,["&"|Acc],PEName,Namespace,PENesting)
end;
%% The following clauses is for PE Nesting VC constraint
Start delimeter for ConditionalSection
scan_entity_value("<!["++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(3),
scan_entity_value(T,S,Delim,["<!["|Acc],PEName,NS,
pe_push("<![",PENesting,S));
Start delimeter for ConditionalSection ( 2 )
scan_entity_value("["++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(1),
scan_entity_value(T,S,Delim,["["|Acc],PEName,NS,
pe_push("[",PENesting,S));
%% Start delimeter for comment
scan_entity_value("<!--"++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(4),
scan_entity_value(T,S,Delim,["<!--"|Acc],PEName,NS,
pe_push("<!--",PENesting,S));
Start delimeter for , AttListDecl , EntityDecl , NotationDecl
scan_entity_value("<!"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["<!"|Acc],PEName,NS,
pe_push("<!",PENesting,S));
%% Start delimeter for PI
scan_entity_value("<?"++T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["<?"|Acc],PEName,NS,
pe_push("<?",PENesting,S));
Start delimeter for elements that matches the proper stop
%% for a markupdecl
scan_entity_value("</"++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(2),
scan_entity_value(T,S,Delim,["</"|Acc],PEName,NS,
pe_push("</",PENesting,S));
scan_entity_value("<"++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(1),
scan_entity_value(T,S,Delim,["<"|Acc],PEName,NS,
pe_push("<",PENesting,S));
for contentspecs
scan_entity_value("("++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(1),
scan_entity_value(T,S,Delim,["("|Acc],PEName,NS,
pe_push("(",PENesting,S));
Stop delimeter for , AttListDecl , EntityDecl , NotationDecl
scan_entity_value(">"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(1),
scan_entity_value(T,S,Delim,[">"|Acc],PEName,NS,
pe_pop(">",PENesting,S));
%% Stop delimeter for PI
scan_entity_value("?>"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["?>"|Acc],PEName,NS,
pe_pop("?>",PENesting,S));
%% Stop delimeter for comment
scan_entity_value("-->"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(3),
scan_entity_value(T,S,Delim,["-->"|Acc],PEName,NS,
pe_pop("-->",PENesting,S));
Stop delimeter for ConditionalSection
scan_entity_value("]]>"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(3),
scan_entity_value(T,S,Delim,["]]>"|Acc],PEName,NS,
pe_pop("]]>",PENesting,S));
added to match a content start included
scan_entity_value("/>"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["/>"|Acc],PEName,NS,
pe_pop("/>",PENesting,S));
scan_entity_value(")"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(1),
scan_entity_value(T,S,Delim,[")"|Acc],PEName,NS,
pe_pop(")",PENesting,S));
scan_entity_value([H|T], S0, Delim, Acc, PEName,Namespace,PENesting) ->
case xmerl_lib:is_char(H) of
true ->
?bump_col(1),
scan_entity_value(T, S, Delim, [H|Acc], PEName,Namespace,PENesting);
false ->
?fatal({unexpected_char,H}, S0)
end.
save_refed_entity_name(Name,PEName,S) ->
case predefined_entity(Name) of
true ->
S;
_ ->
save_refed_entity_name1(Name,PEName,S)
end.
save_refed_entity_name1(Name,PEName,
S=#xmerl_scanner{entity_references=ERefs}) ->
case lists:keysearch(PEName,1,ERefs) of
{value,{_,Refs}} ->
NewRefs =
case lists:member(Name,Refs) of
true ->Refs;
_ -> [Name|Refs]
end,
S#xmerl_scanner{entity_references=lists:keyreplace(PEName,1,ERefs,
{PEName,NewRefs})
};
_ ->
S#xmerl_scanner{entity_references=[{PEName,[Name]}|ERefs]}
end.
pe_push(Tok,Stack,_S) when Tok=="<!";Tok=="<?";Tok=="<!--";Tok=="<![";
Tok=="[";Tok=="<";Tok=="</";Tok=="(" ->
[Tok|Stack];
pe_push(Tok,Stack,#xmerl_scanner{validation=true})
when Tok==")";Tok==">";Tok=="?>";Tok=="]]>";Tok=="-->";Tok=="/>"->
[Tok|Stack];
pe_push(_,Stack,_S) ->
Stack.
pe_pop(">",["<!"|Rest],_S) -> Rest;
pe_pop("?>",["<?"|Rest],_S) -> Rest;
pe_pop("-->",["<!--"|Rest],_S) -> Rest;
pe_pop("]]>",["[","<!["|Rest],_S) -> Rest;
pe_pop("/>",["<"|Rest],_S) -> Rest;
pe_pop(">",["<"|Rest],_S) -> Rest;
pe_pop(">",["</"|Rest],_S) -> Rest;
pe_pop(")",["("|Rest],_S) -> Rest;
pe_pop(Token,_Stack,S=#xmerl_scanner{validation=true}) ->
?fatal({error,{failed_VC_Proper_Declaration_PE_Nesting,5,Token}},S);
pe_pop(_,Rest,_) ->
Rest.
pe_nesting_token("<!"++_T,parameter,true) -> "<!";
pe_nesting_token("<?"++_T,parameter,true) -> "<?";
pe_nesting_token("<!--"++_T,parameter,true) -> "<!--";
pe_nesting_token("<!["++_T,parameter,true) -> "<![";
pe_nesting_token("["++_T,parameter,true) -> "[";
pe_nesting_token("("++_T,parameter,true) -> "(";
pe_nesting_token(">"++_T,parameter,true) -> ">";
pe_nesting_token("?>"++_T,parameter,true) -> "?>";
pe_nesting_token("-->"++_T,parameter,true) -> "-->";
pe_nesting_token("]]>"++_T,parameter,true) -> "]]>";
pe_nesting_token(")"++_T,parameter,true) -> ")";
pe_nesting_token("/>"++_T,parameter,true) -> "/>";
pe_nesting_token(_,_,_) -> false.
predefined_entity(amp) -> true;
predefined_entity(lt) -> true;
predefined_entity(gt) -> true;
predefined_entity(apos) -> true;
predefined_entity(quot) -> true;
predefined_entity(_) -> false.
check_entity_recursion(EName,
S=#xmerl_scanner{entity_references=EntityRefList}) ->
Set = sofs:family(EntityRefList),
case catch sofs:family_to_digraph(Set, [acyclic]) of
{'EXIT',{cyclic,_}} ->
?fatal({illegal_recursion_in_Entity, EName}, S);
{graph,_,_,_,_} ->
ok
end.
[ 15 ] Comment
scan_comment(Str, S) ->
scan_comment(Str, S, _Pos = undefined, _Parents = [], _Lang = []).
scan_comment(Str,S=#xmerl_scanner{col=C,event_fun=Event}, Pos, Parents, Lang) ->
Comment = #xmlComment{pos = Pos,
parents = Parents,
language = Lang,
value = undefined},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = started,
line = S#xmerl_scanner.line,
col = C,
pos = Pos,
data = Comment}, S),
scan_comment1(Str, S1, Pos, Comment, _Acc = []).
scan_comment1([], S=#xmerl_scanner{continuation_fun = F},
Pos, Comment, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_comment1(MoreBytes, S1, Pos, Comment, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_comment1("--" ++ T, S0 = #xmerl_scanner{col = C,
event_fun = Event,
hook_fun = Hook},
_Pos, Comment, Acc) ->
case hd(T) of
$> ->
?bump_col(2),
Comment1 = Comment#xmlComment{value = lists:reverse(Acc)},
S1=#xmerl_scanner{}=Event(#xmerl_event{event = ended,
line=S#xmerl_scanner.line,
col = C,
data = Comment1}, S),
{Ret, S2} = Hook(Comment1, S1),
T2 = tl(T),
?strip3,
{Ret, T3, S3};
Char ->
?fatal({invalid_comment,"--"++[Char]}, S0)
end;
scan_comment1("\n" ++ T, S=#xmerl_scanner{line = L}, Pos, Cmt, Acc) ->
scan_comment1(T, S#xmerl_scanner{line=L+1,col=1},Pos, Cmt, "\n" ++ Acc);
scan_comment1("\r\n" ++ T, S=#xmerl_scanner{line = L}, Pos, Cmt, Acc) ->
CR followed by LF is read as a single LF
scan_comment1(T, S#xmerl_scanner{line=L+1,col=1}, Pos, Cmt, "\n" ++ Acc);
scan_comment1("\r" ++ T, S=#xmerl_scanner{line = L}, Pos, Cmt, Acc) ->
CR not followed by LF is read as a LF
scan_comment1(T, S#xmerl_scanner{line=L+1,col=1}, Pos, Cmt, "\n" ++ Acc);
scan_comment1([H|T], S=#xmerl_scanner{col = C}, Pos, Cmt, Acc) ->
wfc_legal_char(H,S),
scan_comment1(T, S#xmerl_scanner{col=C+1}, Pos, Cmt, [H|Acc]).
%%%%%%%
scan_markup_completion_gt([$>|_R]=T,S) ->
{T,S};
scan_markup_completion_gt([$%|T],S0) ->
?bump_col(1),
{Name,T1,S1} = scan_pe_reference(T,S),
ExpandedRef = expand_pe_reference(Name,S1,as_PE),
{_,T2,S2} = strip(ExpandedRef++T1,S1),
scan_markup_completion_gt(T2,S2);
scan_markup_completion_gt(T,S) ->
?fatal({error,{malformed_syntax_entity_completion,T}},S).
strip(Str,S) ->
strip(Str,S,all).
strip([], S=#xmerl_scanner{continuation_fun = F},_) ->
?dbg("cont()... stripping whitespace~n", []),
F(fun(MoreBytes, S1) -> strip(MoreBytes, S1) end,
fun(S1) -> {[], [], S1} end,
S);
strip("\s" ++ T, S=#xmerl_scanner{col = C},Lim) ->
strip(T, S#xmerl_scanner{col = C+1},Lim);
strip("\t" ++ _T, S ,no_tab) ->
?fatal({error,{no_tab_allowed}},S);
strip("\t" ++ T, S=#xmerl_scanner{col = C},Lim) ->
strip(T, S#xmerl_scanner{col = expand_tab(C)},Lim);
strip("\n" ++ T, S=#xmerl_scanner{line = L},Lim) ->
strip(T, S#xmerl_scanner{line = L+1, col = 1},Lim);
strip("\r\n" ++ T, S=#xmerl_scanner{line = L},Lim) ->
CR followed by LF is read as a single LF
strip(T, S#xmerl_scanner{line = L+1, col = 1},Lim);
strip("\r" ++ T, S=#xmerl_scanner{line = L},Lim) ->
CR not followed by LF is read as a LF
strip(T, S#xmerl_scanner{line = L+1, col = 1},Lim);
strip(Str, S,_Lim) ->
{[], Str, S}.
%% demands a whitespace, though a parameter entity is ok, it will
%% expand with a whitespace on each side.
mandatory_strip([],S) ->
?fatal({error,{whitespace_was_expected}},S);
mandatory_strip(T,S) when ?whitespace(hd(T)) ->
strip(T,S,all);
|T],S ) when ? whitespace(hd(T ) ) - > % this is not a PERefence , but an PEDeclaration
?fatal({error,{whitespace_was_expected}},S);
mandatory_strip([$%|_T]=T,S) ->
{[],T,S};
mandatory_strip(_T,S) ->
?fatal({error,{whitespace_was_expected}},S).
%% strip but don't accept tab
pub_id_strip(Str, S) ->
strip(Str,S,no_tab).
normalize("&"++T,S,IsNorm) ->
case scan_reference(T, S) of
{ExpRef, T1, S1} when ?whitespace(hd(ExpRef)) ->
normalize(ExpRef++T1,S1,IsNorm);
_ ->
{"&"++T,S,IsNorm}
end;
normalize(T,S,IsNorm) ->
case strip(T,S) of
{_,T,S} ->
{T,S,IsNorm};
{_,T1,S1} ->
{T1,S1,true}
end.
accumulate_whitespace(T::string(),S::global_state ( ) ,
atom(),Acc::string ( ) ) - > { Acc , T1 , S1 }
%%%
%%% @doc Function to accumulate and normalize whitespace.
accumulate_whitespace(T, S, preserve, Acc) ->
accumulate_whitespace(T, S, Acc);
accumulate_whitespace(T, S, normalize, Acc) ->
{_WsAcc, T1, S1} = accumulate_whitespace(T, S, []),
{[$\s|Acc], T1, S1}.
accumulate_whitespace([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> accumulate_whitespace(MoreBytes, S1, Acc) end,
fun(S1) -> {Acc, [], S1} end,
S);
accumulate_whitespace("\s" ++ T, S=#xmerl_scanner{col = C}, Acc) ->
accumulate_whitespace(T, S#xmerl_scanner{col = C+1}, [$\s|Acc]);
accumulate_whitespace("\t" ++ T, S=#xmerl_scanner{col = C}, Acc) ->
accumulate_whitespace(T, S#xmerl_scanner{col = expand_tab(C)}, [$\t|Acc]);
accumulate_whitespace("\n" ++ T, S=#xmerl_scanner{line = L}, Acc) ->
accumulate_whitespace(T, S#xmerl_scanner{line = L+1, col = 1}, [$\n|Acc]);
accumulate_whitespace("\r\n" ++ T, S=#xmerl_scanner{line = L}, Acc) ->
CR followed by LF is read as a single LF
accumulate_whitespace(T, S#xmerl_scanner{line = L+1, col=1}, [$\n|Acc]);
accumulate_whitespace("\r" ++ T, S=#xmerl_scanner{line = L}, Acc) ->
CR not followed by LF is read as a LF
accumulate_whitespace(T, S#xmerl_scanner{line = L+1, col = 1}, [$\n|Acc]);
accumulate_whitespace(Str, S, Acc) ->
{Acc, Str, S}.
expand_tab(Col) ->
Rem = (Col-1) rem 8,
_NewCol = Col + 8 - Rem.
%%% Helper functions
fatal(Reason, S) ->
exit({fatal, {Reason, S#xmerl_scanner.line, S#xmerl_scanner.col}}).
BUG when we are many < ! ATTLIST .. > balise none attributes has save in rules
rules_write(Context, Name, Value, #xmerl_scanner{rules = T} = S) ->
case ets:lookup(T, {Context, Name}) of
[] ->
ets:insert(T, {{Context, Name}, Value});
_ ->
ok
end,
S.
rules_read(Context, Name, #xmerl_scanner{rules = T}) ->
case ets:lookup(T, {Context, Name}) of
[] ->
undefined;
[{_, V}] ->
V
end.
rules_delete(Context,Name,#xmerl_scanner{rules = T}) ->
ets:delete(T,{Context,Name}).
% decode_UTF8(Str) ->
% decode_UTF8(Str,[]).
% decode_UTF8([],Acc) ->
% lists:reverse(Acc);
decode_UTF8([H|T],Acc ) when H = < 127 - >
% decode_UTF8(T,[H|Acc]);
% decode_UTF8([H1,H2|T],Acc) when H1 =< 16#DF->
Ch = ) ,
% decode_UTF8(T,[Ch|Acc]);
decode_UTF8([H1,H2,H3|T],Acc ) when H1 = < 16#EF - >
% Ch = char_UTF8_3b(H1,H2,H3),
% decode_UTF8(T,[Ch|Acc]);
decode_UTF8([H1,H2,H3,H4|T],Acc ) when H1 = < 16#F7 - >
% Ch = char_UTF8_4b(H1,H2,H3,H4),
% decode_UTF8(T,[Ch|Acc]);
% decode_UTF8([H1,H2,H3,H4,H5|T],Acc) when H1 =< 16#FB ->
% Ch = char_UTF8_5b(H1,H2,H3,H4,H5),
% decode_UTF8(T,[Ch|Acc]);
% decode_UTF8([H1,H2,H3,H4,H5,H6|T],Acc) ->
% Ch = char_UTF8_6b(H1,H2,H3,H4,H5,H6),
% decode_UTF8(T,[Ch|Acc]).
% char_UTF8_2b(H1,H2) ->
Msb = ( H1 band 16#1F ) bsl 6 ,
Lsb = H2 band 16#3F ,
% Msb + Lsb.
% char_UTF8_3b(H1,H2,H3) ->
( H3 band 16#3F ) + ( ( H2 band 16#3F ) bsl 6 ) + ( ( H1 band 16#0F ) bsl 12 ) .
% char_UTF8_4b(H1,H2,H3,H4) ->
( H4 band 16#3F ) + ( ( H3 band 16#3F ) bsl 6 ) + ( ( H2 band 16#3F ) bsl 12 ) +
( ( H1 band 16#07 ) bsl 18 ) .
% char_UTF8_5b(H1,H2,H3,H4,H5) ->
( H5 band 16#3F ) + ( ( H4 band 16#3F ) bsl 6 ) + ( ( H3 band 16#3F ) bsl 12 ) +
( ( H2 band 16#3F ) bsl 18 ) + ( ( H1 band 16#03 ) band 24 ) .
% char_UTF8_6b(H1,H2,H3,H4,H5,H6) ->
( H6 band 16#3F ) + ( ( H5 band 16#3F ) bsl 6 ) + ( ( H4 band 16#3F ) bsl 12 ) +
( ( H3 band 16#3F ) bsl 18 ) + ( ( H2 band 16#3F ) bsl 24 ) +
( ( H1 band 16#01 ) bsl 30 ) .
% utf8_char([H|T],S0=#xmerl_scanner{encoding="UTF-16"}) ->
? ) ,
% {H,T,S};
utf8_char([H|T],S0 ) when H = < 127 - >
? ) ,
% {H,T,S};
% utf8_char([H1,H2|T],S0) when H1 =< 16#DF->
Ch = ) ,
% ?bump_col(2),
% {Ch,T,S};
utf8_char([H1,H2,H3|T],S0 ) when H1 = < 16#EF - >
% Ch = char_UTF8_3b(H1,H2,H3),
% ?bump_col(3),
% {Ch,T,S};
% utf8_char([H1,H2,H3,H4|T],S0) when H1 =< 16#F7 ->
% Ch = char_UTF8_4b(H1,H2,H3,H4),
% ?bump_col(4),
% {Ch,T,S};
% utf8_char([H1,H2,H3,H4,H5|T],S0) when H1 =< 16#FB ->
% Ch = char_UTF8_5b(H1,H2,H3,H4,H5),
% ?bump_col(5),
% {Ch,T,S};
% utf8_char([H1,H2,H3,H4,H5,H6|T],S0) ->
% Ch = char_UTF8_6b(H1,H2,H3,H4,H5,H6),
% ?bump_col(6),
% {Ch,T,S}.
| null | https://raw.githubusercontent.com/gebi/jungerl/8f5c102295dbe903f47d79fd64714b7de17026ec/lib/xmerl/src/xmerl_scan.erl | erlang | compliance with the License. You may obtain a copy of the License at
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
Contributor(s):
----------------------------------------------------------------------
----------------------------------------------------------------------
File: xmerl_scan.erl
Modules used : ets, file, filename, io, lists, ucs, uri
----------------------------------------------------------------------
@doc
<tt>xmerl_scan:string/[1,2]</tt> or
<tt>xmerl_scan:file/[1,2]</tt>.
It returns records of the type defined in xmerl.hrl.
See also <a href="xmerl_examples.html">tutorial</a> on customization
functions.
@type global_state(). <p>
</p>
@type option_list(). <p>Options allows to customize the behaviour of the
scanner.
See also <a href="xmerl_examples.html">tutorial</a> on customization
functions.
</p>
Possible options are:
<dl>
<dt><code>{acc_fun, Fun}</code></dt>
<dd>Call back function to accumulate contents of entity.</dd>
<dt><code>{continuation_fun, Fun} |
{continuation_fun, Fun, ContinuationState}</code></dt>
<dt><code>{event_fun, Fun} |
{event_fun, Fun, EventState}</code></dt>
<dd>Call back function to handle scanner events.</dd>
<dt><code>{fetch_fun, Fun} |
<dd>Call back function to fetch an external resource.</dd>
<dt><code>{hook_fun, Fun} |
{hook_fun, Fun, HookState}</code></dt>
<dd>Call back function to process the document entities once
identified.</dd>
<dt><code>{close_fun, Fun}</code></dt>
<dd>Called when document has been completely parsed.</dd>
{rules, Rules}</code></dt>
<dd>Handles storing of scanner information when parsing.</dd>
<dt><code>{user_state, UserState}</code></dt>
<dt><code>{fetch_path, PathList}</code></dt>
<dd>PathList is a list of
directories to search when fetching files. If the file in question
name.</dd>
<dt><code>{line, Line}</code></dt>
<dd>To specify starting line for scanning in document which contains
fragments of XML.</dd>
'false' (default) to not otherwise 'true'.</dd>
'false' (default) to not otherwise 'true'.</dd>
<dd>Set to 'true' if xmerl should behave quietly and not output any info
to standard output (default 'false').</dd>
document.</dd>
<dd>XML Base directory. If using string/1 default is current directory.
If using file/1 default is directory of given file.</dd>
<dt><code>{encoding, Enc}</code></dt>
declaration. </dd>
</dl>
@end
Only used internally are:
<dt><code>{environment,Env}</code></dt>
<dd>What is this?</dd>
<dd>What is this?</dd>
main API
access functions for various states
helper functions. To xmerl_lib ??
record def, macros
Functions to access the various states
@spec user_state(S::global_state()) -> global_state()
@equiv user_state(UserState,S)
@spec event_state(S::global_state()) -> global_state()
@equiv event_state(EventState,S)
@spec hook_state(S::global_state()) -> global_state()
@equiv hook_state(HookState,S)
@spec fetch_state(S::global_state()) -> global_state()
@equiv fetch_state(FetchState,S)
@spec cont_state(S::global_state()) -> global_state()
@equiv cont_state(ContinuationState,S)
Functions to modify the various states
@spec user_state(UserState, S::global_state()) -> global_state()
See <a href="xmerl_examples.html">tutorial</a> on customization functions.
function, and called at the beginning and at the end of a parsed entity.
See <a href="xmerl_examples.html">tutorial</a> on customization functions.
function, and called when the parser has parsed a complete entity.
See <a href="xmerl_examples.html">tutorial</a> on customization functions.
@spec rules_state(RulesState, S::global_state()) -> global_state()
function, and called when the parser store scanner information in a rules
database.
See <a href="xmerl_examples.html">tutorial</a> on customization functions.
See <a href="xmerl_examples.html">tutorial</a> on customization functions.
function, and called when the parser encounters the end of the byte stream.
See <a href="xmerl_examples.html">tutorial</a> on customization functions.
@spec file(Filename::string()) -> {xmlElement(),Rest}
Rest = list()
@equiv file(Filename, [])
Rest = list()
for side effects only - final state is dropped
io:format("int_file F=~p~n",[F]),
io:format("int_file_decl F=~p~n",[F]),
@spec string(Text::list()) -> {xmlElement(),Rest}
Rest = list()
@equiv string(Test, [])
Rest = list()
for side effects only - final state is dropped
(This can't happen, currently)
-----------------------------------------------------
Default modifier functions
Hooks:
- {processing_instruction, Line, Data}
Events:
#xmerl_event{event : started | ended,
line : integer(),
col : integer(),
data}
document started, ended
#xmlElement started, ended
#xmlAttribute ended
#xmlComment ended
The acc/3 function is not allowed to redefine the type of object
being defined, but _is_ allowed to either ignore it or split it
into multiple objects (in which case {Acc',Pos',S'} should be returned.)
Below is an example of an acceptable operation
Always assume an external resource can be found locally! Thus
don't bother fetching with e.g. HTTP. Returns the path where the
resource is found. The path to the external resource is given by
URI directly or the option fetch_path (additional paths) or
directory (base path to external resource)
how about Windows systems?
file:/dtd_name
absolute path name
empty systemliteral
-----------------------------------------------------
Transform to given character set.
Note that if another character set is given in the encoding
Text declaration may be empty
Now transform to declared character set.
Document already transformed to this charset!
Document transformed to other bad charset!
Document not previously transformed
No encoding info given
Note:
- Neither of Comment and PI are returned in the resulting parsed
structure.
- scan_misc/3 implements Misc* as that is how the rule is always used
Comment
PI
Text declaration XML 1.0 section 4.3.1
VersionNum ::= ([a-zA-Z0-9_.:] | '-')+
names beginning with [xX][mM][lL] are reserved for future use.
More info on xml-stylesheet can be found at:
| 'PUBLIC' S PubidLiteral S SystemLiteral
Continue with old scanner data, result in Rules
For backward compatibility only. This will be removed later!!
other scheme
io:format("get_file F=~p~n",[F]),
check_decl/1
Now it is necessary to check that all referenced types is declared,
since it is legal to reference some xml types before they are
declared.
it is not an error to declare attributes for an element that is not
declared.
The replacement text of a parameter entity reference in a
{system,URI} or {public,URI}
should do an update Write(parameter_entity) so next
expand_pe_reference is faster
nested conditional section. Topmost condition is ignore, though
NotationDecl | PI |Comment
Validity constraint: Unique Type Declaration: No element type may be
declared more than once.
] entity.name NDATA notation.name >
<!ENTITY [%] entity.name "replacement text">
<!ENTITY [%] entity.name SYSTEM "system.identifier">
<!ENTITY [%] entity.name PUBLIC public.identifier "system.identifier">
this may happen when the ELEMENT is declared in
the slot in rules table must be empty so that the
later write has the assumed effect. Read maybe
should empty the table slot.
be declared later.
may be declared later in internal or external
subset.
There is room here to validate against Type, but we don't do it at
the moment.
parameter entity
generic entity
external general entity, parsed or unparsed.
this bad entity is declared,
but it may not be referenced,
then it would not be an
error.
may be declared later in internal or external
subset.
expand attribute names. We need to do this after having
scanned all attributes of the element, since (as far as
are valid within the whole scope of the element in which
they are declared, which should also mean that even if they
are declared after some other attributes, the namespace
should apply to those attributes as well.
CHECK THIS /JB
A namespace constraint of XML Names is that the prefix
must be declared
{system,URI} or {public,URI}
Included in literal, just get external file.
should do an update Write(parameter_entity) so next
expand_pe_reference is faster
End quote
Reference
Tags not allowed here
This is a problem. All referenced entities in the external entity must be checked for recursion, thus parse the contentbut,skip result.
Guess we have to scan the content to find any internal entity
references.
See Section 2.4: Especially:
and MUST, for compatibility, be escaped using either ">" or a
character reference when it appears in the string "]]>" in content, when
markup delimiters (the entities amp, lt, gt, apos, quot) is always treated
ampersand is not recognized as an entity-reference delimiter.)"
How to achieve this? My current approach is to insert the *strings* "&",
"<", ">", "'", and "\"" instead of the characters. The processor will
ignore them when performing multiple expansions. This means, for now, that
At some suitable point, we should flatten these, so that application-level
processors should not have to be aware of this detail.
S#xmerl_scanner.standalone==yes;
S#xmerl_scanner.environment==prolog ->
WFC or VC failure
Currently unused
case Read(parameter_entity, Name, S) of
undefined ->
?fatal({unknown_parameter_entity, Name}, S);
Result ->
fetch_DTD(Result,S)
end.
changed return value from [[Ref]]
changed return value from [[Acc]]
Eq ::= S? '=' S?
scan_name/2
We perform some checks here to make sure that the names conform to
the "Namespaces in XML" specification. This is an option.
Qualified Name:
| CombiningChar | Extender
The effect of XML Names (namespace) conformance is that:
- No entity names, PI targets, or notation names contain any colons.
scan_name_no_colons/2 will ensure that the name contains no colons iff
the scanner has been told to be namespace conformant. Otherwise, it will
behave exactly like scan_name/2.
In order to conform with the "Namespaces in XML" spec,
we cannot allow names to begin with ":"
parameter entity that expands to a name
scan_nmtoken/2
whitespace marks the end of a name
we don't strip here because the occurrence of whitespace may be an error
e.g. <!ELEMENT spec (front, body, back ?)>
non-empty Prefix means that we've encountered a ":" already.
Conformity with "Namespaces in XML" requires
at most one colon in a name
Before matching public identifiers, all whitespace must be normalized,
so we do that here
We do not match whitespace here, even though they're allowed in public
identifiers. This is because we normalize this whitespace as we scan
(see above in scan_pubid_literal())
It is not ok when there are
more names than '#PCDATA'
and no '*'.
Tests of Validity Constraints
Currently unused
vc_test_attr_value(_,#xmerl_scanner{validation=false}) ->
ok;
vc_test_attr_value(Attr={_,'ID',_,_,_},S) ->
vc_ID_Attribute_Default(Attr,S);
vc_test_attr_value({_,{enumeration,_NameL},_,_,_},_S) ->
ok.
a parameter reference is ok
They differ in that this one checks for recursive calls to the same
parameter entity.
handle whitespace explicitly in this case.
{system,URI} or {public,URI}
Included in literal.
should do an update Write(parameter_entity)
so next expand_pe_reference is faster
single or duoble qoutes are not treated as delimeters
in passages "included in literal"
This is either a character entity or a general entity (internal
or external) reference. An internal general entity shall not be
expanded in an entity def XML1.0 section 4.5.
General Entity is bypassed, though must check for
recursion: save referenced name now and check for
recursive reference after the hole entity definition is
completed.
The following clauses is for PE Nesting VC constraint
Start delimeter for comment
Start delimeter for PI
for a markupdecl
Stop delimeter for PI
Stop delimeter for comment
|T],S0) ->
demands a whitespace, though a parameter entity is ok, it will
expand with a whitespace on each side.
this is not a PERefence , but an PEDeclaration
|_T]=T,S) ->
strip but don't accept tab
@doc Function to accumulate and normalize whitespace.
Helper functions
decode_UTF8(Str) ->
decode_UTF8(Str,[]).
decode_UTF8([],Acc) ->
lists:reverse(Acc);
decode_UTF8(T,[H|Acc]);
decode_UTF8([H1,H2|T],Acc) when H1 =< 16#DF->
decode_UTF8(T,[Ch|Acc]);
Ch = char_UTF8_3b(H1,H2,H3),
decode_UTF8(T,[Ch|Acc]);
Ch = char_UTF8_4b(H1,H2,H3,H4),
decode_UTF8(T,[Ch|Acc]);
decode_UTF8([H1,H2,H3,H4,H5|T],Acc) when H1 =< 16#FB ->
Ch = char_UTF8_5b(H1,H2,H3,H4,H5),
decode_UTF8(T,[Ch|Acc]);
decode_UTF8([H1,H2,H3,H4,H5,H6|T],Acc) ->
Ch = char_UTF8_6b(H1,H2,H3,H4,H5,H6),
decode_UTF8(T,[Ch|Acc]).
char_UTF8_2b(H1,H2) ->
Msb + Lsb.
char_UTF8_3b(H1,H2,H3) ->
char_UTF8_4b(H1,H2,H3,H4) ->
char_UTF8_5b(H1,H2,H3,H4,H5) ->
char_UTF8_6b(H1,H2,H3,H4,H5,H6) ->
utf8_char([H|T],S0=#xmerl_scanner{encoding="UTF-16"}) ->
{H,T,S};
{H,T,S};
utf8_char([H1,H2|T],S0) when H1 =< 16#DF->
?bump_col(2),
{Ch,T,S};
Ch = char_UTF8_3b(H1,H2,H3),
?bump_col(3),
{Ch,T,S};
utf8_char([H1,H2,H3,H4|T],S0) when H1 =< 16#F7 ->
Ch = char_UTF8_4b(H1,H2,H3,H4),
?bump_col(4),
{Ch,T,S};
utf8_char([H1,H2,H3,H4,H5|T],S0) when H1 =< 16#FB ->
Ch = char_UTF8_5b(H1,H2,H3,H4,H5),
?bump_col(5),
{Ch,T,S};
utf8_char([H1,H2,H3,H4,H5,H6|T],S0) ->
Ch = char_UTF8_6b(H1,H2,H3,H4,H5,H6),
?bump_col(6),
{Ch,T,S}. | The contents of this file are subject to the Erlang Public License ,
Version 1.0 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is xmerl-0.15
The Initial Developer of the Original Code is Ericsson Telecom
AB . Portions created by Ericsson are Copyright ( C ) , 1998 , Ericsson
Telecom AB . All Rights Reserved .
< > :
< >
# 0 . BASIC INFORMATION
Author : < >
Description : Simgle - pass XML scanner . See xmerl.hrl for data defs .
The XML parser is activated through
The global state of the scanner , represented by the # xmerl_scanner { } record .
< dd > Call back function to decide what to do if the scanner runs into eof
before the document is complete.</dd >
{ fetch_fun , Fun , FetchState}</code></dt >
< dt><code>{rules , ReadFun , WriteFun , RulesState } |
< dd > Global state variable accessible from all customization functions</dd >
is not in the fetch_path , the URI will be used as a file
< dt><code>{space , >
( default ) to preserve spaces , ' normalize ' to
accumulate consecutive whitespace and replace it with one space.</dd >
< dt><code>{namespace_conformant , >
< dd > Controls whether to behave as a namespace conformant XML parser ,
< dt><code>{validation , >
< dd > Controls whether to process as a validating XML parser ,
< dt><code>{quiet , >
< dt><code>{doctype_DTD , DTD}</code></dt >
< dd > Allows to specify DTD name when it is n't available in the XML
< dt><code>{xmlbase , Dir}</code></dt >
< dd > Set default character set used ( default UTF-8 ) .
This character set is used only if not explicitly given by the XML
< dt><code>{text_decl , Bool}</code></dt >
-module(xmerl_scan).
-vsn('0.19').
-date('03-09-16').
-export([string/1, string/2,
file/1, file/2]).
-export([user_state/1, user_state/2,
event_state/1, event_state/2,
hook_state/1, hook_state/2,
rules_state/1, rules_state/2,
fetch_state/1, fetch_state/2,
cont_state/1, cont_state/2]).
-export([accumulate_whitespace/4]).
-define(debug , 1 ) .
-include_lib("kernel/include/file.hrl").
-define(fatal(Reason, S),
if
S#xmerl_scanner.quiet ->
ok;
true ->
ok=io:format("~p- fatal: ~p~n", [?LINE, Reason])
end,
fatal(Reason, S)).
-define(ustate(U, S), S#xmerl_scanner{user_state = U}).
user_state(#xmerl_scanner{user_state = S}) -> S.
event_state(#xmerl_scanner{fun_states = #xmerl_fun_states{event = S}}) -> S.
hook_state(#xmerl_scanner{fun_states = #xmerl_fun_states{hook = S}}) -> S.
rules_state(S::global_state ( ) ) - > global_state ( )
@equiv rules_state(RulesState , S )
rules_state(#xmerl_scanner{fun_states = #xmerl_fun_states{rules = S}}) -> S.
fetch_state(#xmerl_scanner{fun_states = #xmerl_fun_states{fetch = S}}) -> S.
cont_state(#xmerl_scanner{fun_states = #xmerl_fun_states{cont = S}}) -> S.
@doc For controlling the UserState , to be used in a user function .
user_state(X, S) ->
S#xmerl_scanner{user_state = X}.
@spec event_state(EventState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the EventState , to be used in an event
event_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{event = X},
S#xmerl_scanner{fun_states = FS1}.
@spec hook_state(HookState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the HookState , to be used in a hook
hook_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{hook = X},
S#xmerl_scanner{fun_states = FS1}.
@doc For controlling the RulesState , to be used in a rules
rules_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{rules = X},
S#xmerl_scanner{fun_states = FS1}.
@spec fetch_state(FetchState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the FetchState , to be used in a fetch
function , and called when the parser fetch an external resource ( eg . a ) .
fetch_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{fetch = X},
S#xmerl_scanner{fun_states = FS1}.
@spec cont_state(ContinuationState , S::global_state ( ) ) - > global_state ( )
@doc For controlling the ContinuationState , to be used in a continuation
cont_state(X, S=#xmerl_scanner{fun_states = FS}) ->
FS1 = FS#xmerl_fun_states{cont = X},
S#xmerl_scanner{fun_states = FS1}.
file(F) ->
file(F, []).
@spec file(Filename::string ( ) , Options::option_list ( ) ) - > { xmlElement(),Rest }
@doc Parse file containing an XML document
file(F, Options) ->
ExtCharset=case lists:keysearch(encoding,1,Options) of
{value,{_,Val}} -> Val;
false -> undefined
end,
case int_file(F,Options,ExtCharset) of
{Res, Tail,S=#xmerl_scanner{close_fun=Close}} ->
{Res,Tail};
{error, Reason} ->
{error, Reason};
Other ->
{error, Other}
end.
int_file(F, Options,_ExtCharset) ->
case file:read_file(F) of
{ok, Bin} ->
int_string(binary_to_list(Bin), Options, filename:dirname(F));
Error ->
Error
end.
int_file_decl(F, Options,_ExtCharset) ->
case file:read_file(F) of
{ok, Bin} ->
int_string_decl(binary_to_list(Bin), Options, filename:dirname(F));
Error ->
Error
end.
string(Str) ->
string(Str, []).
@spec string(Text::list(),Options::option_list ( ) ) - > { xmlElement(),Rest }
@doc string containing an XML document
string(Str, Options) ->
case int_string(Str, Options) of
{Res, Tail, S=#xmerl_scanner{close_fun = Close}} ->
{Res,Tail};
{error, Reason} ->
Other ->
{error, Other}
end.
int_string(Str, Options) ->
{ok, XMLBase} = file:get_cwd(),
int_string(Str, Options, XMLBase).
int_string(Str, Options, XMLBase) ->
S=initial_state0(Options,XMLBase),
case xmerl_lib:detect_charset(S#xmerl_scanner.encoding,Str) of
{auto,'iso-10646-utf-1',Str2} ->
scan_document(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{external,'iso-10646-utf-1',Str2} ->
scan_document(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{undefined,undefined,Str2} ->
scan_document(Str2, S);
{external,ExtCharset,Str2} ->
scan_document(Str2, S#xmerl_scanner{encoding=atom_to_list(ExtCharset)})
end.
int_string_decl(Str, Options, XMLBase) ->
S=initial_state0(Options,XMLBase),
case xmerl_lib:detect_charset(S#xmerl_scanner.encoding,Str) of
{auto,'iso-10646-utf-1',Str2} ->
scan_decl(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{external,'iso-10646-utf-1',Str2} ->
scan_decl(Str2, S#xmerl_scanner{encoding="iso-10646-utf-1"});
{undefined,undefined,Str2} ->
scan_decl(Str2, S);
{external,ExtCharset,Str2} ->
scan_decl(Str2, S#xmerl_scanner{encoding=atom_to_list(ExtCharset)})
end.
initial_state0(Options,XMLBase) ->
initial_state(Options, #xmerl_scanner{
event_fun = fun event/2,
hook_fun = fun hook/2,
acc_fun = fun acc/3,
fetch_fun = fun fetch/2,
close_fun = fun close/1,
continuation_fun = fun cont/3,
rules_read_fun = fun rules_read/3,
rules_write_fun = fun rules_write/4,
rules_delete_fun= fun rules_delete/3,
xmlbase = XMLBase
}).
initial_state([{event_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{event_fun = F});
initial_state([{event_fun, F, ES}|T], S) ->
S1 = event_state(ES, S#xmerl_scanner{event_fun = F}),
initial_state(T, S1);
initial_state([{acc_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{acc_fun = F});
initial_state([{hook_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{hook_fun = F});
initial_state([{hook_fun, F, HS}|T], S) ->
S1 = hook_state(HS, S#xmerl_scanner{hook_fun = F}),
initial_state(T, S1);
initial_state([{close_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{close_fun = F});
initial_state([{fetch_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{fetch_fun = F});
initial_state([{fetch_fun, F, FS}|T], S) ->
S1 = fetch_state(FS, S#xmerl_scanner{fetch_fun = F}),
initial_state(T, S1);
initial_state([{fetch_path, P}|T], S) ->
initial_state(T, S#xmerl_scanner{fetch_path = P});
initial_state([{continuation_fun, F}|T], S) ->
initial_state(T, S#xmerl_scanner{continuation_fun = F});
initial_state([{continuation_fun, F, CS}|T], S) ->
S1 = cont_state(CS, S#xmerl_scanner{continuation_fun = F}),
initial_state(T, S1);
initial_state([{rules, R}|T], S) ->
initial_state(T, S#xmerl_scanner{rules = R,
keep_rules = true});
initial_state([{rules, Read, Write, RS}|T], S) ->
S1 = rules_state(RS, S#xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write,
keep_rules = true}),
initial_state(T, S1);
initial_state([{user_state, F}|T], S) ->
initial_state(T, S#xmerl_scanner{user_state = F});
initial_state([{space, L}|T], S) ->
initial_state(T, S#xmerl_scanner{space = L});
initial_state([{line, L}|T], S) ->
initial_state(T, S#xmerl_scanner{line = L});
initial_state([{namespace_conformant, F}|T], S) when F==true; F==false ->
initial_state(T, S#xmerl_scanner{namespace_conformant = F});
initial_state([{validation, F}|T], S) when F==true; F==false ->
initial_state(T, S#xmerl_scanner{validation = F});
initial_state([{quiet, F}|T], S) when F==true; F==false ->
initial_state(T, S#xmerl_scanner{quiet = F});
initial_state([{doctype_DTD,DTD}|T], S) ->
initial_state(T,S#xmerl_scanner{doctype_DTD = DTD});
initial_state([{text_decl,Bool}|T], S) ->
initial_state(T,S#xmerl_scanner{text_decl=Bool});
initial_state([{environment,Env}|T], S) ->
initial_state(T,S#xmerl_scanner{environment=Env});
initial_state([{xmlbase, D}|T], S) ->
initial_state(T, S#xmerl_scanner{xmlbase = D});
initial_state([{encoding, Enc}|T], S) ->
initial_state(T, S#xmerl_scanner{encoding = Enc});
initial_state([], S=#xmerl_scanner{rules = undefined}) ->
Tab = ets:new(rules, [set, public]),
S#xmerl_scanner{rules = Tab};
initial_state([], S) ->
S.
- { element , Line , Name , , Content }
hook(X, State) ->
{X, State}.
Data Events
# xmlPI ended
# xmlText ended
event(_X, S) ->
S.
The acc/3 function can return either { Acc ´ , S ' } or { Acc ' , Pos ' , S ' } ,
where Pos ' can be derived from , , or
X#xmlAttribute.pos ( whichever is the current object type . )
If { Acc',S ' } is returned , Pos will be incremented by 1 by default .
acc(X = #xmlText{value = Text}, Acc, S) ->
{[X#xmlText{value = lists:flatten(Text)}|Acc], S};
acc(X, Acc, S) ->
{[X|Acc], S}.
fetch({system, URI}, S) ->
fetch_URI(URI, S);
fetch({public, _PublicID, URI}, S) ->
fetch_URI(URI, S).
fetch_URI(URI, S) ->
assume URI is a filename
Split = filename:split(URI),
Filename = fun([])->[];(X)->lists:last(X) end (Split),
Fullname =
absolute path , see RFC2396 sect 3
filename:join(["/"|Name]);
["/"|Rest] when Rest /= [] ->
URI;
["http:"|_Rest] ->
{http,URI};
[];
_ ->
filename:join(S#xmerl_scanner.xmlbase, URI)
end,
Path = path_locate(S#xmerl_scanner.fetch_path, Filename, Fullname),
?dbg("fetch(~p) -> {file, ~p}.~n", [URI, Path]),
{ok, Path, S}.
path_locate(_, _, {http,_}=URI) ->
URI;
path_locate(_, _, []) ->
[];
path_locate([Dir|Dirs], FN, FullName) ->
F = filename:join(Dir, FN),
case file:read_file_info(F) of
{ok, #file_info{type = regular}} ->
{file,F};
_ ->
path_locate(Dirs, FN, FullName)
end;
path_locate([], _FN, FullName) ->
{file,FullName}.
cont(_F, Exception, US) ->
Exception(US).
close(S) ->
S.
Scanner
[ 1 ] document : : = prolog element Misc *
scan_document(Str0, S=#xmerl_scanner{event_fun = Event,
line = L, col = C,
environment=Env,
encoding=Charset,
validation=ValidateResult}) ->
S1 = Event(#xmerl_event{event = started,
line = L,
col = C,
data = document}, S),
attribute in a XML declaration that one will be used later
Str=if
Default character set is UTF-8
ucs:to_unicode(Str0,list_to_atom(Charset));
true ->
Str0
end,
{"<"++T2, S2} = scan_prolog(Str, S1, _StartPos = 1),
{Res, T3, S3} =scan_element(T2,S2,_StartPos = 1),
{Tail, S4}=scan_misc(T3, S3, _StartPos = 1),
S5 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S4#xmerl_scanner.line,
col = S4#xmerl_scanner.col,
data = document}, S4),
S6 = case ValidateResult of
false ->
cleanup(S5);
true when Env == element; Env == prolog ->
check_decl2(S5),
case xmerl_validate:validate(S5,Res) of
{'EXIT',{error,Reason}} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
{'EXIT',Reason} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
{error,Reason} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
{error,Reason,_Next} ->
S5b=cleanup(S5),
?fatal({failed_validation,Reason}, S5b);
_XML ->
cleanup(S5)
end;
true ->
cleanup(S5)
end,
{Res, Tail, S6}.
scan_decl(Str, S=#xmerl_scanner{event_fun = Event,
line = L, col = C,
environment=_Env,
encoding=_Charset,
validation=_ValidateResult}) ->
S1 = Event(#xmerl_event{event = started,
line = L,
col = C,
data = document}, S),
case scan_prolog(Str, S1, _StartPos = 1) of
{T2="<"++_, S2} ->
{{S2#xmerl_scanner.user_state,T2},[],S2};
{[], S2}->
{[],[],S2};
{T2, S2} ->
{_,_,S3} = scan_content(T2,S2,[],_Attrs=[],S2#xmerl_scanner.space,
_Lang=[],_Parents=[],#xmlNamespace{}),
{T2,[],S3}
end.
[ 22 ] Prolog
prolog : : = XMLDecl ? Misc * ( * ) ?
scan_prolog([], S=#xmerl_scanner{text_decl=true},_Pos) ->
{[],S};
scan_prolog([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_prolog(MoreBytes, S1, Pos) end,
fun(S1) -> {[], S1} end,
S);
scan_prolog("<?xml"++T,S0=#xmerl_scanner{encoding=Charset0,col=Col,line=L},Pos)
when ?whitespace(hd(T)) ->
{Charset,T3, S3}=
if
Col==1,L==1,S0#xmerl_scanner.text_decl==true ->
?dbg("prolog(\"<?xml\")~n", []),
?bump_col(5),
{_,T1,S1} = mandatory_strip(T,S0),
{Decl,T2, S2}=scan_text_decl(T1,S1),
Encoding=Decl#xmlDecl.encoding,
{Encoding,T2, S2#xmerl_scanner{encoding=Encoding}};
Col==1,L==1 ->
?dbg("prolog(\"<?xml\")~n", []),
?bump_col(5),
{Decl,T2, S2}=scan_xml_decl(T, S0),
Encoding=Decl#xmlDecl.encoding,
{Encoding,T2, S2#xmerl_scanner{encoding=Encoding}};
true ->
?fatal({xml_declaration_must_be_first_in_doc,Col,L},S0)
end,
if
scan_prolog(T3, S3, Pos);
?fatal({xml_declaration_must_be_first_in_doc,Col,L},S3);
T4=ucs:to_unicode(T3,list_to_atom(Charset)),
scan_prolog(T4, S3, Pos);
scan_prolog(T3, S3, Pos)
end;
scan_prolog("<!DOCTYPE" ++ T, S0=#xmerl_scanner{environment=prolog,
encoding=Charset}, Pos) ->
?dbg("prolog(\"<!DOCTYPE\")~n", []),
?bump_col(9),
If no known character set assume it is UTF-8
T1=if
Charset==undefined -> ucs:to_unicode(T,'utf-8');
true -> T
end,
{T2, S1} = scan_doctype(T1, S),
scan_misc(T2, S1, Pos);
scan_prolog(Str, S0 = #xmerl_scanner{user_state=_US,encoding=Charset},Pos) ->
?dbg("prolog(\"<\")~n", []),
Check for Comments , PI before possible DOCTYPE declaration
?bump_col(1),
If no known character set assume it is UTF-8
T=if
Charset==undefined -> ucs:to_unicode(Str,'utf-8');
true -> Str
end,
{T1, S1}=scan_misc(T, S, Pos),
scan_prolog2(T1,S1,Pos).
scan_prolog2([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_prolog2(MoreBytes, S1, Pos) end,
fun(S1) -> {[], S1} end,
S);
scan_prolog2("<!DOCTYPE" ++ T, S0=#xmerl_scanner{environment=prolog}, Pos) ->
?dbg("prolog(\"<!DOCTYPE\")~n", []),
?bump_col(9),
{T1, S1} = scan_doctype(T, S),
scan_misc(T1, S1, Pos);
scan_prolog2(Str = "<!" ++ _, S, _Pos) ->
?dbg("prolog(\"<!\")~n", []),
In e.g. a DTD , we jump directly to markup declarations
scan_ext_subset(Str, S);
scan_prolog2(Str, S0 = #xmerl_scanner{user_state=_US},Pos) ->
?dbg("prolog(\"<\")~n", []),
Check for more Comments and PI after DOCTYPE declaration
?bump_col(1),
scan_misc(Str, S, Pos).
[ 27 ] Misc : : = Comment | PI | S
scan_misc([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_misc(MoreBytes, S1, Pos) end,
fun(S1) -> {[], S1} end,
S);
{_, T1, S1} = scan_comment(T, S, Pos, _Parents = [], _Lang = []),
scan_misc(T1,S1,Pos);
?dbg("prolog(\"<?\")~n", []),
?bump_col(2),
{_PI, T1, S1} = scan_pi(T, S, Pos),
scan_misc(T1,S1,Pos);
scan_misc([H|T], S, Pos) when ?whitespace(H) ->
?dbg("prolog(whitespace)~n", []),
scan_misc(T,S,Pos);
scan_misc(T,S,_Pos) ->
{T,S}.
cleanup(S=#xmerl_scanner{keep_rules = false,
rules = Rules}) ->
ets:delete(Rules),
S#xmerl_scanner{rules = undefined};
cleanup(S) ->
S.
Prolog and Document Type Declaration XML 1.0 Section 2.8
[ 23 ] XMLDecl : : = ' < ? xml ' ? SDDecl ? S ? ' ? > '
[ 24 ] : : = S ' version ' Eq ( " ' " VersionNum " ' " | ' " ' VersionNum ' " ' )
scan_xml_decl(T, S) ->
[ 24 ] is mandatory
{_,T1,S2} = mandatory_strip(T,S),
"version" ++ T2 = T1,
{T3, S3} = scan_eq(T2, S2),
{Vsn, T4, S4} = scan_xml_vsn(T3, S3),
Attr = #xmlAttribute{name = version,
parents = [{xml, _XMLPos = 1}],
value = Vsn},
scan_xml_decl(T4, S4, #xmlDecl{attributes = [Attr]}).
scan_xml_decl([], S=#xmerl_scanner{continuation_fun = F}, Decl) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_xml_decl(MoreBytes, S1, Decl) end,
fun(S1) -> {[], [], S1} end,
S);
scan_xml_decl("?>" ++ T, S0, Decl) ->
?bump_col(2),
return_xml_decl(T,S,Decl);
scan_xml_decl(T,S=#xmerl_scanner{event_fun = _Event},Decl) ->
{_,T1,S1}=mandatory_strip(T,S),
scan_xml_decl2(T1,S1,Decl).
scan_xml_decl2("?>" ++ T, S0,Decl) ->
?bump_col(2),
return_xml_decl(T,S,Decl);
scan_xml_decl2("encoding" ++ T, S0 = #xmerl_scanner{event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
[ 80 ] EncodingDecl
?bump_col(8),
{T1, S1} = scan_eq(T, S),
{EncName, T2, S2} = scan_enc_name(T1, S1),
LowEncName=httpd_util:to_lower(EncName),
Attr = #xmlAttribute{name = encoding,
parents = [{xml, _XMLPos = 1}],
value = LowEncName},
Decl = Decl0#xmlDecl{encoding = LowEncName,
attributes = [Attr|Attrs]},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S0#xmerl_scanner.line,
col = S0#xmerl_scanner.col,
data = Attr}, S2),
case T2 of
"?>" ++ _T3 ->
scan_xml_decl3(T2,S3,Decl);
_ ->
{_,T3,S4} = mandatory_strip(T2,S3),
scan_xml_decl3(T3, S4, Decl)
end;
scan_xml_decl2(T="standalone" ++ _T,S,Decl) ->
scan_xml_decl3(T,S,Decl).
scan_xml_decl3("?>" ++ T, S0,Decl) ->
?bump_col(2),
return_xml_decl(T,S,Decl);
scan_xml_decl3("standalone" ++ T,S0 = #xmerl_scanner{event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
[ 32 ] SDDecl
?bump_col(10),
{T1, S1} = scan_eq(T, S),
{StValue,T2,S2}=scan_standalone_value(T1,S1),
Attr = #xmlAttribute{name = standalone,
parents = [{xml, _XMLPos = 1}],
value = StValue},
Decl = Decl0#xmlDecl{standalone = StValue,
attributes = [Attr|Attrs]},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S0#xmerl_scanner.line,
col = S0#xmerl_scanner.col,
data = Attr}, S2),
{_,T3,S4} = strip(T2,S3),
"?>" ++ T4 = T3,
return_xml_decl(T4, S4#xmerl_scanner{col=S4#xmerl_scanner.col+2}, Decl).
return_xml_decl(T,S=#xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
?strip1,
Decl = Decl0#xmlDecl{attributes = lists:reverse(Attrs)},
S2 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S#xmerl_scanner.line,
col = S#xmerl_scanner.col,
data = Decl}, S1),
{Ret, S3} = Hook(Decl, S2),
{Ret, T1, S3}.
scan_standalone_value("'yes'" ++T,S0)->
?bump_col(5),
{'yes',T,S#xmerl_scanner{standalone=yes}};
scan_standalone_value("\"yes\"" ++T,S0)->
?bump_col(5),
{'yes',T,S#xmerl_scanner{standalone=yes}};
scan_standalone_value("'no'" ++T,S0) ->
?bump_col(4),
{'no',T,S};
scan_standalone_value("\"no\"" ++T,S0) ->
?bump_col(4),
{'no',T,S}.
[ 77 ] TextDecl : : = ' < ? xml ' ? ? ' ? > '
scan_text_decl(T,S=#xmerl_scanner{event_fun = Event}) ->
{#xmlDecl{attributes=Attrs}=Decl0,T1,S1} = scan_optional_version(T,S),
"encoding" ++ T2 = T1,
S2 = S1#xmerl_scanner{col = S1#xmerl_scanner.col + 8},
{T3, S3} = scan_eq(T2, S2),
{EncName, T4, S4} = scan_enc_name(T3, S3),
LowEncName=httpd_util:to_lower(EncName),
?strip5,
Attr = #xmlAttribute{name = encoding,
parents = [{xml,1}],
value = LowEncName},
Decl = Decl0#xmlDecl{encoding = LowEncName,
attributes = [Attr|Attrs]},
S6=#xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S5#xmerl_scanner.line,
col = S5#xmerl_scanner.col,
data = Attr}, S5),
scan_text_decl(T5,S6,Decl).
scan_text_decl("?>"++T,S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Decl0 = #xmlDecl{attributes = Attrs}) ->
?bump_col(2),
?strip1,
Decl = Decl0#xmlDecl{attributes = lists:reverse(Attrs)},
S2 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S0#xmerl_scanner.line,
col = S0#xmerl_scanner.col,
data = Decl}, S1),
{Ret, S3} = Hook(Decl, S2),
{Ret, T1, S3}.
scan_optional_version("version"++T,S0) ->
?bump_col(7),
?strip1,
{T2, S2} = scan_eq(T1, S1),
{Vsn, T3, S3} = scan_xml_vsn(T2, S2),
{_,T4,S4} = mandatory_strip(T3,S3),
Attr = #xmlAttribute{name = version,parents = [{xml,1}],value = Vsn},
{#xmlDecl{attributes=[Attr]},T4,S4};
scan_optional_version(T,S) ->
{#xmlDecl{attributes=[]},T,S}.
[ 81 ] EncName
scan_enc_name([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enc_name(MoreBytes, S1) end,
fun(S1) -> ?fatal(expected_encoding_name, S1) end,
S);
scan_enc_name([H|T], S0) when H >= $"; H =< $' ->
?bump_col(1),
scan_enc_name(T, S, H, []).
scan_enc_name([], S=#xmerl_scanner{continuation_fun = F}, Delim, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enc_name(MoreBytes, S1, Delim, Acc) end,
fun(S1) -> ?fatal(expected_encoding_name, S1) end,
S);
scan_enc_name([H|T], S0, Delim, Acc) when H >= $a, H =< $z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name([H|T], S0, Delim, Acc) when H >= $A, H =< $Z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name([H|_T],S,_Delim,_Acc) ->
?fatal({error,{unexpected_character_in_Enc_Name,H}},S).
scan_enc_name2([], S=#xmerl_scanner{continuation_fun = F}, Delim, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enc_name2(MoreBytes, S1, Delim, Acc) end,
fun(S1) -> ?fatal(expected_encoding_name, S1) end,
S);
scan_enc_name2([H|T], S0, H, Acc) ->
?bump_col(1),
{lists:reverse(Acc), T, S};
scan_enc_name2([H|T], S0, Delim, Acc) when H >= $a, H =< $z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name2([H|T], S0, Delim, Acc) when H >= $A, H =< $Z ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name2([H|T], S0, Delim, Acc) when H >= $0, H =< $9 ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]);
scan_enc_name2([H|T], S0, Delim, Acc) when H == $.; H == $_; H == $- ->
?bump_col(1),
scan_enc_name2(T, S, Delim, [H|Acc]).
[ 26 ] VersionNum
scan_xml_vsn([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_xml_vsn(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_xml_vsn([H|T], S) when H==$"; H==$'->
xml_vsn(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}, H, []).
xml_vsn([], S=#xmerl_scanner{continuation_fun = F}, Delim, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> xml_vsn(MoreBytes, S1, Delim, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
xml_vsn([H|T], S=#xmerl_scanner{col = C}, H, Acc) ->
{lists:reverse(Acc), T, S#xmerl_scanner{col = C+1}};
xml_vsn([H|T], S=#xmerl_scanner{col = C},Delim, Acc) when H >= $a, H =< $z ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
xml_vsn([H|T], S=#xmerl_scanner{col = C},Delim, Acc) when H >= $A, H =< $Z ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
xml_vsn([H|T], S=#xmerl_scanner{col = C},Delim, Acc) when H >= $0, H =< $9 ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
xml_vsn([H|T], S=#xmerl_scanner{col = C}, Delim, Acc) ->
case lists:member(H, "_.:-") of
true ->
xml_vsn(T, S#xmerl_scanner{col = C+1}, Delim, [H|Acc]);
false ->
?fatal({invalid_vsn_char, H}, S)
end.
[ 16 ] PI : : = ' < ? ' PITarget ( S ( ( * ' ? > ' * ) ) ) ? ' ? > '
scan_pi([], S=#xmerl_scanner{continuation_fun = F}, Pos) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pi(MoreBytes, S1, Pos) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pi(Str = [H1,H2,H3 | T],S=#xmerl_scanner{line = L, col = C}, Pos)
when H1==$x;H1==$X ->
if
((H2==$m) or (H2==$M)) and
((H3==$l) or (H3==$L)) ->
scan_wellknown_pi(T,S,Pos);
true ->
{Target, _NamespaceInfo, T1, S1} = scan_name(Str, S),
scan_pi(T1, S1, Target, L, C, Pos, [])
end;
scan_pi(Str, S=#xmerl_scanner{line = L, col = C}, Pos) ->
{Target, _NamespaceInfo, T1, S1} = scan_name(Str, S),
scan_pi(T1, S1, Target, L, C, Pos,[]).
" Associating Style Sheets with XML documents " , Version 1.0 ,
W3C Recommendation 29 June 1999 ( -stylesheet/ )
scan_wellknown_pi("-stylesheet"++T, S0=#xmerl_scanner{line=L,col=C},Pos) ->
?dbg("prolog(\"<?xml-stylesheet\")~n", []),
?bump_col(16),
scan_pi(T, S, "xml-stylesheet",L,C,Pos,[]);
scan_wellknown_pi(Str,S,_Pos) ->
?fatal({invalid_target_name, lists:sublist(Str, 1, 10)}, S).
scan_pi(Str="?>"++_T , S , Target , L , C , Pos ) - >
scan_pi(Str , S , Target , L , C , Pos , [ ] ) ;
scan_pi(Str=[],S , Target , L , C , Pos ) - >
scan_pi(Str , S , Target , L , C , Pos , [ ] ) ;
scan_pi(T , S , Target , L , C , Pos ) - >
{ _ , } = mandatory_strip(T , S ) ,
scan_pi(T1,S1,Target , L , C , Pos , [ ] ) .
scan_pi([], S=#xmerl_scanner{continuation_fun = F}, Target,L, C, Pos, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pi(MoreBytes, S1, Target, L, C, Pos, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pi("?>" ++ T, S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Target, L, C, Pos, Acc) ->
?bump_col(2),
PI = #xmlPI{name = Target,
pos = Pos,
value = lists:reverse(Acc)},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = PI}, S),
{Ret, S2} = Hook(PI, S1),
{Ret, T, S2};
scan_pi([H|T], S, Target, L, C, Pos, Acc) when ?whitespace(H) ->
?strip1,
scan_pi2(T1, S1, Target, L, C, Pos, Acc).
scan_pi2([], S=#xmerl_scanner{continuation_fun = F}, Target,L, C, Pos, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pi2(MoreBytes, S1, Target, L, C, Pos, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pi2("?>" ++ T, S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event},
Target, L, C, Pos, Acc) ->
?bump_col(2),
PI = #xmlPI{name = Target,
pos = Pos,
value = lists:reverse(Acc)},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = PI}, S),
{Ret, S2} = Hook(PI, S1),
{Ret, T, S2};
scan_pi2([H|T], S0, Target, L, C, Pos, Acc) ->
?bump_col(1),
wfc_legal_char(H,S),
scan_pi2(T, S, Target, L, C, Pos, [H|Acc]).
[ 28 ] doctypedecl : : =
' < ! DOCTYPE ' S Name ( S ExternalID ) ? S ? ( ' [ ' intSubset ' ] ' S ? ) ? ' > '
scan_doctype([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype(T, S) ->
{_,T1,S1} = mandatory_strip(T,S),
{DTName, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
?strip3,
scan_doctype1(T3, S3#xmerl_scanner{doctype_name = DTName}).
[ 75 ] ExternalID : : = ' SYSTEM ' S SystemLiteral
scan_doctype1([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype1(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype1("PUBLIC" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{PIDL, T2, S2} = scan_pubid_literal(T1, S1),
{_,T3,S3} = mandatory_strip(T2,S2),
{SL, T4, S4} = scan_system_literal(T3, S3),
?strip5,
scan_doctype2(T5, S5, {public, PIDL, SL});
scan_doctype1("SYSTEM" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{SL, T2, S2} = scan_system_literal(T1, S1),
?strip3,
scan_doctype2(T3, S3, {system, SL});
scan_doctype1(T, S) ->
scan_doctype2(T, S, undefined).
scan_doctype2([], S=#xmerl_scanner{continuation_fun = F},DTD) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype2(MoreBytes, S1, DTD) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype2("[" ++ T, S0, DTD) ->
?bump_col(1),
?strip1,
scan_doctype3(T1, S1, DTD);
scan_doctype2(">" ++ T, S0, DTD) ->
?bump_col(1),
?strip1,
S2 = fetch_DTD(DTD, S1),
check_decl(S2),
{T1, S2}.
[ 28a ] DeclSep : : = PEReference | S
[ 28b ] intSubset : : = ( markupdecl | DeclSep ) *
scan_doctype3([], S=#xmerl_scanner{continuation_fun = F},DTD) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_doctype3(MoreBytes, S1,DTD) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_doctype3("%" ++ T, S0, DTD) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
?strip2,
case expand_pe_reference(PERefName, S2,as_PE) of
{system, _} = Name ->
S3 = fetch_DTD(Name, S2),
scan_doctype3(T2, S3, DTD);
{public, _} = Name ->
S3 = fetch_DTD(Name, S2),
scan_doctype3(T2, S3, DTD);
{public, _, _} = Name ->
S3 = fetch_DTD(Name, S2),
scan_doctype3(T2, S3, DTD);
Space added , see Section 4.4.8
{_,T3,S3} = strip(ExpRef++T2,S2),
scan_doctype3(T3,S3,DTD)
end;
scan_doctype3("]" ++ T, S0, DTD) ->
?bump_col(1),
?strip1,
S2 = fetch_DTD(DTD, S1),
check_decl(S2),
">" ++ T2 = T1,
{T2, S2};
scan_doctype3(T, S, DTD) ->
{_, T1, S1} = scan_markup_decl(T, S),
scan_doctype3(T1, S1, DTD).
fetch_DTD(undefined, S=#xmerl_scanner{doctype_DTD=URI}) when list(URI)->
allow to specify DTD name when it is n't available in xml stream
fetch_DTD({system,URI},S);
fetch_DTD(undefined, S) ->
S;
fetch_DTD(DTDSpec, S)->
case fetch_and_parse(DTDSpec,S,[{text_decl,true},
{environment,{external,subset}}]) of
NewS when record(NewS,xmerl_scanner) ->
NewS;
S
end.
fetch_and_parse(ExtSpec,S=#xmerl_scanner{fetch_fun=Fetch,
rules=Rules,
xmlbase = XMLBase},
Options0) ->
RetS =
case Fetch(ExtSpec, S) of
{ok, NewS} ->
NewS;
{ok, not_fetched,NewS} ->
NewS;
{ok, DataRet, NewS = #xmerl_scanner{user_state = UState,
event_fun = Event,
hook_fun = Hook,
fetch_fun = Fetch1,
close_fun = Close1,
continuation_fun = Cont,
acc_fun = Acc,
rules_read_fun = Read,
rules_write_fun = Write,
validation = Valid,
quiet = Quiet,
encoding = Charset
}} ->
EvS = event_state(NewS),
HoS = hook_state(NewS),
FeS = fetch_state(NewS),
CoS = cont_state(NewS),
Options = Options0++[{user_state, UState},
{rules, Rules},
{event_fun, Event, EvS},
{hook_fun, Hook, HoS},
{fetch_fun, Fetch1, FeS},
{close_fun, Close1},
{continuation_fun, Cont, CoS},
{rules, Read, Write, ""},
{acc_fun, Acc},
{validation,Valid},
{quiet,Quiet},
{encoding,Charset}],
case DataRet of
{file, F} ->
int_file_decl(F, Options,Charset);
{string, String} ->
int_string_decl(String, Options,XMLBase);
_ ->
{DataRet,[],NewS}
end;
Error ->
?fatal({error_fetching_DTD, {ExtSpec, Error}}, S)
end,
case RetS of
#xmerl_scanner{} ->
RetS#xmerl_scanner{text_decl=false,
environment=S#xmerl_scanner.environment};
_ -> RetS
end.
fetch_not_parse(ExtSpec,S=#xmerl_scanner{fetch_fun=Fetch}) ->
case Fetch(ExtSpec,S) of
{ok, not_fetched,_NewS} ->
?fatal({error_fetching_external_source,ExtSpec},S);
{ok, DataRet, NewS} ->
String =
case DataRet of
{file,F} ->
get_file(F,S);
{string,Str} ->
binary_to_list(Str);
_ -> DataRet
end,
{String, NewS};
_ ->
?fatal({error_fetching_external_resource,ExtSpec},S)
end.
get_file(F,S) ->
case file:read_file(F) of
{ok,Bin} ->
binary_to_list(Bin);
Err ->
?fatal({error_reading_file,F,Err},S)
end.
check_decl(#xmerl_scanner{validation=false}) ->
ok;
check_decl(#xmerl_scanner{rules=Tab} = S) ->
check_notations(Tab,S),
check also attribute defs for element
check_entities(Tab,S).
check_notations(Tab,S) ->
case ets:match(Tab,{{notation,'$1'},undeclared}) of
[[]] -> ok;
[] -> ok;
[L] when list(L) ->
?fatal({error_missing_declaration_in_DTD,hd(L)},S);
Err ->
?fatal({error_missing_declaration_in_DTD,Err},S)
end.
check_elements(Tab,S) ->
case ets:match(Tab,{{elem_def,'_'},'$2'},10) of
{_,_}=M ->
Fun = fun({Match,'$end_of_table'},_F) ->
lists:foreach(fun(X)->check_elements2(X,S) end,
Match),
ok;
('$end_of_table',_) ->
ok;
({Match,Cont},F) ->
lists:foreach(fun(X)->check_elements2(X,S) end,
Match),
F(ets:match(Cont),F)
end,
Fun(M,Fun);
'$end_of_table' -> ok;
Err -> ?fatal({error_missing_declaration_in_DTD,Err},S)
end.
check_elements2([#xmlElement{attributes=Attrs}],S) ->
check_attributes(Attrs,S);
check_elements2(_,_) ->
ok.
check_attributes([{N1,'ID',_,_,_}=Attr|Rest],S) ->
case lists:keysearch('ID',2,Rest) of
{value,Att2} ->
?fatal({error_more_than_one_ID_def,N1,element(1,Att2)},S);
_ ->
ok
end,
vc_ID_Attribute_Default(Attr,S),
check_attributes(Rest,S);
check_attributes([{_,{enumeration,_},_,_,_}=Attr|T],S) ->
vc_Enumeration(Attr,S),
check_attributes(T,S);
check_attributes([{_,Ent,_,_,_}=Attr|T],S)
when Ent=='ENTITY';Ent=='ENTITIES' ->
vc_Entity_Name(Attr,S),
check_attributes(T,S);
check_attributes([_|T],S) ->
check_attributes(T,S);
check_attributes([],_S) ->
ok.
check_entities(Tab,S=#xmerl_scanner{validation=true}) ->
case ets:match(Tab,{{entity,'$1'},undeclared}) of
[[]] -> ok;
[] -> ok;
[L] when list(L) ->
?fatal({error_missing_declaration_in_DTD,hd(L)},S);
Err ->
?fatal({error_missing_declaration_in_DTD,Err},S)
end;
check_entities(_,_) ->
ok.
check_decl2/1 : checks that all referenced ID attributes are declared
check_decl2(S=#xmerl_scanner{rules=Tab}) ->
check_referenced_ids(Tab,S).
check_referenced_ids(Tab,S) ->
case ets:match(Tab,{{id,'$1'},undeclared}) of
[[]] -> ok;
[] -> ok;
[L] when list(L) ->
?fatal({error_missing_declaration_in_DTD,hd(L)},S);
Err ->
?fatal({error_missing_declaration_in_DTD,Err},S)
end.
[ 30 ] extSubSet : : = TextDecl ? extSubsetDecl
scan_ext_subset([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_ext_subset(MoreBytes, S1) end,
fun(S1) -> {[], S1} end,
S);
scan_ext_subset("%" ++ T, S0) ->
[ 28a ] : WFC : PE Between Declarations .
must match the production extSubsetDecl .
?bump_col(1),
{_,T1,S1} = scan_decl_sep(T,S),
scan_ext_subset(T1, S1);
scan_ext_subset("<![" ++ T, S0) ->
?bump_col(3),
?strip1,
{_, T2, S2} = scan_conditional_sect(T1, S1),
scan_ext_subset(T2,S2);
scan_ext_subset(T, S) when ?whitespace(hd(T)) ->
{_,T1,S1} = strip(T,S),
scan_ext_subset(T1, S1);
scan_ext_subset(T, S) ->
{_, T1, S1} = scan_markup_decl(T, S),
scan_ext_subset(T1, S1).
[ 28a ] : : = PEReference | S
scan_decl_sep(T,S=#xmerl_scanner{rules_read_fun=Read,
rules_write_fun=Write,
rules_delete_fun=Delete}) ->
{PERefName, T1, S1} = scan_pe_reference(T, S),
{ExpandedRef,S2} =
case expand_pe_reference(PERefName,S1,as_PE) of
Tuple when tuple(Tuple) ->
{ExpRef,_Sx}=fetch_not_parse(Tuple,S1),
{EntV,_,_S2} = scan_entity_value(ExpRef, S1, no_delim,
PERefName,parameter),
Delete(parameter_entity,PERefName,_S2),
_S3 = Write(parameter_entity,PERefName,EntV,_S2),
EntV2 = Read(parameter_entity,PERefName,_S3),
{" " ++ EntV2 ++ " ",_S3};
ExpRef ->
{ExpRef,S1}
end,
{_, T3, S3} = strip(ExpandedRef,S2),
{_T4,S4} = scan_ext_subset(T3,S3),
strip(T1,S4).
[ 61 ] ConditionalSect : : = includeSect | ignoreSect
scan_conditional_sect([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_conditional_sect(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_conditional_sect("IGNORE" ++ T, S0) ->
?bump_col(6),
?strip1,
"[" ++ T2 = T1,
{_,T3,S3} = strip(T2,S1),
scan_ignore(T3,S3);
scan_conditional_sect("INCLUDE" ++ T, S0) ->
?bump_col(7),
?strip1,
"[" ++ T2 = T1,
{_,T3,S3} = strip(T2,S1),
scan_include(T3, S3);
scan_conditional_sect("%"++T,S0) ->
?bump_col(1),
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_conditional_sect(T2,S2).
[ 63 ] ignoreSect : : = ' < ! [ ' S ? ' IGNORE ' S ? ' [ ' ignoreSectContents * ' ] ] > '
[ 64 ] ignoreSectContents : : = Ignore ( ' < ! [ ' ignoreSectContents ' ] ] > ' Ignore ) *
[ 65 ] Ignore : : = ( * ( ' < ! [ ' | ' ] ] > ' ) * )
scan_ignore(Str,S) ->
scan_ignore(Str,S,0).
scan_ignore([], S=#xmerl_scanner{continuation_fun = F},Level) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_ignore(MoreBytes, S1,Level) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_ignore("<![" ++ T, S0,Level) ->
?bump_col(3),
scan_ignore(T, S,Level+1);
scan_ignore("]]>" ++ T, S0,0) ->
?bump_col(3),
{[], T, S};
scan_ignore("]]>" ++ T, S0,Level) ->
?bump_col(3),
scan_ignore(T, S,Level-1);
scan_ignore([_H|T],S0,Level) ->
?bump_col(1),
scan_ignore(T,S,Level).
[ 62 ] includeSect : : = ' < ! [ ' S ? ' INCLUDE ' S ? ' [ ' extSubsetDecl ' ] ] > '
scan_include([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_include(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_include("]]>" ++ T, S0) ->
?bump_col(3),
{[], T, S};
scan_include("%" ++ T, S0) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_include(T2, S2);
scan_include("<![" ++ T, S0) ->
?bump_col(3),
?strip1,
{_, T2, S2} = scan_conditional_sect(T1, S1),
?strip3,
scan_include(T3,S3);
scan_include(T, S) ->
{_, T1, S1} = scan_markup_decl(T, S),
scan_include(T1, S1).
[ 29 ] markupdecl : : = elementdecl | AttlistDecl | EntityDecl |
[ 45 ] elementdecl : : = ' < ! ELEMENT ' S Name S contentspec S ? ' > '
scan_markup_decl([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_markup_decl(MoreBytes, S1) end,
fun(S1) -> {[], [], S1} end,
S);
scan_markup_decl("<!--" ++ T, S0) ->
?bump_col(4),
{_, T1, S1} = scan_comment(T, S),
?strip2;
scan_markup_decl("<?" ++ T, S0) ->
?bump_col(2),
{_PI, T1, S1} = scan_pi(T, S,_Pos=markup),
?strip2;
scan_markup_decl("<!ELEMENT" ++ T,
#xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write,
rules_delete_fun = Delete} = S0) ->
?bump_col(9),
{_,T1,S1} = mandatory_strip(T,S),
{Ename, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
Element =
case Read(elem_def, Ename, S2) of
El = #xmlElement{elementdef=Decl} when Decl /= undeclared ->
case S2#xmerl_scanner.validation of
true ->
?fatal({already_defined, Ename}, S2);
_ ->
Delete(elem_def,Ename,S2),
El
end;
El = #xmlElement{} ->
Delete(elem_def,Ename,S2),
El;
undefined ->
#xmlElement{}
end,
{_,T3,S3} = mandatory_strip(T2,S2),
{Edef, T4, S4} = scan_contentspec(T3, S3),
?strip5,
{">" ++ T6,S6} = scan_element_completion(T5,S5),
S7 = Write(elem_def, Ename,
Element#xmlElement{name = Ename,
content = Edef,
elementdef=S6#xmerl_scanner.environment}, S6),
strip(T6,S7);
scan_markup_decl("<!ENTITY" ++ T, S0) ->
?bump_col(8),
{_,T1,S1} = mandatory_strip(T,S),
{T2, S2} = scan_entity(T1, S1),
?strip3;
scan_markup_decl("<!NOTATION" ++ T, S0) ->
< ! NOTATION notation.name " public.identifier " " helper.application " >
?bump_col(10),
{_,T1,S1} = mandatory_strip(T,S),
{T2, S2} = scan_notation_decl(T1, S1),
?strip3;
scan_markup_decl("<!ATTLIST" ++ T,
#xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write,
rules_delete_fun= Delete} = S0) ->
< ! ( AttrName Type Value ) * >
?bump_col(9),
{_,T1,S1} = mandatory_strip(T,S),
{Ename, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
? ,
{Attributes, T4, S4} = scan_attdef(T2, S2),
{EDEF,MergedAttrs} =
case Read(elem_def, Ename, S4) of
the external DTD but the ATTLIST in the
internal DTD .
{#xmlElement{},update_attributes(Attributes,[])};
Edef = #xmlElement{attributes = OldAttrs} ->
Delete(elem_def,Ename,S4),
{Edef,update_attributes(Attributes, OldAttrs)}
end,
NewEdef = EDEF#xmlElement{name=Ename,attributes = MergedAttrs},
S5 = Write(elem_def, Ename, NewEdef, S4),
T5 = T4,
?strip6.
scan_element_completion(T,S) ->
scan_markup_completion_gt(T,S).
update_attributes(NewAttrs, OldAttrs) ->
update_attributes1(NewAttrs,lists:reverse(OldAttrs)).
update_attributes1([A = {Name,_Type,_DefaultV,_DefaultD,_Env}|Attrs],
OldAttrs) ->
case lists:keymember(Name, 1, OldAttrs) of
true ->
update_attributes1(Attrs, OldAttrs);
false ->
update_attributes1(Attrs, [A|OldAttrs])
end;
update_attributes1([],Acc) ->
lists:reverse(Acc).
[ 53 ] AttDef
scan_attdef([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_attdef(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_attdef(T, S) ->
scan_attdef(T, S, _AttrAcc = []).
scan_attdef([], S=#xmerl_scanner{continuation_fun = F}, Attrs) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_attdef(MoreBytes, S1, Attrs) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_attdef(">" ++ T, S0, Attrs) ->
?bump_col(1),
{lists:reverse(Attrs), T, S};
scan_attdef("%" ++ _T, S=#xmerl_scanner{environment=prolog}, _Attrs) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_attdef("%" ++ T, S0, Attrs) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S0),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_attdef(T2, S2, Attrs);
scan_attdef(T,S,Attrs) ->
{_,T1,S1} = mandatory_strip(T,S),
scan_attdef2(T1,S1,Attrs).
scan_attdef2(">" ++ T, S0, Attrs) ->
?bump_col(1),
{lists:reverse(Attrs), T, S};
scan_attdef2("%" ++ _T, S=#xmerl_scanner{environment=prolog}, _Attrs) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_attdef2("%" ++ T, S0, Attrs) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S0),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_attdef2(T2, S2, Attrs);
scan_attdef2(T, S, Attrs) ->
{AttName, _NamespaceInfo, T1, S1} = scan_name(T, S),
{_,T2,S2} = mandatory_strip(T1,S1),
{AttType, T3, S3} = scan_att_type(T2, S2),
{_,T4,S4} = mandatory_strip(T3,S3),
{{DefaultDecl,DefaultValue}, T5, S5} = scan_default_decl(T4, S4, AttType),
?strip6,
Attr = {AttName, AttType,DefaultValue,DefaultDecl,
S#xmerl_scanner.environment},
scan_attdef2(T6, S6, [Attr|Attrs]).
[ 54 ] StringType
scan_att_type([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_att_type(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_att_type("CDATA" ++ T, S0) ->
?bump_col(5),
{'CDATA', T, S};
[ 55 ] TokenizedType
scan_att_type("IDREFS" ++ T, S0) ->
?bump_col(6),
{'IDREFS', T, S};
scan_att_type("IDREF" ++ T, S0) ->
?bump_col(5),
{'IDREF', T, S};
scan_att_type("ID" ++ T, S0) ->
?bump_col(2),
{'ID', T, S};
scan_att_type("ENTITY" ++ T, S0) ->
?bump_col(6),
{'ENTITY', T, S};
scan_att_type("ENTITIES" ++ T, S0) ->
?bump_col(8),
{'ENTITIES', T, S};
scan_att_type("NMTOKENS" ++ T, S0) ->
?bump_col(8),
{'NMTOKENS', T, S};
scan_att_type("NMTOKEN" ++ T, S0) ->
?bump_col(7),
{'NMTOKEN', T, S};
[ 57 ] EnumeratedType
scan_att_type("NOTATION" ++ T, S0) ->
?bump_col(8),
{_,T1,S1} = mandatory_strip(T,S),
"(" ++ T2 = T1,
S2 = S1,
?strip3,
{Name, _NamespaceInfo, T4, S4} = scan_name(T3, S3),
notation_exists(Name, S4),
?strip5,
scan_notation_type(T5, S5, [Name]);
scan_att_type("(" ++ T, S0) ->
?bump_col(1),
?strip1,
{NmToken, _NamespaceInfo, T2, S2} = scan_nmtoken(T1, S1),
?strip3,
scan_enumeration(T3, S3, [NmToken]);
scan_att_type("%" ++ _T, S=#xmerl_scanner{environment=prolog}) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_att_type("%" ++ T, S0) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S0),
ExpRef = expand_pe_reference(PERefName, S1,in_literal),
{ExpRef,T1,S1}.
[ 58 ] NotationType
scan_notation_type([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_notation_type(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_notation_type(")" ++ T, S0, Acc) ->
?bump_col(1),
{{notation, lists:reverse(Acc)}, T, S};
scan_notation_type("|" ++ T, S0, Acc) ->
?bump_col(1),
?strip1,
{Name, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
notation_exists(Name, S2),
?strip3,
scan_notation_type(T3, S3, [Name | Acc]).
Validity constraint for NotationType :
The used notation names must be declared in the DTD , but they may
notation_exists(Name, #xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write } = S) ->
case Read(notation, Name, S) of
undefined ->
this is legal , since the referenced NOTATION
Write(notation,Name,undeclared,S);
_Value ->
ok
end.
[ 59 ] Enumeration
scan_enumeration([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_enumeration(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_enumeration(")" ++ T, S0, Acc) ->
?bump_col(1),
{{enumeration, lists:reverse(Acc)}, T, S};
scan_enumeration("|" ++ T, S0, Acc) ->
?bump_col(1),
?strip1,
{NmToken, _NamespaceInfo, T2, S2} = scan_nmtoken(T1, S1),
?strip3,
scan_enumeration(T3, S3, [NmToken|Acc]).
[ 60 ] DefaultDecl
scan_default_decl([], S=#xmerl_scanner{continuation_fun = F}, Type) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_default_decl(MoreBytes, S1, Type) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_default_decl("#REQUIRED" ++ T, S0, _Type) ->
?bump_col(9),
{{'#REQUIRED',no_value}, T, S};
scan_default_decl("#IMPLIED" ++ T, S0, _Type) ->
?bump_col(8),
{{'#IMPLIED',no_value}, T, S};
scan_default_decl("#FIXED" ++ T, S0, Type) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{Value,T2,S2,_} = default_value(T1, S1, Type),
{{'#FIXED',Value},T2,S2};
scan_default_decl(Str, S, Type) ->
{Value,T1,S1,_} = default_value(Str, S, Type),
{{no_decl,Value},T1,S1}.
default_value(T, S, Type) ->
{_Val, _T1, _S1,_} = scan_att_value(T, S, Type).
[ 71 ] EntityDef
scan_entity([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_entity(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity("%" ++ T, #xmerl_scanner{rules_write_fun = Write} = S0) ->
?bump_col(1),
{_,T1,S1} = mandatory_strip(T,S),
{PEName, _NamespaceInfo, T2, S2} = scan_name_no_colons(T1, S1),
{_,T3,S3} = mandatory_strip(T2,S2),
{PEDef, T4, S4} = scan_pe_def(T3, S3, PEName),
?strip5,
{">" ++ T6,S6} = scan_entity_completion(T5,S5),
S7 = Write(parameter_entity, PEName, PEDef, S6),
{T6, S7};
scan_entity(T, #xmerl_scanner{rules_write_fun = Write,
rules_read_fun = Read,
rules_delete_fun = Delete} = S) ->
{EName, _NamespaceInfo, T1, S1} = scan_name_no_colons(T, S),
{_,T2,S2} = mandatory_strip(T1,S1),
{EDef, EntType, T3, S3} = scan_entity_def(T2, S2, EName),
check_entity_recursion(EName,S3),
?strip4,
{">" ++ T5,S5} = scan_entity_completion(T4,S4),
case Read(entity,EName,S5) of
undeclared -> Delete(entity,EName,S5);
_ -> ok
end,
S6 = Write(entity, EName, {S5#xmerl_scanner.environment,EntType,EDef}, S5),
{T5, S6}.
scan_entity_completion(T,S) ->
scan_markup_completion_gt(T,S).
[ 73 ] EntityDef
scan_entity_def([], S=#xmerl_scanner{continuation_fun = F}, EName) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_entity_def(MoreBytes, S1, EName) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity_def("'" ++ T, S0, EName) ->
?bump_col(1),
{EVal,T1,S1}=scan_entity_value(T, S, $', EName,general),
{EVal,internal,T1,S1};
scan_entity_def("\"" ++ T, S0, EName) ->
?bump_col(1),
{EVal,T1,S1}=scan_entity_value(T, S, $", EName,general),
{EVal,internal,T1,S1};
scan_entity_def(Str, S, EName) ->
{ExtID, T1, S1} = scan_external_id(Str, S),
{NData, T2, S2} = scan_ndata_decl(T1, S1),
case NData of
{ndata,_} ->
if NDATA exists it is an unparsed ENTITY
{{ExtID,NData},external,T2,S2};
_ ->
case fetch_and_parse(ExtID,S2,
[{text_decl,true},
{environment,{external,{entity,EName}}}]) of
{{_USret,Entity},_Tail,_Sx} ->
{Entity, external,T2, S2};
{Entity,_Tail,Sx} ->
OldRef=S2#xmerl_scanner.entity_references,
NewRef=Sx#xmerl_scanner.entity_references,
{Entity,T2,
S2#xmerl_scanner{entity_references=OldRef++NewRef}};
{{error,enoent},external,T2,S2}
end
end.
scan_ndata_decl([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_ndata_decl(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_ndata_decl(Str = ">"++_T, S) ->
{[], Str, S};
scan_ndata_decl(T, S) ->
{_,T1,S1} = mandatory_strip(T,S),
scan_ndata_decl2(T1,S1).
scan_ndata_decl2(Str = ">"++_T,S) ->
{[], Str, S};
scan_ndata_decl2("NDATA" ++ T,S0 = #xmerl_scanner{rules_read_fun = Read,
rules_write_fun = Write}) ->
?bump_col(5),
{_,T1,S1} = mandatory_strip(T,S),
{Name, _NamespaceInfo, T2, S2} = scan_name(T1, S1),
case Read(notation, Name, S2) of
this is legal , since the referenced NOTATION
Write(notation,Name,undeclared,S2),
{{ndata,Name},T2,S2};
_Value ->
{{ndata, Name}, T2, S2}
end.
[ 39 ] element
scan_element(T, S, Pos) ->
scan_element(T, S, Pos, S#xmerl_scanner.space,
_Lang = [], _Parents = [], #xmlNamespace{}).
scan_element(T, S=#xmerl_scanner{line=L,col=C},
Pos, SpaceDefault,Lang, Parents, NS) ->
{Name, NamespaceInfo, T1, S1} = scan_name(T, S),
vc_Element_valid(Name,S),
?strip2,
scan_element(T2, S2, Pos, Name, L, C, _Attrs = [],
Lang, Parents, NamespaceInfo, NS,
SpaceDefault).
scan_element("/", S=#xmerl_scanner{continuation_fun = F},
Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
?dbg("trailing / detected~n", []),
F(fun(MoreBytes, S1) -> scan_element("/" ++ MoreBytes, S1,
Pos, Name, StartL, StartC, Attrs,
Lang,Parents,NSI,NS,SpaceDefault) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_element([], S=#xmerl_scanner{continuation_fun = F},
Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_element(MoreBytes, S1,
Pos, Name, StartL, StartC, Attrs,
Lang,Parents,NSI,NS,SpaceDefault) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_element("/>" ++ T, S0 = #xmerl_scanner{hook_fun = Hook,
event_fun = Event,
line = L, col = C,
xmlbase_cache=XMLBase}, Pos,
Name, _StartL, _StartC, Attrs0, Lang, Parents, NSI,
Namespace, _SpaceDefault) ->
?bump_col(2),
Attrs = lists:reverse(Attrs0),
E=processed_whole_element(S, Pos, Name, Attrs, Lang, Parents,NSI,Namespace),
wfc_unique_att_spec(Attrs,S),
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = E}, S0),
{Ret, S2} = Hook(E, S1),
S2b=S2#xmerl_scanner{xmlbase=XMLBase},
{Ret, T, S2b};
scan_element(">", S=#xmerl_scanner{continuation_fun = F},
Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
?dbg("trailing > detected~n", []),
F(fun(MoreBytes, S1) -> scan_element(">" ++ MoreBytes, S1,
Pos, Name, StartL, StartC, Attrs,
Lang,Parents,NSI,NS,SpaceDefault) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_element(">" ++ T, S0 = #xmerl_scanner{event_fun = Event,
hook_fun = Hook,
line = L, col = C,
xmlbase_cache=XMLBase,
space = SpaceOption},
Pos, Name, StartL, StartC, Attrs0, Lang, Parents,
NSI, Namespace, SpaceDefault) ->
?bump_col(1),
Attrs = lists:reverse(Attrs0),
wfc_unique_att_spec(Attrs,S),
XMLSpace = case lists:keysearch('xml:space', 1, Attrs) of
false -> SpaceDefault;
{value, "default"} -> SpaceOption;
{value, "preserve"} -> preserve;
_ -> SpaceDefault
end,
E0=processed_whole_element(S,Pos,Name,Attrs,Lang,Parents,NSI,Namespace),
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = started,
line = StartL,
col = StartC,
data = E0}, S),
{Content, T1, S2} = scan_content(T, S1, Name, Attrs, XMLSpace,
E0#xmlElement.language,
[{Name, Pos}|Parents], Namespace),
Element=E0#xmlElement{content=Content,
xmlbase=E0#xmlElement.xmlbase},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = L,
col = C,
data = Element}, S2),
{Ret, S4} = Hook(Element, S3),
S4b=S4#xmerl_scanner{xmlbase=XMLBase},
{Ret, T1, S4b};
scan_element(T, S, Pos, Name, StartL, StartC, Attrs, Lang, Parents,
NSI, NS, SpaceDefault) ->
{AttName, NamespaceInfo, T1, S1} = scan_name(T, S),
{T2, S2} = scan_eq(T1, S1),
{AttType,_DefaultDecl} = get_att_type(S2,AttName,Name),
{AttValue, T3, S3,IsNorm} = scan_att_value(T2, S2, AttType),
check_default_value(S3,DefaultDecl , AttValue ) ,
NewNS = check_namespace(AttName, NamespaceInfo, AttValue, NS),
wfc_whitespace_betw_attrs(hd(T3),S3),
?strip4,
AttrPos = case Attrs of
[] ->
1;
[#xmlAttribute{pos = P}|_] ->
P+1
end,
Attr = #xmlAttribute{name = AttName,
pos = AttrPos,
language = Lang,
namespace = NamespaceInfo,
value = AttValue,
normalized = IsNorm},
XMLBase=if
AttName=='xml:base' ->
resolve_relative_uri(AttValue,S4#xmerl_scanner.xmlbase);
true ->
S4#xmerl_scanner.xmlbase
end,
#xmerl_scanner{event_fun = Event,
line = Line,
col = Col} = S4,
S5 = Event(#xmerl_event{event = ended,
line = Line,
col = Col,
data = Attr},
S4#xmerl_scanner{xmlbase=XMLBase,
xmlbase_cache=S#xmerl_scanner.xmlbase}),
scan_element(T4, S5, Pos, Name, StartL, StartC, [Attr|Attrs],
Lang, Parents, NSI, NewNS, SpaceDefault).
get_att_type(S=#xmerl_scanner{rules_read_fun=Read},AttName,ElemName) ->
case Read(elem_def,ElemName,S) of
#xmlElement{attributes = Attrs} ->
case lists:keysearch(AttName,1,Attrs) of
{value,{_,AttType,_,DefaultDecl,_}} ->
{AttType,DefaultDecl};
undefined attribute shall be treated as CDATA
end;
_ -> {'CDATA',no_value}
end.
resolve_relative_uri(NewBase="/"++_,CurrentBase) ->
case uri:parse(CurrentBase) of
{error,no_scheme} ->
NewBase;
{Scheme,Host,Port,_PathQuery} ->
atom_to_list(Scheme)++Host++":"++integer_to_list(Port)++NewBase
end;
resolve_relative_uri(NewBase,CurrentBase) ->
filename:join(CurrentBase,NewBase).
processed_whole_element(S=#xmerl_scanner{hook_fun = _Hook,
xmlbase = XMLBase,
line = _L, col = _C,
event_fun = _Event},
Pos, Name, Attrs, Lang, Parents, NSI, Namespace) ->
Language = check_language(Attrs, Lang),
{ExpName, ExpAttrs} =
case S#xmerl_scanner.namespace_conformant of
true ->
I can tell ) , XML Names only specifies that namespace attrs
Note that the default URI does not apply to attrbute names .
TempNamespace = Namespace#xmlNamespace{default = []},
ExpAttrsX =
[A#xmlAttribute{
expanded_name=expanded_name(
A#xmlAttribute.name,
A#xmlAttribute.namespace,
NSI ,
TempNamespace, S)} || A <- Attrs],
{expanded_name(Name, NSI, Namespace, S), ExpAttrsX};
false ->
{Name, Attrs}
end,
#xmlElement{name = Name,
xmlbase = XMLBase,
pos = Pos,
parents = Parents,
attributes = ExpAttrs,
language = Language,
expanded_name = ExpName,
nsinfo = NSI,
namespace = Namespace}.
check_language([#xmlAttribute{name='xml:lang',value=Lang}|_], _) ->
Lang;
check_language([_|T], Lang) ->
check_language(T, Lang);
check_language([], Lang) ->
Lang.
check_namespace(xmlns, _, Value, NS) ->
NS#xmlNamespace{default = list_to_atom(Value)};
check_namespace(_, {"xmlns", Prefix}, Value,
NS = #xmlNamespace{nodes = Ns}) ->
NS#xmlNamespace{nodes = keyreplaceadd(
Prefix, 1, Ns, {Prefix, list_to_atom(Value)})};
check_namespace(_, _, _, NS) ->
NS.
expanded_name(Name, [], #xmlNamespace{default = []}, _S) ->
Name;
expanded_name(Name, [], #xmlNamespace{default = URI}, _S) ->
{URI, Name};
{"xmlns",Local};
expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) ->
case lists:keysearch(Prefix, 1, Ns) of
{value, {_, URI}} ->
{URI, list_to_atom(Local)};
false ->
?fatal({namespace_prefix_not_declared, Prefix}, S)
end.
keyreplaceadd(K, Pos, [H|T], Obj) when K == element(Pos, H) ->
[Obj|T];
keyreplaceadd(K, Pos, [H|T], Obj) ->
[H|keyreplaceadd(K, Pos, T, Obj)];
keyreplaceadd(_K, _Pos, [], Obj) ->
[Obj].
[ 10 ] AttValue
normalize the attribute value according to XML 1.0 section 3.3.3
scan_att_value([], S=#xmerl_scanner{continuation_fun = F},AT) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_att_value(MoreBytes, S1, AT) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_att_value("%"++_T,S=#xmerl_scanner{environment=prolog},_AttType) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_att_value("%"++T,S0=#xmerl_scanner{rules_read_fun=Read,
rules_write_fun=Write,
rules_delete_fun=Delete},AttType) ->
?bump_col(1),
{Name,T1,S1} = scan_pe_reference(T,S),
{ExpandedRef,S2} =
case expand_pe_reference(Name,S1,in_literal) of
Tuple when tuple(Tuple) ->
{ExpRef,_Sx}=fetch_not_parse(Tuple,S1),
{EntV,_,_S2} = scan_entity_value(ExpRef, S1, no_delim,
Name,parameter),
Delete(parameter_entity,Name,_S2),
_S3 = Write(parameter_entity,Name,EntV,_S2),
EntV2 = Read(parameter_entity,Name,_S3),
{EntV2,_S3};
ExpRef ->
{ExpRef,S1}
end,
{_,T2,S3} = strip(ExpandedRef ++ T1,S2),
scan_att_value(T2,S3,AttType);
scan_att_value([H|T], S0,'CDATA'=AT) when H == $"; H == $' ->
?bump_col(1),
scan_att_chars(T, S, H, [],[], AT,false);
scan_att_value([H|T], S0,AttType) when H == $"; H == $' ->
?bump_col(1),
{T1,S1,IsNorm} = normalize(T,S,false),
scan_att_chars(T1, S1, H, [],[], AttType,IsNorm).
scan_att_chars([],S=#xmerl_scanner{continuation_fun=F},H,Acc,TmpAcc,AT,IsNorm)->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) ->
scan_att_chars(MoreBytes, S1, H, Acc,TmpAcc,AT,IsNorm)
end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
?bump_col(1),
check_att_default_val(S#xmerl_scanner.validation,TmpAcc,AttType,S),
{Acc2,S2,IsNorm2} =
if
AttType == 'CDATA' -> {Acc,S,IsNorm};
true ->
normalize(Acc,S,IsNorm)
end,
{lists:reverse(Acc2), T, S2,IsNorm2};
?bump_col(1),
{ExpRef, T1, S1} = scan_reference(T, S),
case markup_delimeter(ExpRef) of
true ->
scan_att_chars(T1,S1,Delim,[ExpRef|Acc],[ExpRef|TmpAcc],AT,IsNorm);
_ ->
scan_att_chars(ExpRef ++ T1, S1, Delim, Acc,TmpAcc, AT,IsNorm)
end;
?fatal(unexpected_char, S0);
scan_att_chars([H|T], S0, Delim, Acc, _TmpAcc,'CDATA',IsNorm)
when ?whitespace(H) ->
?bump_col(1),
scan_att_chars(T, S, Delim, [$\s|Acc], [],'CDATA',IsNorm);
scan_att_chars([H|T], S0, Delim, Acc, TmpAcc,AT,IsNorm)
when ?whitespace(H) ->
?bump_col(1),
{T1,S1,IsNorm2} = normalize(T,S,IsNorm),
check_att_default_val(S#xmerl_scanner.validation,TmpAcc,AT,S1),
scan_att_chars(T1, S1, Delim, [$\s|Acc],[], AT,IsNorm2);
scan_att_chars([H|T], S0, Delim, Acc, TmpAcc,AT,IsNorm) ->
?bump_col(1),
valid_Char(S#xmerl_scanner.validation,AT,H,S),
scan_att_chars(T, S, Delim, [H|Acc], [H|TmpAcc],AT,IsNorm).
markup_delimeter("&")-> true;
markup_delimeter("\"") -> true;
markup_delimeter("\'") -> true;
markup_delimeter("<") -> true;
markup_delimeter(">") -> true;
markup_delimeter("%") -> true;
markup_delimeter(_) -> false.
check_att_default_val(true,[],_Ent,_S) ->
ok;
check_att_default_val(true,RevName,Ent,S) ->
check_att_default_val(lists:reverse(RevName),Ent,S);
check_att_default_val(_,_,_,_) ->
ok.
check_att_default_val(Name,Ent,S=#xmerl_scanner{rules_write_fun=Write})
when Ent == 'ENTITY'; Ent == 'ENTITIES' ->
case xmerl_lib:is_letter(hd(Name)) of
true -> ok;
_ -> ?fatal({illegal_first_character,Ent,Name},S)
end,
SName = list_to_atom(Name),
Write(entity,SName,undeclared,S);
check_att_default_val(Name,IDR,S=#xmerl_scanner{rules_write_fun=Write})
when IDR == 'IDREF'; IDR == 'IDREFS' ->
case xmerl_lib:is_letter(hd(Name)) of
true -> ok;
_ -> ?fatal({illegal_first_character,IDR,Name},S)
end,
SName = list_to_atom(Name),
Write(id,SName,undeclared,S);
check_att_default_val(Name,'ID',S=#xmerl_scanner{rules_write_fun=Write,
rules_read_fun=Read,
rules_delete_fun=Delete}) ->
case xmerl_lib:is_name(Name) of
false ->
?fatal({'ID_names_must_be_Name_production',Name},S);
_ ->
ok
end,
SName = if
list(Name) -> list_to_atom(Name);
true -> Name
end,
case Read(id,SName,S) of
was referenced in / IDREFS before defined
Delete(id,SName,S);
SName -> ?fatal({values_must_be_unique,'ID',SName},S);
undefined -> ok
end,
Write(id,SName,SName,S);
check_att_default_val(_,_,_) ->
ok.
valid_Char(true,AT,C,S) when AT=='NMTOKEN';AT=='NMTOKENS' ->
vc_Valid_Char(AT,C,S);
valid_Char(_,_,[C],S) ->
case xmerl_lib:is_char(C) of
true ->
ok;
false ->
?fatal({unexpected_char,C}, S)
end;
valid_Char(_,_,C,S) ->
case xmerl_lib:is_char(C) of
true ->
ok;
false ->
?fatal({unexpected_char,C}, S)
end.
[ 43 ] content
scan_content(T, S, Name, Attrs, Space, Lang, Parents, NS) ->
scan_content(T, S, _Pos = 1, Name, Attrs, Space,
Lang, Parents, NS, _Acc = [],_MarkupDel=[]).
scan_content("<", S= #xmerl_scanner{continuation_fun = F},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
?dbg("trailing < detected~n", []),
F(fun(MoreBytes, S1) -> scan_content("<" ++ MoreBytes, S1,
Pos, Name, Attrs,
Space, Lang, Parents, NS, Acc,[]) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_content([], S=#xmerl_scanner{environment={external,{entity,_}}},
_Pos, _Name, _Attrs, _Space, _Lang, _Parents, _NS, Acc,_) ->
{lists:reverse(Acc),[],S};
scan_content([], S=#xmerl_scanner{environment=internal_parsed_entity},
_Pos, _Name, _Attrs, _Space, _Lang, _Parents, _NS, Acc,_) ->
{lists:reverse(Acc),[],S};
scan_content([], S=#xmerl_scanner{continuation_fun = F},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_content(MoreBytes, S1,
Pos, Name, Attrs,
Space, Lang, Parents, NS, Acc,[]) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_content("</" ++ T, S0, _Pos, Name, _Attrs, _Space, _Lang,
_Parents, _NS, Acc,[]) ->
?bump_col(2),
{ETagName, _NamespaceInfo, T1, S1} = scan_name(T, S),
if ETagName == Name ->
ok;
true ->
?fatal({endtag_does_not_match, {ETagName, Name}}, S1)
end,
?strip2,
case T2 of
">" ++ T3 ->
{lists:reverse(Acc), T3, S2};
_ ->
?fatal({error,{unexpected_end_of_STag}},S)
end;
scan_content([$&|_T]=Str,
#xmerl_scanner{environment={external,{entity,EName}}} = S0,
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
{_EntV,T1,S1}=scan_entity_value(Str,S0 ,[],EName,general),
scan_content(T1,S1,Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content("&"++T,
#xmerl_scanner{environment=internal_parsed_entity} = S,
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
{_, T1, S1} = scan_reference(T, S),
scan_content(T1,S1,Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content("&" ++ T, S0, Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]) ->
?bump_col(1),
{ExpRef, T1, S1} = scan_reference(T, S),
case markup_delimeter(ExpRef) of
true -> scan_content(ExpRef++T1,S1,Pos,Name,Attrs,Space,Lang,Parents,NS,Acc,ExpRef);
_ ->
scan_content(ExpRef++T1,S1,Pos,Name,Attrs,Space,Lang,Parents,NS,Acc,[])
end;
scan_content("<!--" ++ T, S, Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]) ->
{_, T1, S1} = scan_comment(T, S, Pos, Parents, Lang),
scan_content(T1, S1, Pos+1, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content("<" ++ T, S0, Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]) ->
?bump_col(1),
{Markup, T1, S1} =
scan_content_markup(T, S, Pos, Name, Attrs, Space, Lang, Parents, NS),
AccF = S1#xmerl_scanner.acc_fun,
{NewAcc, NewPos, NewS} = case AccF(Markup, Acc, S1) of
{Acc2, S2} ->
{Acc2, Pos+1, S2};
{Acc2, Pos2, S2} ->
{Acc2, Pos2, S2}
end,
scan_content(T1, NewS, NewPos, Name, Attrs, Space, Lang,
Parents, NS, NewAcc,[]);
scan_content([_H|T], S= #xmerl_scanner{environment={external,{entity,_}}},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,_) ->
scan_content(T,S,Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,[]);
scan_content(T, S=#xmerl_scanner{acc_fun = F,
event_fun = Event,
line = _L},
Pos, Name, Attrs, Space, Lang, Parents, NS, Acc,MarkupDel) ->
Text0 = #xmlText{pos = Pos,
parents = Parents},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = started,
line = S#xmerl_scanner.line,
data = Text0}, S),
{Data, T1, S2} = scan_char_data(T, S1, Space,MarkupDel),
Text = Text0#xmlText{value = Data},
S3 = #xmerl_scanner{} = Event(#xmerl_event{event = ended,
line = S2#xmerl_scanner.line,
data = Text}, S2),
{NewAcc, NewPos, NewS} = case F(Text, Acc, S3) of
{Acc4, S4} ->
{Acc4, Pos+1, S4};
{Acc4, Pos4, S4} ->
{Acc4, Pos4, S4}
end,
scan_content(T1, NewS, NewPos, Name, Attrs, Space, Lang,
Parents, NS, NewAcc,[]).
scan_content_markup([], S=#xmerl_scanner{continuation_fun = F},
Pos, Name, Attrs, Space, Lang, Parents, NS) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_content_markup(
MoreBytes,S1,Pos,Name,
Attrs,Space,Lang,Parents,NS) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_content_markup("![CDATA[" ++ T, S0, Pos, _Name, _Attrs,
_Space, _Lang, Parents, _NS) ->
?bump_col(8),
scan_cdata(T, S, Pos, Parents);
scan_content_markup("?"++T,S0,Pos,_Name,_Attrs,_Space,_Lang,_Parents,_NS) ->
?bump_col(1),
scan_pi(T, S, Pos);
scan_content_markup(T, S, Pos, _Name, _Attrs, Space, Lang, Parents, NS) ->
scan_element(T, S, Pos, Space, Lang, Parents, NS).
scan_char_data(T, S, Space,MUD) ->
scan_char_data(T, S, Space,MUD, _Acc = []).
[ 14 ] CharData
scan_char_data([], S=#xmerl_scanner{environment={external,{entity,_}}},
_Space,_MUD, Acc) ->
{lists:reverse(Acc), [], S};
scan_char_data([], S=#xmerl_scanner{environment=internal_parsed_entity},
_Space, _MUD,Acc) ->
{lists:reverse(Acc), [], S};
scan_char_data([], S=#xmerl_scanner{continuation_fun = F}, Space, _MUD,Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_char_data(MoreBytes,S1,Space,_MUD,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_char_data([$&|T], S,Space,"&",Acc) ->
scan_char_data(T, S, Space,[], [$&|Acc]);
scan_char_data(T=[$&|_], S,_Space,_MUD,Acc) ->
{lists:reverse(Acc), T, S};
scan_char_data("]]>" ++ _T, S, _Space,_MUD, _Acc) ->
" The right angle bracket ( > ) MAY be represented using the string " & gt ; " ,
that string is not marking the end of a CDATA section .
?fatal(unexpected_cdata_end, S);
scan_char_data([$<|T],S,Space,"<", Acc) ->
scan_char_data(T, S, Space,[], [$<|Acc]);
scan_char_data(T = [$<|_], S, _Space,_MUD,Acc) ->
{lists:reverse(Acc), T, S};
scan_char_data(T = [H|_], S, Space,MUD, Acc) when ?whitespace(H) ->
{NewAcc, T1, S1} = accumulate_whitespace(T, S, Space, Acc),
scan_char_data(T1, S1, Space,MUD,NewAcc);
scan_char_data([H1,H2|_T],S,_Space,_MUD,_Acc) when ?non_character(H1,H2) ->
?fatal({error,{not_allowed_to_use_Unicode_noncharacters}},S);
scan_char_data("]]>"++_T,S,_Space,_MUD,_Acc) ->
?fatal({error,{illegal_character_in_content,"]]>"}},S);
scan_char_data([H|T],S0,Space,MUD,Acc) ->
?bump_col(1),
wfc_legal_char(H,S),
scan_char_data(T,S,Space,MUD,[H|Acc]).
[ 18]-[21 ] CDATA
scan_cdata(Str, S, Pos, Parents) ->
scan_cdata(Str, S, Pos, Parents, _Acc = []).
scan_cdata([], S=#xmerl_scanner{continuation_fun = F}, Pos, Parents, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_cdata(MoreBytes, S1, Pos, Parents, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_cdata("]]>" ++ T, S0, Pos, Parents, Acc) ->
?bump_col(3),
{#xmlText{pos = Pos,
parents = Parents,
value = lists:reverse(Acc),
type = cdata}, T, S};
scan_cdata([H|T], S0, Pos, Parents, Acc) ->
case xmerl_lib:is_char(H) of
true ->
?bump_col(1),
scan_cdata(T, S, Pos, Parents, [H|Acc]);
false ->
?fatal({unexpected_char,H}, S0)
end.
[ 67 ] Reference
returns a three tuple { Result , RestBuf , State }
scan_reference([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_reference(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_reference("#x" ++ T, S0) ->
[ 66 ] CharRef
?bump_col(1),
if hd(T) /= $; ->
scan_char_ref_hex(T, S, 0);
true ->
?fatal(invalid_char_ref, S)
end;
scan_reference("#" ++ T, S0) ->
[ 66 ] CharRef
?bump_col(1),
if hd(T) /= $; ->
scan_char_ref_dec(T, S, []);
true ->
?fatal(invalid_char_ref, S)
end;
scan_reference(T, S) ->
case catch scan_entity_ref(T, S) of
{'EXIT', _} ->
?fatal(error_scanning_entity_ref,S);
Other ->
Other
end.
Chapter 4.4.2 : ... the replacement text of entities used to escape
as data . ( The string " AT&T ; " expands to " AT&T ; " and the remaining
the character data output by the processor is ( 1 - 2 levels ) deep .
scan_entity_ref([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_entity_ref(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity_ref("amp;" ++ T, S0) ->
?bump_col(4),
{"&", T, S};
scan_entity_ref("lt;" ++ T, S0) ->
?bump_col(3),
{"<", T, S};
scan_entity_ref("gt;" ++ T, S0) ->
?bump_col(3),
{">", T, S};
scan_entity_ref("apos;" ++ T, S0) ->
?bump_col(5),
{"'", T, S};
scan_entity_ref("quot;" ++ T, S0) ->
?bump_col(5),
{"\"", T, S};
scan_entity_ref(T, S) ->
{Name, _NamespaceInfo, T1, S1} = scan_name(T, S),
";" ++ T2 = T1,
S2 = S1,
Entity = expand_reference(Name, S2),
{Entity, T2, S2}.
[ 69 ]
scan_pe_reference(T, S) ->
{Name, _NamespaceInfo, T1, S1} = scan_name(T, S),
";" ++ T2 = T1,
{Name, T2, S1#xmerl_scanner{col = S1#xmerl_scanner.col+1}}.
expand_pe_reference(Name, #xmerl_scanner{rules_read_fun = Read} = S,WS) ->
case Read(parameter_entity, Name, S) of
undefined when S#xmerl_scanner.validation==true ;
undefined ->
WFC or VC failure
Err={error,_Reason} ->
?fatal(Err,S);
Tuple when tuple(Tuple) ->
Tuple;
Result ->
if
WS == in_literal -> Result;
true -> " "++Result++" "
end
end.
expand_external_pe_reference(Name , } = S ) - >
[ 68 ] EntityReference
expand_reference(Name, #xmerl_scanner{environment={external,{entity,_}}}) ->
atom_to_list(Name);
expand_reference(Name, #xmerl_scanner{environment=internal_parsed_entity}) ->
atom_to_list(Name);
expand_reference(Name, #xmerl_scanner{rules_read_fun = Read} = S) ->
case Read(entity, Name, S) of
undefined ->
?fatal({unknown_entity_ref, Name}, S);
{_,external,{error,enoent}} ->
?fatal({error,{entity_target_not_found,{error,enoent},Name}},S);
{DefEnv,EntType,Value} ->
wfc_Entity_Declared(DefEnv,S,Name),
wfc_Internal_parsed_entity(EntType,Value,S),
Value
end.
[ 66 ] CharRef
scan_char_ref_dec([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_char_ref_dec(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_char_ref_dec([H|T], S0, Acc) when H >= $0, H =< $9 ->
?bump_col(1),
scan_char_ref_dec(T, S, [H|Acc]);
scan_char_ref_dec(";" ++ T, S0, Acc) ->
?bump_col(1),
Ref = list_to_integer(lists:reverse(Acc)),
wfc_legal_char(Ref,S),
scan_char_ref_hex([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_char_ref_hex(MoreBytes, S1, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_char_ref_hex([H|T], S0, Acc) when H >= $0, H =< $9 ->
?bump_col(1),
Dec = H - $0,
scan_char_ref_hex(T, S, (Dec bor (Acc bsl 4)));
scan_char_ref_hex([H|T], S0, Acc) when H >= $a, H =< $f ->
?bump_col(1),
Dec = (H - $a) + 10,
scan_char_ref_hex(T, S, (Dec bor (Acc bsl 4)));
scan_char_ref_hex([H|T], S0, Acc) when H >= $A, H =< $F ->
?bump_col(1),
Dec = (H - $A) + 10,
scan_char_ref_hex(T, S, (Dec bor (Acc bsl 4)));
scan_char_ref_hex(";" ++ T, S0, Acc) ->
?bump_col(1),
wfc_legal_char(Acc,S),
[ 25 ] Eq
scan_eq(T, S) ->
?strip1,
case T1 of
[$=|T2] ->
S2 = S1,
?strip3,
{T3, S3};
_ ->
?fatal(assignment_expected,S)
end.
[ 6 ] QName : : = ( Prefix ' :') ? LocalPart
[ 7 ] Prefix : : = NCName
[ 8 ] LocalPart : : = NCName
[ 4 ] NCName : : = ( Letter | ' _ ' ) ( NCNameChar ) *
[ 5 ] NCNameChar : : = Letter | Digit | ' . ' | ' - ' | ' _ '
- All element types and attribute names contain either zero or one colon
scan_name_no_colons(Str, S) ->
NSC = S#xmerl_scanner.namespace_conformant,
case NSC of
true ->
{Target, NSI, T1, S1} =
scan_name(Str,S#xmerl_scanner{namespace_conformant=no_colons}),
{Target,NSI,T1,S1#xmerl_scanner{namespace_conformant=NSC}};
false ->
scan_name(Str, S)
end.
[ 5 ] Name : : = ( Letter | ' _ ' | ' :') ( NameChar ) *
scan_name([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_name(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_name(Str = [$:|T], S0 = #xmerl_scanner{namespace_conformant = NSC}) ->
if NSC == false ->
?bump_col(1),
scan_nmtoken(T, S, [$:], NSC);
NSC == no_colons ->
?fatal({invalid_NCName, lists:sublist(Str, 1, 6)}, S0);
true ->
?fatal({invalid_NCName, lists:sublist(Str, 1, 6)}, S0)
end;
scan_name([$_|T], S0 = #xmerl_scanner{namespace_conformant = NSC}) ->
?bump_col(1),
scan_nmtoken(T, S, [$_], NSC);
scan_name("%"++_T,S=#xmerl_scanner{environment=prolog}) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_name("%"++T,S0=#xmerl_scanner{environment={external,_}}) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_name(T2,S2);
scan_name([H|T], S0 = #xmerl_scanner{namespace_conformant = NSC}) ->
case xmerl_lib:is_letter(H) of
true ->
?bump_col(1),
scan_nmtoken(T, S, [H], NSC);
false ->
?fatal({invalid_name, lists:sublist([H|T], 1, 6)}, S0)
end;
scan_name(Str, S) ->
?fatal({invalid_name, Str}, S).
scan_nmtoken(Str, S, Acc, NSC) ->
scan_nmtoken(Str, S, Acc, _Prefix = [], _Local = Acc, NSC,is7bAscii(hd(Acc),true)).
[ 7 ] : : = ( NameChar)+
scan_nmtoken([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_nmtoken(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_nmtoken(Str = [H|T], S) ->
case xmerl_lib:is_namechar(H) of
true ->
scan_nmtoken(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1},
_Acc = [H], _Prefix = [], _Local = [H],
_NamespaceConformant = false,is7bAscii(H,true));
false ->
?fatal({invalid_nmtoken, lists:sublist(Str, 1, 6)}, S)
end.
scan_nmtoken([], S=#xmerl_scanner{continuation_fun = F},
Acc, Prefix, Local, NSC,Is7bAscii) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_nmtoken(MoreBytes,S1,Acc,Prefix,Local,NSC,Is7bAscii) end,
fun(S1) -> {list_to_atom(lists:reverse(Acc)),
namespace_info(Prefix, Local),[],S1} end,
S);
scan_nmtoken(Str = [H|_], S, Acc, Prefix, Local, _NSC,true) when ?whitespace(H) ->
NmString = lists:reverse(Acc),
{list_to_atom(NmString), namespace_info(Prefix, Local), Str, S};
scan_nmtoken(Str = [$:|_], S, Acc, [], _Local, no_colons,_Is7bAscii) ->
?fatal({invalid_NCName,
lists:sublist(lists:reverse(Acc) ++ Str, 1, 6)}, S);
scan_nmtoken([$:|T], S0, Acc, [], Local, NSC, Is7bAscii) ->
?bump_col(1),
scan_nmtoken(T, S, [$:|Acc], lists:reverse(Local), [], NSC,Is7bAscii);
scan_nmtoken(Str = [$:|_T], S, Acc, _Prefix, _Local, _NSC = true,_Is7bAscii) ->
?fatal({invalid_NCName,
lists:sublist(lists:reverse(Acc) ++ Str, 1, 6)}, S);
also marks the end of a name
scan_nmtoken(Str=[H|T], S0, Acc, Prefix, Local, NSC,Is7bAscii) ->
?bump_col(1),
case xmerl_lib:is_namechar(H) of
true ->
scan_nmtoken(T, S, [H|Acc], Prefix, [H|Local], NSC,is7bAscii(H,Is7bAscii));
_ ->
NmStr = lists:reverse(Acc),
{list_to_atom(NmStr), namespace_info(Prefix, Local), Str, S}
end.
namespace_info([], _) ->
[];
namespace_info(Prefix, Local) ->
{Prefix, lists:reverse(Local)}.
is7bAscii(_Ch,false) ->
false;
is7bAscii(Ch,_) when Ch > 127 ->
false;
is7bAscii(_,_) ->
true.
[ 11 ] SystemLiteral
scan_system_literal([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_system_literal(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_system_literal("\"" ++ T, S) ->
scan_system_literal(T, S, $", []);
scan_system_literal("'" ++ T, S) ->
scan_system_literal(T, S, $', []).
scan_system_literal([], S=#xmerl_scanner{continuation_fun = F},
Delimiter, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_system_literal(MoreBytes,S1,Delimiter,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_system_literal([H|T], S, H, Acc) ->
{lists:reverse(Acc), T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}};
scan_system_literal([H|T], S, Delimiter, Acc) ->
scan_system_literal(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1},
Delimiter, [H|Acc]).
[ 12 ] PubidLiteral
scan_pubid_literal([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pubid_literal(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pubid_literal([H|T], S) when H == $"; H == $' ->
scan_pubid_literal(T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}, H, []);
scan_pubid_literal([H|_T], S) ->
?fatal({invalid_pubid_char, H}, S).
scan_pubid_literal([], S=#xmerl_scanner{continuation_fun = F},
Delimiter, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pubid_literal(MoreBytes,S1,Delimiter,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pubid_literal([H|T], S, H, Acc) ->
{lists:reverse(Acc), T, S#xmerl_scanner{col = S#xmerl_scanner.col+1}};
scan_pubid_literal(Str = [H|_], S, Delimiter, Acc) when ?whitespace(H) ->
{_, T, S1} = pub_id_strip(Str, S),
scan_pubid_literal(T, S1, Delimiter, [16#20|Acc]);
scan_pubid_literal([H|T], S, Delimiter, Acc) ->
case is_pubid_char(H) of
true ->
scan_pubid_literal(
T, S#xmerl_scanner{col = S#xmerl_scanner.col+1},
Delimiter, [H|Acc]);
false ->
?fatal({invalid_pubid_char, H}, S)
end.
is_pubid_char(X) when X >= $a, X =< $z -> true;
is_pubid_char(X) when X >= $A, X =< $Z -> true;
is_pubid_char(X) when X >= $0, X =< $9 -> true;
is_pubid_char(X) ->
lists:member(X, "-'()+,./:=?;!*#@$_%").
[ 46 ] contentspec
scan_contentspec([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_contentspec(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_contentspec("EMPTY" ++ T, S0) ->
?bump_col(5),
{empty, T, S};
scan_contentspec("ANY" ++ T, S0) ->
?bump_col(3),
{any, T, S};
scan_contentspec("%" ++ _T, S=#xmerl_scanner{environment=prolog}) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_contentspec("%" ++ T, S0) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2} = strip(ExpRef ++ T1,S1),
scan_contentspec(T2, S2);
scan_contentspec("(" ++ T, S0) ->
?bump_col(1),
?strip1,
scan_elem_content(T1, S1).
[ 47 ] children
[ 51 ] Mixed
scan_elem_content(T, S) ->
scan_elem_content(T, S, _Context = children, _Mode = unknown, _Acc = []).
scan_elem_content([], S=#xmerl_scanner{continuation_fun = F},
Context, Mode, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes,S1) -> scan_elem_content(MoreBytes,S1,Context,Mode,Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_elem_content(")" ++ T, S0, Context, Mode0, Acc0) ->
?bump_col(1),
{Mode, Acc} = case {Mode0, Acc0} of
{unknown, [_X]} ->
{seq, Acc0};
{M, _L} when M == seq; M == choice ->
{Mode0, lists:reverse(Acc0)}
end,
{Occurrence, T1, S1} = scan_occurrence(T, S),
vc_No_Duplicate_Types(S,Context,Acc),
case {Occurrence, Context,Acc} of
{'*', mixed,_} -> ok;
{Other, mixed,_} ->
?fatal({illegal_for_mixed_content, Other}, S1);
_ ->
ok
end,
?strip2,
{format_elem_content({Occurrence, {Mode, Acc}}), T2, S2};
scan_elem_content("#PCDATA" ++ _T, S, not_mixed, _Mode, _Acc) ->
?fatal({error,{extra_set_of_parenthesis}},S);
scan_elem_content("#PCDATA" ++ _T, S, _Cont, Mode, Acc)
when Mode==choice;Mode==seq;Acc/=[] ->
?fatal({error,{invalid_format_of_mixed_content}},S);
scan_elem_content("#PCDATA" ++ T, S0, _Context, Mode, Acc) ->
?bump_col(7),
?strip1,
scan_elem_content(T1, S1, mixed, Mode, ['#PCDATA'|Acc]);
scan_elem_content("," ++ _T, S, _Context, choice, _Acc) ->
?fatal({mixing_comma_and_vertical_bar_in_content_model},S);
scan_elem_content("," ++ T, S0, Context, _Mode, Acc) ->
?bump_col(1),
?strip1,
scan_elem_content2(T1, S1, Context, seq, Acc);
scan_elem_content("|" ++ _T, S, _Context, seq, _Acc) ->
?fatal({mixing_comma_and_vertical_bar_in_content_model},S);
scan_elem_content("|" ++ T, S0, Context, _Mode, Acc) ->
?bump_col(1),
?strip1,
scan_elem_content2(T1, S1, Context, choice, Acc);
scan_elem_content(T, S, Context, Mode, Acc) ->
scan_elem_content2(T, S, Context, Mode, Acc).
scan_elem_content2("(" ++ _T, S, mixed, _Mode, _Acc) ->
?fatal({error,
{element_names_must_not_be_parenthesized_in_mixed_content}},S);
scan_elem_content2("(" ++ T, S0, Context, Mode, Acc) ->
?bump_col(1),
?strip1,
{Inner, T2, S2} = scan_elem_content(T1, S1, not_mixed, unknown, []),
scan_elem_content(T2, S2, Context, Mode, [Inner|Acc]);
scan_elem_content2("%" ++ _T,S=#xmerl_scanner{environment=prolog},_Context,_Mode,_Acc) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_elem_content2("%" ++ T, S0, Context, Mode, Acc) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
ExpRef = expand_pe_reference(PERefName, S1,as_PE),
{_,T2,S2}=strip(ExpRef++T1,S1),
scan_elem_content(T2, S2, Context, Mode, Acc);
scan_elem_content2(T, S, Context, Mode, Acc) ->
{Name, _NameStr, T1, S1} = scan_name(T, S),
{Occurrence, T2, S2} = scan_occurrence(T1, S1),
case {Occurrence, Context} of
{once, mixed} -> ok;
{Other, mixed} ->
?fatal({illegal_for_mixed_content, Other}, S1);
_ ->
ok
end,
?strip3,
mandatory_delimeter_wfc(T3,S3),
NewAcc = [format_elem_content({Occurrence, Name}) | Acc],
scan_elem_content(T3, S3, Context, Mode, NewAcc).
format_elem_content({once, What}) -> What;
format_elem_content(Other) -> Other.
scan_occurrence([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_occurrence(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_occurrence([$?|T], S0) ->
?bump_col(1),
{'?', T, S};
scan_occurrence([$+|T], S0) ->
?bump_col(1),
{'+', T, S};
scan_occurrence([$*|T], S0) ->
?bump_col(1),
{'*', T, S};
scan_occurrence(T, S) ->
{once, T , S}.
first part of VC : Name Token
vc_Valid_Char(_AT,C,S) ->
case xmerl_lib:is_namechar(C) of
true ->
ok;
_ ->
?fatal({error,{validity_constraint_Name_Token,C}},S)
end.
vc_ID_Attribute_Default(_,#xmerl_scanner{validation=false}) ->
ok;
vc_ID_Attribute_Default({_,'ID',_,Def,_},_S)
when Def=='#IMPLIED';Def=='#REQUIRED' ->
ok;
vc_ID_Attribute_Default({_,'ID',_,Def,_},S) ->
?fatal({error,{validity_constraint_error_ID_Attribute_Default,Def}},S);
vc_ID_Attribute_Default(_,_) ->
ok.
vc_Enumeration({_Name,{_,NameList},DefaultVal,_,_},S)
when list(DefaultVal) ->
case lists:member(list_to_atom(DefaultVal),NameList) of
true ->
ok;
_ ->
?fatal({error,{vc_enumeration,list_to_atom(DefaultVal),NameList}},S)
end;
vc_Enumeration({_Name,{_,_NameList},_DefaultVal,_,_},_S) ->
ok.
vc_Entity_Name({_Name,'ENTITY',DefaultVal,_,_},S) when list(DefaultVal) ->
Read = S#xmerl_scanner.rules_read_fun,
case Read(entity,list_to_atom(DefaultVal),S) of
{_,external,{_,{ndata,_}}} ->
ok;
_ -> ?fatal({error,{vc_Entity_Name,list_to_atom(DefaultVal)}},S)
end;
vc_Entity_Name({_Name,'ENTITY',_,_,_},_S) ->
ok;
vc_Entity_Name({_,'ENTITIES',DefaultVal,_,_},S) when list(DefaultVal) ->
Read = S#xmerl_scanner.rules_read_fun,
NameListFun = fun([],Acc,_St,_Fun) ->
lists:reverse(Acc);
(Str,Acc,St,Fun) ->
{N,_,St2,Str2} = scan_name(Str,St),
Fun(Str2,[N|Acc],St2,Fun)
end,
NameList = NameListFun(DefaultVal,[],S,NameListFun),
VcFun =
fun(X) ->
case Read(entity,X,S) of
{_,external,{_,{ndata,_}}} ->
ok;
_ -> ?fatal({error,{vc_Entity_Name,X}},S)
end
end,
lists:foreach(VcFun,NameList);
vc_Entity_Name({_,'ENTITIES',_,_,_},_S) ->
ok.
vc_No_Duplicate_Types(#xmerl_scanner{validation=true} = S,mixed,Acc) ->
CheckDupl =
fun([H|T],F) ->
case lists:member(H,T) of
true ->
?fatal({no_duplicate_types_allowed,H},S);
_ -> F(T,F)
end;
([],_) -> ok
end,
CheckDupl(Acc,CheckDupl);
vc_No_Duplicate_Types(_,_,_) ->
ok.
Tests of Well - Formededness Constraints
mandatory_delimeter_wfc(","++_T,_S) ->
ok;
mandatory_delimeter_wfc("|"++_T,_S) ->
ok;
mandatory_delimeter_wfc(")"++_T,_S) ->
ok;
mandatory_delimeter_wfc("%"++_T,_S) ->
ok;
mandatory_delimeter_wfc(T,S) ->
?fatal({comma_or_vertical_bar_mandatory_between_names_in_content_model,T},S).
wfc_unique_att_spec([],_S) ->
ok;
wfc_unique_att_spec([#xmlAttribute{name=N}|Atts],S) ->
case lists:keymember(N,#xmlAttribute.name,Atts) of
true ->
?fatal({error,{unique_att_spec_required,N}},S);
_ ->
wfc_unique_att_spec(Atts,S)
end.
wfc_legal_char([Ch],S) ->
case xmerl_lib:is_char(Ch) of
true ->
ok;
_ ->
?fatal({error,{wfc_Legal_Character,Ch}},S)
end;
wfc_legal_char(Ch,S) ->
case xmerl_lib:is_char(Ch) of
true ->
ok;
_ ->
?fatal({error,{wfc_Legal_Character,Ch}},S)
end.
wfc_whitespace_betw_attrs(WS,_S) when ?whitespace(WS) ->
ok;
wfc_whitespace_betw_attrs($/,_S) ->
ok;
wfc_whitespace_betw_attrs($>,_S) ->
ok;
wfc_whitespace_betw_attrs(_,S) ->
?fatal({whitespace_required_between_attributes},S).
wfc_Entity_Declared({external,_},S=#xmerl_scanner{standalone=yes},Name) ->
?fatal({reference_to_externally_defed_entity_standalone_doc,Name},S);
wfc_Entity_Declared({external,_},_S,_) ->
ok;
wfc_Entity_Declared(_Env,_S,_) ->
ok.
wfc_Internal_parsed_entity(internal,Value,S) ->
WFC test that replacement text matches production content
scan_content(Value,S#xmerl_scanner{environment=internal_parsed_entity},
_Name=[],[],S#xmerl_scanner.space,_Lang=[],_Prnt=[],
#xmlNamespace{});
wfc_Internal_parsed_entity(_,_,_) ->
ok.
vc_Element_valid(_Name,#xmerl_scanner{environment=internal_parsed_entity}) ->
ok;
vc_Element_valid(Name,S=#xmerl_scanner{rules_read_fun=Read,
validation=true}) ->
case Read(elem_def,Name,S) of
#xmlElement{elementdef=undeclared} ->
?fatal({error,{error_missing_element_declaration_in_DTD,Name}},S); undefined ->
?fatal({error,{error_missing_element_declaration_in_DTD,Name}},S); _ -> ok
end;
vc_Element_valid(_,_) ->
ok.
[ 74 ] PEDef
scan_pe_def([], S=#xmerl_scanner{continuation_fun = F}, PEName) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_pe_def(MoreBytes, S1, PEName) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_pe_def("'" ++ T, S0, PEName) ->
?bump_col(1),
scan_entity_value(T, S, $', PEName,parameter);
scan_pe_def("\"" ++ T, S0, PEName) ->
?bump_col(1),
scan_entity_value(T, S, $", PEName,parameter);
scan_pe_def(Str, S, _PEName) ->
scan_external_id(Str, S).
[ 82 ] NotationDecl
scan_notation_decl(T, #xmerl_scanner{rules_write_fun = Write,
rules_read_fun=Read,
rules_delete_fun=Delete} = S) ->
{Name, _NameStr, T1, S1} = scan_name_no_colons(T, S),
{_,T2,S2} = mandatory_strip(T1,S1),
{Def, T3, S3} = scan_notation_decl1(T2, S2),
?strip4,
">" ++ T5 = T4,
case Read(notation,Name,S) of
undeclared -> Delete(notation,Name,S4);
_ -> ok
end,
S5 = Write(notation, Name, Def, S4),
{T5, S5}.
scan_notation_decl1([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_notation_decl1(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_notation_decl1("SYSTEM" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{SL, T2, S2} = scan_system_literal(T1, S1),
{{system, SL}, T2, S2};
scan_notation_decl1("PUBLIC" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{PIDL, T2, S2} = scan_pubid_literal(T1, S1),
?strip3,
case T3 of
">" ++ _ ->
{{public, PIDL}, T3,
S3#xmerl_scanner{col = S3#xmerl_scanner.col+1}};
_ ->
{SL, T4, S4} = scan_system_literal(T3, S3),
{{public, PIDL, SL}, T4, S4}
end.
[ 75 ] ExternalID
scan_external_id([], S=#xmerl_scanner{continuation_fun = F}) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_external_id(MoreBytes, S1) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_external_id("SYSTEM" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{SL, T2, S2} = scan_system_literal(T1, S1),
{{system, SL}, T2, S2};
scan_external_id("PUBLIC" ++ T, S0) ->
?bump_col(6),
{_,T1,S1} = mandatory_strip(T,S),
{PIDL, T2, S2} = scan_pubid_literal(T1, S1),
{_,T3,S3} = mandatory_strip(T2,S2),
{SL, T4, S4} = scan_system_literal(T3, S3),
{{public, PIDL, SL}, T4, S4}.
[ 9 ] EntityValue
Note that we have two different scan functions for EntityValue
scan_entity_value(Str, S, Delim, Name, Namespace) ->
scan_entity_value(Str, S, Delim, _Acc = [], Name, Namespace,[]).
scan_entity_value([], S=#xmerl_scanner{environment={external,{entity,_}}},
_Delim,Acc,_,_,[]) ->
{lists:flatten(lists:reverse(Acc)), [], S};
scan_entity_value([], S=#xmerl_scanner{environment={external,{entity,_}},
validation=true},
_Delim,_Acc,PEName,_,_) ->
{{error,{failed_VC_Proper_Declaration_PE_Nesting,1,PEName}},[],S};
scan_entity_value([],S,
no_delim,Acc,_,_,[]) ->
{lists:flatten(lists:reverse(Acc)),[],S};
scan_entity_value([],S=#xmerl_scanner{validation=true},
no_delim,_Acc,PEName,_,_PENesting) ->
{{error,{failed_VC_Proper_Declaration_PE_Nesting,2,PEName}},[],S};
scan_entity_value([], S=#xmerl_scanner{continuation_fun = F},
Delim, Acc, PEName,Namespace,PENesting) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) ->
scan_entity_value(MoreBytes,S1,
Delim,Acc,PEName,Namespace,PENesting)
end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_entity_value([Delim|T], S=#xmerl_scanner{validation=true},
Delim,_Acc,PEName,_NS,PENesting) when length(PENesting) /= 0 ->
{{error,{failed_VC_Proper_Declaration_PE_Nesting,3,PEName}},T,S};
scan_entity_value([Delim|T], S0,
Delim, Acc, _PEName,_NS,_PENesting) ->
?bump_col(1),
{lists:flatten(lists:reverse(Acc)), T, S};
scan_entity_value("%" ++ _T,S=#xmerl_scanner{environment=prolog},_,_,_,_,_) ->
?fatal({error,{wfc_PEs_In_Internal_Subset}},S);
scan_entity_value("%" ++ T, S0, Delim, Acc, PEName,Namespace,PENesting) ->
?bump_col(1),
{PERefName, T1, S1} = scan_pe_reference(T, S),
if PERefName == PEName,Namespace==parameter ->
?fatal({illegal_recursion_in_PE, PEName}, S1);
true ->
{ExpandedRef,S2} =
case expand_pe_reference(PERefName, S1, in_literal) of
actually should be expanded as_PE but
Tuple when tuple(Tuple) ->
{ExpRef,_Sx}=fetch_not_parse(Tuple,S1),
{EntV,_,_S2} =
scan_entity_value(ExpRef, S1, no_delim,[],
PERefName,parameter,[]),
{EntV,_S2};
ExpRef ->
{ExpRef,S1}
end,
S3 = S2#xmerl_scanner{col=S2#xmerl_scanner.col+1},
{Acc2,_,S4} = scan_entity_value(ExpandedRef,S3,no_delim,Acc,
PEName,Namespace,[]),
{_,T2,S5} = strip(" "++T1,S4),
scan_entity_value(T2,S5,Delim,lists:reverse(Acc2),
PEName,Namespace,PENesting)
end;
scan_entity_value("&" ++ T, S0, Delim, Acc, PEName,Namespace,PENesting) ->
?bump_col(1),
case T of
"#"++_T ->
{ExpRef, T1, S1} = scan_reference(T, S),
Tok = pe_nesting_token(ExpRef++T1,Namespace,S1#xmerl_scanner.validation),
case markup_delimeter(ExpRef) of
true ->
scan_entity_value(T1, S1, Delim, [ExpRef|Acc], PEName,
Namespace,pe_push(Tok,PENesting,S1));
_ ->
scan_entity_value(ExpRef ++ T1, S1, Delim, Acc, PEName,
Namespace,pe_push(Tok,PENesting,S1))
end;
{Name, _NamespaceInfo, T1, S1} = scan_name(T,S),
S2=save_refed_entity_name(Name,PEName,S1),
scan_entity_value(T1,S2,Delim,["&"|Acc],PEName,Namespace,PENesting)
end;
Start delimeter for ConditionalSection
scan_entity_value("<!["++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(3),
scan_entity_value(T,S,Delim,["<!["|Acc],PEName,NS,
pe_push("<![",PENesting,S));
Start delimeter for ConditionalSection ( 2 )
scan_entity_value("["++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(1),
scan_entity_value(T,S,Delim,["["|Acc],PEName,NS,
pe_push("[",PENesting,S));
scan_entity_value("<!--"++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(4),
scan_entity_value(T,S,Delim,["<!--"|Acc],PEName,NS,
pe_push("<!--",PENesting,S));
Start delimeter for , AttListDecl , EntityDecl , NotationDecl
scan_entity_value("<!"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["<!"|Acc],PEName,NS,
pe_push("<!",PENesting,S));
scan_entity_value("<?"++T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["<?"|Acc],PEName,NS,
pe_push("<?",PENesting,S));
Start delimeter for elements that matches the proper stop
scan_entity_value("</"++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(2),
scan_entity_value(T,S,Delim,["</"|Acc],PEName,NS,
pe_push("</",PENesting,S));
scan_entity_value("<"++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(1),
scan_entity_value(T,S,Delim,["<"|Acc],PEName,NS,
pe_push("<",PENesting,S));
for contentspecs
scan_entity_value("("++T,S0,Delim,Acc,PEName,parameter=NS,PENesting)->
?bump_col(1),
scan_entity_value(T,S,Delim,["("|Acc],PEName,NS,
pe_push("(",PENesting,S));
Stop delimeter for , AttListDecl , EntityDecl , NotationDecl
scan_entity_value(">"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(1),
scan_entity_value(T,S,Delim,[">"|Acc],PEName,NS,
pe_pop(">",PENesting,S));
scan_entity_value("?>"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["?>"|Acc],PEName,NS,
pe_pop("?>",PENesting,S));
scan_entity_value("-->"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(3),
scan_entity_value(T,S,Delim,["-->"|Acc],PEName,NS,
pe_pop("-->",PENesting,S));
Stop delimeter for ConditionalSection
scan_entity_value("]]>"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(3),
scan_entity_value(T,S,Delim,["]]>"|Acc],PEName,NS,
pe_pop("]]>",PENesting,S));
added to match a content start included
scan_entity_value("/>"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(2),
scan_entity_value(T,S,Delim,["/>"|Acc],PEName,NS,
pe_pop("/>",PENesting,S));
scan_entity_value(")"++ T,S0,Delim,Acc,PEName, parameter=NS,PENesting) ->
?bump_col(1),
scan_entity_value(T,S,Delim,[")"|Acc],PEName,NS,
pe_pop(")",PENesting,S));
scan_entity_value([H|T], S0, Delim, Acc, PEName,Namespace,PENesting) ->
case xmerl_lib:is_char(H) of
true ->
?bump_col(1),
scan_entity_value(T, S, Delim, [H|Acc], PEName,Namespace,PENesting);
false ->
?fatal({unexpected_char,H}, S0)
end.
save_refed_entity_name(Name,PEName,S) ->
case predefined_entity(Name) of
true ->
S;
_ ->
save_refed_entity_name1(Name,PEName,S)
end.
save_refed_entity_name1(Name,PEName,
S=#xmerl_scanner{entity_references=ERefs}) ->
case lists:keysearch(PEName,1,ERefs) of
{value,{_,Refs}} ->
NewRefs =
case lists:member(Name,Refs) of
true ->Refs;
_ -> [Name|Refs]
end,
S#xmerl_scanner{entity_references=lists:keyreplace(PEName,1,ERefs,
{PEName,NewRefs})
};
_ ->
S#xmerl_scanner{entity_references=[{PEName,[Name]}|ERefs]}
end.
pe_push(Tok,Stack,_S) when Tok=="<!";Tok=="<?";Tok=="<!--";Tok=="<![";
Tok=="[";Tok=="<";Tok=="</";Tok=="(" ->
[Tok|Stack];
pe_push(Tok,Stack,#xmerl_scanner{validation=true})
when Tok==")";Tok==">";Tok=="?>";Tok=="]]>";Tok=="-->";Tok=="/>"->
[Tok|Stack];
pe_push(_,Stack,_S) ->
Stack.
pe_pop(">",["<!"|Rest],_S) -> Rest;
pe_pop("?>",["<?"|Rest],_S) -> Rest;
pe_pop("-->",["<!--"|Rest],_S) -> Rest;
pe_pop("]]>",["[","<!["|Rest],_S) -> Rest;
pe_pop("/>",["<"|Rest],_S) -> Rest;
pe_pop(">",["<"|Rest],_S) -> Rest;
pe_pop(">",["</"|Rest],_S) -> Rest;
pe_pop(")",["("|Rest],_S) -> Rest;
pe_pop(Token,_Stack,S=#xmerl_scanner{validation=true}) ->
?fatal({error,{failed_VC_Proper_Declaration_PE_Nesting,5,Token}},S);
pe_pop(_,Rest,_) ->
Rest.
pe_nesting_token("<!"++_T,parameter,true) -> "<!";
pe_nesting_token("<?"++_T,parameter,true) -> "<?";
pe_nesting_token("<!--"++_T,parameter,true) -> "<!--";
pe_nesting_token("<!["++_T,parameter,true) -> "<![";
pe_nesting_token("["++_T,parameter,true) -> "[";
pe_nesting_token("("++_T,parameter,true) -> "(";
pe_nesting_token(">"++_T,parameter,true) -> ">";
pe_nesting_token("?>"++_T,parameter,true) -> "?>";
pe_nesting_token("-->"++_T,parameter,true) -> "-->";
pe_nesting_token("]]>"++_T,parameter,true) -> "]]>";
pe_nesting_token(")"++_T,parameter,true) -> ")";
pe_nesting_token("/>"++_T,parameter,true) -> "/>";
pe_nesting_token(_,_,_) -> false.
predefined_entity(amp) -> true;
predefined_entity(lt) -> true;
predefined_entity(gt) -> true;
predefined_entity(apos) -> true;
predefined_entity(quot) -> true;
predefined_entity(_) -> false.
check_entity_recursion(EName,
S=#xmerl_scanner{entity_references=EntityRefList}) ->
Set = sofs:family(EntityRefList),
case catch sofs:family_to_digraph(Set, [acyclic]) of
{'EXIT',{cyclic,_}} ->
?fatal({illegal_recursion_in_Entity, EName}, S);
{graph,_,_,_,_} ->
ok
end.
[ 15 ] Comment
scan_comment(Str, S) ->
scan_comment(Str, S, _Pos = undefined, _Parents = [], _Lang = []).
scan_comment(Str,S=#xmerl_scanner{col=C,event_fun=Event}, Pos, Parents, Lang) ->
Comment = #xmlComment{pos = Pos,
parents = Parents,
language = Lang,
value = undefined},
S1 = #xmerl_scanner{} = Event(#xmerl_event{event = started,
line = S#xmerl_scanner.line,
col = C,
pos = Pos,
data = Comment}, S),
scan_comment1(Str, S1, Pos, Comment, _Acc = []).
scan_comment1([], S=#xmerl_scanner{continuation_fun = F},
Pos, Comment, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> scan_comment1(MoreBytes, S1, Pos, Comment, Acc) end,
fun(S1) -> ?fatal(unexpected_end, S1) end,
S);
scan_comment1("--" ++ T, S0 = #xmerl_scanner{col = C,
event_fun = Event,
hook_fun = Hook},
_Pos, Comment, Acc) ->
case hd(T) of
$> ->
?bump_col(2),
Comment1 = Comment#xmlComment{value = lists:reverse(Acc)},
S1=#xmerl_scanner{}=Event(#xmerl_event{event = ended,
line=S#xmerl_scanner.line,
col = C,
data = Comment1}, S),
{Ret, S2} = Hook(Comment1, S1),
T2 = tl(T),
?strip3,
{Ret, T3, S3};
Char ->
?fatal({invalid_comment,"--"++[Char]}, S0)
end;
scan_comment1("\n" ++ T, S=#xmerl_scanner{line = L}, Pos, Cmt, Acc) ->
scan_comment1(T, S#xmerl_scanner{line=L+1,col=1},Pos, Cmt, "\n" ++ Acc);
scan_comment1("\r\n" ++ T, S=#xmerl_scanner{line = L}, Pos, Cmt, Acc) ->
CR followed by LF is read as a single LF
scan_comment1(T, S#xmerl_scanner{line=L+1,col=1}, Pos, Cmt, "\n" ++ Acc);
scan_comment1("\r" ++ T, S=#xmerl_scanner{line = L}, Pos, Cmt, Acc) ->
CR not followed by LF is read as a LF
scan_comment1(T, S#xmerl_scanner{line=L+1,col=1}, Pos, Cmt, "\n" ++ Acc);
scan_comment1([H|T], S=#xmerl_scanner{col = C}, Pos, Cmt, Acc) ->
wfc_legal_char(H,S),
scan_comment1(T, S#xmerl_scanner{col=C+1}, Pos, Cmt, [H|Acc]).
scan_markup_completion_gt([$>|_R]=T,S) ->
{T,S};
?bump_col(1),
{Name,T1,S1} = scan_pe_reference(T,S),
ExpandedRef = expand_pe_reference(Name,S1,as_PE),
{_,T2,S2} = strip(ExpandedRef++T1,S1),
scan_markup_completion_gt(T2,S2);
scan_markup_completion_gt(T,S) ->
?fatal({error,{malformed_syntax_entity_completion,T}},S).
strip(Str,S) ->
strip(Str,S,all).
strip([], S=#xmerl_scanner{continuation_fun = F},_) ->
?dbg("cont()... stripping whitespace~n", []),
F(fun(MoreBytes, S1) -> strip(MoreBytes, S1) end,
fun(S1) -> {[], [], S1} end,
S);
strip("\s" ++ T, S=#xmerl_scanner{col = C},Lim) ->
strip(T, S#xmerl_scanner{col = C+1},Lim);
strip("\t" ++ _T, S ,no_tab) ->
?fatal({error,{no_tab_allowed}},S);
strip("\t" ++ T, S=#xmerl_scanner{col = C},Lim) ->
strip(T, S#xmerl_scanner{col = expand_tab(C)},Lim);
strip("\n" ++ T, S=#xmerl_scanner{line = L},Lim) ->
strip(T, S#xmerl_scanner{line = L+1, col = 1},Lim);
strip("\r\n" ++ T, S=#xmerl_scanner{line = L},Lim) ->
CR followed by LF is read as a single LF
strip(T, S#xmerl_scanner{line = L+1, col = 1},Lim);
strip("\r" ++ T, S=#xmerl_scanner{line = L},Lim) ->
CR not followed by LF is read as a LF
strip(T, S#xmerl_scanner{line = L+1, col = 1},Lim);
strip(Str, S,_Lim) ->
{[], Str, S}.
mandatory_strip([],S) ->
?fatal({error,{whitespace_was_expected}},S);
mandatory_strip(T,S) when ?whitespace(hd(T)) ->
strip(T,S,all);
?fatal({error,{whitespace_was_expected}},S);
{[],T,S};
mandatory_strip(_T,S) ->
?fatal({error,{whitespace_was_expected}},S).
pub_id_strip(Str, S) ->
strip(Str,S,no_tab).
normalize("&"++T,S,IsNorm) ->
case scan_reference(T, S) of
{ExpRef, T1, S1} when ?whitespace(hd(ExpRef)) ->
normalize(ExpRef++T1,S1,IsNorm);
_ ->
{"&"++T,S,IsNorm}
end;
normalize(T,S,IsNorm) ->
case strip(T,S) of
{_,T,S} ->
{T,S,IsNorm};
{_,T1,S1} ->
{T1,S1,true}
end.
accumulate_whitespace(T::string(),S::global_state ( ) ,
atom(),Acc::string ( ) ) - > { Acc , T1 , S1 }
accumulate_whitespace(T, S, preserve, Acc) ->
accumulate_whitespace(T, S, Acc);
accumulate_whitespace(T, S, normalize, Acc) ->
{_WsAcc, T1, S1} = accumulate_whitespace(T, S, []),
{[$\s|Acc], T1, S1}.
accumulate_whitespace([], S=#xmerl_scanner{continuation_fun = F}, Acc) ->
?dbg("cont()...~n", []),
F(fun(MoreBytes, S1) -> accumulate_whitespace(MoreBytes, S1, Acc) end,
fun(S1) -> {Acc, [], S1} end,
S);
accumulate_whitespace("\s" ++ T, S=#xmerl_scanner{col = C}, Acc) ->
accumulate_whitespace(T, S#xmerl_scanner{col = C+1}, [$\s|Acc]);
accumulate_whitespace("\t" ++ T, S=#xmerl_scanner{col = C}, Acc) ->
accumulate_whitespace(T, S#xmerl_scanner{col = expand_tab(C)}, [$\t|Acc]);
accumulate_whitespace("\n" ++ T, S=#xmerl_scanner{line = L}, Acc) ->
accumulate_whitespace(T, S#xmerl_scanner{line = L+1, col = 1}, [$\n|Acc]);
accumulate_whitespace("\r\n" ++ T, S=#xmerl_scanner{line = L}, Acc) ->
CR followed by LF is read as a single LF
accumulate_whitespace(T, S#xmerl_scanner{line = L+1, col=1}, [$\n|Acc]);
accumulate_whitespace("\r" ++ T, S=#xmerl_scanner{line = L}, Acc) ->
CR not followed by LF is read as a LF
accumulate_whitespace(T, S#xmerl_scanner{line = L+1, col = 1}, [$\n|Acc]);
accumulate_whitespace(Str, S, Acc) ->
{Acc, Str, S}.
expand_tab(Col) ->
Rem = (Col-1) rem 8,
_NewCol = Col + 8 - Rem.
fatal(Reason, S) ->
exit({fatal, {Reason, S#xmerl_scanner.line, S#xmerl_scanner.col}}).
BUG when we are many < ! ATTLIST .. > balise none attributes has save in rules
rules_write(Context, Name, Value, #xmerl_scanner{rules = T} = S) ->
case ets:lookup(T, {Context, Name}) of
[] ->
ets:insert(T, {{Context, Name}, Value});
_ ->
ok
end,
S.
rules_read(Context, Name, #xmerl_scanner{rules = T}) ->
case ets:lookup(T, {Context, Name}) of
[] ->
undefined;
[{_, V}] ->
V
end.
rules_delete(Context,Name,#xmerl_scanner{rules = T}) ->
ets:delete(T,{Context,Name}).
decode_UTF8([H|T],Acc ) when H = < 127 - >
Ch = ) ,
decode_UTF8([H1,H2,H3|T],Acc ) when H1 = < 16#EF - >
decode_UTF8([H1,H2,H3,H4|T],Acc ) when H1 = < 16#F7 - >
Msb = ( H1 band 16#1F ) bsl 6 ,
Lsb = H2 band 16#3F ,
( H3 band 16#3F ) + ( ( H2 band 16#3F ) bsl 6 ) + ( ( H1 band 16#0F ) bsl 12 ) .
( H4 band 16#3F ) + ( ( H3 band 16#3F ) bsl 6 ) + ( ( H2 band 16#3F ) bsl 12 ) +
( ( H1 band 16#07 ) bsl 18 ) .
( H5 band 16#3F ) + ( ( H4 band 16#3F ) bsl 6 ) + ( ( H3 band 16#3F ) bsl 12 ) +
( ( H2 band 16#3F ) bsl 18 ) + ( ( H1 band 16#03 ) band 24 ) .
( H6 band 16#3F ) + ( ( H5 band 16#3F ) bsl 6 ) + ( ( H4 band 16#3F ) bsl 12 ) +
( ( H3 band 16#3F ) bsl 18 ) + ( ( H2 band 16#3F ) bsl 24 ) +
( ( H1 band 16#01 ) bsl 30 ) .
? ) ,
utf8_char([H|T],S0 ) when H = < 127 - >
? ) ,
Ch = ) ,
utf8_char([H1,H2,H3|T],S0 ) when H1 = < 16#EF - >
|
8b41a4bd104851e72c4bb4b7e93c846b7ddcfc53fffe9a8a776123d46f33fd14 | snowleopard/hadrian | Flavour.hs | module Flavour (Flavour (..)) where
import Expression
Please update doc/{flavours.md , user-settings.md } when changing this file .
| ' Flavour ' is a collection of build settings that fully define a GHC build .
-- Note the following type semantics:
* @Bool@ : a plain Boolean flag whose value is known at compile time .
-- * @Action Bool@: a flag whose value can depend on the build environment.
-- * @Predicate@: a flag whose value can depend on the build environment and
-- on the current build target.
data Flavour = Flavour {
| Flavour name , to select this flavour from command line .
name :: String,
-- | Use these command line arguments.
args :: Args,
-- | Build these packages.
packages :: Stage -> Action [Package],
-- | Either 'integerGmp' or 'integerSimple'.
integerLibrary :: Action Package,
-- | Build libraries these ways.
libraryWays :: Ways,
-- | Build RTS these ways.
rtsWays :: Ways,
-- | Build split objects.
splitObjects :: Predicate,
| Build dynamic GHC programs .
dynamicGhcPrograms :: Action Bool,
-- | Enable GHCi debugger.
ghciWithDebugger :: Bool,
| Build profiled GHC .
ghcProfiled :: Bool,
| Build GHC with debug information .
ghcDebugged :: Bool }
| null | https://raw.githubusercontent.com/snowleopard/hadrian/b9a3f9521b315942e1dabb006688ee7c9902f5fe/src/Flavour.hs | haskell | Note the following type semantics:
* @Action Bool@: a flag whose value can depend on the build environment.
* @Predicate@: a flag whose value can depend on the build environment and
on the current build target.
| Use these command line arguments.
| Build these packages.
| Either 'integerGmp' or 'integerSimple'.
| Build libraries these ways.
| Build RTS these ways.
| Build split objects.
| Enable GHCi debugger. | module Flavour (Flavour (..)) where
import Expression
Please update doc/{flavours.md , user-settings.md } when changing this file .
| ' Flavour ' is a collection of build settings that fully define a GHC build .
* @Bool@ : a plain Boolean flag whose value is known at compile time .
data Flavour = Flavour {
| Flavour name , to select this flavour from command line .
name :: String,
args :: Args,
packages :: Stage -> Action [Package],
integerLibrary :: Action Package,
libraryWays :: Ways,
rtsWays :: Ways,
splitObjects :: Predicate,
| Build dynamic GHC programs .
dynamicGhcPrograms :: Action Bool,
ghciWithDebugger :: Bool,
| Build profiled GHC .
ghcProfiled :: Bool,
| Build GHC with debug information .
ghcDebugged :: Bool }
|
591038f5b17a405eec9adc255a0c4b56b03993a67d65547159792741e8ecebb0 | Ericson2314/lighthouse | Transitional.hs | -- | Produces XHTML 1.0 Transitional.
module Text.XHtml.Transitional (
-- * Data types
Html, HtmlAttr,
-- * Classes
HTML(..), ADDATTRS(..),
-- * Primitives and basic combinators
(<<), concatHtml, (+++),
noHtml, isNoHtml, tag, itag,
emptyAttr, intAttr, strAttr, htmlAttr,
primHtml,
-- * Rendering
showHtml, renderHtml, prettyHtml,
showHtmlFragment, renderHtmlFragment, prettyHtmlFragment,
module Text.XHtml.Strict.Elements,
module Text.XHtml.Frameset.Elements,
module Text.XHtml.Transitional.Elements,
module Text.XHtml.Strict.Attributes,
module Text.XHtml.Frameset.Attributes,
module Text.XHtml.Transitional.Attributes,
module Text.XHtml.Extras
) where
import Text.XHtml.Internals
import Text.XHtml.Strict.Elements
import Text.XHtml.Frameset.Elements
import Text.XHtml.Transitional.Elements
import Text.XHtml.Strict.Attributes
import Text.XHtml.Frameset.Attributes
import Text.XHtml.Transitional.Attributes
import Text.XHtml.Extras
docType =
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"" ++
" \"-transitional.dtd\">"
-- | Output the HTML without adding newlines or spaces within the markup.
-- This should be the most time and space efficient way to
-- render HTML, though the ouput is quite unreadable.
showHtml :: HTML html => html -> String
showHtml = showHtmlInternal docType
-- | Outputs indented HTML. Because space matters in
-- HTML, the output is quite messy.
renderHtml :: HTML html => html -> String
renderHtml = renderHtmlInternal docType
-- | Outputs indented HTML, with indentation inside elements.
-- This can change the meaning of the HTML document, and
-- is mostly useful for debugging the HTML output.
-- The implementation is inefficient, and you are normally
-- better off using 'showHtml' or 'renderHtml'.
prettyHtml :: HTML html => html -> String
prettyHtml = prettyHtmlInternal docType
| null | https://raw.githubusercontent.com/Ericson2314/lighthouse/210078b846ebd6c43b89b5f0f735362a01a9af02/ghc-6.8.2/libraries/xhtml/Text/XHtml/Transitional.hs | haskell | | Produces XHTML 1.0 Transitional.
* Data types
* Classes
* Primitives and basic combinators
* Rendering
| Output the HTML without adding newlines or spaces within the markup.
This should be the most time and space efficient way to
render HTML, though the ouput is quite unreadable.
| Outputs indented HTML. Because space matters in
HTML, the output is quite messy.
| Outputs indented HTML, with indentation inside elements.
This can change the meaning of the HTML document, and
is mostly useful for debugging the HTML output.
The implementation is inefficient, and you are normally
better off using 'showHtml' or 'renderHtml'. | module Text.XHtml.Transitional (
Html, HtmlAttr,
HTML(..), ADDATTRS(..),
(<<), concatHtml, (+++),
noHtml, isNoHtml, tag, itag,
emptyAttr, intAttr, strAttr, htmlAttr,
primHtml,
showHtml, renderHtml, prettyHtml,
showHtmlFragment, renderHtmlFragment, prettyHtmlFragment,
module Text.XHtml.Strict.Elements,
module Text.XHtml.Frameset.Elements,
module Text.XHtml.Transitional.Elements,
module Text.XHtml.Strict.Attributes,
module Text.XHtml.Frameset.Attributes,
module Text.XHtml.Transitional.Attributes,
module Text.XHtml.Extras
) where
import Text.XHtml.Internals
import Text.XHtml.Strict.Elements
import Text.XHtml.Frameset.Elements
import Text.XHtml.Transitional.Elements
import Text.XHtml.Strict.Attributes
import Text.XHtml.Frameset.Attributes
import Text.XHtml.Transitional.Attributes
import Text.XHtml.Extras
docType =
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"" ++
" \"-transitional.dtd\">"
showHtml :: HTML html => html -> String
showHtml = showHtmlInternal docType
renderHtml :: HTML html => html -> String
renderHtml = renderHtmlInternal docType
prettyHtml :: HTML html => html -> String
prettyHtml = prettyHtmlInternal docType
|
8724d204d8c94261c11bb1d47112210d3c64958781b1797ae5112c1b789b4d30 | bhaskara/programmable-reinforcement-learning | rbe-tabular-features.lisp | (defpackage rbe-tabular-features
(:documentation
"Tabular features for calisp resource balance program")
(:use
cl
rbe-prog
calisp-features
rbe
utils)
(:export
make-tabular-featurizer))
(in-package rbe-tabular-features)
(defun make-tabular-featurizer (wm)
(lambda (omega u)
(let* ((choosing-thread-ids
(js-choosing-thread-ids omega))
(choosing-thread-labels
(mapcar (lambda (id)
(js-thread-label omega id))
choosing-thread-ids))
(env-state (js-env-state omega))
(gold (gold env-state))
(wood (wood env-state))
(state-type
(if (eq (first choosing-thread-labels) 'nav-choice)
'nav-choice
(when (and (eq (first choosing-thread-labels) 'task-choice)
(zerop gold) (zerop wood))
'init-state)))
(l (list
(loop
for k being each hash-key in (js-thread-states omega) using (hash-value v)
collect (list k (ts-label v) (ts-stack v)))
(list 'resources (gold env-state) (wood env-state)))))
(case state-type
(nav-choice (cons (result-positions wm (pos env-state) choosing-thread-ids u) l))
(init-state (list u l))
(otherwise (list* (pos env-state) u l))))))
(defun result-positions (wm positions t-ids u)
(let ((a (make-array (length positions) :initial-element 'uninitialized)))
(mapc (lambda (id act)
(setf (aref a id)
(gw:result-legal wm (aref positions id) act)))
t-ids u)
(dotimes (i (length a) a)
(when (eq (aref a i) 'uninitialized)
(setf (aref a i) (aref positions i))))))
| null | https://raw.githubusercontent.com/bhaskara/programmable-reinforcement-learning/8afc98116a8f78163b3f86076498d84b3f596217/lisp/calisp-examples/res-balance/rbe-tabular-features.lisp | lisp | (defpackage rbe-tabular-features
(:documentation
"Tabular features for calisp resource balance program")
(:use
cl
rbe-prog
calisp-features
rbe
utils)
(:export
make-tabular-featurizer))
(in-package rbe-tabular-features)
(defun make-tabular-featurizer (wm)
(lambda (omega u)
(let* ((choosing-thread-ids
(js-choosing-thread-ids omega))
(choosing-thread-labels
(mapcar (lambda (id)
(js-thread-label omega id))
choosing-thread-ids))
(env-state (js-env-state omega))
(gold (gold env-state))
(wood (wood env-state))
(state-type
(if (eq (first choosing-thread-labels) 'nav-choice)
'nav-choice
(when (and (eq (first choosing-thread-labels) 'task-choice)
(zerop gold) (zerop wood))
'init-state)))
(l (list
(loop
for k being each hash-key in (js-thread-states omega) using (hash-value v)
collect (list k (ts-label v) (ts-stack v)))
(list 'resources (gold env-state) (wood env-state)))))
(case state-type
(nav-choice (cons (result-positions wm (pos env-state) choosing-thread-ids u) l))
(init-state (list u l))
(otherwise (list* (pos env-state) u l))))))
(defun result-positions (wm positions t-ids u)
(let ((a (make-array (length positions) :initial-element 'uninitialized)))
(mapc (lambda (id act)
(setf (aref a id)
(gw:result-legal wm (aref positions id) act)))
t-ids u)
(dotimes (i (length a) a)
(when (eq (aref a i) 'uninitialized)
(setf (aref a i) (aref positions i))))))
| |
c3ce47afbe846b7328b0a3eaf6f373239ecab1a251410423d1c4024be0dc076b | exoscale/clojure-kubernetes-client | extensions_v1beta1_deployment_condition.clj | (ns clojure-kubernetes-client.specs.extensions-v1beta1-deployment-condition
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(declare extensions-v1beta1-deployment-condition-data extensions-v1beta1-deployment-condition)
(def extensions-v1beta1-deployment-condition-data
{
(ds/opt :lastTransitionTime) inst?
(ds/opt :lastUpdateTime) inst?
(ds/opt :message) string?
(ds/opt :reason) string?
(ds/req :status) string?
(ds/req :type) string?
})
(def extensions-v1beta1-deployment-condition
(ds/spec
{:name ::extensions-v1beta1-deployment-condition
:spec extensions-v1beta1-deployment-condition-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/extensions_v1beta1_deployment_condition.clj | clojure | (ns clojure-kubernetes-client.specs.extensions-v1beta1-deployment-condition
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(declare extensions-v1beta1-deployment-condition-data extensions-v1beta1-deployment-condition)
(def extensions-v1beta1-deployment-condition-data
{
(ds/opt :lastTransitionTime) inst?
(ds/opt :lastUpdateTime) inst?
(ds/opt :message) string?
(ds/opt :reason) string?
(ds/req :status) string?
(ds/req :type) string?
})
(def extensions-v1beta1-deployment-condition
(ds/spec
{:name ::extensions-v1beta1-deployment-condition
:spec extensions-v1beta1-deployment-condition-data}))
| |
df641d176b838b14635b8af4656dcd7028e158186f0c34eadd6a5c3ac667d035 | rabbitmq/rabbitmq-common | rabbit_auth_backend_dummy.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(rabbit_auth_backend_dummy).
-include("rabbit.hrl").
-behaviour(rabbit_authn_backend).
-behaviour(rabbit_authz_backend).
-export([user/0]).
-export([user_login_authentication/2, user_login_authorization/2,
check_vhost_access/3, check_resource_access/4, check_topic_access/4]).
-export([state_can_expire/0]).
-spec user() -> rabbit_types:user().
%% A user to be used by the direct client when permission checks are
not needed . This user can do anything .
user() -> #user{username = <<"none">>,
tags = [],
authz_backends = [{?MODULE, none}]}.
%% Implementation of rabbit_auth_backend
user_login_authentication(_, _) ->
{refused, "cannot log in conventionally as dummy user", []}.
user_login_authorization(_, _) ->
{refused, "cannot log in conventionally as dummy user", []}.
check_vhost_access(#auth_user{}, _VHostPath, _AuthzData) -> true.
check_resource_access(#auth_user{}, #resource{}, _Permission, _Context) -> true.
check_topic_access(#auth_user{}, #resource{}, _Permission, _Context) -> true.
state_can_expire() -> false.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-common/67c4397ffa9f51d87f994aa4db4a68e8e95326ab/src/rabbit_auth_backend_dummy.erl | erlang |
A user to be used by the direct client when permission checks are
Implementation of rabbit_auth_backend | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
-module(rabbit_auth_backend_dummy).
-include("rabbit.hrl").
-behaviour(rabbit_authn_backend).
-behaviour(rabbit_authz_backend).
-export([user/0]).
-export([user_login_authentication/2, user_login_authorization/2,
check_vhost_access/3, check_resource_access/4, check_topic_access/4]).
-export([state_can_expire/0]).
-spec user() -> rabbit_types:user().
not needed . This user can do anything .
user() -> #user{username = <<"none">>,
tags = [],
authz_backends = [{?MODULE, none}]}.
user_login_authentication(_, _) ->
{refused, "cannot log in conventionally as dummy user", []}.
user_login_authorization(_, _) ->
{refused, "cannot log in conventionally as dummy user", []}.
check_vhost_access(#auth_user{}, _VHostPath, _AuthzData) -> true.
check_resource_access(#auth_user{}, #resource{}, _Permission, _Context) -> true.
check_topic_access(#auth_user{}, #resource{}, _Permission, _Context) -> true.
state_can_expire() -> false.
|
b0760c744152f95be4eed6e88e35d8559a849f539a36ee0b304d4572bf4a79f5 | cambium-clojure/cambium.core | nested_test.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns cambium.nested-test
(:require
[clojure.test :refer :all]
[cambium.codec :as codec]
[cambium.codec.util :as ccu]
[cambium.core :as c]
[cambium.test-util :as tu]))
(deftest type-safe-encoding
(testing "type-safe encoding"
(let [sk codec/stringify-key]
(with-redefs [codec/stringify-key (fn ^String [x] (.replace ^String (sk x) \- \_))
codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val]
(c/info "hello")
(c/info {:foo-k "bar" :baz 10 :qux true} "hello with context")
(c/with-logging-context {:extra-k "context" "some-data" [1 2 :three 'four]}
(is (= (c/get-context) {"extra_k" "context" "some_data" [1 2 :three 'four]}))
(is (= (c/context-val :extra-k) "context"))
(is (nil? (c/context-val "foo")))
(c/info {:foo "bar"} "hello with wrapped context"))
(c/error {} (ex-info "some error" {:data :foo}) "internal error")))))
(deftest test-codec
(let [payload (ccu/encode-val :foo)] (is (= "foo" payload)) (is (= "foo" (ccu/decode-val payload))))
(let [payload (ccu/encode-val 'foo)] (is (= "foo" payload)) (is (= "foo" (ccu/decode-val payload))))
(let [payload (ccu/encode-val "foo")] (is (= "foo" payload)) (is (= "foo" (ccu/decode-val payload))))
(let [payload (ccu/encode-val 10)] (is (= "^long 10" payload)) (is (= 10 (ccu/decode-val payload))))
(let [payload (ccu/encode-val 1.2)] (is (= "^double 1.2" payload)) (is (= 1.2 (ccu/decode-val payload))))
(let [payload (ccu/encode-val true)] (is (= "^boolean true" payload)) (is (= true (ccu/decode-val payload))))
(let [payload (ccu/encode-val nil)] (is (= "^object nil" payload)) (is (= nil (ccu/decode-val payload))))
(let [payload (ccu/encode-val [1 :two 'four])]
(is (= "^object [1 :two four]" payload))(is (= [1 :two 'four] (ccu/decode-val payload)))))
(deftest log-test
(with-redefs [codec/nested-nav? true]
(testing "Normal scenarios"
(c/info "hello")
(c/info {:foo "bar" :baz 10 :qux true} "hello with context")
(c/with-logging-context {:extra "context" "data" [1 2 :three 'four]}
(is (= (c/get-context) {"extra" "context" "data" "[1 2 :three four]"}))
(is (= (c/context-val :extra) "context"))
(is (nil? (c/context-val "foo")))
(c/info {:foo "bar"} "hello with wrapped context"))
(c/error {} (ex-info "some error" {:data :foo}) "internal error"))
(testing "custom loggers"
(tu/metrics {:latency-ns 430 :module "registration"} "op.latency")
(tu/metrics {[:app :module] "registration"} (ex-info "some error" {:data :foo}) "internal error")
(tu/txn-metrics {:module "order-fetch"} "Fetched order #4568"))
(testing "type-safe encoding"
(let [sk codec/stringify-key]
(with-redefs [codec/nested-nav? true
codec/stringify-key (fn ^String [x] (.replace ^String (sk x) \- \_))
codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val]
(c/info "hello")
(c/info {:foo-k "bar" :baz 10 :qux true} "hello with context")
(c/with-logging-context {:extra-k "context" "some-data" [1 2 :three 'four]}
(is (= (c/get-context) {"extra_k" "context" "some_data" [1 2 :three 'four]}))
(is (= (c/context-val :extra-k) "context"))
(is (nil? (c/context-val "foo")))
(c/info {:foo "bar"} "hello with wrapped context"))
(c/error {} (ex-info "some error" {:data :foo}) "internal error"))))))
(deftest test-context-propagation
(with-redefs [codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val
codec/nested-nav? true]
(let [context-old {:foo :bar
:baz :quux}
context-new {:foo 10
:bar :baz}
nested-diff {[:foo-fighter :learn-to-fly] {:title "learn to fly"
:year 1999}
[:foo-fighter :best-of-you ] {:title "best of you"
:year 2005}}
f (fn
([]
(is (= "bar" (c/context-val :foo)))
(is (= "quux" (c/context-val :baz)))
(is (nil? (c/context-val :bar))))
([dummy arg]))]
(testing "with-raw-mdc"
(is (nil? (c/context-val :foo)) "Attribute not set must be absent before override")
(c/with-logging-context context-old
(f)
(c/with-logging-context context-new
(is (= 10 (c/context-val :foo)))
(is (= "quux" (c/context-val :baz)) "Delta context override must not remove non-overridden attributes")
(is (= "baz" (c/context-val :bar))))
(with-redefs [codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val]
(c/with-logging-context nested-diff
(is (= {"learn-to-fly" {"title" "learn to fly"
"year" 1999}
"best-of-you" {"title" "best of you"
"year" 2005}}
(c/context-val :foo-fighter))
"nested map comes out preserved as a map")
(is (= {"title" "learn to fly"
"year" 1999}
(c/context-val [:foo-fighter :learn-to-fly]))
"deep nested map comes out as a map")
(c/with-logging-context {[:foo-fighter :learn-to-fly :year] 2000}
(is (= {"title" "learn to fly"
"year" 2000}
(c/context-val [:foo-fighter :learn-to-fly])))))))
(c/with-logging-context context-old
(f)
(c/with-logging-context context-new
(is (= 10 (c/context-val :foo)))
(is (= "quux" (c/context-val :baz)) "Delta context override must not remove non-overridden attributes")
(is (= "baz" (c/context-val :bar)))))
(is (nil? (c/context-val :foo)) "Attribute not set must be absent after restoration"))
(testing "deletion via nil values"
(c/with-logging-context context-old
(c/with-logging-context {:foo nil}
(is (not (contains? (c/get-context) (codec/stringify-key :foo))))))
(c/with-logging-context {:foo {:bar {:baz 10}}}
(c/with-logging-context {[:foo :bar :baz] nil}
(is (= {(codec/stringify-key :bar) {}} (c/context-val :foo))))
(c/with-logging-context {[:foo :bar] nil}
(is (= {} (c/context-val :foo))))
(c/with-logging-context {[:foo] nil}
(is (not (contains? (c/get-context) (codec/stringify-key :foo)))))))
(testing "wrap-raw-mdc"
(is (nil? (c/context-val :foo)))
((c/wrap-logging-context context-old f))
((c/wrap-logging-context context-old f) :dummy :arg)
((comp (partial c/wrap-logging-context context-new) (c/wrap-logging-context context-old f)))
(is (nil? (c/context-val :foo)))))))
| null | https://raw.githubusercontent.com/cambium-clojure/cambium.core/4ae72905436eae63e8e5b852dd6de25b4a16ca1e/test/cambium/nested_test.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) . All rights reserved .
(ns cambium.nested-test
(:require
[clojure.test :refer :all]
[cambium.codec :as codec]
[cambium.codec.util :as ccu]
[cambium.core :as c]
[cambium.test-util :as tu]))
(deftest type-safe-encoding
(testing "type-safe encoding"
(let [sk codec/stringify-key]
(with-redefs [codec/stringify-key (fn ^String [x] (.replace ^String (sk x) \- \_))
codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val]
(c/info "hello")
(c/info {:foo-k "bar" :baz 10 :qux true} "hello with context")
(c/with-logging-context {:extra-k "context" "some-data" [1 2 :three 'four]}
(is (= (c/get-context) {"extra_k" "context" "some_data" [1 2 :three 'four]}))
(is (= (c/context-val :extra-k) "context"))
(is (nil? (c/context-val "foo")))
(c/info {:foo "bar"} "hello with wrapped context"))
(c/error {} (ex-info "some error" {:data :foo}) "internal error")))))
(deftest test-codec
(let [payload (ccu/encode-val :foo)] (is (= "foo" payload)) (is (= "foo" (ccu/decode-val payload))))
(let [payload (ccu/encode-val 'foo)] (is (= "foo" payload)) (is (= "foo" (ccu/decode-val payload))))
(let [payload (ccu/encode-val "foo")] (is (= "foo" payload)) (is (= "foo" (ccu/decode-val payload))))
(let [payload (ccu/encode-val 10)] (is (= "^long 10" payload)) (is (= 10 (ccu/decode-val payload))))
(let [payload (ccu/encode-val 1.2)] (is (= "^double 1.2" payload)) (is (= 1.2 (ccu/decode-val payload))))
(let [payload (ccu/encode-val true)] (is (= "^boolean true" payload)) (is (= true (ccu/decode-val payload))))
(let [payload (ccu/encode-val nil)] (is (= "^object nil" payload)) (is (= nil (ccu/decode-val payload))))
(let [payload (ccu/encode-val [1 :two 'four])]
(is (= "^object [1 :two four]" payload))(is (= [1 :two 'four] (ccu/decode-val payload)))))
(deftest log-test
(with-redefs [codec/nested-nav? true]
(testing "Normal scenarios"
(c/info "hello")
(c/info {:foo "bar" :baz 10 :qux true} "hello with context")
(c/with-logging-context {:extra "context" "data" [1 2 :three 'four]}
(is (= (c/get-context) {"extra" "context" "data" "[1 2 :three four]"}))
(is (= (c/context-val :extra) "context"))
(is (nil? (c/context-val "foo")))
(c/info {:foo "bar"} "hello with wrapped context"))
(c/error {} (ex-info "some error" {:data :foo}) "internal error"))
(testing "custom loggers"
(tu/metrics {:latency-ns 430 :module "registration"} "op.latency")
(tu/metrics {[:app :module] "registration"} (ex-info "some error" {:data :foo}) "internal error")
(tu/txn-metrics {:module "order-fetch"} "Fetched order #4568"))
(testing "type-safe encoding"
(let [sk codec/stringify-key]
(with-redefs [codec/nested-nav? true
codec/stringify-key (fn ^String [x] (.replace ^String (sk x) \- \_))
codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val]
(c/info "hello")
(c/info {:foo-k "bar" :baz 10 :qux true} "hello with context")
(c/with-logging-context {:extra-k "context" "some-data" [1 2 :three 'four]}
(is (= (c/get-context) {"extra_k" "context" "some_data" [1 2 :three 'four]}))
(is (= (c/context-val :extra-k) "context"))
(is (nil? (c/context-val "foo")))
(c/info {:foo "bar"} "hello with wrapped context"))
(c/error {} (ex-info "some error" {:data :foo}) "internal error"))))))
(deftest test-context-propagation
(with-redefs [codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val
codec/nested-nav? true]
(let [context-old {:foo :bar
:baz :quux}
context-new {:foo 10
:bar :baz}
nested-diff {[:foo-fighter :learn-to-fly] {:title "learn to fly"
:year 1999}
[:foo-fighter :best-of-you ] {:title "best of you"
:year 2005}}
f (fn
([]
(is (= "bar" (c/context-val :foo)))
(is (= "quux" (c/context-val :baz)))
(is (nil? (c/context-val :bar))))
([dummy arg]))]
(testing "with-raw-mdc"
(is (nil? (c/context-val :foo)) "Attribute not set must be absent before override")
(c/with-logging-context context-old
(f)
(c/with-logging-context context-new
(is (= 10 (c/context-val :foo)))
(is (= "quux" (c/context-val :baz)) "Delta context override must not remove non-overridden attributes")
(is (= "baz" (c/context-val :bar))))
(with-redefs [codec/stringify-val ccu/encode-val
codec/destringify-val ccu/decode-val]
(c/with-logging-context nested-diff
(is (= {"learn-to-fly" {"title" "learn to fly"
"year" 1999}
"best-of-you" {"title" "best of you"
"year" 2005}}
(c/context-val :foo-fighter))
"nested map comes out preserved as a map")
(is (= {"title" "learn to fly"
"year" 1999}
(c/context-val [:foo-fighter :learn-to-fly]))
"deep nested map comes out as a map")
(c/with-logging-context {[:foo-fighter :learn-to-fly :year] 2000}
(is (= {"title" "learn to fly"
"year" 2000}
(c/context-val [:foo-fighter :learn-to-fly])))))))
(c/with-logging-context context-old
(f)
(c/with-logging-context context-new
(is (= 10 (c/context-val :foo)))
(is (= "quux" (c/context-val :baz)) "Delta context override must not remove non-overridden attributes")
(is (= "baz" (c/context-val :bar)))))
(is (nil? (c/context-val :foo)) "Attribute not set must be absent after restoration"))
(testing "deletion via nil values"
(c/with-logging-context context-old
(c/with-logging-context {:foo nil}
(is (not (contains? (c/get-context) (codec/stringify-key :foo))))))
(c/with-logging-context {:foo {:bar {:baz 10}}}
(c/with-logging-context {[:foo :bar :baz] nil}
(is (= {(codec/stringify-key :bar) {}} (c/context-val :foo))))
(c/with-logging-context {[:foo :bar] nil}
(is (= {} (c/context-val :foo))))
(c/with-logging-context {[:foo] nil}
(is (not (contains? (c/get-context) (codec/stringify-key :foo)))))))
(testing "wrap-raw-mdc"
(is (nil? (c/context-val :foo)))
((c/wrap-logging-context context-old f))
((c/wrap-logging-context context-old f) :dummy :arg)
((comp (partial c/wrap-logging-context context-new) (c/wrap-logging-context context-old f)))
(is (nil? (c/context-val :foo)))))))
|
a112b6799a7aac27d90e6a830fa77dacf36546d9d75d6739f241cfc63f20b12f | amir-sabbaghi/proxy | Main.hs | module Main where
import System.Environment
import qualified Server as S
import qualified Network.Socket as S
import HTTPWorker
import Proxy
import ProxyAuth
import Data.Default.Class
import Data.Maybe
import System.Exit
import Control.Monad
data Settings = Settings { bindAddress :: String
, bufferSize :: Int
, authentication :: String
, realm :: String
, https :: Maybe S.HTTPS
, http :: Maybe S.HTTP
} deriving (Show)
instance Default Settings where
def = Settings { bindAddress = "0.0.0.0"
, bufferSize = 2^18
, authentication = ""
, realm = ""
, https = Nothing
, http = Nothing
}
main = do
args <- getArgs
let settings = parseArgs args def :: Settings
let servSett = def { S.bindAddress = bindAddress settings
, S.bufferSize = bufferSize settings
, S.http = http settings
, S.https = https settings
} :: S.ServerSettings
when ((isJust . https) settings &&
((null . S.key . fromJust . https) settings ||
(null . S.cert . fromJust . https) settings)) $ do
print "You must specify --key and --cert for https to work"
exitFailure
when ((isNothing . http) settings && (isNothing . https) settings) $ do
print "You must specify at least one of --http or --https parameters"
exitFailure
let handler = if null (authentication settings) then
handleRequest
else
proxyAuth (authentication settings) (realm settings) handleRequest
S.server servSett.httpWorker handler $ (Nothing, [])
parseArgs :: [String] -> Settings -> Settings
parseArgs [] s = s
parseArgs ("-p":as) s = parseArgs ("--port":as) s
parseArgs ("-b":as) s = parseArgs ("--bindaddr":as) s
parseArgs ("-a":as) s = parseArgs ("--auth":as) s
parseArgs ("--bindaddr":as) s = case as of
[] -> error "Please specify bind address in front of --bindaddr"
(b:as) -> parseArgs as $ s { bindAddress = b }
parseArgs ("--auth":as) s = case as of
[] -> error "Please specify authentication in front of --auth"
(a:as) -> parseArgs as $ s { authentication = a }
parseArgs ("--realm":as) s = case as of
[] -> error "Please specify realm in front of --realm"
(r:as) -> parseArgs as $ s { realm = r }
parseArgs ("--http":as) s = case as of
[] -> error "Please specify http port in front of --http"
(r:as) -> parseArgs as $ s { http = Just (def { S.httpPort = r }) }
parseArgs ("--https":as) s = case as of
[] -> error "Please specify https port in front of --https"
(r:as) -> parseArgs as $ s { https = Just (def { S.httpsPort = r }) }
parseArgs ("--cert":as) s = case as of
[] -> error "Please specify certificate path in front of --cert"
(r:as) -> parseArgs as $ s { https = Just ((fromJust $ https s) { S.cert = r }) }
parseArgs ("--key":as) s = case as of
[] -> error "Please specify key path in front of --key"
(r:as) -> parseArgs as $ s { https = Just ((fromJust $ https s) { S.key = r }) }
| null | https://raw.githubusercontent.com/amir-sabbaghi/proxy/9dadd56e7b365eb2e70759da10d3a517c07783df/app/Main.hs | haskell | module Main where
import System.Environment
import qualified Server as S
import qualified Network.Socket as S
import HTTPWorker
import Proxy
import ProxyAuth
import Data.Default.Class
import Data.Maybe
import System.Exit
import Control.Monad
data Settings = Settings { bindAddress :: String
, bufferSize :: Int
, authentication :: String
, realm :: String
, https :: Maybe S.HTTPS
, http :: Maybe S.HTTP
} deriving (Show)
instance Default Settings where
def = Settings { bindAddress = "0.0.0.0"
, bufferSize = 2^18
, authentication = ""
, realm = ""
, https = Nothing
, http = Nothing
}
main = do
args <- getArgs
let settings = parseArgs args def :: Settings
let servSett = def { S.bindAddress = bindAddress settings
, S.bufferSize = bufferSize settings
, S.http = http settings
, S.https = https settings
} :: S.ServerSettings
when ((isJust . https) settings &&
((null . S.key . fromJust . https) settings ||
(null . S.cert . fromJust . https) settings)) $ do
print "You must specify --key and --cert for https to work"
exitFailure
when ((isNothing . http) settings && (isNothing . https) settings) $ do
print "You must specify at least one of --http or --https parameters"
exitFailure
let handler = if null (authentication settings) then
handleRequest
else
proxyAuth (authentication settings) (realm settings) handleRequest
S.server servSett.httpWorker handler $ (Nothing, [])
parseArgs :: [String] -> Settings -> Settings
parseArgs [] s = s
parseArgs ("-p":as) s = parseArgs ("--port":as) s
parseArgs ("-b":as) s = parseArgs ("--bindaddr":as) s
parseArgs ("-a":as) s = parseArgs ("--auth":as) s
parseArgs ("--bindaddr":as) s = case as of
[] -> error "Please specify bind address in front of --bindaddr"
(b:as) -> parseArgs as $ s { bindAddress = b }
parseArgs ("--auth":as) s = case as of
[] -> error "Please specify authentication in front of --auth"
(a:as) -> parseArgs as $ s { authentication = a }
parseArgs ("--realm":as) s = case as of
[] -> error "Please specify realm in front of --realm"
(r:as) -> parseArgs as $ s { realm = r }
parseArgs ("--http":as) s = case as of
[] -> error "Please specify http port in front of --http"
(r:as) -> parseArgs as $ s { http = Just (def { S.httpPort = r }) }
parseArgs ("--https":as) s = case as of
[] -> error "Please specify https port in front of --https"
(r:as) -> parseArgs as $ s { https = Just (def { S.httpsPort = r }) }
parseArgs ("--cert":as) s = case as of
[] -> error "Please specify certificate path in front of --cert"
(r:as) -> parseArgs as $ s { https = Just ((fromJust $ https s) { S.cert = r }) }
parseArgs ("--key":as) s = case as of
[] -> error "Please specify key path in front of --key"
(r:as) -> parseArgs as $ s { https = Just ((fromJust $ https s) { S.key = r }) }
| |
0fb1827efc953936588c7458c2d3daa7651d86411a7bd1c6cab4cbd37e0a3485 | kamek-pf/ntfd | Helpers.hs | module Spec.Helpers where
import Data.ByteString.Char8 (pack)
import Data.Maybe (fromJust)
import Data.Time.Clock (secondsToNominalDiffTime)
import System.Environment (lookupEnv)
import Config (Config(..))
import Config.Mpd (MpdConfig(..))
import Config.Github (GithubConfig(..))
import Config.Weather (WeatherConfig(..))
defaultCfg :: IO Config
defaultCfg = do
weather <- defaultWeatherCfg
github <- defaultGithubCfg
pure Config
{ weatherCfg = Right weather
, githubCfg = Right github
, mpdCfg = Right defaultMpdCfg
}
defaultMpdCfg :: MpdConfig
defaultMpdCfg = MpdConfig
{ mpdEnabled = True
, mpdMusicDirectory = "/home/musicguy/collection"
, mpdNotifTime = secondsToNominalDiffTime 10
, mpdCoverName = "cover.jpg"
, mpdSkipMissingCover = True
}
defaultWeatherCfg :: IO WeatherConfig
defaultWeatherCfg = do
apiKey <- lookupEnv "OWM_API_KEY"
pure WeatherConfig
{ weatherEnabled = True
, weatherApiKey = fromJust $ pack <$> apiKey
, weatherCityId = "6077243"
, weatherNotifTime = secondsToNominalDiffTime 10
, weatherNotifBody = "hullo"
, weatherSyncFreq = secondsToNominalDiffTime 1800
, weatherTemplate =
"{{ temp_icon }} {{ temp_celsius }}°C {{ trend }} {{ forecast_icon }} {{ forecast_celcius }}°C" -- Spelling error in celsius on purpose ;/
}
defaultGithubCfg :: IO GithubConfig
defaultGithubCfg = do
apiKey <- lookupEnv "GITHUB_TOKEN"
pure GithubConfig
{ githubEnabled = True
, githubApiKey = fromJust $ pack <$> apiKey
, githubNotifTime = secondsToNominalDiffTime 10
, githubShowAvatar = True
, githubSyncFreq = secondsToNominalDiffTime 30
, githubTemplate = "{{ notification_count }}"
, githubAvatarDir = "/home/someone/.cache/ntfd/github_avatar"
}
| null | https://raw.githubusercontent.com/kamek-pf/ntfd/d297a59339b3310a62341ffa9c378180c578dbce/test/Spec/Helpers.hs | haskell | Spelling error in celsius on purpose ;/ | module Spec.Helpers where
import Data.ByteString.Char8 (pack)
import Data.Maybe (fromJust)
import Data.Time.Clock (secondsToNominalDiffTime)
import System.Environment (lookupEnv)
import Config (Config(..))
import Config.Mpd (MpdConfig(..))
import Config.Github (GithubConfig(..))
import Config.Weather (WeatherConfig(..))
defaultCfg :: IO Config
defaultCfg = do
weather <- defaultWeatherCfg
github <- defaultGithubCfg
pure Config
{ weatherCfg = Right weather
, githubCfg = Right github
, mpdCfg = Right defaultMpdCfg
}
defaultMpdCfg :: MpdConfig
defaultMpdCfg = MpdConfig
{ mpdEnabled = True
, mpdMusicDirectory = "/home/musicguy/collection"
, mpdNotifTime = secondsToNominalDiffTime 10
, mpdCoverName = "cover.jpg"
, mpdSkipMissingCover = True
}
defaultWeatherCfg :: IO WeatherConfig
defaultWeatherCfg = do
apiKey <- lookupEnv "OWM_API_KEY"
pure WeatherConfig
{ weatherEnabled = True
, weatherApiKey = fromJust $ pack <$> apiKey
, weatherCityId = "6077243"
, weatherNotifTime = secondsToNominalDiffTime 10
, weatherNotifBody = "hullo"
, weatherSyncFreq = secondsToNominalDiffTime 1800
, weatherTemplate =
}
defaultGithubCfg :: IO GithubConfig
defaultGithubCfg = do
apiKey <- lookupEnv "GITHUB_TOKEN"
pure GithubConfig
{ githubEnabled = True
, githubApiKey = fromJust $ pack <$> apiKey
, githubNotifTime = secondsToNominalDiffTime 10
, githubShowAvatar = True
, githubSyncFreq = secondsToNominalDiffTime 30
, githubTemplate = "{{ notification_count }}"
, githubAvatarDir = "/home/someone/.cache/ntfd/github_avatar"
}
|
6131ef52b5ea60b47d52ee7bf03a1b9714557e07561bc0efca4a75a5b66021b0 | green-labs/ppx_ts | str_partial.ml | open Ppxlib
open Parsetree
open Ast_helper
open Utils
(* partial attribute mapper *)
let make_structure_items name loc manifest kind suffix =
match (manifest, kind) with
(* type t *)
| None, Ptype_abstract -> fail loc "Can't handle the unspecified type"
| None, Ptype_record decls ->
let decls =
[
Str.type_ Recursive
[
Type.mk
(mkloc (name ^ "_" ^ suffix) loc)
~priv:Public
~kind:(Ptype_record (make_label_decls ~is_option:true decls));
];
]
in
decls
| _ -> fail loc "This type is not handled by @ppx_ts.partial"
(* partial extension mapper *)
let make_structure_item name loc manifest kind attributes =
match (manifest, kind) with
(* type t *)
| None, Ptype_abstract -> fail loc "Can't handle the unspecified type"
| None, Ptype_record decls ->
Str.type_ Recursive
[
Type.mk (mkloc name loc) ~priv:Public ~attrs:attributes
~kind:(Ptype_record (make_label_decls ~is_option:true decls));
]
| _ -> fail loc "This type is not handled by @ppx_ts.partial"
| null | https://raw.githubusercontent.com/green-labs/ppx_ts/e7e7190b47a698a0ef6428af53bd4c62254a052a/src/str_partial.ml | ocaml | partial attribute mapper
type t
partial extension mapper
type t | open Ppxlib
open Parsetree
open Ast_helper
open Utils
let make_structure_items name loc manifest kind suffix =
match (manifest, kind) with
| None, Ptype_abstract -> fail loc "Can't handle the unspecified type"
| None, Ptype_record decls ->
let decls =
[
Str.type_ Recursive
[
Type.mk
(mkloc (name ^ "_" ^ suffix) loc)
~priv:Public
~kind:(Ptype_record (make_label_decls ~is_option:true decls));
];
]
in
decls
| _ -> fail loc "This type is not handled by @ppx_ts.partial"
let make_structure_item name loc manifest kind attributes =
match (manifest, kind) with
| None, Ptype_abstract -> fail loc "Can't handle the unspecified type"
| None, Ptype_record decls ->
Str.type_ Recursive
[
Type.mk (mkloc name loc) ~priv:Public ~attrs:attributes
~kind:(Ptype_record (make_label_decls ~is_option:true decls));
]
| _ -> fail loc "This type is not handled by @ppx_ts.partial"
|
3ef571eacb80078b885dc432c53fa0e1217803b2b9ca733c3557cbeec04b91c0 | kazu-yamamoto/hhp | Info.hs | # LANGUAGE TupleSections , FlexibleInstances , Rank2Types #
module Hhp.Info (
infoExpr
, info
, typeExpr
, types
) where
import GHC (Ghc, TypecheckedModule(..), SrcSpan, Type, GenLocated(L), ModSummary, mgModSummaries, mg_ext, LHsBind, Type, LPat, LHsExpr)
import qualified GHC as G
import GHC.Core.Type (mkVisFunTys)
import GHC.Core.Utils (exprType)
import GHC.Hs.Binds (HsBindLR(..))
import GHC.Hs.Expr (MatchGroupTc(..))
import GHC.Hs.Extension (GhcTc)
import GHC.HsToCore (deSugarExpr)
import GHC.Utils.Monad (liftIO)
import GHC.Utils.Outputable (SDocContext)
import GHC.Driver.Session (initSDocContext)
import Control.Applicative ((<|>))
import Control.Monad (filterM)
import Control.Monad.Catch (SomeException(..), handle, bracket)
import Data.Function (on)
import Data.List (sortBy)
import Data.Maybe (catMaybes, fromMaybe)
import Data.Ord as O
import Hhp.Doc (showPage, showOneLine, getStyle)
import Hhp.Gap
import Hhp.GHCApi
import Hhp.Logger (getSrcSpan)
import Hhp.Syb
import Hhp.Things
import Hhp.Types
----------------------------------------------------------------
-- | Obtaining information of a target expression. (GHCi's info:)
infoExpr :: Options
-> Cradle
-> FilePath -- ^ A target file.
^ A expression .
-> IO String
infoExpr opt cradle file expr = withGHC' $ do
initializeFlagsWithCradle opt cradle
info opt file expr
-- | Obtaining information of a target expression. (GHCi's info:)
info :: Options
-> FilePath -- ^ A target file.
^ A expression .
-> Ghc String
info opt file expr = convert opt <$> handle handler body
where
body = inModuleContext file $ \ctx -> do
sdoc <- infoThing expr
return $ showPage ctx sdoc
handler (SomeException _e) = return $ "Cannot show info: " ++ show _e
----------------------------------------------------------------
-- | Obtaining type of a target expression. (GHCi's type:)
typeExpr :: Options
-> Cradle
-> FilePath -- ^ A target file.
-> Int -- ^ Line number.
-> Int -- ^ Column number.
-> IO String
typeExpr opt cradle file lineNo colNo = withGHC' $ do
initializeFlagsWithCradle opt cradle
types opt file lineNo colNo
-- | Obtaining type of a target expression. (GHCi's type:)
types :: Options
-> FilePath -- ^ A target file.
-> Int -- ^ Line number.
-> Int -- ^ Column number.
-> Ghc String
types opt file lineNo colNo = convert opt <$> handle handler body
where
body = inModuleContext file $ \ctx -> do
modSum <- fileModSummary file
srcSpanTypes <- getSrcSpanType modSum lineNo colNo
return $ map (toTup ctx) $ sortBy (cmp `on` fst) srcSpanTypes
handler (SomeException _) = return []
type LExpression = LHsExpr GhcTc
type LBinding = LHsBind GhcTc
type LPattern = LPat GhcTc
getSrcSpanType :: ModSummary -> Int -> Int -> Ghc [(SrcSpan, Type)]
getSrcSpanType modSum lineNo colNo = do
p <- G.parseModule modSum
tcm@TypecheckedModule{tm_typechecked_source = tcs} <- G.typecheckModule p
let es = listifySpans tcs (lineNo, colNo) :: [LExpression]
bs = listifySpans tcs (lineNo, colNo) :: [LBinding]
ps = listifySpans tcs (lineNo, colNo) :: [LPattern]
ets <- mapM (getTypeLExpression tcm) es
bts <- mapM (getTypeLBinding tcm) bs
pts <- mapM (getTypeLPattern tcm) ps
return $ catMaybes $ concat [ets, bts, pts]
cmp :: SrcSpan -> SrcSpan -> Ordering
cmp a b
| a `G.isSubspanOf` b = O.LT
| b `G.isSubspanOf` a = O.GT
| otherwise = O.EQ
toTup :: SDocContext -> (SrcSpan, Type) -> ((Int,Int,Int,Int),String)
toTup ctx (spn, typ) = (fourInts spn, pretty ctx typ)
fourInts :: SrcSpan -> (Int,Int,Int,Int)
fourInts = fromMaybe (0,0,0,0) . getSrcSpan
pretty :: SDocContext -> Type -> String
pretty ctx = showOneLine ctx . pprSigmaType
----------------------------------------------------------------
inModuleContext :: FilePath -> (SDocContext -> Ghc a) -> Ghc a
inModuleContext file action =
withDynFlags (setWarnTypedHoles . setDeferTypeErrors . setNoWarningFlags) $ do
setTargetFiles [file]
withContext $ do
dflag <- G.getSessionDynFlags
style <- getStyle
action $ initSDocContext dflag style
----------------------------------------------------------------
fileModSummary :: FilePath -> Ghc ModSummary
fileModSummary file = do
mss <- mgModSummaries <$> G.getModuleGraph
let xs = filter (\m -> G.ml_hs_file (G.ms_location m) == Just file) mss
case xs of
[ms] -> return ms
_ -> error "fileModSummary"
withContext :: Ghc a -> Ghc a
withContext action = bracket setup teardown body
where
setup = G.getContext
teardown = setCtx
body _ = do
topImports >>= setCtx
action
topImports = do
mss <- mgModSummaries <$> G.getModuleGraph
map modName <$> filterM isTop mss
isTop mos = lookupMod mos <|> returnFalse
lookupMod mos = G.lookupModule (G.ms_mod_name mos) Nothing >> return True
returnFalse = return False
modName = G.IIModule . G.moduleName . G.ms_mod
setCtx = G.setContext
----------------------------------------------------------------
getTypeLExpression :: TypecheckedModule -> LExpression -> Ghc (Maybe (SrcSpan, Type))
getTypeLExpression _ e@(L spnA _) = do
hs_env <- G.getSession
(_, mbc) <- liftIO $ deSugarExpr hs_env e
let spn = locA spnA
return $ (spn, ) . exprType <$> mbc
getTypeLBinding :: TypecheckedModule -> LBinding -> Ghc (Maybe (SrcSpan, Type))
getTypeLBinding _ (L spnA FunBind{fun_matches = m}) = return $ Just (spn, typ)
where
in_tys = mg_arg_tys $ mg_ext m
out_typ = mg_res_ty $ mg_ext m
typ = mkVisFunTys in_tys out_typ
spn = locA spnA
getTypeLBinding _ _ = return Nothing
getTypeLPattern :: TypecheckedModule -> LPattern -> Ghc (Maybe (SrcSpan, Type))
getTypeLPattern _ (L spnA pat) = return $ Just (locA spnA, hsPatType pat)
| null | https://raw.githubusercontent.com/kazu-yamamoto/hhp/6982aace9280daa7990782b645d9242685038c1f/lib/Hhp/Info.hs | haskell | --------------------------------------------------------------
| Obtaining information of a target expression. (GHCi's info:)
^ A target file.
| Obtaining information of a target expression. (GHCi's info:)
^ A target file.
--------------------------------------------------------------
| Obtaining type of a target expression. (GHCi's type:)
^ A target file.
^ Line number.
^ Column number.
| Obtaining type of a target expression. (GHCi's type:)
^ A target file.
^ Line number.
^ Column number.
--------------------------------------------------------------
--------------------------------------------------------------
-------------------------------------------------------------- | # LANGUAGE TupleSections , FlexibleInstances , Rank2Types #
module Hhp.Info (
infoExpr
, info
, typeExpr
, types
) where
import GHC (Ghc, TypecheckedModule(..), SrcSpan, Type, GenLocated(L), ModSummary, mgModSummaries, mg_ext, LHsBind, Type, LPat, LHsExpr)
import qualified GHC as G
import GHC.Core.Type (mkVisFunTys)
import GHC.Core.Utils (exprType)
import GHC.Hs.Binds (HsBindLR(..))
import GHC.Hs.Expr (MatchGroupTc(..))
import GHC.Hs.Extension (GhcTc)
import GHC.HsToCore (deSugarExpr)
import GHC.Utils.Monad (liftIO)
import GHC.Utils.Outputable (SDocContext)
import GHC.Driver.Session (initSDocContext)
import Control.Applicative ((<|>))
import Control.Monad (filterM)
import Control.Monad.Catch (SomeException(..), handle, bracket)
import Data.Function (on)
import Data.List (sortBy)
import Data.Maybe (catMaybes, fromMaybe)
import Data.Ord as O
import Hhp.Doc (showPage, showOneLine, getStyle)
import Hhp.Gap
import Hhp.GHCApi
import Hhp.Logger (getSrcSpan)
import Hhp.Syb
import Hhp.Things
import Hhp.Types
infoExpr :: Options
-> Cradle
^ A expression .
-> IO String
infoExpr opt cradle file expr = withGHC' $ do
initializeFlagsWithCradle opt cradle
info opt file expr
info :: Options
^ A expression .
-> Ghc String
info opt file expr = convert opt <$> handle handler body
where
body = inModuleContext file $ \ctx -> do
sdoc <- infoThing expr
return $ showPage ctx sdoc
handler (SomeException _e) = return $ "Cannot show info: " ++ show _e
typeExpr :: Options
-> Cradle
-> IO String
typeExpr opt cradle file lineNo colNo = withGHC' $ do
initializeFlagsWithCradle opt cradle
types opt file lineNo colNo
types :: Options
-> Ghc String
types opt file lineNo colNo = convert opt <$> handle handler body
where
body = inModuleContext file $ \ctx -> do
modSum <- fileModSummary file
srcSpanTypes <- getSrcSpanType modSum lineNo colNo
return $ map (toTup ctx) $ sortBy (cmp `on` fst) srcSpanTypes
handler (SomeException _) = return []
type LExpression = LHsExpr GhcTc
type LBinding = LHsBind GhcTc
type LPattern = LPat GhcTc
getSrcSpanType :: ModSummary -> Int -> Int -> Ghc [(SrcSpan, Type)]
getSrcSpanType modSum lineNo colNo = do
p <- G.parseModule modSum
tcm@TypecheckedModule{tm_typechecked_source = tcs} <- G.typecheckModule p
let es = listifySpans tcs (lineNo, colNo) :: [LExpression]
bs = listifySpans tcs (lineNo, colNo) :: [LBinding]
ps = listifySpans tcs (lineNo, colNo) :: [LPattern]
ets <- mapM (getTypeLExpression tcm) es
bts <- mapM (getTypeLBinding tcm) bs
pts <- mapM (getTypeLPattern tcm) ps
return $ catMaybes $ concat [ets, bts, pts]
cmp :: SrcSpan -> SrcSpan -> Ordering
cmp a b
| a `G.isSubspanOf` b = O.LT
| b `G.isSubspanOf` a = O.GT
| otherwise = O.EQ
toTup :: SDocContext -> (SrcSpan, Type) -> ((Int,Int,Int,Int),String)
toTup ctx (spn, typ) = (fourInts spn, pretty ctx typ)
fourInts :: SrcSpan -> (Int,Int,Int,Int)
fourInts = fromMaybe (0,0,0,0) . getSrcSpan
pretty :: SDocContext -> Type -> String
pretty ctx = showOneLine ctx . pprSigmaType
inModuleContext :: FilePath -> (SDocContext -> Ghc a) -> Ghc a
inModuleContext file action =
withDynFlags (setWarnTypedHoles . setDeferTypeErrors . setNoWarningFlags) $ do
setTargetFiles [file]
withContext $ do
dflag <- G.getSessionDynFlags
style <- getStyle
action $ initSDocContext dflag style
fileModSummary :: FilePath -> Ghc ModSummary
fileModSummary file = do
mss <- mgModSummaries <$> G.getModuleGraph
let xs = filter (\m -> G.ml_hs_file (G.ms_location m) == Just file) mss
case xs of
[ms] -> return ms
_ -> error "fileModSummary"
withContext :: Ghc a -> Ghc a
withContext action = bracket setup teardown body
where
setup = G.getContext
teardown = setCtx
body _ = do
topImports >>= setCtx
action
topImports = do
mss <- mgModSummaries <$> G.getModuleGraph
map modName <$> filterM isTop mss
isTop mos = lookupMod mos <|> returnFalse
lookupMod mos = G.lookupModule (G.ms_mod_name mos) Nothing >> return True
returnFalse = return False
modName = G.IIModule . G.moduleName . G.ms_mod
setCtx = G.setContext
getTypeLExpression :: TypecheckedModule -> LExpression -> Ghc (Maybe (SrcSpan, Type))
getTypeLExpression _ e@(L spnA _) = do
hs_env <- G.getSession
(_, mbc) <- liftIO $ deSugarExpr hs_env e
let spn = locA spnA
return $ (spn, ) . exprType <$> mbc
getTypeLBinding :: TypecheckedModule -> LBinding -> Ghc (Maybe (SrcSpan, Type))
getTypeLBinding _ (L spnA FunBind{fun_matches = m}) = return $ Just (spn, typ)
where
in_tys = mg_arg_tys $ mg_ext m
out_typ = mg_res_ty $ mg_ext m
typ = mkVisFunTys in_tys out_typ
spn = locA spnA
getTypeLBinding _ _ = return Nothing
getTypeLPattern :: TypecheckedModule -> LPattern -> Ghc (Maybe (SrcSpan, Type))
getTypeLPattern _ (L spnA pat) = return $ Just (locA spnA, hsPatType pat)
|
d38beb5188dd70e1ae3f51ab1e831f2a63ed08e475918a91512b4beef390c8a0 | fumieval/mason | Fast.hs | # LANGUAGE CPP #
{-# OPTIONS_GHC -ddump-simpl -ddump-to-file -dsuppress-all #-}
#define LIB Data.ByteString.FastBuilder
#define NAME Fast
#include "template.hs"
encodeUtf8Builder :: T.Text -> Builder
encodeUtf8Builder = byteString . T.encodeUtf8
| null | https://raw.githubusercontent.com/fumieval/mason/09e3d6aab64ded54380270be2af3db31b432bcc1/benchmarks/aeson/Fast.hs | haskell | # OPTIONS_GHC -ddump-simpl -ddump-to-file -dsuppress-all # | # LANGUAGE CPP #
#define LIB Data.ByteString.FastBuilder
#define NAME Fast
#include "template.hs"
encodeUtf8Builder :: T.Text -> Builder
encodeUtf8Builder = byteString . T.encodeUtf8
|
177274f544a0761e195449859e27c217011eac1ace9d815a3d21103842460203 | devaspot/games | lucky_sup.erl | -module(lucky_sup).
-behaviour(supervisor).
-include_lib("db/include/config.hrl").
-export([start_link/0]).
-export([init/1]).
-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}).
start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
RestartStrategy = one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 1,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Restart = permanent,
Shutdown = 2000,
OkeyTableParams = [{mult_factor, 1},
{slang_allowed, false},
{observers_allowed, false},
{tournament_type, lucky},
{round_timeout, infinity},
{ round_timeout , 30 * 1000 } ,
{set_timeout, infinity},
{ set_timeout , 10 * 60 * 1000 } ,
{speed, normal},
{game_type, standard},
{rounds, undefined},
{reveal_confirmation, false},
{next_series_confirmation, no},
{pause_mode, normal},
{social_actions_enabled, true},
{gosterge_finish_allowed, undefined}
],
OkeyGameId = id_generator:get_id(),
GameName = "I'm filling lucky - " ++ erlang:integer_to_list(OkeyGameId),
OkeyParams = [{game, game_okey},
{game_mode, standard},
{game_name, GameName},
{mode, normal}, % Common table for several real players
{seats, 4},
{ quota_per_round , Quota } ,
{table_module, okey_table},
{bot_module, okey_bot},
{table_params, OkeyTableParams}
],
OkeySpec = {okey_lucky, {lucky, start_link, [OkeyGameId, OkeyParams]},
Restart, Shutdown, worker, [lucky]},
TavlaTableParams = [{mult_factor, 1},
{slang_allowed, false},
{observers_allowed, false},
{tournament_type, lucky},
{round_timeout, infinity},
{ round_timeout , 30 * 1000 } ,
{set_timeout, infinity},
{ set_timeout , 10 * 60 * 1000 } ,
{speed, normal},
{game_mode, standard},
{rounds, undefined},
{next_series_confirmation, no},
{pause_mode, normal},
{social_actions_enabled, true}
],
TavlaGameId = id_generator:get_id(),
TavlaGameName = "I'm filling lucky - " ++ erlang:integer_to_list(TavlaGameId),
TavlaParams = [{game, game_tavla},
{game_mode, standard},
{game_name, TavlaGameName},
{mode, normal}, % Common table for several real players
{seats, 2},
{ quota_per_round , Quota } ,
{table_module, tavla_table},
{bot_module, tavla_bot},
{table_params, TavlaTableParams}
],
TavlaSpec = {tavla_lucky, {lucky, start_link, [TavlaGameId, TavlaParams]},
Restart, Shutdown, worker, [lucky]},
{ok, { SupFlags, [OkeySpec, TavlaSpec]} }.
| null | https://raw.githubusercontent.com/devaspot/games/a1f7c3169c53d31e56049e90e0094a3f309603ae/apps/server/src/sup/lucky_sup.erl | erlang | Common table for several real players
Common table for several real players | -module(lucky_sup).
-behaviour(supervisor).
-include_lib("db/include/config.hrl").
-export([start_link/0]).
-export([init/1]).
-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}).
start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
RestartStrategy = one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 1,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Restart = permanent,
Shutdown = 2000,
OkeyTableParams = [{mult_factor, 1},
{slang_allowed, false},
{observers_allowed, false},
{tournament_type, lucky},
{round_timeout, infinity},
{ round_timeout , 30 * 1000 } ,
{set_timeout, infinity},
{ set_timeout , 10 * 60 * 1000 } ,
{speed, normal},
{game_type, standard},
{rounds, undefined},
{reveal_confirmation, false},
{next_series_confirmation, no},
{pause_mode, normal},
{social_actions_enabled, true},
{gosterge_finish_allowed, undefined}
],
OkeyGameId = id_generator:get_id(),
GameName = "I'm filling lucky - " ++ erlang:integer_to_list(OkeyGameId),
OkeyParams = [{game, game_okey},
{game_mode, standard},
{game_name, GameName},
{seats, 4},
{ quota_per_round , Quota } ,
{table_module, okey_table},
{bot_module, okey_bot},
{table_params, OkeyTableParams}
],
OkeySpec = {okey_lucky, {lucky, start_link, [OkeyGameId, OkeyParams]},
Restart, Shutdown, worker, [lucky]},
TavlaTableParams = [{mult_factor, 1},
{slang_allowed, false},
{observers_allowed, false},
{tournament_type, lucky},
{round_timeout, infinity},
{ round_timeout , 30 * 1000 } ,
{set_timeout, infinity},
{ set_timeout , 10 * 60 * 1000 } ,
{speed, normal},
{game_mode, standard},
{rounds, undefined},
{next_series_confirmation, no},
{pause_mode, normal},
{social_actions_enabled, true}
],
TavlaGameId = id_generator:get_id(),
TavlaGameName = "I'm filling lucky - " ++ erlang:integer_to_list(TavlaGameId),
TavlaParams = [{game, game_tavla},
{game_mode, standard},
{game_name, TavlaGameName},
{seats, 2},
{ quota_per_round , Quota } ,
{table_module, tavla_table},
{bot_module, tavla_bot},
{table_params, TavlaTableParams}
],
TavlaSpec = {tavla_lucky, {lucky, start_link, [TavlaGameId, TavlaParams]},
Restart, Shutdown, worker, [lucky]},
{ok, { SupFlags, [OkeySpec, TavlaSpec]} }.
|
1c37edfb109caf3047f4595bc1c94b5fcd8d707bc3ddcb2a57c756bd22498999 | pdarragh/camlrack | camlrack.ml | include Errors
include Sexp
include SexpPatterns
module Tokenize = Tokenize
module Parse = Parse
module Match = Match
module ListConvenienceFunctions = Match.ListConvenienceFunctions
let sexps_of_string (s : string) : (sexp list, Parse.parse_error) result = Parse.parse_many s
let sexps_of_string_exn (s : string) : sexp list = Parse.parse_many_exn s
let sexps_of_string_opt (s : string) : (sexp list) option = Parse.parse_many_opt s
let sexp_patterns_of_string (s : string) : (sexp_pattern list) option =
match sexps_of_string_opt s with
| Some ses ->
let pat_opts = List.map sexp_pattern_of_sexp ses in
if List.for_all Option.is_some pat_opts
then Some (List.map Option.get pat_opts)
else None
| None -> None
let sexp_patterns_of_string_exn (s : string) : sexp_pattern list =
let ses = sexps_of_string_exn s in
let pat_opts = List.map sexp_pattern_of_sexp ses in
if List.for_all Option.is_some pat_opts
then List.map Option.get pat_opts
else raise (CamlrackError "failed to convert one or more S-Expressions to S-Expression patterns")
let sexp_of_string (s : string) : (sexp, Parse.parse_error) result = Parse.parse s
let sexp_of_string_exn (s : string) : sexp = Parse.parse_exn s
let sexp_of_string_opt (s : string) : sexp option = Parse.parse_opt s
let sexp_pattern_of_string (s : string) : sexp_pattern option =
match sexp_of_string_opt s with
| Some se -> sexp_pattern_of_sexp se
| None -> None
let sexp_pattern_of_string_exn (s : string) : sexp_pattern =
match sexp_pattern_of_sexp (sexp_of_string_exn s) with
| Some pat -> pat
| None -> raise (CamlrackError "failed to convert S-Expression to S-Expression pattern")
let sexp_match (p : sexp_pattern) (se : sexp) : bool = Match.sexp_match p se
| null | https://raw.githubusercontent.com/pdarragh/camlrack/b80f35a973529f029c0715b83ce64c1de4e8467d/camlrack/src/camlrack.ml | ocaml | include Errors
include Sexp
include SexpPatterns
module Tokenize = Tokenize
module Parse = Parse
module Match = Match
module ListConvenienceFunctions = Match.ListConvenienceFunctions
let sexps_of_string (s : string) : (sexp list, Parse.parse_error) result = Parse.parse_many s
let sexps_of_string_exn (s : string) : sexp list = Parse.parse_many_exn s
let sexps_of_string_opt (s : string) : (sexp list) option = Parse.parse_many_opt s
let sexp_patterns_of_string (s : string) : (sexp_pattern list) option =
match sexps_of_string_opt s with
| Some ses ->
let pat_opts = List.map sexp_pattern_of_sexp ses in
if List.for_all Option.is_some pat_opts
then Some (List.map Option.get pat_opts)
else None
| None -> None
let sexp_patterns_of_string_exn (s : string) : sexp_pattern list =
let ses = sexps_of_string_exn s in
let pat_opts = List.map sexp_pattern_of_sexp ses in
if List.for_all Option.is_some pat_opts
then List.map Option.get pat_opts
else raise (CamlrackError "failed to convert one or more S-Expressions to S-Expression patterns")
let sexp_of_string (s : string) : (sexp, Parse.parse_error) result = Parse.parse s
let sexp_of_string_exn (s : string) : sexp = Parse.parse_exn s
let sexp_of_string_opt (s : string) : sexp option = Parse.parse_opt s
let sexp_pattern_of_string (s : string) : sexp_pattern option =
match sexp_of_string_opt s with
| Some se -> sexp_pattern_of_sexp se
| None -> None
let sexp_pattern_of_string_exn (s : string) : sexp_pattern =
match sexp_pattern_of_sexp (sexp_of_string_exn s) with
| Some pat -> pat
| None -> raise (CamlrackError "failed to convert S-Expression to S-Expression pattern")
let sexp_match (p : sexp_pattern) (se : sexp) : bool = Match.sexp_match p se
| |
3ecf71ac495363e2f9eaf3a6f9ac9d8f4e28d986aa19768efa18b64552b7f1a1 | billstclair/Lisplog | captcha.lisp | ; -*- mode: lisp -*-
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
Captcha logic
;;;
(in-package :lisplog)
(defstruct (captcha (:constructor %make-captcha))
query-explanation
query-html
response-size
hidden-value)
(defun captcha-values (captcha)
(values (captcha-query-explanation captcha)
(captcha-query-html captcha)
(captcha-response-size captcha)
(captcha-hidden-value captcha)))
(defun read-captcha-state (&optional (db *data-db*))
(sexp-get db $CAPTCHA $CAPTCHA :subdirs-p nil))
(defun (setf read-captcha-state) (state &optional (db *data-db*))
(setf (sexp-get db $CAPTCHA $CAPTCHA :subdirs-p nil) state))
New random seed every 10 minutes
(defparameter *captcha-valid-time* (* 60 10))
(defun make-captcha-seed ()
(format nil "~x" (cl-crypto:get-random-bits 160)))
(defun parse-hex (string)
(parse-integer string :radix 16))
(defun update-captcha-state (&optional (db *data-db*))
(let* ((state (read-captcha-state db))
(time (getf state :time))
(seed (getf state :seed))
(last-time (getf state :last-time))
(last-seed (getf state :last-seed))
(now (get-unix-time)))
(when (or (null time) (< time (- now *captcha-valid-time*)))
(let ((new-seed (make-captcha-seed)))
(setf last-time (or time now)
last-seed (or seed new-seed)
time now
seed new-seed
(getf state :last-time) last-time
(getf state :last-seed) last-seed
(getf state :time) time
(getf state :seed) seed
(read-captcha-state db) state)))
(values time seed last-time last-seed)))
(defun get-captcha-seed (timestamp &optional (db *data-db*))
(unless (< timestamp (- (get-unix-time) *captcha-valid-time*))
(multiple-value-bind (time seed last-time last-seed)
(update-captcha-state db)
(declare (ignore last-time))
(cond ((< timestamp time) last-seed)
(t seed)))))
(defun make-captcha (&optional (db *data-db*))
(cl-crypto:with-random-byte-stream
(multiple-value-bind (time seed) (update-captcha-state db)
(let* ((x (1+ (cl-crypto:random-integer 9)))
(y (1+ (cl-crypto:random-integer 9)))
(opnum (cl-crypto:random-integer 3))
(op (cond ((eql opnum 0) '+)
((eql opnum 1)
(when (< x y) (rotatef x y))
'-)
(t '*)))
(opname (if (eq op '*) "x" op))
(res (funcall op x y))
(query (format nil "~d ~a ~d = " x opname y))
(seed-int (parse-hex seed))
(res-hash (cl-crypto:sha1 (format nil "~d" res)))
(res-int (parse-hex res-hash))
(hidden-int (logxor seed-int res-int))
(hidden-hash (cl-crypto:sha1 (format nil "~x" hidden-int))))
(%make-captcha
:query-explanation "Solve the simple arithmetic problem."
:query-html query
:response-size 4
:hidden-value (format nil "~a+~a" time hidden-hash))))))
(defun validate-captcha (res hidden-value &optional (db *data-db*))
(check-type res string)
(check-type hidden-value string)
(let* ((pos (position #\+ hidden-value))
(time-str (and pos (subseq hidden-value 0 pos)))
(timestamp (ignore-errors (parse-integer time-str)))
(seed (and timestamp (get-captcha-seed timestamp db)))
(seed-int (and seed (parse-hex seed)))
(hidden-hash (and pos (subseq hidden-value (1+ pos))))
(res-hash (cl-crypto:sha1 res))
(res-int (parse-hex res-hash)))
(cond ((and seed-int hidden-hash)
(let ((hidden-int (logxor seed-int res-int)))
(equal hidden-hash
(cl-crypto:sha1 (format nil "~x" hidden-int)))))
(t (values nil :timeout)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
Copyright 2011 Bill St. Clair
;;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;;; you may not use this file except in compliance with the License.
;;; You may obtain a copy of the License at
;;;
;;; -2.0
;;;
;;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;; See the License for the specific language governing permissions
;;; and limitations under the License.
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
| null | https://raw.githubusercontent.com/billstclair/Lisplog/827e737062b4873ecf86c255649d44aca4b4f48f/src/captcha.lisp | lisp | -*- mode: lisp -*-
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions
and limitations under the License.
|
Captcha logic
(in-package :lisplog)
(defstruct (captcha (:constructor %make-captcha))
query-explanation
query-html
response-size
hidden-value)
(defun captcha-values (captcha)
(values (captcha-query-explanation captcha)
(captcha-query-html captcha)
(captcha-response-size captcha)
(captcha-hidden-value captcha)))
(defun read-captcha-state (&optional (db *data-db*))
(sexp-get db $CAPTCHA $CAPTCHA :subdirs-p nil))
(defun (setf read-captcha-state) (state &optional (db *data-db*))
(setf (sexp-get db $CAPTCHA $CAPTCHA :subdirs-p nil) state))
New random seed every 10 minutes
(defparameter *captcha-valid-time* (* 60 10))
(defun make-captcha-seed ()
(format nil "~x" (cl-crypto:get-random-bits 160)))
(defun parse-hex (string)
(parse-integer string :radix 16))
(defun update-captcha-state (&optional (db *data-db*))
(let* ((state (read-captcha-state db))
(time (getf state :time))
(seed (getf state :seed))
(last-time (getf state :last-time))
(last-seed (getf state :last-seed))
(now (get-unix-time)))
(when (or (null time) (< time (- now *captcha-valid-time*)))
(let ((new-seed (make-captcha-seed)))
(setf last-time (or time now)
last-seed (or seed new-seed)
time now
seed new-seed
(getf state :last-time) last-time
(getf state :last-seed) last-seed
(getf state :time) time
(getf state :seed) seed
(read-captcha-state db) state)))
(values time seed last-time last-seed)))
(defun get-captcha-seed (timestamp &optional (db *data-db*))
(unless (< timestamp (- (get-unix-time) *captcha-valid-time*))
(multiple-value-bind (time seed last-time last-seed)
(update-captcha-state db)
(declare (ignore last-time))
(cond ((< timestamp time) last-seed)
(t seed)))))
(defun make-captcha (&optional (db *data-db*))
(cl-crypto:with-random-byte-stream
(multiple-value-bind (time seed) (update-captcha-state db)
(let* ((x (1+ (cl-crypto:random-integer 9)))
(y (1+ (cl-crypto:random-integer 9)))
(opnum (cl-crypto:random-integer 3))
(op (cond ((eql opnum 0) '+)
((eql opnum 1)
(when (< x y) (rotatef x y))
'-)
(t '*)))
(opname (if (eq op '*) "x" op))
(res (funcall op x y))
(query (format nil "~d ~a ~d = " x opname y))
(seed-int (parse-hex seed))
(res-hash (cl-crypto:sha1 (format nil "~d" res)))
(res-int (parse-hex res-hash))
(hidden-int (logxor seed-int res-int))
(hidden-hash (cl-crypto:sha1 (format nil "~x" hidden-int))))
(%make-captcha
:query-explanation "Solve the simple arithmetic problem."
:query-html query
:response-size 4
:hidden-value (format nil "~a+~a" time hidden-hash))))))
(defun validate-captcha (res hidden-value &optional (db *data-db*))
(check-type res string)
(check-type hidden-value string)
(let* ((pos (position #\+ hidden-value))
(time-str (and pos (subseq hidden-value 0 pos)))
(timestamp (ignore-errors (parse-integer time-str)))
(seed (and timestamp (get-captcha-seed timestamp db)))
(seed-int (and seed (parse-hex seed)))
(hidden-hash (and pos (subseq hidden-value (1+ pos))))
(res-hash (cl-crypto:sha1 res))
(res-int (parse-hex res-hash)))
(cond ((and seed-int hidden-hash)
(let ((hidden-int (logxor seed-int res-int)))
(equal hidden-hash
(cl-crypto:sha1 (format nil "~x" hidden-int)))))
(t (values nil :timeout)))))
Copyright 2011 Bill St. Clair
distributed under the License is distributed on an " AS IS " BASIS ,
|
0d3104e68cace913bf622be411ad271038f29b621047cc7d247fde78f50ef6db | lilyball/projecteuler-ocaml | prob9.ml |
A Pythagorean triplet is a set of three natural numbers , a b c , for which ,
a² + b² = c²
For example , 3² + 4² = 9 + 16 = 25 = 5² .
There exists exactly one Pythagorean triplet for which a + b + c = 1000 .
Find the product abc .
A Pythagorean triplet is a set of three natural numbers, a b c, for which,
a² + b² = c²
For example, 3² + 4² = 9 + 16 = 25 = 5².
There exists exactly one Pythagorean triplet for which a + b + c = 1000.
Find the product abc.
*)
(* this is extremely brute-force. It doesn't even try and calculate c from a and b. *)
let rec loop a b c =
if a = 999 then failwith "no answer"
else if b = 999 then loop (succ a) (succ a) (succ a)
else if c = 999 then loop a (succ b) (succ b)
else if (a * a) + (b * b) = (c * c) && a + b + c = 1000 then a * b * c
else loop a b (succ c)
let _ =
print_int (loop 1 1 1);
print_newline ()
| null | https://raw.githubusercontent.com/lilyball/projecteuler-ocaml/a88ed8355b565ad0726cfcac4916d2b80512da7a/prob9.ml | ocaml | this is extremely brute-force. It doesn't even try and calculate c from a and b. |
A Pythagorean triplet is a set of three natural numbers , a b c , for which ,
a² + b² = c²
For example , 3² + 4² = 9 + 16 = 25 = 5² .
There exists exactly one Pythagorean triplet for which a + b + c = 1000 .
Find the product abc .
A Pythagorean triplet is a set of three natural numbers, a b c, for which,
a² + b² = c²
For example, 3² + 4² = 9 + 16 = 25 = 5².
There exists exactly one Pythagorean triplet for which a + b + c = 1000.
Find the product abc.
*)
let rec loop a b c =
if a = 999 then failwith "no answer"
else if b = 999 then loop (succ a) (succ a) (succ a)
else if c = 999 then loop a (succ b) (succ b)
else if (a * a) + (b * b) = (c * c) && a + b + c = 1000 then a * b * c
else loop a b (succ c)
let _ =
print_int (loop 1 1 1);
print_newline ()
|
26193fbd51405662b98fb891fca416c687bf1ad8a4c3955b901906e710707209 | ku-fpg/sized-types | Matrix.hs | -- | Sized matrixes.
--
Copyright : ( c ) 2013 University of Kansas
-- License: BSD3
--
Maintainer : < >
-- Stability: unstable
Portability : ghc
# LANGUAGE TypeFamilies , RankNTypes , FlexibleInstances , ScopedTypeVariables ,
UndecidableInstances , MultiParamTypeClasses , TypeOperators , DataKinds , FlexibleContexts , DeriveDataTypeable #
UndecidableInstances, MultiParamTypeClasses, TypeOperators, DataKinds, FlexibleContexts, DeriveDataTypeable #-}
module Data.Sized.Matrix where
import Prelude as P hiding (all)
import Control.Applicative
import qualified Data.Traversable as T
import qualified Data.Foldable as F
import qualified Data.List as L hiding (all)
import Data.Array.Base as B
import Data.Array.IArray as I
import GHC.TypeLits
import Data.Typeable
import Numeric
import Data.Sized.Fin
-- | A 'Matrix' is an array with the size determined uniquely by the
-- /type/ of the index type, 'ix', with every type in 'ix' used.
newtype Matrix ix a = Matrix (Array ix a)
deriving (Typeable, Eq, Ord)
-- | A 'Vector' is a 1D Matrix, using a TypeNat to define its length.
type Vector (ix :: Nat) a = Matrix (Fin ix) a
| A ' Vector2 ' is a 2D Matrix , using a TypeNat 's to define its size .
type Vector2 (ix :: Nat) (iy :: Nat) a = Matrix (Fin ix,Fin iy) a
instance (Ix ix) => Functor (Matrix ix) where
fmap f (Matrix xs) = Matrix (fmap f xs)
instance IArray Matrix a where
bounds (Matrix arr) = B.bounds arr
numElements (Matrix arr) = B.numElements arr
unsafeArray (a,b) ass = Matrix $ B.unsafeArray (a,b) ass
unsafeAt (Matrix arr) i = B.unsafeAt arr i
instance (Bounded i, Ix i) => Applicative (Matrix i) where
pure a = fmap (const a) coord -- possible because we are a fixed size
-- Also why use use newtype here.
a <*> b = forAll $ \ i -> (a ! i) (b ! i)
-- | 'matrix' turns a finite list into a matrix. You often need to give the type of the result.
matrix :: forall i a . (Bounded i, Ix i) => [a] -> Matrix i a
matrix xs | size' == fromIntegral (L.length xs) = I.listArray (low,high) xs
| otherwise = error $ "bad length of fromList for Matrix, "
++ "expecting " ++ show size' ++ " elements"
++ ", found " ++ show (L.length xs) ++ " elements."
where
size' = rangeSize (low,high)
low :: i
low = minBound
high :: i
high = maxBound
-- | what is the population of a matrix?
population :: forall i a . (Bounded i, Ix i) => Matrix i a -> Int
population _ = rangeSize (minBound :: i,maxBound)
allIndices :: (Bounded i, Ix i) => Matrix i a -> [i]
allIndices _ = universe
-- | 'zeroOf' is for use to force typing issues, and is 0.
zeroOf :: (Bounded i, Ix i) => Matrix i a -> i
zeroOf _ = minBound
-- | 'coord' returns a matrix filled with indexes.
coord :: (Bounded i, Ix i) => Matrix i i
coord = matrix universe
-- | Same as for lists.
zipWith :: (Bounded i, Ix i) => (a -> b -> c) -> Matrix i a -> Matrix i b -> Matrix i c
zipWith f a b = forAll $ \ i -> f (a ! i) (b ! i)
-- | 'forEach' takes a matrix, and calls a function for each element, to give a new matrix of the same size.
forEach :: (Bounded i, Ix i) => Matrix i a -> (i -> a -> b) -> Matrix i b
forEach a f = Data.Sized.Matrix.zipWith f coord a
-- | 'forAll' creates a matrix out of a mapping from the coordinates.
forAll :: (Bounded i, Ix i) => (i -> a) -> Matrix i a
forAll f = fmap f coord
-- | 'mm' is the 2D matrix multiply.
mm :: (Bounded m, Ix m, Bounded n, Ix n, Bounded o, Ix o, Num a) => Matrix (m,n) a -> Matrix (n,o) a -> Matrix (m,o) a
mm a b = forAll $ \ (i,j) -> sum [ a ! (i,r) * b ! (r,j) | r <- universe ]
-- | 'transpose' a 2D matrix.
transpose :: (Bounded x, Ix x, Bounded y, Ix y) => Matrix (x,y) a -> Matrix (y,x) a
transpose = ixmap corners $ \ (x,y) -> (y,x)
-- | return the identity for a specific matrix size.
identity :: (Bounded x, Ix x, Num a) => Matrix (x,x) a
identity = (\ (x,y) -> if x == y then 1 else 0) <$> coord
| append to 1D vectors
append :: (SingI left, SingI right, SingI (left + right))
=> Vector left a -> Vector right a -> Vector (left + right) a
append m1 m2 = matrix (I.elems m1 ++ I.elems m2)
TODO . Is the type constraint for ' both ' sufficient ?
-- In an earlier version we had:
-- , ADD top bottom ~ both
-- , SUB both top ~ bottom
-- , SUB both bottom ~ top
| stack two matrixes ' above ' each other .
above :: (SingI top, SingI bottom, SingI y, SingI (top + bottom))
=> Vector2 top y a -> Vector2 bottom y a -> Vector2 (top + bottom) y a
above m1 m2 = matrix (I.elems m1 ++ I.elems m2)
| stack two matrixes ' beside ' each other .
beside :: (SingI left, SingI right, SingI x, SingI (left + right))
=> Vector2 x left a -> Vector2 x right a -> Vector2 x (left + right) a
beside m1 m2 = transpose (transpose m1 `above` transpose m2)
-- | look at a matrix through a functor lens, to another matrix.
ixfmap :: (Bounded i, Ix i, Bounded j, Ix j, Functor f) => (i -> f j) -> Matrix j a -> Matrix i (f a)
ixfmap f m = (fmap (\ j -> m ! j) . f) <$> coord
FIXME . This is difficult to do with the simplifications appearing Sized .
-- The Index class no longer exists (which required addIndex)
-- Is this required ???
-- | grab /part/ of a matrix.
--cropAt :: (Index i ~ Index ix, Bounded i, Ix i, Bounded ix, Ix ix) => Matrix ix a -> ix -> Matrix i a
cropAt m corner ( \ i - > ( addIndex corner ( toIndex i ) ) ) m
-- | slice a 2D matrix into rows.
rows :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix (m,n) a -> Matrix m (Matrix n a)
rows a = (\ m -> matrix [ a ! (m,n) | n <- universe ]) <$> coord
-- | slice a 2D matrix into columns.
columns :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix (m,n) a -> Matrix n (Matrix m a)
columns = rows . transpose
-- | join a matrix of matrixes into a single matrix.
joinRows :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix m (Matrix n a) -> Matrix (m,n) a
joinRows a = (\ (m,n) -> (a ! m) ! n) <$> coord
-- | join a matrix of matrixes into a single matrix.
joinColumns :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix n (Matrix m a) -> Matrix (m,n) a
joinColumns a = (\ (m,n) -> (a ! n) ! m) <$> coord
instance (Bounded ix, Ix ix) => T.Traversable (Matrix ix) where
traverse f a = matrix <$> (T.traverse f $ I.elems a)
instance (Bounded ix, Ix ix) => F.Foldable (Matrix ix) where
foldMap f m = F.foldMap f (I.elems m)
-- | 'show2D' displays a 2D matrix, and is the worker for 'show'.
--
> GHCi > matrix [ 1 .. 42 ] : : Matrix ( Fin 7 , Fin 6 ) Int
> [ 1 , 2 , 3 , 4 , 5 , 6 ,
> 7 , 8 , 9 , 10 , 11 , 12 ,
> 13 , 14 , 15 , 16 , 17 , 18 ,
> 19 , 20 , 21 , 22 , 23 , 24 ,
> 25 , 26 , 27 , 28 , 29 , 30 ,
> 31 , 32 , 33 , 34 , 35 , 36 ,
> 37 , 38 , 39 , 40 , 41 , 42 ]
-- >
show2D :: (Bounded n, Ix n, Bounded m, Ix m, Show a) => Matrix (m, n) a -> String
show2D m0 = (joinLines $ map showRow m_rows)
where
m = fmap show m0
m' = forEach m $ \ (x,y) a -> (x == maxBound && y == maxBound,a)
joinLines = unlines . addTail . L.zipWith (++) ("[":repeat " ")
addTail xs = init xs ++ [last xs ++ " ]"]
showRow r = concat (I.elems $ Data.Sized.Matrix.zipWith showEle r m_cols_size)
showEle (f,str) s = take (s - L.length str) (cycle " ") ++ " " ++ str ++ (if f then "" else ",")
m_cols = columns m
m_rows = I.elems $ rows m'
m_cols_size = fmap (maximum . map L.length . I.elems) m_cols
instance (Show a, Show ix, Bounded ix, Ix ix) => Show (Matrix ix a) where
show m = "matrix " ++ show (I.bounds m) ++ " " ++ show (I.elems m)
-- TODO: read instance
-- | 'S' is shown as the contents, without the quotes.
-- One use is a matrix of S, so that you can do show-style functions
-- using fmap.
newtype S = S String
instance Show S where
show (S s) = s
showAsE :: (RealFloat a) => Int -> a -> S
showAsE i a = S $ showEFloat (Just i) a ""
showAsF :: (RealFloat a) => Int -> a -> S
showAsF i a = S $ showFFloat (Just i) a ""
| null | https://raw.githubusercontent.com/ku-fpg/sized-types/77e8931d1758085ae76d67e9054549e8e75bd595/Data/Sized/Matrix.hs | haskell | | Sized matrixes.
License: BSD3
Stability: unstable
| A 'Matrix' is an array with the size determined uniquely by the
/type/ of the index type, 'ix', with every type in 'ix' used.
| A 'Vector' is a 1D Matrix, using a TypeNat to define its length.
possible because we are a fixed size
Also why use use newtype here.
| 'matrix' turns a finite list into a matrix. You often need to give the type of the result.
| what is the population of a matrix?
| 'zeroOf' is for use to force typing issues, and is 0.
| 'coord' returns a matrix filled with indexes.
| Same as for lists.
| 'forEach' takes a matrix, and calls a function for each element, to give a new matrix of the same size.
| 'forAll' creates a matrix out of a mapping from the coordinates.
| 'mm' is the 2D matrix multiply.
| 'transpose' a 2D matrix.
| return the identity for a specific matrix size.
In an earlier version we had:
, ADD top bottom ~ both
, SUB both top ~ bottom
, SUB both bottom ~ top
| look at a matrix through a functor lens, to another matrix.
The Index class no longer exists (which required addIndex)
Is this required ???
| grab /part/ of a matrix.
cropAt :: (Index i ~ Index ix, Bounded i, Ix i, Bounded ix, Ix ix) => Matrix ix a -> ix -> Matrix i a
| slice a 2D matrix into rows.
| slice a 2D matrix into columns.
| join a matrix of matrixes into a single matrix.
| join a matrix of matrixes into a single matrix.
| 'show2D' displays a 2D matrix, and is the worker for 'show'.
>
TODO: read instance
| 'S' is shown as the contents, without the quotes.
One use is a matrix of S, so that you can do show-style functions
using fmap. | Copyright : ( c ) 2013 University of Kansas
Maintainer : < >
Portability : ghc
# LANGUAGE TypeFamilies , RankNTypes , FlexibleInstances , ScopedTypeVariables ,
UndecidableInstances , MultiParamTypeClasses , TypeOperators , DataKinds , FlexibleContexts , DeriveDataTypeable #
UndecidableInstances, MultiParamTypeClasses, TypeOperators, DataKinds, FlexibleContexts, DeriveDataTypeable #-}
module Data.Sized.Matrix where
import Prelude as P hiding (all)
import Control.Applicative
import qualified Data.Traversable as T
import qualified Data.Foldable as F
import qualified Data.List as L hiding (all)
import Data.Array.Base as B
import Data.Array.IArray as I
import GHC.TypeLits
import Data.Typeable
import Numeric
import Data.Sized.Fin
newtype Matrix ix a = Matrix (Array ix a)
deriving (Typeable, Eq, Ord)
type Vector (ix :: Nat) a = Matrix (Fin ix) a
| A ' Vector2 ' is a 2D Matrix , using a TypeNat 's to define its size .
type Vector2 (ix :: Nat) (iy :: Nat) a = Matrix (Fin ix,Fin iy) a
instance (Ix ix) => Functor (Matrix ix) where
fmap f (Matrix xs) = Matrix (fmap f xs)
instance IArray Matrix a where
bounds (Matrix arr) = B.bounds arr
numElements (Matrix arr) = B.numElements arr
unsafeArray (a,b) ass = Matrix $ B.unsafeArray (a,b) ass
unsafeAt (Matrix arr) i = B.unsafeAt arr i
instance (Bounded i, Ix i) => Applicative (Matrix i) where
a <*> b = forAll $ \ i -> (a ! i) (b ! i)
matrix :: forall i a . (Bounded i, Ix i) => [a] -> Matrix i a
matrix xs | size' == fromIntegral (L.length xs) = I.listArray (low,high) xs
| otherwise = error $ "bad length of fromList for Matrix, "
++ "expecting " ++ show size' ++ " elements"
++ ", found " ++ show (L.length xs) ++ " elements."
where
size' = rangeSize (low,high)
low :: i
low = minBound
high :: i
high = maxBound
population :: forall i a . (Bounded i, Ix i) => Matrix i a -> Int
population _ = rangeSize (minBound :: i,maxBound)
allIndices :: (Bounded i, Ix i) => Matrix i a -> [i]
allIndices _ = universe
zeroOf :: (Bounded i, Ix i) => Matrix i a -> i
zeroOf _ = minBound
coord :: (Bounded i, Ix i) => Matrix i i
coord = matrix universe
zipWith :: (Bounded i, Ix i) => (a -> b -> c) -> Matrix i a -> Matrix i b -> Matrix i c
zipWith f a b = forAll $ \ i -> f (a ! i) (b ! i)
forEach :: (Bounded i, Ix i) => Matrix i a -> (i -> a -> b) -> Matrix i b
forEach a f = Data.Sized.Matrix.zipWith f coord a
forAll :: (Bounded i, Ix i) => (i -> a) -> Matrix i a
forAll f = fmap f coord
mm :: (Bounded m, Ix m, Bounded n, Ix n, Bounded o, Ix o, Num a) => Matrix (m,n) a -> Matrix (n,o) a -> Matrix (m,o) a
mm a b = forAll $ \ (i,j) -> sum [ a ! (i,r) * b ! (r,j) | r <- universe ]
transpose :: (Bounded x, Ix x, Bounded y, Ix y) => Matrix (x,y) a -> Matrix (y,x) a
transpose = ixmap corners $ \ (x,y) -> (y,x)
identity :: (Bounded x, Ix x, Num a) => Matrix (x,x) a
identity = (\ (x,y) -> if x == y then 1 else 0) <$> coord
| append to 1D vectors
append :: (SingI left, SingI right, SingI (left + right))
=> Vector left a -> Vector right a -> Vector (left + right) a
append m1 m2 = matrix (I.elems m1 ++ I.elems m2)
TODO . Is the type constraint for ' both ' sufficient ?
| stack two matrixes ' above ' each other .
above :: (SingI top, SingI bottom, SingI y, SingI (top + bottom))
=> Vector2 top y a -> Vector2 bottom y a -> Vector2 (top + bottom) y a
above m1 m2 = matrix (I.elems m1 ++ I.elems m2)
| stack two matrixes ' beside ' each other .
beside :: (SingI left, SingI right, SingI x, SingI (left + right))
=> Vector2 x left a -> Vector2 x right a -> Vector2 x (left + right) a
beside m1 m2 = transpose (transpose m1 `above` transpose m2)
ixfmap :: (Bounded i, Ix i, Bounded j, Ix j, Functor f) => (i -> f j) -> Matrix j a -> Matrix i (f a)
ixfmap f m = (fmap (\ j -> m ! j) . f) <$> coord
FIXME . This is difficult to do with the simplifications appearing Sized .
cropAt m corner ( \ i - > ( addIndex corner ( toIndex i ) ) ) m
rows :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix (m,n) a -> Matrix m (Matrix n a)
rows a = (\ m -> matrix [ a ! (m,n) | n <- universe ]) <$> coord
columns :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix (m,n) a -> Matrix n (Matrix m a)
columns = rows . transpose
joinRows :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix m (Matrix n a) -> Matrix (m,n) a
joinRows a = (\ (m,n) -> (a ! m) ! n) <$> coord
joinColumns :: (Bounded n, Ix n, Bounded m, Ix m) => Matrix n (Matrix m a) -> Matrix (m,n) a
joinColumns a = (\ (m,n) -> (a ! n) ! m) <$> coord
instance (Bounded ix, Ix ix) => T.Traversable (Matrix ix) where
traverse f a = matrix <$> (T.traverse f $ I.elems a)
instance (Bounded ix, Ix ix) => F.Foldable (Matrix ix) where
foldMap f m = F.foldMap f (I.elems m)
> GHCi > matrix [ 1 .. 42 ] : : Matrix ( Fin 7 , Fin 6 ) Int
> [ 1 , 2 , 3 , 4 , 5 , 6 ,
> 7 , 8 , 9 , 10 , 11 , 12 ,
> 13 , 14 , 15 , 16 , 17 , 18 ,
> 19 , 20 , 21 , 22 , 23 , 24 ,
> 25 , 26 , 27 , 28 , 29 , 30 ,
> 31 , 32 , 33 , 34 , 35 , 36 ,
> 37 , 38 , 39 , 40 , 41 , 42 ]
show2D :: (Bounded n, Ix n, Bounded m, Ix m, Show a) => Matrix (m, n) a -> String
show2D m0 = (joinLines $ map showRow m_rows)
where
m = fmap show m0
m' = forEach m $ \ (x,y) a -> (x == maxBound && y == maxBound,a)
joinLines = unlines . addTail . L.zipWith (++) ("[":repeat " ")
addTail xs = init xs ++ [last xs ++ " ]"]
showRow r = concat (I.elems $ Data.Sized.Matrix.zipWith showEle r m_cols_size)
showEle (f,str) s = take (s - L.length str) (cycle " ") ++ " " ++ str ++ (if f then "" else ",")
m_cols = columns m
m_rows = I.elems $ rows m'
m_cols_size = fmap (maximum . map L.length . I.elems) m_cols
instance (Show a, Show ix, Bounded ix, Ix ix) => Show (Matrix ix a) where
show m = "matrix " ++ show (I.bounds m) ++ " " ++ show (I.elems m)
newtype S = S String
instance Show S where
show (S s) = s
showAsE :: (RealFloat a) => Int -> a -> S
showAsE i a = S $ showEFloat (Just i) a ""
showAsF :: (RealFloat a) => Int -> a -> S
showAsF i a = S $ showFFloat (Just i) a ""
|
8489eb53aed7ddc16fd7cc7eb7648d8be5e3abd1f243718203ee00676b0bf761 | spell-music/csound-expression | Ref.hs | {-# Language ScopedTypeVariables, FlexibleContexts #-}
module Csound.Typed.Control.Ref(
Ref(..), writeRef, readRef, newRef, mixRef, modifyRef, sensorsSE, newGlobalRef,
concatRef, concatRef3, concatRef4, concatRef5,
newCtrlRef, newGlobalCtrlRef,
globalSensorsSE, newClearableGlobalRef, newTab, newGlobalTab,
-- conditionals
whileRef, whileRefD
) where
import Data.Boolean
import Data.Proxy
import Control.Monad
import Control.Monad.Trans.Class
import Csound.Dynamic hiding (when1, newLocalVars, writeArr, readArr, whileRef)
import Csound.Typed.Types.Prim
import Csound.Typed.Types.Tuple
import Csound.Typed.GlobalState.SE
import Csound.Typed.GlobalState.GE
import qualified Csound.Dynamic as D
-- | It describes a reference to mutable values.
newtype Ref a = Ref [Var]
{-
{ writeRef :: a -> SE ()
, readRef :: SE a }
-}
writeRef :: Tuple a => Ref a -> a -> SE ()
writeRef (Ref vars) a = fromDep_ $ hideGEinDep $ do
vals <- fromTuple a
return $ zipWithM_ writeVar vars vals
( zipWithM _ vars ) = < < lift ( fromTuple a )
: : Var - > E - > Dep ( )
[ ] ( GE [ E ] )
readRef :: Tuple a => Ref a -> SE a
readRef (Ref vars) = SE $ fmap (toTuple . return) $ mapM readVar vars
-- | Allocates a new local (it is visible within the instrument) mutable value and initializes it with value.
-- A reference can contain a tuple of variables.
newRef :: forall a. Tuple a => a -> SE (Ref a)
newRef t = fmap Ref $ newLocalVars (tupleRates (Proxy :: Proxy a)) (fromTuple t)
-- | Allocates a new local (it is visible within the instrument) mutable value and initializes it with value.
-- A reference can contain a tuple of variables.
-- It contains control signals (k-rate) and constants for numbers (i-rates).
newCtrlRef :: forall a. Tuple a => a -> SE (Ref a)
newCtrlRef t = fmap Ref $ newLocalVars (fmap toCtrlRate $ tupleRates (Proxy :: Proxy a)) (fromTuple t)
toCtrlRate :: Rate -> Rate
toCtrlRate x = case x of
Ar -> Kr
_ -> x
concatRef :: (Tuple a, Tuple b) => Ref a -> Ref b -> Ref (a, b)
concatRef (Ref a) (Ref b) = Ref (a ++ b)
concatRef3 :: (Tuple a, Tuple b, Tuple c) => Ref a -> Ref b -> Ref c -> Ref (a, b, c)
concatRef3 (Ref a) (Ref b) (Ref c) = Ref (a ++ b ++ c)
concatRef4 :: (Tuple a, Tuple b, Tuple c, Tuple d) => Ref a -> Ref b -> Ref c -> Ref d -> Ref (a, b, c, d)
concatRef4 (Ref a) (Ref b) (Ref c) (Ref d) = Ref (a ++ b ++ c ++ d)
concatRef5 :: (Tuple a, Tuple b, Tuple c, Tuple d, Tuple e) => Ref a -> Ref b -> Ref c -> Ref d -> Ref e -> Ref (a, b, c, d, e)
concatRef5 (Ref a) (Ref b) (Ref c) (Ref d) (Ref e) = Ref (a ++ b ++ c ++ d ++ e)
-- | Adds the given signal to the value that is contained in the
-- reference.
mixRef :: (Num a, Tuple a) => Ref a -> a -> SE ()
mixRef ref asig = modifyRef ref (+ asig)
| Modifies the Ref value with given function .
modifyRef :: Tuple a => Ref a -> (a -> a) -> SE ()
modifyRef ref f = do
v <- readRef ref
writeRef ref (f v)
-- | An alias for the function @newRef@. It returns not the reference
-- to mutable value but a pair of reader and writer functions.
sensorsSE :: Tuple a => a -> SE (SE a, a -> SE ())
sensorsSE a = do
ref <- newCtrlRef a
return $ (readRef ref, writeRef ref)
-- | Allocates a new global mutable value and initializes it with value.
-- A reference can contain a tuple of variables.
newGlobalRef :: forall a. Tuple a => a -> SE (Ref a)
newGlobalRef t = fmap Ref $ newGlobalVars (tupleRates (Proxy :: Proxy a)) (fromTuple t)
-- | Allocates a new global mutable value and initializes it with value.
-- A reference can contain a tuple of variables.
-- It contains control signals (k-rate) and constants for numbers (i-rates).
newGlobalCtrlRef :: forall a . Tuple a => a -> SE (Ref a)
newGlobalCtrlRef t = fmap Ref $ newGlobalVars (fmap toCtrlRate $ tupleRates (Proxy :: Proxy a)) (fromTuple t)
-- | An alias for the function @newRef@. It returns not the reference
-- to mutable value but a pair of reader and writer functions.
globalSensorsSE :: Tuple a => a -> SE (SE a, a -> SE ())
globalSensorsSE a = do
ref <- newRef a
return $ (readRef ref, writeRef ref)
-- | Allocates a new clearable global mutable value and initializes it with value.
-- A reference can contain a tuple of variables.
The variable is set to zero at the end of every iteration .
-- It's useful for accumulation of audio values from several instruments.
newClearableGlobalRef :: forall a . Tuple a => a -> SE (Ref a)
newClearableGlobalRef t = fmap Ref $ newClearableGlobalVars (tupleRates (Proxy :: Proxy a)) (fromTuple t)
-------------------------------------------------------------------------------
-- writable tables
| Creates a new table . The could be used while the instrument
-- is playing. When the instrument is retriggered the new tab is allocated.
--
-- > newTab size
newTab :: D -> SE Tab
newTab size = ftgentmp 0 0 size 7 0 [size, 0]
-- | Creates a new global table.
-- It's generated only once. It's persisted between instrument calls.
--
-- > newGlobalTab identifier size
newGlobalTab :: Int -> SE Tab
newGlobalTab size = do
ref <- newGlobalCtrlRef ((fromGE $ saveWriteTab size) :: D)
fmap (fromGE . toGE) $ readRef ref
-----------------------------------------------------------------------
-- some opcodes that I have to define upfront
-- |
-- Generate a score function table from within the orchestra, which is deleted at the end of the note.
--
-- Generate a score function table from within the orchestra,
-- which is optionally deleted at the end of the note.
--
> ifno ftgentmp ip1 , ip2dummy , isize , , , iargb , ...
--
-- csound doc: <>
ftgentmp :: D -> D -> D -> D -> D -> [D] -> SE Tab
ftgentmp b1 b2 b3 b4 b5 b6 = fmap ( Tab . return) $ SE $ (depT =<<) $ lift $ f <$> unD b1 <*> unD b2 <*> unD b3 <*> unD b4 <*> unD b5 <*> mapM unD b6
where f a1 a2 a3 a4 a5 a6 = opcs "ftgentmp" [(Ir,(repeat Ir))] ([a1,a2,a3,a4,a5] ++ a6)
--------------------------------------------------------------------
whileRef :: forall st . Tuple st => st -> (st -> SE BoolSig) -> (st -> SE st) -> SE ()
whileRef initVal c body = do
refSt <- newCtrlRef initVal
refCond <- newRef =<< condSig =<< readRef refSt
whileRefBegin refCond
writeRef refSt =<< body =<< readRef refSt
writeRef refCond =<< condSig =<< readRef refSt
fromDep_ whileEnd
where
condSig :: st -> SE Sig
condSig = fmap (\b -> ifB b 1 0) . c
whileRefD :: forall st . Tuple st => st -> (st -> SE BoolD) -> (st -> SE st) -> SE ()
whileRefD initVal c body = do
refSt <- newCtrlRef initVal
refCond <- newRef =<< condSig =<< readRef refSt
whileRefBegin refCond
writeRef refSt =<< body =<< readRef refSt
writeRef refCond =<< condSig =<< readRef refSt
fromDep_ whileEnd
where
condSig :: st -> SE D
condSig = fmap (\b -> ifB b 1 0) . c
whileRefBegin :: SigOrD a => Ref a -> SE ()
whileRefBegin (Ref vars) = fromDep_ $ D.whileRef $ head vars
| null | https://raw.githubusercontent.com/spell-music/csound-expression/e384b7e3f69345bbe236730dee2fb0864d575259/csound-expression-typed/src/Csound/Typed/Control/Ref.hs | haskell | # Language ScopedTypeVariables, FlexibleContexts #
conditionals
| It describes a reference to mutable values.
{ writeRef :: a -> SE ()
, readRef :: SE a }
| Allocates a new local (it is visible within the instrument) mutable value and initializes it with value.
A reference can contain a tuple of variables.
| Allocates a new local (it is visible within the instrument) mutable value and initializes it with value.
A reference can contain a tuple of variables.
It contains control signals (k-rate) and constants for numbers (i-rates).
| Adds the given signal to the value that is contained in the
reference.
| An alias for the function @newRef@. It returns not the reference
to mutable value but a pair of reader and writer functions.
| Allocates a new global mutable value and initializes it with value.
A reference can contain a tuple of variables.
| Allocates a new global mutable value and initializes it with value.
A reference can contain a tuple of variables.
It contains control signals (k-rate) and constants for numbers (i-rates).
| An alias for the function @newRef@. It returns not the reference
to mutable value but a pair of reader and writer functions.
| Allocates a new clearable global mutable value and initializes it with value.
A reference can contain a tuple of variables.
It's useful for accumulation of audio values from several instruments.
-----------------------------------------------------------------------------
writable tables
is playing. When the instrument is retriggered the new tab is allocated.
> newTab size
| Creates a new global table.
It's generated only once. It's persisted between instrument calls.
> newGlobalTab identifier size
---------------------------------------------------------------------
some opcodes that I have to define upfront
|
Generate a score function table from within the orchestra, which is deleted at the end of the note.
Generate a score function table from within the orchestra,
which is optionally deleted at the end of the note.
csound doc: <>
------------------------------------------------------------------ | module Csound.Typed.Control.Ref(
Ref(..), writeRef, readRef, newRef, mixRef, modifyRef, sensorsSE, newGlobalRef,
concatRef, concatRef3, concatRef4, concatRef5,
newCtrlRef, newGlobalCtrlRef,
globalSensorsSE, newClearableGlobalRef, newTab, newGlobalTab,
whileRef, whileRefD
) where
import Data.Boolean
import Data.Proxy
import Control.Monad
import Control.Monad.Trans.Class
import Csound.Dynamic hiding (when1, newLocalVars, writeArr, readArr, whileRef)
import Csound.Typed.Types.Prim
import Csound.Typed.Types.Tuple
import Csound.Typed.GlobalState.SE
import Csound.Typed.GlobalState.GE
import qualified Csound.Dynamic as D
newtype Ref a = Ref [Var]
writeRef :: Tuple a => Ref a -> a -> SE ()
writeRef (Ref vars) a = fromDep_ $ hideGEinDep $ do
vals <- fromTuple a
return $ zipWithM_ writeVar vars vals
( zipWithM _ vars ) = < < lift ( fromTuple a )
: : Var - > E - > Dep ( )
[ ] ( GE [ E ] )
readRef :: Tuple a => Ref a -> SE a
readRef (Ref vars) = SE $ fmap (toTuple . return) $ mapM readVar vars
newRef :: forall a. Tuple a => a -> SE (Ref a)
newRef t = fmap Ref $ newLocalVars (tupleRates (Proxy :: Proxy a)) (fromTuple t)
newCtrlRef :: forall a. Tuple a => a -> SE (Ref a)
newCtrlRef t = fmap Ref $ newLocalVars (fmap toCtrlRate $ tupleRates (Proxy :: Proxy a)) (fromTuple t)
toCtrlRate :: Rate -> Rate
toCtrlRate x = case x of
Ar -> Kr
_ -> x
concatRef :: (Tuple a, Tuple b) => Ref a -> Ref b -> Ref (a, b)
concatRef (Ref a) (Ref b) = Ref (a ++ b)
concatRef3 :: (Tuple a, Tuple b, Tuple c) => Ref a -> Ref b -> Ref c -> Ref (a, b, c)
concatRef3 (Ref a) (Ref b) (Ref c) = Ref (a ++ b ++ c)
concatRef4 :: (Tuple a, Tuple b, Tuple c, Tuple d) => Ref a -> Ref b -> Ref c -> Ref d -> Ref (a, b, c, d)
concatRef4 (Ref a) (Ref b) (Ref c) (Ref d) = Ref (a ++ b ++ c ++ d)
concatRef5 :: (Tuple a, Tuple b, Tuple c, Tuple d, Tuple e) => Ref a -> Ref b -> Ref c -> Ref d -> Ref e -> Ref (a, b, c, d, e)
concatRef5 (Ref a) (Ref b) (Ref c) (Ref d) (Ref e) = Ref (a ++ b ++ c ++ d ++ e)
mixRef :: (Num a, Tuple a) => Ref a -> a -> SE ()
mixRef ref asig = modifyRef ref (+ asig)
| Modifies the Ref value with given function .
modifyRef :: Tuple a => Ref a -> (a -> a) -> SE ()
modifyRef ref f = do
v <- readRef ref
writeRef ref (f v)
sensorsSE :: Tuple a => a -> SE (SE a, a -> SE ())
sensorsSE a = do
ref <- newCtrlRef a
return $ (readRef ref, writeRef ref)
newGlobalRef :: forall a. Tuple a => a -> SE (Ref a)
newGlobalRef t = fmap Ref $ newGlobalVars (tupleRates (Proxy :: Proxy a)) (fromTuple t)
newGlobalCtrlRef :: forall a . Tuple a => a -> SE (Ref a)
newGlobalCtrlRef t = fmap Ref $ newGlobalVars (fmap toCtrlRate $ tupleRates (Proxy :: Proxy a)) (fromTuple t)
globalSensorsSE :: Tuple a => a -> SE (SE a, a -> SE ())
globalSensorsSE a = do
ref <- newRef a
return $ (readRef ref, writeRef ref)
The variable is set to zero at the end of every iteration .
newClearableGlobalRef :: forall a . Tuple a => a -> SE (Ref a)
newClearableGlobalRef t = fmap Ref $ newClearableGlobalVars (tupleRates (Proxy :: Proxy a)) (fromTuple t)
| Creates a new table . The could be used while the instrument
newTab :: D -> SE Tab
newTab size = ftgentmp 0 0 size 7 0 [size, 0]
newGlobalTab :: Int -> SE Tab
newGlobalTab size = do
ref <- newGlobalCtrlRef ((fromGE $ saveWriteTab size) :: D)
fmap (fromGE . toGE) $ readRef ref
> ifno ftgentmp ip1 , ip2dummy , isize , , , iargb , ...
ftgentmp :: D -> D -> D -> D -> D -> [D] -> SE Tab
ftgentmp b1 b2 b3 b4 b5 b6 = fmap ( Tab . return) $ SE $ (depT =<<) $ lift $ f <$> unD b1 <*> unD b2 <*> unD b3 <*> unD b4 <*> unD b5 <*> mapM unD b6
where f a1 a2 a3 a4 a5 a6 = opcs "ftgentmp" [(Ir,(repeat Ir))] ([a1,a2,a3,a4,a5] ++ a6)
whileRef :: forall st . Tuple st => st -> (st -> SE BoolSig) -> (st -> SE st) -> SE ()
whileRef initVal c body = do
refSt <- newCtrlRef initVal
refCond <- newRef =<< condSig =<< readRef refSt
whileRefBegin refCond
writeRef refSt =<< body =<< readRef refSt
writeRef refCond =<< condSig =<< readRef refSt
fromDep_ whileEnd
where
condSig :: st -> SE Sig
condSig = fmap (\b -> ifB b 1 0) . c
whileRefD :: forall st . Tuple st => st -> (st -> SE BoolD) -> (st -> SE st) -> SE ()
whileRefD initVal c body = do
refSt <- newCtrlRef initVal
refCond <- newRef =<< condSig =<< readRef refSt
whileRefBegin refCond
writeRef refSt =<< body =<< readRef refSt
writeRef refCond =<< condSig =<< readRef refSt
fromDep_ whileEnd
where
condSig :: st -> SE D
condSig = fmap (\b -> ifB b 1 0) . c
whileRefBegin :: SigOrD a => Ref a -> SE ()
whileRefBegin (Ref vars) = fromDep_ $ D.whileRef $ head vars
|
bcb440bdc4c0af1074a41b2f531c0a2068a874257db5a6a990e503908e8e8354 | Ucombinator/parsing-with-zippers | pwZ_WorklistWithLookahead.ml | (* Simple type aliases.
* Fig 1. *)
type lab = string (* token and sequence labels *)
type tag = int (* token tag, used for lookahead and token comparison *)
type pos = int (* token position in input *)
type tok = lab * tag (* token *)
(* An exception when a match fails. Should never appear.
* This is primarily to suppress warnings in a safe manner. *)
exception FailedMatch
Additional types necessary for using zippers without memoization tables .
* Fig 19 .
* The implementation of m_0 required the definition of a new ` undefined ` value ,
* which is given here . It is essentially a placeholder to be discarded .
* Fig 19.
* The implementation of m_0 required the definition of a new `undefined` value,
* which is given here. It is essentially a placeholder to be discarded. *)
type exp = { mutable m : mem; e : exp'; first : bool array }
and exp' = T of tok
| Seq of lab * exp list
| Alt of (exp list) ref
and cxt = Top
| SeqC of mem * lab * exp list * exp list
| AltC of mem
and mem = {
start : pos;
mutable parents : cxt list;
mutable end_ : pos;
mutable result : exp }
type zipper = exp' * mem
let rec undefined : exp = {
m = m_undefined;
e = T ("undefined", -1);
first = [| |] }
and m_undefined : mem = {
start = -1;
parents = [];
end_ = -1;
result = undefined }
let m_0 : mem = {
start = -1;
parents = [];
end_ = -1;
result = undefined }
(* A global worklist. This is used for keeping track of what to do next. *)
let worklist : (zipper list) ref = ref []
(* A list of "tops", which gives us parse-null of a Top for free. This is useful
* so that in the end we can simply return the result. *)
let tops : exp list ref = ref []
Core algorithm . Similar to Fig 20 , but with additional steps taken for
* performance . Note that the return type is now ` unit ` .
* performance. Note that the return type is now `unit`. *)
let derive (p : pos) ((t, i) : tok) ((e, m) : zipper) : unit =
let rec d_d (c : cxt) (e : exp) : unit =
if p == e.m.start
then (e.m.parents <- c :: e.m.parents;
if p == e.m.end_
then d_u' e.m.result c
else ())
else (let m = { start = p; parents = [c]; end_ = -1; result = undefined } in
e.m <- m;
d_d' m e.e)
and d_d' (m : mem) (e : exp') : unit =
match e with
| T (t', i') ->
if i == i'
then worklist := (Seq (t, []), m) :: !worklist
else ()
| Seq (l, []) -> d_u (Seq (l, [])) m
| Seq (l, e :: es) -> d_d (SeqC (m, l, [], es)) e
| Alt es -> List.iter
(fun e ->
if e.first.(i)
then d_d (AltC m) e
else ())
!es
and d_u (e : exp') (m : mem) : unit =
let e' = { m = m_0; e = e; first = [| |] } in
m.end_ <- p;
m.result <- e';
List.iter (fun c -> d_u' e' c) m.parents
and d_u' (e : exp) (c : cxt) : unit =
match c with
| Top -> tops := e :: !tops
| SeqC (m, l, es, []) -> d_u (Seq (l, List.rev (e :: es))) m
| SeqC (m, l, left, e' :: right) -> d_d (SeqC (m, l, e :: left, right)) e'
| AltC m -> if p == m.end_
then match m.result.e with
| Alt es -> es := e :: !es
| _ -> raise FailedMatch
else d_u (Alt (ref [e])) m
in d_u e m
Here we construct the initial zipper . This allows us to properly traverse the
* grammar from the first step . This construction is similar in spirit to the
* Seq / SeqC pair used on l318 ( near the end of Section 4 ) of the paper .
* grammar from the first step. This construction is similar in spirit to the
* Seq/SeqC pair used on l318 (near the end of Section 4) of the paper. *)
let init_zipper (e : exp) : zipper =
let e' = Seq ("<init_zipper:Seq>", []) in
let m_top : mem = { start = 0; parents = [Top]; end_ = -1; result = undefined } in
let c = SeqC (m_top, "<init_zipper:SeqC>", [], [e]) in
let m_seq : mem = { start = 0; parents = [c]; end_ = -1; result = undefined } in
(e', m_seq)
(* When a result is produced, it will have some vestigial structure remaining
* from the initial zipper (see above). This function removes those extra bits
* so only the important stuff is returned once the parse is complete. *)
let unwrap_top_exp (e : exp) : exp =
match e.e with
| Seq (_, [_; e']) -> e'
| _ -> raise FailedMatch
(* This is our wrapper/driver function. It initializes blank worklist and tops
* lists for each element in the worklist. This allows for a generational style
* of worklist (where "child processes" can each have their own worklist).
*
* The token tag 0 is assumed to be reserved for the end of the input. *)
let parse (ts : tok list) (e : exp) : exp list =
let rec parse (p : pos) (ts : tok list) : exp list =
(let w = !worklist in
worklist := [];
tops := [];
match ts with
| [] -> List.iter (fun z -> derive p ("EOF", 0) z) w;
List.map unwrap_top_exp !tops
| ((t, s) :: ts') ->
List.iter (fun z -> derive p (t, s) z) w;
parse (p + 1) ts')
in worklist := [init_zipper e];
parse 0 ts
| null | https://raw.githubusercontent.com/Ucombinator/parsing-with-zippers/168252c3598f7e704b8484bb469b37aa1a9b0313/pwZ_WorklistWithLookahead.ml | ocaml | Simple type aliases.
* Fig 1.
token and sequence labels
token tag, used for lookahead and token comparison
token position in input
token
An exception when a match fails. Should never appear.
* This is primarily to suppress warnings in a safe manner.
A global worklist. This is used for keeping track of what to do next.
A list of "tops", which gives us parse-null of a Top for free. This is useful
* so that in the end we can simply return the result.
When a result is produced, it will have some vestigial structure remaining
* from the initial zipper (see above). This function removes those extra bits
* so only the important stuff is returned once the parse is complete.
This is our wrapper/driver function. It initializes blank worklist and tops
* lists for each element in the worklist. This allows for a generational style
* of worklist (where "child processes" can each have their own worklist).
*
* The token tag 0 is assumed to be reserved for the end of the input. |
exception FailedMatch
Additional types necessary for using zippers without memoization tables .
* Fig 19 .
* The implementation of m_0 required the definition of a new ` undefined ` value ,
* which is given here . It is essentially a placeholder to be discarded .
* Fig 19.
* The implementation of m_0 required the definition of a new `undefined` value,
* which is given here. It is essentially a placeholder to be discarded. *)
type exp = { mutable m : mem; e : exp'; first : bool array }
and exp' = T of tok
| Seq of lab * exp list
| Alt of (exp list) ref
and cxt = Top
| SeqC of mem * lab * exp list * exp list
| AltC of mem
and mem = {
start : pos;
mutable parents : cxt list;
mutable end_ : pos;
mutable result : exp }
type zipper = exp' * mem
let rec undefined : exp = {
m = m_undefined;
e = T ("undefined", -1);
first = [| |] }
and m_undefined : mem = {
start = -1;
parents = [];
end_ = -1;
result = undefined }
let m_0 : mem = {
start = -1;
parents = [];
end_ = -1;
result = undefined }
let worklist : (zipper list) ref = ref []
let tops : exp list ref = ref []
Core algorithm . Similar to Fig 20 , but with additional steps taken for
* performance . Note that the return type is now ` unit ` .
* performance. Note that the return type is now `unit`. *)
let derive (p : pos) ((t, i) : tok) ((e, m) : zipper) : unit =
let rec d_d (c : cxt) (e : exp) : unit =
if p == e.m.start
then (e.m.parents <- c :: e.m.parents;
if p == e.m.end_
then d_u' e.m.result c
else ())
else (let m = { start = p; parents = [c]; end_ = -1; result = undefined } in
e.m <- m;
d_d' m e.e)
and d_d' (m : mem) (e : exp') : unit =
match e with
| T (t', i') ->
if i == i'
then worklist := (Seq (t, []), m) :: !worklist
else ()
| Seq (l, []) -> d_u (Seq (l, [])) m
| Seq (l, e :: es) -> d_d (SeqC (m, l, [], es)) e
| Alt es -> List.iter
(fun e ->
if e.first.(i)
then d_d (AltC m) e
else ())
!es
and d_u (e : exp') (m : mem) : unit =
let e' = { m = m_0; e = e; first = [| |] } in
m.end_ <- p;
m.result <- e';
List.iter (fun c -> d_u' e' c) m.parents
and d_u' (e : exp) (c : cxt) : unit =
match c with
| Top -> tops := e :: !tops
| SeqC (m, l, es, []) -> d_u (Seq (l, List.rev (e :: es))) m
| SeqC (m, l, left, e' :: right) -> d_d (SeqC (m, l, e :: left, right)) e'
| AltC m -> if p == m.end_
then match m.result.e with
| Alt es -> es := e :: !es
| _ -> raise FailedMatch
else d_u (Alt (ref [e])) m
in d_u e m
Here we construct the initial zipper . This allows us to properly traverse the
* grammar from the first step . This construction is similar in spirit to the
* Seq / SeqC pair used on l318 ( near the end of Section 4 ) of the paper .
* grammar from the first step. This construction is similar in spirit to the
* Seq/SeqC pair used on l318 (near the end of Section 4) of the paper. *)
let init_zipper (e : exp) : zipper =
let e' = Seq ("<init_zipper:Seq>", []) in
let m_top : mem = { start = 0; parents = [Top]; end_ = -1; result = undefined } in
let c = SeqC (m_top, "<init_zipper:SeqC>", [], [e]) in
let m_seq : mem = { start = 0; parents = [c]; end_ = -1; result = undefined } in
(e', m_seq)
let unwrap_top_exp (e : exp) : exp =
match e.e with
| Seq (_, [_; e']) -> e'
| _ -> raise FailedMatch
let parse (ts : tok list) (e : exp) : exp list =
let rec parse (p : pos) (ts : tok list) : exp list =
(let w = !worklist in
worklist := [];
tops := [];
match ts with
| [] -> List.iter (fun z -> derive p ("EOF", 0) z) w;
List.map unwrap_top_exp !tops
| ((t, s) :: ts') ->
List.iter (fun z -> derive p (t, s) z) w;
parse (p + 1) ts')
in worklist := [init_zipper e];
parse 0 ts
|
566edd29d7056d754b988761ea3e863c3ee4181f7f0d932e877f90f00b99e419 | resttime/cl-liballegro | video-streaming.lisp | (in-package #:cl-liballegro)
(defcfun ("al_init_video_addon" init-video-addon) :bool)
(defcfun ("al_is_video_addon_initialized" is-init-video-addon-initialized) :bool)
(defcfun ("al_shutdown_video_addon" shutdown-video-addon) :void)
(defcfun ("al_get_allegro_video_version" get-allegro-video-version) :uint32)
(defcfun ("al_open_video" open-video) :pointer (filename :string))
(defcfun ("al_identify_video" identify-video) :string (filename :string))
(defcfun ("al_identify_video_f" identify-video-f) :string (fp :pointer))
(defcfun ("al_close_video" close-video) :void (video :pointer))
(defcfun ("al_start_video" start-video) :void (video :pointer) (mixer :pointer))
(defcfun ("al_start_video_with_voice" start-video-with-voice) :void
(video :pointer) (voice :pointer) )
(defcfun ("al_get_video_event_source" get-video-event-source) :pointer
(video :pointer))
(defcfun ("al_set_video_playing" set-video-playing) :void
(video :pointer) (play :bool))
(defcfun ("al_is_video_playing" is-video-playing) :bool (video :pointer))
(defcfun ("al_get_video_audio_rate" get-video-audio-rate) :double
(video :pointer))
(defcfun ("al_get_video_fps" get-video-fps) :double (video :pointer))
(defcfun ("al_get_video_scaled_width" get-video-scaled-width) :float
(video :pointer))
(defcfun ("al_get_video_scaled_height" get-video-scaled-height) :float
(video :pointer))
(defcfun ("al_get_video_frame" get-video-frame) :pointer (video :pointer))
(defcfun ("al_get_video_position" get-video-position) :double
(video :pointer) (which video-position-type))
(defcfun ("al_seek_video" seek-video) :bool
(video :pointer) (pos-in-seconds :double))
| null | https://raw.githubusercontent.com/resttime/cl-liballegro/4f0797db464aaef32e4bf6d87c2baf3ca71534c4/src/ffi-functions/addons/video-streaming.lisp | lisp | (in-package #:cl-liballegro)
(defcfun ("al_init_video_addon" init-video-addon) :bool)
(defcfun ("al_is_video_addon_initialized" is-init-video-addon-initialized) :bool)
(defcfun ("al_shutdown_video_addon" shutdown-video-addon) :void)
(defcfun ("al_get_allegro_video_version" get-allegro-video-version) :uint32)
(defcfun ("al_open_video" open-video) :pointer (filename :string))
(defcfun ("al_identify_video" identify-video) :string (filename :string))
(defcfun ("al_identify_video_f" identify-video-f) :string (fp :pointer))
(defcfun ("al_close_video" close-video) :void (video :pointer))
(defcfun ("al_start_video" start-video) :void (video :pointer) (mixer :pointer))
(defcfun ("al_start_video_with_voice" start-video-with-voice) :void
(video :pointer) (voice :pointer) )
(defcfun ("al_get_video_event_source" get-video-event-source) :pointer
(video :pointer))
(defcfun ("al_set_video_playing" set-video-playing) :void
(video :pointer) (play :bool))
(defcfun ("al_is_video_playing" is-video-playing) :bool (video :pointer))
(defcfun ("al_get_video_audio_rate" get-video-audio-rate) :double
(video :pointer))
(defcfun ("al_get_video_fps" get-video-fps) :double (video :pointer))
(defcfun ("al_get_video_scaled_width" get-video-scaled-width) :float
(video :pointer))
(defcfun ("al_get_video_scaled_height" get-video-scaled-height) :float
(video :pointer))
(defcfun ("al_get_video_frame" get-video-frame) :pointer (video :pointer))
(defcfun ("al_get_video_position" get-video-position) :double
(video :pointer) (which video-position-type))
(defcfun ("al_seek_video" seek-video) :bool
(video :pointer) (pos-in-seconds :double))
| |
e82c10df9a49b281c6c3e1c0c55db99a3e3f4474a706bc883eb3602944096f0b | eugeneia/erlangen | mailbox.lisp | Mailbox FIFO queue implementation based on JPL - QUEUES .
(in-package :erlangen.mailbox)
(defstruct (mailbox (:constructor make-mailbox%))
"Mailbox structure."
(queue (error "QUEUE must be supplied.") :type bounded-fifo-queue)
(priority (error "PRIORITY must be supplied.") :type unbounded-fifo-queue)
(open? t :type symbol)
(lock (make-lock "erlangen.mailbox"))
(enqueued (make-semaphore))
(messages-dequeued 0 :type (unsigned-byte 60))
(messages-dropped 0 :type (unsigned-byte 60)))
(defun make-mailbox (size)
"Return a new empty mailbox of SIZE."
(make-mailbox%
:queue (make-instance 'bounded-fifo-queue :capacity size)
:priority (make-instance 'unbounded-fifo-queue)))
(defun enqueue-message (message mailbox)
"Attempt to enqueue MESSAGE in MAILBOX."
(with-slots (queue open? lock enqueued messages-dropped) mailbox
(with-lock-grabbed (lock)
(if (and open? (not (full? queue)))
(progn (enqueue message queue)
(signal-semaphore enqueued))
(incf messages-dropped))))
(values))
(defun enqueue-priority (message mailbox)
"Attempt to enqueue priority MESSAGE in MAILBOX. Fails if MAILBOX is closed,
but does *not* signal an error."
(with-slots (priority open? lock enqueued messages-dropped) mailbox
(with-lock-grabbed (lock)
(if open?
(progn (enqueue message priority)
(signal-semaphore enqueued))
(incf messages-dropped))))
(values))
(defun empty-p (mailbox)
"Predicate to test if MAILBOX is empty."
(with-slots (queue priority lock) mailbox
(with-lock-grabbed (lock)
(and (empty? queue) (empty? priority)))))
(defun dequeue-message (mailbox &key timeout)
"Return the next message in MAILBOX. Blocks depending on TIMEOUT. Only one
process (the “owner”) may call DEQUEUE-MESSAGE on a given `mailbox'."
(with-slots (queue priority lock enqueued messages-dequeued) mailbox
(case timeout
TIMEOUT = nil : wait for new message indefinitely
((nil) (wait-on-semaphore enqueued))
TIMEOUT = 0 , signal ` timeout ' immediately
(0 (unless (try-semaphore enqueued)
(error 'timeout)))
TIMEOUT = n : wait up to n seconds for new message
(otherwise (unless (timed-wait-on-semaphore enqueued timeout)
(error 'timeout))))
(with-lock-grabbed (lock)
(incf messages-dequeued)
(if (empty? priority)
(dequeue queue)
(dequeue priority)))))
(defun close-mailbox (mailbox)
"Close MAILBOX."
(with-lock-grabbed ((mailbox-lock mailbox))
(setf (mailbox-open? mailbox) nil)))
| null | https://raw.githubusercontent.com/eugeneia/erlangen/204166b33833c49841617bbc6ecfaf4dd77cf6d8/mailbox.lisp | lisp | Mailbox FIFO queue implementation based on JPL - QUEUES .
(in-package :erlangen.mailbox)
(defstruct (mailbox (:constructor make-mailbox%))
"Mailbox structure."
(queue (error "QUEUE must be supplied.") :type bounded-fifo-queue)
(priority (error "PRIORITY must be supplied.") :type unbounded-fifo-queue)
(open? t :type symbol)
(lock (make-lock "erlangen.mailbox"))
(enqueued (make-semaphore))
(messages-dequeued 0 :type (unsigned-byte 60))
(messages-dropped 0 :type (unsigned-byte 60)))
(defun make-mailbox (size)
"Return a new empty mailbox of SIZE."
(make-mailbox%
:queue (make-instance 'bounded-fifo-queue :capacity size)
:priority (make-instance 'unbounded-fifo-queue)))
(defun enqueue-message (message mailbox)
"Attempt to enqueue MESSAGE in MAILBOX."
(with-slots (queue open? lock enqueued messages-dropped) mailbox
(with-lock-grabbed (lock)
(if (and open? (not (full? queue)))
(progn (enqueue message queue)
(signal-semaphore enqueued))
(incf messages-dropped))))
(values))
(defun enqueue-priority (message mailbox)
"Attempt to enqueue priority MESSAGE in MAILBOX. Fails if MAILBOX is closed,
but does *not* signal an error."
(with-slots (priority open? lock enqueued messages-dropped) mailbox
(with-lock-grabbed (lock)
(if open?
(progn (enqueue message priority)
(signal-semaphore enqueued))
(incf messages-dropped))))
(values))
(defun empty-p (mailbox)
"Predicate to test if MAILBOX is empty."
(with-slots (queue priority lock) mailbox
(with-lock-grabbed (lock)
(and (empty? queue) (empty? priority)))))
(defun dequeue-message (mailbox &key timeout)
"Return the next message in MAILBOX. Blocks depending on TIMEOUT. Only one
process (the “owner”) may call DEQUEUE-MESSAGE on a given `mailbox'."
(with-slots (queue priority lock enqueued messages-dequeued) mailbox
(case timeout
TIMEOUT = nil : wait for new message indefinitely
((nil) (wait-on-semaphore enqueued))
TIMEOUT = 0 , signal ` timeout ' immediately
(0 (unless (try-semaphore enqueued)
(error 'timeout)))
TIMEOUT = n : wait up to n seconds for new message
(otherwise (unless (timed-wait-on-semaphore enqueued timeout)
(error 'timeout))))
(with-lock-grabbed (lock)
(incf messages-dequeued)
(if (empty? priority)
(dequeue queue)
(dequeue priority)))))
(defun close-mailbox (mailbox)
"Close MAILBOX."
(with-lock-grabbed ((mailbox-lock mailbox))
(setf (mailbox-open? mailbox) nil)))
| |
e905d763c50405aa91dca5ad4118f03e23450762cf3f894df4c3b0f346e6bae1 | IndecisionTree/adventofcode2022 | Day10.hs | module Days.Day10 (day10) where
import AOC (Solution (..))
import qualified Data.Text as T
day10 :: Solution
day10 = Solution parseInput part1 part2
parseInput :: T.Text -> a
parseInput = error "parseInput not defined for day 10"
part1 :: a -> Int
part1 = error "part1 not defined for day 10"
part2 :: a -> Int
part2 = error "part2 not defined for day 10"
| null | https://raw.githubusercontent.com/IndecisionTree/adventofcode2022/9fce98a2cd0b18d26d2d369f8933b9abba0109ba/solutions/Days/Day10.hs | haskell | module Days.Day10 (day10) where
import AOC (Solution (..))
import qualified Data.Text as T
day10 :: Solution
day10 = Solution parseInput part1 part2
parseInput :: T.Text -> a
parseInput = error "parseInput not defined for day 10"
part1 :: a -> Int
part1 = error "part1 not defined for day 10"
part2 :: a -> Int
part2 = error "part2 not defined for day 10"
| |
66f095e3267872947b1b6af7a9f92683853a69c57a74ff74ab100f3221362246 | samrocketman/home | klawiter-sepia-toning.scm | ;
; The GIMP -- an image manipulation program
Copyright ( C ) 1995 and
;
Sepia toning script for GIMP 2.4
by 05.2007 - 11.2007
;
Tags : photo , colcor , old ,
;
; Author statement:
; just to learn how all it works ;-)
this is a copy of Sepia Toning tutorial
; /
by
;
; --------------------------------------------------------------------
Distributed by Gimp FX Foundry project
; --------------------------------------------------------------------
; - Changelog -
;
; --------------------------------------------------------------------
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program; if not, you can view the GNU General Public
License version 3 at the web site -3.0.html
Alternatively you can write to the Free Software Foundation , Inc. , 675 Mass
Ave , Cambridge , , USA .
;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (script-fu-Sepia_Toning img drawable desaturate mergeLayers color)
(let* (
(sepiaLayer 0)
(maskLayer 0)
(mask 0)
)
; Start an undo group. Everything between the start and the end will
; be carried out if an undo command is issued.
(gimp-image-undo-group-start img)
(gimp-displays-flush)
STEP 2 - copy and desaturate ( optional ) source layer
(set! sepiaLayer
(car
(gimp-layer-copy
drawable
TRUE
)
)
)
(gimp-layer-set-name sepiaLayer "Sepia")
(gimp-image-add-layer img sepiaLayer -1)
(if (equal? desaturate TRUE)
(gimp-desaturate sepiaLayer)
()
)
STEP 3 Set foreground color
(gimp-context-set-foreground color)
STEP 4
; Create a new layer
(set! maskLayer
(car
(gimp-layer-new
img ; image handle
(car (gimp-image-width img)) ; width of layer
(car (gimp-image-height img)) ; height
type ( RGB , RGBA , etc . )
"Sepia Mask" ; name of layer
100 ; opacity
COLOR-MODE ; mode
)
)
)
; Add the new layer to the image
(gimp-image-add-layer img maskLayer -1)
(gimp-drawable-fill maskLayer 0)
STEP 5
(set! mask
(car
(gimp-layer-create-mask maskLayer 0)
)
)
(gimp-layer-add-mask maskLayer mask)
STEP 6 , 7 Copy image into Sepia Layer mask , and than invert it
(gimp-layer-resize-to-image-size sepiaLayer) ; workaround because i cannot 'paste in place' into mask
(gimp-edit-copy sepiaLayer)
(let ((selection (car (gimp-edit-paste mask 0))))
(gimp-floating-sel-anchor selection)
)
(gimp-invert mask)
; merge layer down
(if (equal? mergeLayers TRUE)
(gimp-image-merge-down
img ; img
maskLayer ; upper layer
0 ; merge type [0,1,2]
)
()
)
; Complete the undo group
(gimp-image-undo-group-end img)
) ; let* variables definition
)
(script-fu-register "script-fu-Sepia_Toning"
"Sepia Toning"
"Automatic version of great
Sepia Toning tutorial
by Eric R. Jeschke (redskiesatnight.com/)
www.gimp.org/tutorials/Sepia_Toning/"
"Jakub Klawiter"
""
"03.2007"
"RGB RGBA"
SF-IMAGE "img" 0
SF-DRAWABLE "drawable" 0
SF-TOGGLE "Desaturate source" FALSE
SF-TOGGLE "Merge layers" FALSE
SF-COLOR "color" '(162 138 101))
(script-fu-menu-register
"script-fu-Sepia_Toning"
"<Image>/FX-Foundry/Photo/Effects"
)
| null | https://raw.githubusercontent.com/samrocketman/home/63a8668a71dc594ea9ed76ec56bf8ca43b2a86ca/dotfiles/.gimp/scripts/klawiter-sepia-toning.scm | scheme |
The GIMP -- an image manipulation program
Author statement:
just to learn how all it works ;-)
/
--------------------------------------------------------------------
--------------------------------------------------------------------
- Changelog -
--------------------------------------------------------------------
This program is free software; you can redistribute it and/or modify
either version 3 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, you can view the GNU General Public
Start an undo group. Everything between the start and the end will
be carried out if an undo command is issued.
Create a new layer
image handle
width of layer
height
name of layer
opacity
mode
Add the new layer to the image
workaround because i cannot 'paste in place' into mask
merge layer down
img
upper layer
merge type [0,1,2]
Complete the undo group
let* variables definition | Copyright ( C ) 1995 and
Sepia toning script for GIMP 2.4
by 05.2007 - 11.2007
Tags : photo , colcor , old ,
this is a copy of Sepia Toning tutorial
by
Distributed by Gimp FX Foundry project
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
License version 3 at the web site -3.0.html
Alternatively you can write to the Free Software Foundation , Inc. , 675 Mass
Ave , Cambridge , , USA .
(define (script-fu-Sepia_Toning img drawable desaturate mergeLayers color)
(let* (
(sepiaLayer 0)
(maskLayer 0)
(mask 0)
)
(gimp-image-undo-group-start img)
(gimp-displays-flush)
STEP 2 - copy and desaturate ( optional ) source layer
(set! sepiaLayer
(car
(gimp-layer-copy
drawable
TRUE
)
)
)
(gimp-layer-set-name sepiaLayer "Sepia")
(gimp-image-add-layer img sepiaLayer -1)
(if (equal? desaturate TRUE)
(gimp-desaturate sepiaLayer)
()
)
STEP 3 Set foreground color
(gimp-context-set-foreground color)
STEP 4
(set! maskLayer
(car
(gimp-layer-new
type ( RGB , RGBA , etc . )
)
)
)
(gimp-image-add-layer img maskLayer -1)
(gimp-drawable-fill maskLayer 0)
STEP 5
(set! mask
(car
(gimp-layer-create-mask maskLayer 0)
)
)
(gimp-layer-add-mask maskLayer mask)
STEP 6 , 7 Copy image into Sepia Layer mask , and than invert it
(gimp-edit-copy sepiaLayer)
(let ((selection (car (gimp-edit-paste mask 0))))
(gimp-floating-sel-anchor selection)
)
(gimp-invert mask)
(if (equal? mergeLayers TRUE)
(gimp-image-merge-down
)
()
)
(gimp-image-undo-group-end img)
)
(script-fu-register "script-fu-Sepia_Toning"
"Sepia Toning"
"Automatic version of great
Sepia Toning tutorial
by Eric R. Jeschke (redskiesatnight.com/)
www.gimp.org/tutorials/Sepia_Toning/"
"Jakub Klawiter"
""
"03.2007"
"RGB RGBA"
SF-IMAGE "img" 0
SF-DRAWABLE "drawable" 0
SF-TOGGLE "Desaturate source" FALSE
SF-TOGGLE "Merge layers" FALSE
SF-COLOR "color" '(162 138 101))
(script-fu-menu-register
"script-fu-Sepia_Toning"
"<Image>/FX-Foundry/Photo/Effects"
)
|
e52574a0afd26029833fbc5bbbcc0e206ae3d94d07fe8499524e45c4e6cdfb2c | auser/alice | permissions.erl | -module (permissions).
-include ("alice.hrl").
-export ([get/1, post/2, put/2, delete/2]).
% set_permissions [-p <VHostPath>] <UserName> <Regexp> <Regexp> <Regexp>
% clear_permissions [-p <VHostPath>] <UserName>
% list_permissions [-p <VHostPath>]
% list_user_permissions <UserName>
% TODO: Complete
get([]) ->
% ?MODULE:get(["vhost", "/"]);
VhostListing = [ erlang:binary_to_list(V) || V <- vhosts:get_all_vhosts()],
Vhosts = lists:map(fun(V) ->
get_vhost_perms(V)
end, VhostListing),
{?MODULE,
{struct, [
{vhosts, [ Q || Q <- Vhosts ] }
]}
};
get(["vhost", "root"]) -> ?MODULE:get(["vhost", "/"]);
get(["vhost", Vhost]) ->
{?MODULE, get_vhost_perms(Vhost)};
get([Username]) -> {?MODULE, get_user_perms(Username)};
get(Path) -> {"error", erlang:list_to_binary("unhandled: "++Path)}.
post([Username], Data) ->
VHost = extract_vhost(Data),
CPerm = extract_param("configure", Data),
WPerm = extract_param("write", Data),
RPerm = extract_param("read", Data),
case catch rabint:call({rabbit_access_control, set_permissions, [Username, VHost, CPerm, WPerm, RPerm]}) of
{badrpc, {'EXIT', Error}} ->
case Error of
{undef, _Arr} ->
?ERROR("DEPRECATED SUPPORT: To get rid of this message, upgrade to RabbitMQ 1.6", []),
map_user_to_vhost(Username, VHost);
_E -> throw(Error)
end;
ok -> {?MODULE, get_user_perms(Username)}
end;
post(_Path, _Data) -> {"error", <<"unhandled">>}.
put(_Path, _Data) -> {"error", <<"unhandled">>}.
delete([Username], Data) ->
VHost = extract_vhost(Data),
case catch rabint:call({rabbit_access_control, clear_permissions, [Username, VHost]}) of
{badrpc, {'EXIT', Error}} ->
case Error of
{undef, _Arr} ->
?ERROR("DEPRECATED SUPPORT: To get rid of this message, upgrade to RabbitMQ 1.6", []),
unmap_user_from_vhost(Username, VHost);
_E -> throw(Error)
end;
ok -> {?MODULE, get_user_perms(Username)}
end;
delete(_Path, _Data) -> {"error", <<"unhandled">>}.
% PRIVATE
get_user_perms(Username) ->
VhostListing = case catch rabint:call({rabbit_access_control, list_user_permissions, [Username]}) of
{badrpc, {'EXIT', Error}} ->
case Error of
{undef, _Arr} ->
?ERROR("DEPRECATED SUPPORT: To get rid of this message, upgrade to RabbitMQ 1.6", []),
list_user_vhosts(Username);
_E -> throw(Error)
end;
Bin ->
[{struct, create_writable_perm_structure(erlang:tuple_to_list(P))} || P <- Bin ]
end,
{struct, [
{name, utils:turn_binary(Username) },
{vhosts, VhostListing }
]}.
get_vhost_perms(Vhost) ->
U = list_vhost_users(Vhost),
% Now aggregate their data
Users = lists:map(fun(User) ->
UserTuple = create_writable_perm_structure(User),
{struct, UserTuple }
end, U),
{struct, [{"name", utils:turn_binary(Vhost)},{"users", Users}]}.
% ConfigurePerm, WritePerm, ReadPerm
extract_param(Name, Data) ->
case proplists:get_value(erlang:list_to_binary(Name), Data) of
undefined -> ".*";
Bin -> erlang:binary_to_list(Bin)
end.
extract_vhost(Data) ->
case proplists:get_value(<<"vhost">>, Data) of
undefined -> "/";
Perm -> erlang:binary_to_list(Perm)
end.
%%====================================================================
Utils
%%====================================================================
create_writable_perm_structure(Perm) ->
[Name|Rest] = Perm,
[Configure|Rest2] = Rest,
[Write|ReadArr] = Rest2,
[Read] = ReadArr,
[{"name", Name}, {"configure", Configure}, {"write", Write}, {"read", Read}].
%%====================================================================
%% DEPRECATED SUPPORT
%%====================================================================
map_user_to_vhost(Username, Vhost) ->
O = rabint:call({rabbit_access_control, map_user_vhost, [Username, Vhost]}),
Out = case O of
ok -> utils:turn_binary(lists:append(["Mapped ", Username, " to ", Vhost]));
{error, {no_such_user, _BinUsername}} -> utils:turn_binary(lists:append(["No such user ", Username]));
{error, UnknownError} -> utils:turn_binary(lists:append(["Unknown error: ", UnknownError]))
end,
{?MODULE, Out}.
unmap_user_from_vhost(Username, Vhost) ->
O = rabint:call({rabbit_access_control, unmap_user_vhost, [Username, Vhost]}),
Out = case O of
ok -> utils:turn_binary(lists:append(["Unmapped ", Username, " from ", Vhost]));
Else -> utils:turn_binary(Else)
end,
{?MODULE, Out}.
% Fake it
list_vhost_users(Vhost) ->
O = rabint:call({rabbit_access_control, list_vhost_permissions, [Vhost]}),
Users = lists:map(fun(User) -> tuple_to_list(User) end, O),
Users.
list_user_vhosts(Username) ->
rabint:call({rabbit_access_control, list_user_vhosts, [Username]}).
| null | https://raw.githubusercontent.com/auser/alice/e0f867071ede99f451d09053608bd6719c72d1c9/src/rest/controllers/permissions.erl | erlang | set_permissions [-p <VHostPath>] <UserName> <Regexp> <Regexp> <Regexp>
clear_permissions [-p <VHostPath>] <UserName>
list_permissions [-p <VHostPath>]
list_user_permissions <UserName>
TODO: Complete
?MODULE:get(["vhost", "/"]);
PRIVATE
Now aggregate their data
ConfigurePerm, WritePerm, ReadPerm
====================================================================
====================================================================
====================================================================
DEPRECATED SUPPORT
====================================================================
Fake it | -module (permissions).
-include ("alice.hrl").
-export ([get/1, post/2, put/2, delete/2]).
get([]) ->
VhostListing = [ erlang:binary_to_list(V) || V <- vhosts:get_all_vhosts()],
Vhosts = lists:map(fun(V) ->
get_vhost_perms(V)
end, VhostListing),
{?MODULE,
{struct, [
{vhosts, [ Q || Q <- Vhosts ] }
]}
};
get(["vhost", "root"]) -> ?MODULE:get(["vhost", "/"]);
get(["vhost", Vhost]) ->
{?MODULE, get_vhost_perms(Vhost)};
get([Username]) -> {?MODULE, get_user_perms(Username)};
get(Path) -> {"error", erlang:list_to_binary("unhandled: "++Path)}.
post([Username], Data) ->
VHost = extract_vhost(Data),
CPerm = extract_param("configure", Data),
WPerm = extract_param("write", Data),
RPerm = extract_param("read", Data),
case catch rabint:call({rabbit_access_control, set_permissions, [Username, VHost, CPerm, WPerm, RPerm]}) of
{badrpc, {'EXIT', Error}} ->
case Error of
{undef, _Arr} ->
?ERROR("DEPRECATED SUPPORT: To get rid of this message, upgrade to RabbitMQ 1.6", []),
map_user_to_vhost(Username, VHost);
_E -> throw(Error)
end;
ok -> {?MODULE, get_user_perms(Username)}
end;
post(_Path, _Data) -> {"error", <<"unhandled">>}.
put(_Path, _Data) -> {"error", <<"unhandled">>}.
delete([Username], Data) ->
VHost = extract_vhost(Data),
case catch rabint:call({rabbit_access_control, clear_permissions, [Username, VHost]}) of
{badrpc, {'EXIT', Error}} ->
case Error of
{undef, _Arr} ->
?ERROR("DEPRECATED SUPPORT: To get rid of this message, upgrade to RabbitMQ 1.6", []),
unmap_user_from_vhost(Username, VHost);
_E -> throw(Error)
end;
ok -> {?MODULE, get_user_perms(Username)}
end;
delete(_Path, _Data) -> {"error", <<"unhandled">>}.
get_user_perms(Username) ->
VhostListing = case catch rabint:call({rabbit_access_control, list_user_permissions, [Username]}) of
{badrpc, {'EXIT', Error}} ->
case Error of
{undef, _Arr} ->
?ERROR("DEPRECATED SUPPORT: To get rid of this message, upgrade to RabbitMQ 1.6", []),
list_user_vhosts(Username);
_E -> throw(Error)
end;
Bin ->
[{struct, create_writable_perm_structure(erlang:tuple_to_list(P))} || P <- Bin ]
end,
{struct, [
{name, utils:turn_binary(Username) },
{vhosts, VhostListing }
]}.
get_vhost_perms(Vhost) ->
U = list_vhost_users(Vhost),
Users = lists:map(fun(User) ->
UserTuple = create_writable_perm_structure(User),
{struct, UserTuple }
end, U),
{struct, [{"name", utils:turn_binary(Vhost)},{"users", Users}]}.
extract_param(Name, Data) ->
case proplists:get_value(erlang:list_to_binary(Name), Data) of
undefined -> ".*";
Bin -> erlang:binary_to_list(Bin)
end.
extract_vhost(Data) ->
case proplists:get_value(<<"vhost">>, Data) of
undefined -> "/";
Perm -> erlang:binary_to_list(Perm)
end.
Utils
create_writable_perm_structure(Perm) ->
[Name|Rest] = Perm,
[Configure|Rest2] = Rest,
[Write|ReadArr] = Rest2,
[Read] = ReadArr,
[{"name", Name}, {"configure", Configure}, {"write", Write}, {"read", Read}].
map_user_to_vhost(Username, Vhost) ->
O = rabint:call({rabbit_access_control, map_user_vhost, [Username, Vhost]}),
Out = case O of
ok -> utils:turn_binary(lists:append(["Mapped ", Username, " to ", Vhost]));
{error, {no_such_user, _BinUsername}} -> utils:turn_binary(lists:append(["No such user ", Username]));
{error, UnknownError} -> utils:turn_binary(lists:append(["Unknown error: ", UnknownError]))
end,
{?MODULE, Out}.
unmap_user_from_vhost(Username, Vhost) ->
O = rabint:call({rabbit_access_control, unmap_user_vhost, [Username, Vhost]}),
Out = case O of
ok -> utils:turn_binary(lists:append(["Unmapped ", Username, " from ", Vhost]));
Else -> utils:turn_binary(Else)
end,
{?MODULE, Out}.
list_vhost_users(Vhost) ->
O = rabint:call({rabbit_access_control, list_vhost_permissions, [Vhost]}),
Users = lists:map(fun(User) -> tuple_to_list(User) end, O),
Users.
list_user_vhosts(Username) ->
rabint:call({rabbit_access_control, list_user_vhosts, [Username]}).
|
cd7e399956cc6759a82a95c93708a7386626fc944137b19f117c75e5a7180085 | onedata/op-worker | atm_workflow_execution_garbage_collector.erl | %%%-------------------------------------------------------------------
@author
( C ) 2022 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% Module responsible for:
%%% - discarding expired atm workflow executions
%%% - purging discarded atm workflow executions
%%% @end
%%%-------------------------------------------------------------------
-module(atm_workflow_execution_garbage_collector).
-author("Bartosz Walkowicz").
-behaviour(gen_server).
-include("modules/automation/atm_execution.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include_lib("ctool/include/http/codes.hrl").
%% API
-export([id/0, spec/0, start_link/0]).
-export([run/0]).
%% gen_server callbacks
-export([
init/1,
handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3
]).
-type state() :: undefined.
-define(GC_RUN_INTERVAL_SECONDS, op_worker:get_env(
1 hour
)).
-define(ATM_SUSPENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS, op_worker:get_env(
30 days
)).
-define(ATM_ENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS, op_worker:get_env(
15 days
)).
-define(NOW_SECONDS(), global_clock:timestamp_seconds()).
-define(LIST_BATCH_SIZE, 1000).
-define(SERVER, {global, ?MODULE}).
%%%===================================================================
%%% API
%%%===================================================================
-spec id() -> atom().
id() -> ?MODULE.
-spec spec() -> supervisor:child_spec().
spec() ->
#{
id => id(),
start => {?MODULE, start_link, []},
restart => permanent,
shutdown => timer:seconds(10),
type => worker,
modules => [?MODULE]
}.
-spec start_link() -> {ok, pid()} | {error, term()}.
start_link() ->
gen_server:start_link(?SERVER, ?MODULE, [], []).
-spec run() -> ok.
run() ->
gen_server:call(?SERVER, gc_atm_workflow_executions).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
-spec init(Args :: term()) -> {ok, undefined, non_neg_integer()}.
init(_) ->
process_flag(trap_exit, true),
{ok, undefined, timer:seconds(?GC_RUN_INTERVAL_SECONDS)}.
-spec handle_call(Request :: term(), From :: {pid(), Tag :: term()}, state()) ->
{reply, Reply :: term(), NewState :: state()} |
{reply, Reply :: term(), NewState :: state(), non_neg_integer()}.
handle_call(gc_atm_workflow_executions, _From, State) ->
garbage_collect_atm_workflow_executions(),
{reply, ok, State, timer:seconds(?GC_RUN_INTERVAL_SECONDS)};
handle_call(Request, _From, State) ->
?log_bad_request(Request),
{reply, {error, wrong_request}, State}.
-spec handle_cast(Request :: term(), state()) ->
{noreply, NewState :: state()}.
handle_cast(Request, State) ->
?log_bad_request(Request),
{noreply, State}.
-spec handle_info(Info :: term(), state()) ->
{noreply, NewState :: state()} |
{noreply, NewState :: state(), non_neg_integer()}.
handle_info(timeout, State) ->
garbage_collect_atm_workflow_executions(),
{noreply, State, timer:seconds(?GC_RUN_INTERVAL_SECONDS)};
handle_info(Info, State) ->
?log_bad_request(Info),
{noreply, State}.
-spec terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
state()) -> term().
terminate(_Reason, _State) ->
ok.
-spec code_change(OldVsn :: term() | {down, term()}, state(), Extra :: term()) ->
{ok, NewState :: state()}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
@private
-spec garbage_collect_atm_workflow_executions() -> ok.
garbage_collect_atm_workflow_executions() ->
?info("Running workflow execution garbage collector..."),
discard_expired_atm_workflow_executions(),
purge_discarded_atm_workflow_executions(),
?debug("Automation workflow executions garbage collecting procedure finished succesfully.").
@private
-spec discard_expired_atm_workflow_executions() -> ok.
discard_expired_atm_workflow_executions() ->
case provider_logic:get_spaces() of
{ok, SpaceIds} ->
?debug("Starting expired automation workflow executions discarding procedure..."),
lists:foreach(fun discard_expired_atm_workflow_executions/1, SpaceIds),
?debug("Expired automation workflow executions discarding procedure finished succesfully.");
{error, _} = Error ->
?warning(
"Skipping expired automation workflow executions discarding procedure due to: ~p",
[Error]
)
end.
@private
-spec discard_expired_atm_workflow_executions(od_space:id()) -> ok.
discard_expired_atm_workflow_executions(SpaceId) ->
?debug(
"[Space: ~s] Starting expired automation workflow executions discarding procedure...",
[SpaceId]
),
discard_expired_atm_workflow_executions(SpaceId, ?SUSPENDED_PHASE),
discard_expired_atm_workflow_executions(SpaceId, ?ENDED_PHASE),
?debug(
"[Space: ~s] Expired automation workflow executions discarding procedure finished succesfully.",
[SpaceId]
).
@private
-spec discard_expired_atm_workflow_executions(od_space:id(), ?SUSPENDED_PHASE | ?ENDED_PHASE) ->
ok.
discard_expired_atm_workflow_executions(SpaceId, ?SUSPENDED_PHASE) ->
discard_expired_atm_workflow_executions(SpaceId, ?SUSPENDED_PHASE, #{
start_index => atm_workflow_executions_forest:index(
<<>>, ?NOW_SECONDS() - ?ATM_SUSPENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS
),
limit => ?LIST_BATCH_SIZE
});
discard_expired_atm_workflow_executions(SpaceId, ?ENDED_PHASE) ->
discard_expired_atm_workflow_executions(SpaceId, ?ENDED_PHASE, #{
start_index => atm_workflow_executions_forest:index(
<<>>, ?NOW_SECONDS() - ?ATM_ENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS
),
limit => ?LIST_BATCH_SIZE
}).
@private
-spec discard_expired_atm_workflow_executions(
od_space:id(),
?SUSPENDED_PHASE | ?ENDED_PHASE,
atm_workflow_executions_forest:listing_opts()
) ->
ok.
discard_expired_atm_workflow_executions(SpaceId, Phase, ListingOpts = #{start_index := StartIndex}) ->
{ok, AtmWorkflowExecutionBasicEntries, IsLast} = atm_workflow_execution_api:list(
SpaceId, Phase, basic, ListingOpts
),
{LastEntryIndex, DiscardedAtmWorkflowExecutionIds} = lists:foldl(
fun({Index, AtmWorkflowExecutionId}, {_, Acc}) ->
{Index, case discard_atm_workflow_execution(AtmWorkflowExecutionId) of
true -> [AtmWorkflowExecutionId | Acc];
false -> Acc
end}
end,
{StartIndex, []},
AtmWorkflowExecutionBasicEntries
),
case length(DiscardedAtmWorkflowExecutionIds) of
0 -> ok;
Num -> ?info("[Space: ~s] Atm gc: discarded ~B expired workflow executions", [SpaceId, Num])
end,
case IsLast of
true ->
ok;
false ->
discard_expired_atm_workflow_executions(SpaceId, Phase, ListingOpts#{
start_index => LastEntryIndex, offset => 1
})
end.
@private
-spec discard_atm_workflow_execution(atm_workflow_execution:id()) -> boolean().
discard_atm_workflow_execution(AtmWorkflowExecutionId) ->
case atm_workflow_execution_api:discard(AtmWorkflowExecutionId) of
ok ->
true;
{error, _} = Error ->
% Log only warning as next gc run will again try to discard this execution
?warning("Failed to discard automation workflow execution (id: ~p) due to: ~p", [
AtmWorkflowExecutionId, Error
]),
false
end.
@private
-spec purge_discarded_atm_workflow_executions() -> ok.
purge_discarded_atm_workflow_executions() ->
?debug("Starting discarded automation workflow executions purging procedure..."),
purge_discarded_atm_workflow_executions(<<>>),
?debug("Discarded automation workflow executions purging procedure finished succesfully.").
@private
-spec purge_discarded_atm_workflow_executions(atm_workflow_execution:id()) -> ok.
purge_discarded_atm_workflow_executions(StartAtmWorkflowExecutionId) ->
DiscardedAtmWorkflowExecutionIds = atm_discarded_workflow_executions:list(
StartAtmWorkflowExecutionId, ?LIST_BATCH_SIZE
),
{LastAtmWorkflowExecutionId, PurgedAtmWorkflowExecutionIds} = lists:foldl(
fun(AtmWorkflowExecutionId, {_, Acc}) ->
{AtmWorkflowExecutionId, case purge_atm_workflow_execution(AtmWorkflowExecutionId) of
true -> [AtmWorkflowExecutionId | Acc];
false -> Acc
end}
end,
{StartAtmWorkflowExecutionId, []},
DiscardedAtmWorkflowExecutionIds
),
case length(PurgedAtmWorkflowExecutionIds) of
0 -> ok;
Num -> ?info("Atm gc: purged ~B discarded workflow executions", [Num])
end,
case length(DiscardedAtmWorkflowExecutionIds) < ?LIST_BATCH_SIZE of
true ->
ok;
false ->
purge_discarded_atm_workflow_executions(LastAtmWorkflowExecutionId)
end.
@private
-spec purge_atm_workflow_execution(atm_workflow_execution:id()) -> boolean().
purge_atm_workflow_execution(AtmWorkflowExecutionId) ->
try
atm_workflow_execution_factory:delete_insecure(AtmWorkflowExecutionId),
true
catch Type:Reason:Stacktrace ->
?error_stacktrace(
"Failed to purge automation workflow execution (id: ~s) due to ~p:~p",
[AtmWorkflowExecutionId, Type, Reason],
Stacktrace
),
false
end.
| null | https://raw.githubusercontent.com/onedata/op-worker/171b05ac629acb4fc337b7dc2f5bf7c433d2c23f/src/modules/automation/workflow/garbage_collector/atm_workflow_execution_garbage_collector.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc
Module responsible for:
- discarding expired atm workflow executions
- purging discarded atm workflow executions
@end
-------------------------------------------------------------------
API
gen_server callbacks
===================================================================
API
===================================================================
===================================================================
gen_server callbacks
===================================================================
===================================================================
===================================================================
Log only warning as next gc run will again try to discard this execution | @author
( C ) 2022 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(atm_workflow_execution_garbage_collector).
-author("Bartosz Walkowicz").
-behaviour(gen_server).
-include("modules/automation/atm_execution.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include_lib("ctool/include/http/codes.hrl").
-export([id/0, spec/0, start_link/0]).
-export([run/0]).
-export([
init/1,
handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3
]).
-type state() :: undefined.
-define(GC_RUN_INTERVAL_SECONDS, op_worker:get_env(
1 hour
)).
-define(ATM_SUSPENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS, op_worker:get_env(
30 days
)).
-define(ATM_ENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS, op_worker:get_env(
15 days
)).
-define(NOW_SECONDS(), global_clock:timestamp_seconds()).
-define(LIST_BATCH_SIZE, 1000).
-define(SERVER, {global, ?MODULE}).
-spec id() -> atom().
id() -> ?MODULE.
-spec spec() -> supervisor:child_spec().
spec() ->
#{
id => id(),
start => {?MODULE, start_link, []},
restart => permanent,
shutdown => timer:seconds(10),
type => worker,
modules => [?MODULE]
}.
-spec start_link() -> {ok, pid()} | {error, term()}.
start_link() ->
gen_server:start_link(?SERVER, ?MODULE, [], []).
-spec run() -> ok.
run() ->
gen_server:call(?SERVER, gc_atm_workflow_executions).
-spec init(Args :: term()) -> {ok, undefined, non_neg_integer()}.
init(_) ->
process_flag(trap_exit, true),
{ok, undefined, timer:seconds(?GC_RUN_INTERVAL_SECONDS)}.
-spec handle_call(Request :: term(), From :: {pid(), Tag :: term()}, state()) ->
{reply, Reply :: term(), NewState :: state()} |
{reply, Reply :: term(), NewState :: state(), non_neg_integer()}.
handle_call(gc_atm_workflow_executions, _From, State) ->
garbage_collect_atm_workflow_executions(),
{reply, ok, State, timer:seconds(?GC_RUN_INTERVAL_SECONDS)};
handle_call(Request, _From, State) ->
?log_bad_request(Request),
{reply, {error, wrong_request}, State}.
-spec handle_cast(Request :: term(), state()) ->
{noreply, NewState :: state()}.
handle_cast(Request, State) ->
?log_bad_request(Request),
{noreply, State}.
-spec handle_info(Info :: term(), state()) ->
{noreply, NewState :: state()} |
{noreply, NewState :: state(), non_neg_integer()}.
handle_info(timeout, State) ->
garbage_collect_atm_workflow_executions(),
{noreply, State, timer:seconds(?GC_RUN_INTERVAL_SECONDS)};
handle_info(Info, State) ->
?log_bad_request(Info),
{noreply, State}.
-spec terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
state()) -> term().
terminate(_Reason, _State) ->
ok.
-spec code_change(OldVsn :: term() | {down, term()}, state(), Extra :: term()) ->
{ok, NewState :: state()}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
@private
-spec garbage_collect_atm_workflow_executions() -> ok.
garbage_collect_atm_workflow_executions() ->
?info("Running workflow execution garbage collector..."),
discard_expired_atm_workflow_executions(),
purge_discarded_atm_workflow_executions(),
?debug("Automation workflow executions garbage collecting procedure finished succesfully.").
@private
-spec discard_expired_atm_workflow_executions() -> ok.
discard_expired_atm_workflow_executions() ->
case provider_logic:get_spaces() of
{ok, SpaceIds} ->
?debug("Starting expired automation workflow executions discarding procedure..."),
lists:foreach(fun discard_expired_atm_workflow_executions/1, SpaceIds),
?debug("Expired automation workflow executions discarding procedure finished succesfully.");
{error, _} = Error ->
?warning(
"Skipping expired automation workflow executions discarding procedure due to: ~p",
[Error]
)
end.
@private
-spec discard_expired_atm_workflow_executions(od_space:id()) -> ok.
discard_expired_atm_workflow_executions(SpaceId) ->
?debug(
"[Space: ~s] Starting expired automation workflow executions discarding procedure...",
[SpaceId]
),
discard_expired_atm_workflow_executions(SpaceId, ?SUSPENDED_PHASE),
discard_expired_atm_workflow_executions(SpaceId, ?ENDED_PHASE),
?debug(
"[Space: ~s] Expired automation workflow executions discarding procedure finished succesfully.",
[SpaceId]
).
@private
-spec discard_expired_atm_workflow_executions(od_space:id(), ?SUSPENDED_PHASE | ?ENDED_PHASE) ->
ok.
discard_expired_atm_workflow_executions(SpaceId, ?SUSPENDED_PHASE) ->
discard_expired_atm_workflow_executions(SpaceId, ?SUSPENDED_PHASE, #{
start_index => atm_workflow_executions_forest:index(
<<>>, ?NOW_SECONDS() - ?ATM_SUSPENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS
),
limit => ?LIST_BATCH_SIZE
});
discard_expired_atm_workflow_executions(SpaceId, ?ENDED_PHASE) ->
discard_expired_atm_workflow_executions(SpaceId, ?ENDED_PHASE, #{
start_index => atm_workflow_executions_forest:index(
<<>>, ?NOW_SECONDS() - ?ATM_ENDED_WORKFLOW_EXECUTION_EXPIRATION_SECONDS
),
limit => ?LIST_BATCH_SIZE
}).
@private
-spec discard_expired_atm_workflow_executions(
od_space:id(),
?SUSPENDED_PHASE | ?ENDED_PHASE,
atm_workflow_executions_forest:listing_opts()
) ->
ok.
discard_expired_atm_workflow_executions(SpaceId, Phase, ListingOpts = #{start_index := StartIndex}) ->
{ok, AtmWorkflowExecutionBasicEntries, IsLast} = atm_workflow_execution_api:list(
SpaceId, Phase, basic, ListingOpts
),
{LastEntryIndex, DiscardedAtmWorkflowExecutionIds} = lists:foldl(
fun({Index, AtmWorkflowExecutionId}, {_, Acc}) ->
{Index, case discard_atm_workflow_execution(AtmWorkflowExecutionId) of
true -> [AtmWorkflowExecutionId | Acc];
false -> Acc
end}
end,
{StartIndex, []},
AtmWorkflowExecutionBasicEntries
),
case length(DiscardedAtmWorkflowExecutionIds) of
0 -> ok;
Num -> ?info("[Space: ~s] Atm gc: discarded ~B expired workflow executions", [SpaceId, Num])
end,
case IsLast of
true ->
ok;
false ->
discard_expired_atm_workflow_executions(SpaceId, Phase, ListingOpts#{
start_index => LastEntryIndex, offset => 1
})
end.
@private
-spec discard_atm_workflow_execution(atm_workflow_execution:id()) -> boolean().
discard_atm_workflow_execution(AtmWorkflowExecutionId) ->
case atm_workflow_execution_api:discard(AtmWorkflowExecutionId) of
ok ->
true;
{error, _} = Error ->
?warning("Failed to discard automation workflow execution (id: ~p) due to: ~p", [
AtmWorkflowExecutionId, Error
]),
false
end.
@private
-spec purge_discarded_atm_workflow_executions() -> ok.
purge_discarded_atm_workflow_executions() ->
?debug("Starting discarded automation workflow executions purging procedure..."),
purge_discarded_atm_workflow_executions(<<>>),
?debug("Discarded automation workflow executions purging procedure finished succesfully.").
@private
-spec purge_discarded_atm_workflow_executions(atm_workflow_execution:id()) -> ok.
purge_discarded_atm_workflow_executions(StartAtmWorkflowExecutionId) ->
DiscardedAtmWorkflowExecutionIds = atm_discarded_workflow_executions:list(
StartAtmWorkflowExecutionId, ?LIST_BATCH_SIZE
),
{LastAtmWorkflowExecutionId, PurgedAtmWorkflowExecutionIds} = lists:foldl(
fun(AtmWorkflowExecutionId, {_, Acc}) ->
{AtmWorkflowExecutionId, case purge_atm_workflow_execution(AtmWorkflowExecutionId) of
true -> [AtmWorkflowExecutionId | Acc];
false -> Acc
end}
end,
{StartAtmWorkflowExecutionId, []},
DiscardedAtmWorkflowExecutionIds
),
case length(PurgedAtmWorkflowExecutionIds) of
0 -> ok;
Num -> ?info("Atm gc: purged ~B discarded workflow executions", [Num])
end,
case length(DiscardedAtmWorkflowExecutionIds) < ?LIST_BATCH_SIZE of
true ->
ok;
false ->
purge_discarded_atm_workflow_executions(LastAtmWorkflowExecutionId)
end.
@private
-spec purge_atm_workflow_execution(atm_workflow_execution:id()) -> boolean().
purge_atm_workflow_execution(AtmWorkflowExecutionId) ->
try
atm_workflow_execution_factory:delete_insecure(AtmWorkflowExecutionId),
true
catch Type:Reason:Stacktrace ->
?error_stacktrace(
"Failed to purge automation workflow execution (id: ~s) due to ~p:~p",
[AtmWorkflowExecutionId, Type, Reason],
Stacktrace
),
false
end.
|
f314cc784cf980689bafade7431bba5d31d2ec8baa83fe2cf36ad5fe5440d03f | clojure/core.match | regex.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns clojure.core.match.test.regex
(:use [clojure.core.match :only [match]])
(:use clojure.core.match.regex)
(:use clojure.test))
(deftest basic-regex
(is (= (match ["asdf"]
[#"asdf"] 1
:else 2)
1)))
| null | https://raw.githubusercontent.com/clojure/core.match/1a57329c01507d3eb479a4f5461e012edaa6e4de/src/test/clojure/clojure/core/match/test/regex.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) . All rights reserved .
(ns clojure.core.match.test.regex
(:use [clojure.core.match :only [match]])
(:use clojure.core.match.regex)
(:use clojure.test))
(deftest basic-regex
(is (= (match ["asdf"]
[#"asdf"] 1
:else 2)
1)))
|
6cb8b3d7eaefc73109d64fe292fb4850751e2b24289be6d33d2ba5fff2e4fdae | rbkmoney/erlang_capi_v2 | capi_client_geo.erl | -module(capi_client_geo).
-export([get_location_names/2]).
-type context() :: capi_client_lib:context().
-type query_string() :: map().
-spec get_location_names(context(), query_string()) -> {ok, term()} | {error, term()}.
get_location_names(Context, Query) ->
Params = #{
qs_val => Query
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_geo_api:get_locations_names(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
| null | https://raw.githubusercontent.com/rbkmoney/erlang_capi_v2/438d0a603475c57dddade8c419f0d70fdf86438d/apps/capi_client/src/capi_client_geo.erl | erlang | -module(capi_client_geo).
-export([get_location_names/2]).
-type context() :: capi_client_lib:context().
-type query_string() :: map().
-spec get_location_names(context(), query_string()) -> {ok, term()} | {error, term()}.
get_location_names(Context, Query) ->
Params = #{
qs_val => Query
},
{Url, PreparedParams, Opts} = capi_client_lib:make_request(Context, Params),
Response = swag_client_geo_api:get_locations_names(Url, PreparedParams, Opts),
capi_client_lib:handle_response(Response).
| |
c16245abffcaeaf5f09ea672d6bae0190fe6ec512a9989c4b898fd3831898a72 | coq/coq | control.mli | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
* Global control of Coq .
(** Used to convert signals to exceptions *)
exception Timeout
(** Will periodically call [Thread.delay] if set to true *)
val enable_thread_delay : bool ref
val interrupt : bool ref
* Coq interruption : set the following boolean reference to interrupt Coq
( it eventually raises [ Break ] , simulating a Ctrl - C )
(it eventually raises [Break], simulating a Ctrl-C) *)
val check_for_interrupt : unit -> unit
* Use this function as a potential yield function . If { ! interrupt } has been
set , will raise [ . Break ] .
set, il will raise [Sys.Break]. *)
val timeout : float -> ('a -> 'b) -> 'a -> 'b option
* [ timeout n f x ] tries to compute [ Some ( f x ) ] , and if it fails to do so
before [ n ] seconds , returns [ None ] instead .
before [n] seconds, returns [None] instead. *)
(** Set a particular timeout function; warning, this is an internal
API and it is scheduled to go away. *)
type timeout = { timeout : 'a 'b. float -> ('a -> 'b) -> 'a -> 'b option }
val set_timeout : timeout -> unit
* [ protect_sigalrm f x ] computes [ f x ] , but if SIGALRM is received during that
computation , the signal handler is executed only once the computation is
terminated . Otherwise said , it makes the execution of [ f ] atomic w.r.t .
handling of SIGALRM .
This is useful for example to prevent the implementation of ` Timeout ` to
interrupt I / O routines , generating ill - formed output .
computation, the signal handler is executed only once the computation is
terminated. Otherwise said, it makes the execution of [f] atomic w.r.t.
handling of SIGALRM.
This is useful for example to prevent the implementation of `Timeout` to
interrupt I/O routines, generating ill-formed output.
*)
val protect_sigalrm : ('a -> 'b) -> 'a -> 'b
| null | https://raw.githubusercontent.com/coq/coq/110921a449fcb830ec2a1cd07e3acc32319feae6/lib/control.mli | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
* Used to convert signals to exceptions
* Will periodically call [Thread.delay] if set to true
* Set a particular timeout function; warning, this is an internal
API and it is scheduled to go away. | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
* Global control of Coq .
exception Timeout
val enable_thread_delay : bool ref
val interrupt : bool ref
* Coq interruption : set the following boolean reference to interrupt Coq
( it eventually raises [ Break ] , simulating a Ctrl - C )
(it eventually raises [Break], simulating a Ctrl-C) *)
val check_for_interrupt : unit -> unit
* Use this function as a potential yield function . If { ! interrupt } has been
set , will raise [ . Break ] .
set, il will raise [Sys.Break]. *)
val timeout : float -> ('a -> 'b) -> 'a -> 'b option
* [ timeout n f x ] tries to compute [ Some ( f x ) ] , and if it fails to do so
before [ n ] seconds , returns [ None ] instead .
before [n] seconds, returns [None] instead. *)
type timeout = { timeout : 'a 'b. float -> ('a -> 'b) -> 'a -> 'b option }
val set_timeout : timeout -> unit
* [ protect_sigalrm f x ] computes [ f x ] , but if SIGALRM is received during that
computation , the signal handler is executed only once the computation is
terminated . Otherwise said , it makes the execution of [ f ] atomic w.r.t .
handling of SIGALRM .
This is useful for example to prevent the implementation of ` Timeout ` to
interrupt I / O routines , generating ill - formed output .
computation, the signal handler is executed only once the computation is
terminated. Otherwise said, it makes the execution of [f] atomic w.r.t.
handling of SIGALRM.
This is useful for example to prevent the implementation of `Timeout` to
interrupt I/O routines, generating ill-formed output.
*)
val protect_sigalrm : ('a -> 'b) -> 'a -> 'b
|
05d9642688f3562990e11fcf0de8ce745abf5327916c7c64016ef940af716bd5 | wlitwin/graphv | path.ml | type t = {
mutable first : int;
mutable count : int;
mutable closed : bool;
mutable nbevel : int;
mutable fill : VertexBuffer.Sub.t;
mutable stroke : VertexBuffer.Sub.t;
mutable winding : Winding.t;
mutable convex : bool;
}
let empty_sub = VertexBuffer.Sub.create()
let create () = {
first = 0;
count = 0;
closed = false;
nbevel = 0;
fill = empty_sub;
stroke = empty_sub;
winding = Winding.CCW;
convex = true;
}
let reset (t : t) : unit =
t.first <- 0;
t.count <- 0;
t.closed <- false;
t.nbevel <- 0;
t.fill <- empty_sub;
t.stroke <- empty_sub;
t.winding <- Winding.CCW;
t.convex <- true;
;;
let copy (t : t) : t = {
first = t.first;
count = t.count;
closed = t.closed;
nbevel = t.nbevel;
fill = t.fill;
stroke = t.stroke;
winding = t.winding;
convex = t.convex;
}
| null | https://raw.githubusercontent.com/wlitwin/graphv/d0a09575c5ff5ee3727c222dd6130d22e4cf62d9/webgl2/core/path.ml | ocaml | type t = {
mutable first : int;
mutable count : int;
mutable closed : bool;
mutable nbevel : int;
mutable fill : VertexBuffer.Sub.t;
mutable stroke : VertexBuffer.Sub.t;
mutable winding : Winding.t;
mutable convex : bool;
}
let empty_sub = VertexBuffer.Sub.create()
let create () = {
first = 0;
count = 0;
closed = false;
nbevel = 0;
fill = empty_sub;
stroke = empty_sub;
winding = Winding.CCW;
convex = true;
}
let reset (t : t) : unit =
t.first <- 0;
t.count <- 0;
t.closed <- false;
t.nbevel <- 0;
t.fill <- empty_sub;
t.stroke <- empty_sub;
t.winding <- Winding.CCW;
t.convex <- true;
;;
let copy (t : t) : t = {
first = t.first;
count = t.count;
closed = t.closed;
nbevel = t.nbevel;
fill = t.fill;
stroke = t.stroke;
winding = t.winding;
convex = t.convex;
}
| |
9854782b20f5ae2c855c3ab5c9ce98162ef69f626f42c95a0661127572414e10 | 8c6794b6/guile-tjit | evil.scm | ;;; examples/safe/evil.scm -- Evil Scheme file to be run in a safe
;;; environment.
;;; Commentary:
;;; This is an example file to be evaluated by the `safe' program in
;;; this directory. This program, unlike the `untrusted.scm' (which
;;; is untrusted, but a really nice fellow though), tries to do evil
;;; things and will thus break in a safe environment.
;;;
;;; *Note* that the files in this directory are only suitable for
;;; demonstration purposes, if you have to implement safe evaluation
;;; mechanisms in important environments, you will have to do more
;;; than shown here -- for example disabling input/output operations.
Author :
Date : 2001 - 05 - 30
;;; Code:
(define passwd (open-input-file "/etc/passwd"))
(let lp ((ch (read-char passwd)))
(if (not (eof-object? ch))
(lp (read-char passwd))))
;;; End of file.
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/examples/safe/evil.scm | scheme | examples/safe/evil.scm -- Evil Scheme file to be run in a safe
environment.
Commentary:
This is an example file to be evaluated by the `safe' program in
this directory. This program, unlike the `untrusted.scm' (which
is untrusted, but a really nice fellow though), tries to do evil
things and will thus break in a safe environment.
*Note* that the files in this directory are only suitable for
demonstration purposes, if you have to implement safe evaluation
mechanisms in important environments, you will have to do more
than shown here -- for example disabling input/output operations.
Code:
End of file. |
Author :
Date : 2001 - 05 - 30
(define passwd (open-input-file "/etc/passwd"))
(let lp ((ch (read-char passwd)))
(if (not (eof-object? ch))
(lp (read-char passwd))))
|
87b25cae1c6aa142328584bc3da8246ad08d13af71113fd5024b460bda06fb82 | anoma/juvix | Options.hs | module Juvix.Compiler.Backend.Html.Data.Options where
import Juvix.Prelude
data HtmlOptions = HtmlOptions
{ _htmlOptionsKind :: HtmlKind,
_htmlOptionsAssetsPrefix :: Text,
_htmlOptionsUrlPrefix :: Text,
_htmlOptionsOutputDir :: Path Abs Dir,
_htmlOptionsParamBase :: Text,
_htmlOptionsTheme :: Theme,
_htmlOptionsNoFooter :: Bool
}
data Theme
= Nord
| Ayu
deriving stock (Show, Enum, Bounded, Data)
data HtmlKind
= HtmlDoc
| HtmlSrc
| HtmlOnly
deriving stock (Data)
makeLenses ''HtmlOptions
| null | https://raw.githubusercontent.com/anoma/juvix/22027f137c96845cb91c08d510e63fa4bc3f06e2/src/Juvix/Compiler/Backend/Html/Data/Options.hs | haskell | module Juvix.Compiler.Backend.Html.Data.Options where
import Juvix.Prelude
data HtmlOptions = HtmlOptions
{ _htmlOptionsKind :: HtmlKind,
_htmlOptionsAssetsPrefix :: Text,
_htmlOptionsUrlPrefix :: Text,
_htmlOptionsOutputDir :: Path Abs Dir,
_htmlOptionsParamBase :: Text,
_htmlOptionsTheme :: Theme,
_htmlOptionsNoFooter :: Bool
}
data Theme
= Nord
| Ayu
deriving stock (Show, Enum, Bounded, Data)
data HtmlKind
= HtmlDoc
| HtmlSrc
| HtmlOnly
deriving stock (Data)
makeLenses ''HtmlOptions
| |
ae572a471b2d2f0f598ce99aecd9d449f6e3101373b2c6f6b66bba0237102fed | basho/riak_kv | riak_kv_mrc_sink.erl | %% -------------------------------------------------------------------
%%
%% riak_kv_mrc_sink: A simple process to act as a Pipe sink for
%% MapReduce queries
%%
Copyright ( c ) 2012 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
@doc This FSM acts as a Riak Pipe sink , and dumbly accumulates
%% messages received from the pipe, until it is asked to send them to
its owner . The owner is whatever process started this FSM .
This FSM will speak both ` raw ' and ` fsm ' sink types ( it
%% answers appropriately to each, without parameterization).
The FSM enforces a soft cap on the number of results and logs
accumulated when receiving ` fsm ' sink type messages . When the
number of results+logs that have been delivered exceeds the cap
between calls to { @link next/1 } , the sink stops delivering result
%% acks to workers. The value of this cap can be specified by
%% including a `buffer' property in the `Options' parameter of {@link
%% start/2}, or by setting the `mrc_sink_buffer' environment variable
%% in the `riak_kv' application. If neither settings is specified, or
%% they are not specified as non-negative integers, the default
( currently 1000 ) is used .
%% Messages are delivered to the owners as an erlang message that is a
%% `#kv_mrc_pipe{}' record. The `logs' field is a list of log messages
%% received, ordered oldest to youngest, each having the form
%% `{PhaseId, Message}'. The `results' field is an orddict keyed by
%% `PhaseId', with each value being a list of results received from
%% that phase, ordered oldest to youngest. The `ref' field is the
%% reference from the `#pipe{}' record. The `done' field is `true' if
%% the `eoi' message has been received, or `false' otherwise.
There should be three states : ` which_pipe ' , ` collect_output ' , and
%% `send_output'.
The FSM starts in ` which_pipe ' , and waits there until it
%% is told which pipe to expect output from.
From ` which_pipe ' , the FSM moves to ` collect_output ' . While in
` collect_output ' , the FSM simply collects ` # pipe_log { } ' ,
%% `#pipe_result{}', and `#pipe_eoi{}' messages.
If the FSM has received logs , results , or the eoi before it
%% receives a `next' event, it sends everything it has accumulated to
%% the owner, wrapped in a `#kv_mrc_sink{}' record, clears its buffers,
%% and returns to collecting pipe messages.
If the FSM has not received any logs , results , or the eoi before it
%% receives a `next' event, it enters the `send_ouput' state. As soon
as the FSM receives any log , result , or eoi message in the
%% `send_output' state, it sends that message to the owner process,
%% and then returns to the `collect_output' state.
The FSM only exits on its own in three cases . The first is when its
owner exits . The second is when the builder of the pipe for which
it is consuming messages exits abnormally . The third is after it
%% delivers the a `#kv_mrc_sink{}' in which it has marked
%% `done=true'.
-module(riak_kv_mrc_sink).
-export([
start/2,
start_link/2,
use_pipe/2,
next/1,
stop/1,
merge_outputs/1,
init/1,
which_pipe/2, which_pipe/3,
collect_output/2, collect_output/3,
send_output/2, send_output/3,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4
]).
-behaviour(gen_fsm).
-compile({nowarn_deprecated_function,
[{gen_fsm, start_link, 3},
{gen_fsm, send_event, 2},
{gen_fsm, sync_send_event, 2},
{gen_fsm, sync_send_event, 3},
{gen_fsm, reply, 2}]}).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include_lib("riak_pipe/include/riak_pipe.hrl").
-include("riak_kv_mrc_sink.hrl").
-define(BUFFER_SIZE_DEFAULT, 1000).
-record(state, {
owner :: pid(),
builder :: pid() | undefined,
ref :: reference() | undefined,
results=[] :: [{PhaseId::term(), Results::list()}],
delayed_acks=[] :: list(),
logs=[] :: list(),
done=false :: boolean(),
buffer_max :: integer(),
buffer_left :: integer()
}).
start(OwnerPid, Options) ->
riak_kv_mrc_sink_sup:start_sink(OwnerPid, Options).
start_link(OwnerPid, Options) ->
gen_fsm:start_link(?MODULE, [OwnerPid, Options], []).
use_pipe(Sink, Pipe) ->
gen_fsm:sync_send_event(Sink, {use_pipe, Pipe}).
%% @doc Trigger the send of the next result/log/eoi batch received.
next(Sink) ->
gen_fsm:send_event(Sink, next).
stop(Sink) ->
riak_kv_mrc_sink_sup:terminate_sink(Sink).
%% @doc Convenience: If outputs are collected as a list of orddicts,
with the first being the most recently received , merge them into
one orddict .
%%
That is , for one keep , our input should look like :
%% [ [{0, [G,H,I]}], [{0, [D,E,F]}], [{0, [A,B,C]}] ]
%% And we want it to come out as:
%% [{0, [A,B,C,D,E,F,G,H,I]}]
-spec merge_outputs([ [{integer(), list()}] ]) -> [{integer(), list()}].
merge_outputs(Acc) ->
%% each orddict has its outputs in oldest->newest; since we're
iterating from newest->oldest overall , we can just tack the
%% next list onto the front of the accumulator
DM = fun(_K, O, A) -> O++A end,
lists:foldl(fun(O, A) -> orddict:merge(DM, O, A) end, [], Acc).
%% gen_fsm exports
init([OwnerPid, Options]) ->
erlang:monitor(process, OwnerPid),
Buffer = buffer_size(Options),
{ok, which_pipe, #state{owner=OwnerPid,
buffer_max=Buffer,
buffer_left=Buffer}}.
%%% which_pipe: waiting to find out what pipe we're listening to
which_pipe(_, State) ->
{next_state, which_pipe, State}.
which_pipe({use_pipe, #pipe{builder=Builder, sink=Sink}}, _From, State) ->
erlang:monitor(process, Builder),
{reply, ok, collect_output,
State#state{builder=Builder, ref=Sink#fitting.ref}};
which_pipe(_, _, State) ->
{next_state, which_pipe, State}.
%%% collect_output: buffering results and logs until asked for them
collect_output(next, State) ->
case State#state.done of
true ->
NewState = send_to_owner(State),
{stop, normal, NewState};
false ->
case has_output(State) of
true ->
NewState = send_to_owner(State),
{next_state, collect_output, NewState};
false ->
%% nothing to send yet, prepare to send as soon as
%% there is something
{next_state, send_output, State}
end
end;
collect_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
#state{ref=Ref, results=Acc, buffer_left=Left}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
{next_state, collect_output,
State#state{results=NewAcc, buffer_left=Left-1}};
collect_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
#state{ref=Ref, logs=Acc, buffer_left=Left}=State) ->
{next_state, collect_output,
State#state{logs=[{PhaseId, Msg}|Acc], buffer_left=Left-1}};
collect_output(#pipe_eoi{ref=Ref}, #state{ref=Ref}=State) ->
{next_state, collect_output, State#state{done=true}};
collect_output(_, State) ->
{next_state, collect_output, State}.
collect_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
From,
#state{ref=Ref, results=Acc}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
maybe_ack(From, State#state{results=NewAcc});
collect_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
From,
#state{ref=Ref, logs=Acc}=State) ->
maybe_ack(From, State#state{logs=[{PhaseId, Msg}|Acc]});
collect_output(#pipe_eoi{ref=Ref}, _From, #state{ref=Ref}=State) ->
{reply, ok, collect_output, State#state{done=true}};
collect_output(_, _, State) ->
{next_state, collect_output, State}.
maybe_ack(_From, #state{buffer_left=Left}=State) when Left > 0 ->
%% there's room for more, tell the worker it can continue
{reply, ok, collect_output, State#state{buffer_left=Left-1}};
maybe_ack(From, #state{buffer_left=Left, delayed_acks=Delayed}=State) ->
%% there's no more room, hold up the worker
%% not actually necessary to update buffer_left, but it could make
%% for interesting stats
{next_state, collect_output,
State#state{buffer_left=Left-1, delayed_acks=[From|Delayed]}}.
%% send_output: waiting for output to send, after having been asked
%% for some while there wasn't any
send_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
#state{ref=Ref, results=Acc}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
NewState = send_to_owner(State#state{results=NewAcc}),
{next_state, collect_output, NewState};
send_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
#state{ref=Ref, logs=Acc}=State) ->
NewState = send_to_owner(State#state{logs=[{PhaseId, Msg}|Acc]}),
{next_state, collect_output, NewState};
send_output(#pipe_eoi{ref=Ref}, #state{ref=Ref}=State) ->
NewState = send_to_owner(State#state{done=true}),
{stop, normal, NewState};
send_output(_, State) ->
{next_state, send_output, State}.
send_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
_From, #state{ref=Ref, results=Acc}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
NewState = send_to_owner(State#state{results=NewAcc}),
{reply, ok, collect_output, NewState};
send_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
_From, #state{ref=Ref, logs=Acc}=State) ->
NewState = send_to_owner(State#state{logs=[{PhaseId, Msg}|Acc]}),
{reply, ok, collect_output, NewState};
send_output(#pipe_eoi{ref=Ref}, _From, #state{ref=Ref}=State) ->
NewState = send_to_owner(State#state{done=true}),
{stop, normal, ok, NewState};
send_output(_, _, State) ->
{next_state, send_output, State}.
handle_event(_, StateName, State) ->
{next_state, StateName, State}.
handle_sync_event(_, _, StateName, State) ->
{next_state, StateName, State}.
Clusters containing nodes running version 1.2 and previous
%% will send raw results, regardless of sink type. We can't block
%% these worker sending raw results, but we can still track these
%% additions, and block other workers because of them.
handle_info(#pipe_result{ref=Ref, from=PhaseId, result=Res},
StateName,
#state{ref=Ref, results=Acc, buffer_left=Left}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
info_response(StateName,
State#state{results=NewAcc, buffer_left=Left-1});
handle_info(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
StateName,
#state{ref=Ref, logs=Acc, buffer_left=Left}=State) ->
info_response(StateName,
State#state{logs=[{PhaseId, Msg}|Acc],
buffer_left=Left-1});
handle_info(#pipe_eoi{ref=Ref},
StateName, #state{ref=Ref}=State) ->
info_response(StateName, State#state{done=true});
handle_info({'DOWN', _, process, Pid, _Reason}, _,
#state{owner=Pid}=State) ->
%% exit as soon as the owner dies
{stop, normal, State};
handle_info({'DOWN', _, process, Pid, Reason}, _,
#state{builder=Pid}=State) when Reason /= normal ->
%% don't stop when the builder exits 'normal', because that's
%% probably just the pipe shutting down normally - wait for the
%% owner to ask for the last outputs
_ = lager:warning("Pipe builder down. Reason: ~p", [Reason]),
{stop, normal, State};
handle_info(_, StateName, State) ->
{next_state, StateName, State}.
%% continue buffering, unless we've been waiting to reply; stop if we
%% were waiting to reply and we've received eoi
info_response(collect_output, State) ->
{next_state, collect_output, State};
info_response(send_output, #state{done=Done}=State) ->
NewState = send_to_owner(State),
if Done -> {stop, normal, NewState};
true -> {next_state, collect_output, NewState}
end.
terminate(_, _, _) ->
ok.
code_change(_, StateName, State, _) ->
{ok, StateName, State}.
%% internal
has_output(#state{results=[], logs=[]}) ->
false;
has_output(_) ->
true.
%% also clears buffers
send_to_owner(#state{owner=Owner, ref=Ref,
results=Results, logs=Logs, done=Done,
buffer_max=Max, delayed_acks=Delayed}=State) ->
Owner ! #kv_mrc_sink{ref=Ref,
results=finish_results(Results),
logs=lists:reverse(Logs),
done=Done},
_ = [ gen_fsm:reply(From, ok) || From <- Delayed ],
State#state{results=[], logs=[],
buffer_left=Max, delayed_acks=[]}.
%% results are kept as lists in a proplist
add_result(PhaseId, Result, Acc) ->
case lists:keytake(PhaseId, 1, Acc) of
{value, {PhaseId, IAcc}, RAcc} ->
[{PhaseId,[Result|IAcc]}|RAcc];
false ->
[{PhaseId,[Result]}|Acc]
end.
%% transform the proplist buffers into orddicts time-ordered
finish_results(Results) ->
[{I, lists:reverse(R)} || {I, R} <- lists:keysort(1, Results)].
%% choose buffer size, given Options, app env, default
-spec buffer_size(list()) -> non_neg_integer().
buffer_size(Options) ->
case buffer_size_options(Options) of
{ok, Size} -> Size;
false ->
case buffer_size_app_env() of
{ok, Size} -> Size;
false ->
?BUFFER_SIZE_DEFAULT
end
end.
-spec buffer_size_options(list()) -> {ok, non_neg_integer()} | false.
buffer_size_options(Options) ->
case lists:keyfind(buffer, 1, Options) of
{buffer, Size} when is_integer(Size), Size >= 0 ->
{ok, Size};
_ ->
false
end.
-spec buffer_size_app_env() -> {ok, non_neg_integer()} | false.
buffer_size_app_env() ->
case application:get_env(riak_kv, mrc_sink_buffer) of
{ok, Size} when is_integer(Size), Size >= 0 ->
{ok, Size};
_ ->
false
end.
%% TEST
-ifdef(TEST).
buffer_size_test_() ->
Tests = [ {"buffer option", 5, [{buffer, 5}], []},
{"buffer app env", 5, [], [{mrc_sink_buffer, 5}]},
{"buffer default", ?BUFFER_SIZE_DEFAULT, [], []} ],
FillFuns = [ {"send_event", fun gen_fsm:send_event/2},
{"sync_send_event", fun gen_fsm:sync_send_event/2},
{"erlang:send", fun(S, R) -> S ! R, ok end} ],
{foreach,
fun() -> application:load(riak_kv) end,
fun(_) -> application:unload(riak_kv) end,
[buffer_size_test_helper(Name, FillFun, Size, Options, AppEnv)
|| {Name, Size, Options, AppEnv} <- Tests,
FillFun <- FillFuns]}.
buffer_size_test_helper(Name, {FillName, FillFun}, Size, Options, AppEnv) ->
{Name++" "++FillName,
fun() ->
application:load(riak_kv),
[ application:set_env(riak_kv, K, V) || {K, V} <- AppEnv ],
%% start up our sink
{ok, Sink} = ?MODULE:start_link(self(), Options),
Ref = make_ref(),
Pipe = #pipe{builder=self(),
sink=#fitting{pid=Sink, ref=Ref}},
?MODULE:use_pipe(Sink, Pipe),
%% fill its buffer
[ ok = FillFun(
Sink,
#pipe_result{from=tester, ref=Ref, result=I})
|| I <- lists:seq(1, Size) ],
%% ensure extra result will block
{'EXIT',{timeout,{gen_fsm,sync_send_event,_}}} =
(catch gen_fsm:sync_send_event(
Sink,
#pipe_result{from=tester, ref=Ref, result=Size+1},
1000)),
%% now drain what's there
?MODULE:next(Sink),
%% make sure that all results were received, including
blocked one
receive
#kv_mrc_sink{ref=Ref, results=[{tester,R}]} ->
?assertEqual(Size+1, length(R))
end,
%% make sure that the delayed ack was received
receive
{GenFsmRef, ok} when is_reference(GenFsmRef) ->
ok
end
end}.
-endif.
| null | https://raw.githubusercontent.com/basho/riak_kv/aeef1591704d32230b773d952a2f1543cbfa1889/src/riak_kv_mrc_sink.erl | erlang | -------------------------------------------------------------------
riak_kv_mrc_sink: A simple process to act as a Pipe sink for
MapReduce queries
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
messages received from the pipe, until it is asked to send them to
answers appropriately to each, without parameterization).
acks to workers. The value of this cap can be specified by
including a `buffer' property in the `Options' parameter of {@link
start/2}, or by setting the `mrc_sink_buffer' environment variable
in the `riak_kv' application. If neither settings is specified, or
they are not specified as non-negative integers, the default
Messages are delivered to the owners as an erlang message that is a
`#kv_mrc_pipe{}' record. The `logs' field is a list of log messages
received, ordered oldest to youngest, each having the form
`{PhaseId, Message}'. The `results' field is an orddict keyed by
`PhaseId', with each value being a list of results received from
that phase, ordered oldest to youngest. The `ref' field is the
reference from the `#pipe{}' record. The `done' field is `true' if
the `eoi' message has been received, or `false' otherwise.
`send_output'.
is told which pipe to expect output from.
`#pipe_result{}', and `#pipe_eoi{}' messages.
receives a `next' event, it sends everything it has accumulated to
the owner, wrapped in a `#kv_mrc_sink{}' record, clears its buffers,
and returns to collecting pipe messages.
receives a `next' event, it enters the `send_ouput' state. As soon
`send_output' state, it sends that message to the owner process,
and then returns to the `collect_output' state.
delivers the a `#kv_mrc_sink{}' in which it has marked
`done=true'.
@doc Trigger the send of the next result/log/eoi batch received.
@doc Convenience: If outputs are collected as a list of orddicts,
[ [{0, [G,H,I]}], [{0, [D,E,F]}], [{0, [A,B,C]}] ]
And we want it to come out as:
[{0, [A,B,C,D,E,F,G,H,I]}]
each orddict has its outputs in oldest->newest; since we're
next list onto the front of the accumulator
gen_fsm exports
which_pipe: waiting to find out what pipe we're listening to
collect_output: buffering results and logs until asked for them
nothing to send yet, prepare to send as soon as
there is something
there's room for more, tell the worker it can continue
there's no more room, hold up the worker
not actually necessary to update buffer_left, but it could make
for interesting stats
send_output: waiting for output to send, after having been asked
for some while there wasn't any
will send raw results, regardless of sink type. We can't block
these worker sending raw results, but we can still track these
additions, and block other workers because of them.
exit as soon as the owner dies
don't stop when the builder exits 'normal', because that's
probably just the pipe shutting down normally - wait for the
owner to ask for the last outputs
continue buffering, unless we've been waiting to reply; stop if we
were waiting to reply and we've received eoi
internal
also clears buffers
results are kept as lists in a proplist
transform the proplist buffers into orddicts time-ordered
choose buffer size, given Options, app env, default
TEST
start up our sink
fill its buffer
ensure extra result will block
now drain what's there
make sure that all results were received, including
make sure that the delayed ack was received | Copyright ( c ) 2012 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc This FSM acts as a Riak Pipe sink , and dumbly accumulates
its owner . The owner is whatever process started this FSM .
This FSM will speak both ` raw ' and ` fsm ' sink types ( it
The FSM enforces a soft cap on the number of results and logs
accumulated when receiving ` fsm ' sink type messages . When the
number of results+logs that have been delivered exceeds the cap
between calls to { @link next/1 } , the sink stops delivering result
( currently 1000 ) is used .
There should be three states : ` which_pipe ' , ` collect_output ' , and
The FSM starts in ` which_pipe ' , and waits there until it
From ` which_pipe ' , the FSM moves to ` collect_output ' . While in
` collect_output ' , the FSM simply collects ` # pipe_log { } ' ,
If the FSM has received logs , results , or the eoi before it
If the FSM has not received any logs , results , or the eoi before it
as the FSM receives any log , result , or eoi message in the
The FSM only exits on its own in three cases . The first is when its
owner exits . The second is when the builder of the pipe for which
it is consuming messages exits abnormally . The third is after it
-module(riak_kv_mrc_sink).
-export([
start/2,
start_link/2,
use_pipe/2,
next/1,
stop/1,
merge_outputs/1,
init/1,
which_pipe/2, which_pipe/3,
collect_output/2, collect_output/3,
send_output/2, send_output/3,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4
]).
-behaviour(gen_fsm).
-compile({nowarn_deprecated_function,
[{gen_fsm, start_link, 3},
{gen_fsm, send_event, 2},
{gen_fsm, sync_send_event, 2},
{gen_fsm, sync_send_event, 3},
{gen_fsm, reply, 2}]}).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include_lib("riak_pipe/include/riak_pipe.hrl").
-include("riak_kv_mrc_sink.hrl").
-define(BUFFER_SIZE_DEFAULT, 1000).
-record(state, {
owner :: pid(),
builder :: pid() | undefined,
ref :: reference() | undefined,
results=[] :: [{PhaseId::term(), Results::list()}],
delayed_acks=[] :: list(),
logs=[] :: list(),
done=false :: boolean(),
buffer_max :: integer(),
buffer_left :: integer()
}).
start(OwnerPid, Options) ->
riak_kv_mrc_sink_sup:start_sink(OwnerPid, Options).
start_link(OwnerPid, Options) ->
gen_fsm:start_link(?MODULE, [OwnerPid, Options], []).
use_pipe(Sink, Pipe) ->
gen_fsm:sync_send_event(Sink, {use_pipe, Pipe}).
next(Sink) ->
gen_fsm:send_event(Sink, next).
stop(Sink) ->
riak_kv_mrc_sink_sup:terminate_sink(Sink).
with the first being the most recently received , merge them into
one orddict .
That is , for one keep , our input should look like :
-spec merge_outputs([ [{integer(), list()}] ]) -> [{integer(), list()}].
merge_outputs(Acc) ->
iterating from newest->oldest overall , we can just tack the
DM = fun(_K, O, A) -> O++A end,
lists:foldl(fun(O, A) -> orddict:merge(DM, O, A) end, [], Acc).
init([OwnerPid, Options]) ->
erlang:monitor(process, OwnerPid),
Buffer = buffer_size(Options),
{ok, which_pipe, #state{owner=OwnerPid,
buffer_max=Buffer,
buffer_left=Buffer}}.
which_pipe(_, State) ->
{next_state, which_pipe, State}.
which_pipe({use_pipe, #pipe{builder=Builder, sink=Sink}}, _From, State) ->
erlang:monitor(process, Builder),
{reply, ok, collect_output,
State#state{builder=Builder, ref=Sink#fitting.ref}};
which_pipe(_, _, State) ->
{next_state, which_pipe, State}.
collect_output(next, State) ->
case State#state.done of
true ->
NewState = send_to_owner(State),
{stop, normal, NewState};
false ->
case has_output(State) of
true ->
NewState = send_to_owner(State),
{next_state, collect_output, NewState};
false ->
{next_state, send_output, State}
end
end;
collect_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
#state{ref=Ref, results=Acc, buffer_left=Left}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
{next_state, collect_output,
State#state{results=NewAcc, buffer_left=Left-1}};
collect_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
#state{ref=Ref, logs=Acc, buffer_left=Left}=State) ->
{next_state, collect_output,
State#state{logs=[{PhaseId, Msg}|Acc], buffer_left=Left-1}};
collect_output(#pipe_eoi{ref=Ref}, #state{ref=Ref}=State) ->
{next_state, collect_output, State#state{done=true}};
collect_output(_, State) ->
{next_state, collect_output, State}.
collect_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
From,
#state{ref=Ref, results=Acc}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
maybe_ack(From, State#state{results=NewAcc});
collect_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
From,
#state{ref=Ref, logs=Acc}=State) ->
maybe_ack(From, State#state{logs=[{PhaseId, Msg}|Acc]});
collect_output(#pipe_eoi{ref=Ref}, _From, #state{ref=Ref}=State) ->
{reply, ok, collect_output, State#state{done=true}};
collect_output(_, _, State) ->
{next_state, collect_output, State}.
maybe_ack(_From, #state{buffer_left=Left}=State) when Left > 0 ->
{reply, ok, collect_output, State#state{buffer_left=Left-1}};
maybe_ack(From, #state{buffer_left=Left, delayed_acks=Delayed}=State) ->
{next_state, collect_output,
State#state{buffer_left=Left-1, delayed_acks=[From|Delayed]}}.
send_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
#state{ref=Ref, results=Acc}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
NewState = send_to_owner(State#state{results=NewAcc}),
{next_state, collect_output, NewState};
send_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
#state{ref=Ref, logs=Acc}=State) ->
NewState = send_to_owner(State#state{logs=[{PhaseId, Msg}|Acc]}),
{next_state, collect_output, NewState};
send_output(#pipe_eoi{ref=Ref}, #state{ref=Ref}=State) ->
NewState = send_to_owner(State#state{done=true}),
{stop, normal, NewState};
send_output(_, State) ->
{next_state, send_output, State}.
send_output(#pipe_result{ref=Ref, from=PhaseId, result=Res},
_From, #state{ref=Ref, results=Acc}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
NewState = send_to_owner(State#state{results=NewAcc}),
{reply, ok, collect_output, NewState};
send_output(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
_From, #state{ref=Ref, logs=Acc}=State) ->
NewState = send_to_owner(State#state{logs=[{PhaseId, Msg}|Acc]}),
{reply, ok, collect_output, NewState};
send_output(#pipe_eoi{ref=Ref}, _From, #state{ref=Ref}=State) ->
NewState = send_to_owner(State#state{done=true}),
{stop, normal, ok, NewState};
send_output(_, _, State) ->
{next_state, send_output, State}.
handle_event(_, StateName, State) ->
{next_state, StateName, State}.
handle_sync_event(_, _, StateName, State) ->
{next_state, StateName, State}.
Clusters containing nodes running version 1.2 and previous
handle_info(#pipe_result{ref=Ref, from=PhaseId, result=Res},
StateName,
#state{ref=Ref, results=Acc, buffer_left=Left}=State) ->
NewAcc = add_result(PhaseId, Res, Acc),
info_response(StateName,
State#state{results=NewAcc, buffer_left=Left-1});
handle_info(#pipe_log{ref=Ref, from=PhaseId, msg=Msg},
StateName,
#state{ref=Ref, logs=Acc, buffer_left=Left}=State) ->
info_response(StateName,
State#state{logs=[{PhaseId, Msg}|Acc],
buffer_left=Left-1});
handle_info(#pipe_eoi{ref=Ref},
StateName, #state{ref=Ref}=State) ->
info_response(StateName, State#state{done=true});
handle_info({'DOWN', _, process, Pid, _Reason}, _,
#state{owner=Pid}=State) ->
{stop, normal, State};
handle_info({'DOWN', _, process, Pid, Reason}, _,
#state{builder=Pid}=State) when Reason /= normal ->
_ = lager:warning("Pipe builder down. Reason: ~p", [Reason]),
{stop, normal, State};
handle_info(_, StateName, State) ->
{next_state, StateName, State}.
info_response(collect_output, State) ->
{next_state, collect_output, State};
info_response(send_output, #state{done=Done}=State) ->
NewState = send_to_owner(State),
if Done -> {stop, normal, NewState};
true -> {next_state, collect_output, NewState}
end.
terminate(_, _, _) ->
ok.
code_change(_, StateName, State, _) ->
{ok, StateName, State}.
has_output(#state{results=[], logs=[]}) ->
false;
has_output(_) ->
true.
send_to_owner(#state{owner=Owner, ref=Ref,
results=Results, logs=Logs, done=Done,
buffer_max=Max, delayed_acks=Delayed}=State) ->
Owner ! #kv_mrc_sink{ref=Ref,
results=finish_results(Results),
logs=lists:reverse(Logs),
done=Done},
_ = [ gen_fsm:reply(From, ok) || From <- Delayed ],
State#state{results=[], logs=[],
buffer_left=Max, delayed_acks=[]}.
add_result(PhaseId, Result, Acc) ->
case lists:keytake(PhaseId, 1, Acc) of
{value, {PhaseId, IAcc}, RAcc} ->
[{PhaseId,[Result|IAcc]}|RAcc];
false ->
[{PhaseId,[Result]}|Acc]
end.
finish_results(Results) ->
[{I, lists:reverse(R)} || {I, R} <- lists:keysort(1, Results)].
-spec buffer_size(list()) -> non_neg_integer().
buffer_size(Options) ->
case buffer_size_options(Options) of
{ok, Size} -> Size;
false ->
case buffer_size_app_env() of
{ok, Size} -> Size;
false ->
?BUFFER_SIZE_DEFAULT
end
end.
-spec buffer_size_options(list()) -> {ok, non_neg_integer()} | false.
buffer_size_options(Options) ->
case lists:keyfind(buffer, 1, Options) of
{buffer, Size} when is_integer(Size), Size >= 0 ->
{ok, Size};
_ ->
false
end.
-spec buffer_size_app_env() -> {ok, non_neg_integer()} | false.
buffer_size_app_env() ->
case application:get_env(riak_kv, mrc_sink_buffer) of
{ok, Size} when is_integer(Size), Size >= 0 ->
{ok, Size};
_ ->
false
end.
-ifdef(TEST).
buffer_size_test_() ->
Tests = [ {"buffer option", 5, [{buffer, 5}], []},
{"buffer app env", 5, [], [{mrc_sink_buffer, 5}]},
{"buffer default", ?BUFFER_SIZE_DEFAULT, [], []} ],
FillFuns = [ {"send_event", fun gen_fsm:send_event/2},
{"sync_send_event", fun gen_fsm:sync_send_event/2},
{"erlang:send", fun(S, R) -> S ! R, ok end} ],
{foreach,
fun() -> application:load(riak_kv) end,
fun(_) -> application:unload(riak_kv) end,
[buffer_size_test_helper(Name, FillFun, Size, Options, AppEnv)
|| {Name, Size, Options, AppEnv} <- Tests,
FillFun <- FillFuns]}.
buffer_size_test_helper(Name, {FillName, FillFun}, Size, Options, AppEnv) ->
{Name++" "++FillName,
fun() ->
application:load(riak_kv),
[ application:set_env(riak_kv, K, V) || {K, V} <- AppEnv ],
{ok, Sink} = ?MODULE:start_link(self(), Options),
Ref = make_ref(),
Pipe = #pipe{builder=self(),
sink=#fitting{pid=Sink, ref=Ref}},
?MODULE:use_pipe(Sink, Pipe),
[ ok = FillFun(
Sink,
#pipe_result{from=tester, ref=Ref, result=I})
|| I <- lists:seq(1, Size) ],
{'EXIT',{timeout,{gen_fsm,sync_send_event,_}}} =
(catch gen_fsm:sync_send_event(
Sink,
#pipe_result{from=tester, ref=Ref, result=Size+1},
1000)),
?MODULE:next(Sink),
blocked one
receive
#kv_mrc_sink{ref=Ref, results=[{tester,R}]} ->
?assertEqual(Size+1, length(R))
end,
receive
{GenFsmRef, ok} when is_reference(GenFsmRef) ->
ok
end
end}.
-endif.
|
facf57eb9a6db20f831b68e1f04a46b96a181fa9c1f82878f13dbb193b708991 | cfpb/qu | cache.clj | (ns qu.test.cache
(:refer-clojure :exclude [sort])
(:require [clojure.test :refer :all]
[qu.query :as q]
[qu.cache :refer :all]))
(deftest test-query-to-key
(let [query1 (q/map->Query {:select "state_id, county_id, MAX(tax_returns)" :group "state_id, county_id" :metadata {:database "test"} :slice "test"})
query2 (q/map->Query {:select "state_id,county_id,MAX(tax_returns)" :group "state_id,county_id" :metadata {:database "test"} :slice "test"})]
(testing "it eliminates space in the SELECT and GROUP BY fields"
(is (= (query-to-key query1) (query-to-key query2))))
(testing "different WHERE queries make different keys"
(is (not (= (query-to-key (assoc query1 :where "state_id = 1"))
(query-to-key (assoc query1 :where "state_id = 2"))))))
(testing "different ORDER BY queries make the same key"
(is (= (query-to-key (assoc query1 :orderBy "state_id"))
(query-to-key (assoc query1 :orderBy "max_tax_returns")))))
(testing "different LIMIT and OFFSET queries make the same key"
(is (= (query-to-key (assoc query1 :limit 10))
(query-to-key (assoc query1 :limit 20 :offset 10)))))))
;; (run-tests)
| null | https://raw.githubusercontent.com/cfpb/qu/f460d9ab2f05ac22f6d68a98a9641daf0f7c7ba4/test/qu/test/cache.clj | clojure | (run-tests) | (ns qu.test.cache
(:refer-clojure :exclude [sort])
(:require [clojure.test :refer :all]
[qu.query :as q]
[qu.cache :refer :all]))
(deftest test-query-to-key
(let [query1 (q/map->Query {:select "state_id, county_id, MAX(tax_returns)" :group "state_id, county_id" :metadata {:database "test"} :slice "test"})
query2 (q/map->Query {:select "state_id,county_id,MAX(tax_returns)" :group "state_id,county_id" :metadata {:database "test"} :slice "test"})]
(testing "it eliminates space in the SELECT and GROUP BY fields"
(is (= (query-to-key query1) (query-to-key query2))))
(testing "different WHERE queries make different keys"
(is (not (= (query-to-key (assoc query1 :where "state_id = 1"))
(query-to-key (assoc query1 :where "state_id = 2"))))))
(testing "different ORDER BY queries make the same key"
(is (= (query-to-key (assoc query1 :orderBy "state_id"))
(query-to-key (assoc query1 :orderBy "max_tax_returns")))))
(testing "different LIMIT and OFFSET queries make the same key"
(is (= (query-to-key (assoc query1 :limit 10))
(query-to-key (assoc query1 :limit 20 :offset 10)))))))
|
e18a56ad478ebb52be4b1d513bdd048de6c7e4c002d6bb562944c7dcd1fa4f80 | JoelSanchez/ventas | images.clj | (ns ventas.utils.images
(:require
[clojure.java.io :as io]
[slingshot.slingshot :refer [throw+]]
[ventas.utils.files :as utils.files])
(:import [javax.imageio ImageIO]
[net.coobird.thumbnailator Thumbnails]
[java.awt.image BufferedImage]
[net.coobird.thumbnailator.resizers.configurations ScalingMode]))
(defn path-with-metadata [path options]
(str (utils.files/basename path)
"-" (hash options)
"." (utils.files/extension path)))
(defn- portrait? [relation]
(< relation 1))
(defn- landscape? [relation]
(<= 1 relation))
;; these functions make this way easier to test
(defn- source-region [builder x y w h]
(.sourceRegion builder x y w h))
(defn- scale-to [builder factor]
(.scale builder factor))
(defn- output-quality [builder quality]
(.outputQuality builder quality))
(defn- scale-dimensions* [scale {:keys [width height]}]
{:width (* 1.0 scale width)
:height (* 1.0 scale height)})
(defn- scale-dimensions [source target]
"Ensures that the target width and height are not higher than their source counterparts"
(->> target
(scale-dimensions* (min 1 (/ (:width source) (:width target))))
(scale-dimensions* (min 1 (/ (:height source) (:height target))))))
(defn- adapt-dimensions-to-relation* [{:keys [width height] :as source} target-relation]
(scale-dimensions
source
(if (landscape? target-relation)
{:width (* height target-relation)
:height height}
{:width width
:height (/ width target-relation)})))
(defn- adapt-dimensions-to-relation [{:keys [width height target-relation]}]
"Returns a new width and height that matches the given target relation, using the maximum
available space within the given width and height"
(let [source-relation (/ width height)]
(if (or (= 1 target-relation)
(and (landscape? target-relation) (landscape? source-relation))
(and (portrait? target-relation) (portrait? source-relation)))
(adapt-dimensions-to-relation* {:width width :height height} target-relation)
(let [{:keys [width height]} (adapt-dimensions-to-relation
{:width width
:height height
:target-relation 1})]
(adapt-dimensions-to-relation* {:width width :height height} target-relation)))))
(defn- crop-image [builder metadata {:keys [offset size relation]}]
(if relation
(let [relation (* 1.0 relation)
{:keys [width height]} (adapt-dimensions-to-relation
{:width (:width metadata)
:height (:height metadata)
:target-relation relation})]
(recur builder metadata {:offset [(- (/ (:width metadata) 2.0) (/ width 2.0))
(- (/ (:height metadata) 2.0) (/ height 2.0))]
:size [width height]}))
(source-region builder
(first offset)
(second offset)
(first size)
(second size))))
(defn resize-image [builder metadata {:keys [width height allow-smaller?]}]
(if (and allow-smaller? (< (:width metadata) width) (< (:height metadata) height))
builder
(let [width-scale (/ width (:width metadata))
height-scale (/ height (:height metadata))]
(scale-to builder (min width-scale height-scale)))))
(defn transform-image* [source-path target-path {:keys [resize scale crop quality]}]
(when-not (.exists (io/file source-path))
(throw+ {:type ::file-not-found
:path source-path}))
(let [^BufferedImage buffered-image (ImageIO/read (io/file source-path))
metadata {:width (.getWidth buffered-image)
:height (.getHeight buffered-image)}]
(-> [buffered-image]
into-array
Thumbnails/of
(.scalingMode ScalingMode/PROGRESSIVE_BILINEAR)
(cond-> crop (crop-image metadata crop)
scale (scale-to scale)
resize (resize-image metadata resize)
quality (output-quality (double quality))
(and (not scale) (not resize)) (scale-to 1))
(.toFile (io/file target-path)))))
(defn transform-image [source-path target-dir & [options]]
(let [target-dir (or target-dir (utils.files/get-tmp-dir))
target-filename (path-with-metadata source-path options)
target-path (str target-dir "/" target-filename)]
(when (and (:scale options) (or (get-in options [:resize :width])
(get-in options [:resize :height])))
(throw+ {:type ::inconsistent-parameters
:message "Setting both :scale and :width/:height is forbidden"}))
(io/make-parents target-path)
(transform-image* source-path target-path options)
target-path))
| null | https://raw.githubusercontent.com/JoelSanchez/ventas/dc8fc8ff9f63dfc8558ecdaacfc4983903b8e9a1/src/clj/ventas/utils/images.clj | clojure | these functions make this way easier to test | (ns ventas.utils.images
(:require
[clojure.java.io :as io]
[slingshot.slingshot :refer [throw+]]
[ventas.utils.files :as utils.files])
(:import [javax.imageio ImageIO]
[net.coobird.thumbnailator Thumbnails]
[java.awt.image BufferedImage]
[net.coobird.thumbnailator.resizers.configurations ScalingMode]))
(defn path-with-metadata [path options]
(str (utils.files/basename path)
"-" (hash options)
"." (utils.files/extension path)))
(defn- portrait? [relation]
(< relation 1))
(defn- landscape? [relation]
(<= 1 relation))
(defn- source-region [builder x y w h]
(.sourceRegion builder x y w h))
(defn- scale-to [builder factor]
(.scale builder factor))
(defn- output-quality [builder quality]
(.outputQuality builder quality))
(defn- scale-dimensions* [scale {:keys [width height]}]
{:width (* 1.0 scale width)
:height (* 1.0 scale height)})
(defn- scale-dimensions [source target]
"Ensures that the target width and height are not higher than their source counterparts"
(->> target
(scale-dimensions* (min 1 (/ (:width source) (:width target))))
(scale-dimensions* (min 1 (/ (:height source) (:height target))))))
(defn- adapt-dimensions-to-relation* [{:keys [width height] :as source} target-relation]
(scale-dimensions
source
(if (landscape? target-relation)
{:width (* height target-relation)
:height height}
{:width width
:height (/ width target-relation)})))
(defn- adapt-dimensions-to-relation [{:keys [width height target-relation]}]
"Returns a new width and height that matches the given target relation, using the maximum
available space within the given width and height"
(let [source-relation (/ width height)]
(if (or (= 1 target-relation)
(and (landscape? target-relation) (landscape? source-relation))
(and (portrait? target-relation) (portrait? source-relation)))
(adapt-dimensions-to-relation* {:width width :height height} target-relation)
(let [{:keys [width height]} (adapt-dimensions-to-relation
{:width width
:height height
:target-relation 1})]
(adapt-dimensions-to-relation* {:width width :height height} target-relation)))))
(defn- crop-image [builder metadata {:keys [offset size relation]}]
(if relation
(let [relation (* 1.0 relation)
{:keys [width height]} (adapt-dimensions-to-relation
{:width (:width metadata)
:height (:height metadata)
:target-relation relation})]
(recur builder metadata {:offset [(- (/ (:width metadata) 2.0) (/ width 2.0))
(- (/ (:height metadata) 2.0) (/ height 2.0))]
:size [width height]}))
(source-region builder
(first offset)
(second offset)
(first size)
(second size))))
(defn resize-image [builder metadata {:keys [width height allow-smaller?]}]
(if (and allow-smaller? (< (:width metadata) width) (< (:height metadata) height))
builder
(let [width-scale (/ width (:width metadata))
height-scale (/ height (:height metadata))]
(scale-to builder (min width-scale height-scale)))))
(defn transform-image* [source-path target-path {:keys [resize scale crop quality]}]
(when-not (.exists (io/file source-path))
(throw+ {:type ::file-not-found
:path source-path}))
(let [^BufferedImage buffered-image (ImageIO/read (io/file source-path))
metadata {:width (.getWidth buffered-image)
:height (.getHeight buffered-image)}]
(-> [buffered-image]
into-array
Thumbnails/of
(.scalingMode ScalingMode/PROGRESSIVE_BILINEAR)
(cond-> crop (crop-image metadata crop)
scale (scale-to scale)
resize (resize-image metadata resize)
quality (output-quality (double quality))
(and (not scale) (not resize)) (scale-to 1))
(.toFile (io/file target-path)))))
(defn transform-image [source-path target-dir & [options]]
(let [target-dir (or target-dir (utils.files/get-tmp-dir))
target-filename (path-with-metadata source-path options)
target-path (str target-dir "/" target-filename)]
(when (and (:scale options) (or (get-in options [:resize :width])
(get-in options [:resize :height])))
(throw+ {:type ::inconsistent-parameters
:message "Setting both :scale and :width/:height is forbidden"}))
(io/make-parents target-path)
(transform-image* source-path target-path options)
target-path))
|
948e7b5ebb9a7f256e69d307100158de3efa96aba008faa94ed3f71161634926 | yesodweb/persistent | Types.hs | module Database.Persist.Sql.Types
( module Database.Persist.Sql.Types
, SqlBackend, SqlReadBackend (..), SqlWriteBackend (..)
, Statement (..), LogFunc, InsertSqlResult (..)
, readToUnknown, readToWrite, writeToUnknown
, SqlBackendCanRead, SqlBackendCanWrite, SqlReadT, SqlWriteT, IsSqlBackend
, OverflowNatural(..)
, ConnectionPoolConfig(..)
) where
import Control.Exception (Exception(..))
import Control.Monad.Logger (NoLoggingT)
import Control.Monad.Trans.Reader (ReaderT(..))
import Control.Monad.Trans.Resource (ResourceT)
import Data.Pool (Pool)
import Data.Text (Text)
import Data.Time (NominalDiffTime)
import Database.Persist.Sql.Types.Internal
import Database.Persist.Types
data Column = Column
{ cName :: !FieldNameDB
, cNull :: !Bool
, cSqlType :: !SqlType
, cDefault :: !(Maybe Text)
, cGenerated :: !(Maybe Text)
, cDefaultConstraintName :: !(Maybe ConstraintNameDB)
, cMaxLen :: !(Maybe Integer)
, cReference :: !(Maybe ColumnReference)
}
deriving (Eq, Ord, Show)
-- | This value specifies how a field references another table.
--
-- @since 2.11.0.0
data ColumnReference = ColumnReference
{ crTableName :: !EntityNameDB
-- ^ The table name that the
--
-- @since 2.11.0.0
, crConstraintName :: !ConstraintNameDB
-- ^ The name of the foreign key constraint.
--
-- @since 2.11.0.0
, crFieldCascade :: !FieldCascade
-- ^ Whether or not updates/deletions to the referenced table cascade
-- to this table.
--
-- @since 2.11.0.0
}
deriving (Eq, Ord, Show)
data PersistentSqlException = StatementAlreadyFinalized Text
| Couldn'tGetSQLConnection
deriving Show
instance Exception PersistentSqlException
type SqlPersistT = ReaderT SqlBackend
type SqlPersistM = SqlPersistT (NoLoggingT (ResourceT IO))
type ConnectionPool = Pool SqlBackend
-- | Values to configure a pool of database connections. See "Data.Pool" for details.
--
-- @since 2.11.0.0
data ConnectionPoolConfig = ConnectionPoolConfig
^ How many stripes to divide the pool into . See " Data . Pool " for details . Default : 1 .
^ How long connections can remain idle before being disposed of , in seconds . Default : 600
^ How many connections should be held in the connection pool . Default : 10
}
deriving (Show)
TODO : Bad defaults for SQLite maybe ?
-- | Initializes a ConnectionPoolConfig with default values. See the documentation of 'ConnectionPoolConfig' for each field's default value.
--
-- @since 2.11.0.0
defaultConnectionPoolConfig :: ConnectionPoolConfig
defaultConnectionPoolConfig = ConnectionPoolConfig 1 600 10
-- $rawSql
--
Although it covers most of the useful cases , @persistent@ 's
API may not be enough for some of your tasks . May be you need
-- some complex @JOIN@ query, or a database-specific command
-- needs to be issued.
--
-- To issue raw SQL queries, use 'rawSql'. It does all the hard work of
-- automatically parsing the rows of the result. It may return:
--
-- * An 'Entity', that which 'selectList' returns.
-- All of your entity's fields are
-- automatically parsed.
--
* A @'Single ' a@ , which is a single , raw column of type
You may use a type ( such as in your entity
definitions ) , for example Text@ or ,
or you may get the raw column value with
-- 'PersistValue'@.
--
-- * A tuple combining any of these (including other tuples).
Using tuples allows you to return many entities in one
-- query.
--
-- The only difference between issuing SQL queries with 'rawSql'
-- and using other means is that we have an /entity selection/
-- /placeholder/, the double question mark @??@. It /must/ be
-- used whenever you want to @SELECT@ an 'Entity' from your
-- query. Here's a sample SQL query @sampleStmt@ that may be
-- issued:
--
-- @
-- SELECT ??, ??
FROM \"Person\ " , \"Likes\ " , \"Object\ "
-- WHERE \"Person\".id = \"Likes\".\"personId\"
AND \"Object\".id = \"Likes\".\"objectId\ "
-- AND \"Person\".name LIKE ?
-- @
--
-- To use that query, you could say
--
-- @
-- do results <- 'rawSql' sampleStmt [\"%Luke%\"]
-- forM_ results $
\\ ( Entity personKey person
-- , Entity objectKey object
-- ) -> do ...
-- @
--
-- Note that 'rawSql' knows how to replace the double question
-- marks @??@ because of the type of the @results@.
| A single column ( see ' rawSql ' ) . Any ' PersistField ' may be
used here , including ' PersistValue ' ( which does not do any
-- processing).
newtype Single a = Single {unSingle :: a}
deriving (Eq, Ord, Show, Read)
| null | https://raw.githubusercontent.com/yesodweb/persistent/d7a67f0fea5e07f6f6562a54c0838de23c51d387/persistent/Database/Persist/Sql/Types.hs | haskell | | This value specifies how a field references another table.
@since 2.11.0.0
^ The table name that the
@since 2.11.0.0
^ The name of the foreign key constraint.
@since 2.11.0.0
^ Whether or not updates/deletions to the referenced table cascade
to this table.
@since 2.11.0.0
| Values to configure a pool of database connections. See "Data.Pool" for details.
@since 2.11.0.0
| Initializes a ConnectionPoolConfig with default values. See the documentation of 'ConnectionPoolConfig' for each field's default value.
@since 2.11.0.0
$rawSql
some complex @JOIN@ query, or a database-specific command
needs to be issued.
To issue raw SQL queries, use 'rawSql'. It does all the hard work of
automatically parsing the rows of the result. It may return:
* An 'Entity', that which 'selectList' returns.
All of your entity's fields are
automatically parsed.
'PersistValue'@.
* A tuple combining any of these (including other tuples).
query.
The only difference between issuing SQL queries with 'rawSql'
and using other means is that we have an /entity selection/
/placeholder/, the double question mark @??@. It /must/ be
used whenever you want to @SELECT@ an 'Entity' from your
query. Here's a sample SQL query @sampleStmt@ that may be
issued:
@
SELECT ??, ??
WHERE \"Person\".id = \"Likes\".\"personId\"
AND \"Person\".name LIKE ?
@
To use that query, you could say
@
do results <- 'rawSql' sampleStmt [\"%Luke%\"]
forM_ results $
, Entity objectKey object
) -> do ...
@
Note that 'rawSql' knows how to replace the double question
marks @??@ because of the type of the @results@.
processing). | module Database.Persist.Sql.Types
( module Database.Persist.Sql.Types
, SqlBackend, SqlReadBackend (..), SqlWriteBackend (..)
, Statement (..), LogFunc, InsertSqlResult (..)
, readToUnknown, readToWrite, writeToUnknown
, SqlBackendCanRead, SqlBackendCanWrite, SqlReadT, SqlWriteT, IsSqlBackend
, OverflowNatural(..)
, ConnectionPoolConfig(..)
) where
import Control.Exception (Exception(..))
import Control.Monad.Logger (NoLoggingT)
import Control.Monad.Trans.Reader (ReaderT(..))
import Control.Monad.Trans.Resource (ResourceT)
import Data.Pool (Pool)
import Data.Text (Text)
import Data.Time (NominalDiffTime)
import Database.Persist.Sql.Types.Internal
import Database.Persist.Types
data Column = Column
{ cName :: !FieldNameDB
, cNull :: !Bool
, cSqlType :: !SqlType
, cDefault :: !(Maybe Text)
, cGenerated :: !(Maybe Text)
, cDefaultConstraintName :: !(Maybe ConstraintNameDB)
, cMaxLen :: !(Maybe Integer)
, cReference :: !(Maybe ColumnReference)
}
deriving (Eq, Ord, Show)
data ColumnReference = ColumnReference
{ crTableName :: !EntityNameDB
, crConstraintName :: !ConstraintNameDB
, crFieldCascade :: !FieldCascade
}
deriving (Eq, Ord, Show)
data PersistentSqlException = StatementAlreadyFinalized Text
| Couldn'tGetSQLConnection
deriving Show
instance Exception PersistentSqlException
type SqlPersistT = ReaderT SqlBackend
type SqlPersistM = SqlPersistT (NoLoggingT (ResourceT IO))
type ConnectionPool = Pool SqlBackend
data ConnectionPoolConfig = ConnectionPoolConfig
^ How many stripes to divide the pool into . See " Data . Pool " for details . Default : 1 .
^ How long connections can remain idle before being disposed of , in seconds . Default : 600
^ How many connections should be held in the connection pool . Default : 10
}
deriving (Show)
TODO : Bad defaults for SQLite maybe ?
defaultConnectionPoolConfig :: ConnectionPoolConfig
defaultConnectionPoolConfig = ConnectionPoolConfig 1 600 10
Although it covers most of the useful cases , @persistent@ 's
API may not be enough for some of your tasks . May be you need
* A @'Single ' a@ , which is a single , raw column of type
You may use a type ( such as in your entity
definitions ) , for example Text@ or ,
or you may get the raw column value with
Using tuples allows you to return many entities in one
FROM \"Person\ " , \"Likes\ " , \"Object\ "
AND \"Object\".id = \"Likes\".\"objectId\ "
\\ ( Entity personKey person
| A single column ( see ' rawSql ' ) . Any ' PersistField ' may be
used here , including ' PersistValue ' ( which does not do any
newtype Single a = Single {unSingle :: a}
deriving (Eq, Ord, Show, Read)
|
02627240505840662ab26005a90c73cd6fe60735f2865c959d9d1986395720a3 | rudymatela/express | listable.hs | Copyright ( c ) 2017 - 2021 .
-- Distributed under the 3-Clause BSD licence (see the file LICENSE).
import Test
main :: IO ()
main = mainTest tests 5040
tests :: Int -> [Bool]
tests n =
[ True
only produces well - typed expressions
, holds n $ isJust . toDynamic
, holds n $ isJust . mtyp
only produces ill - typed expressions
, holds n $ isNothing . toDynamic . unIll
, holds n $ isNothing . mtyp . unIll
Listable TypeE produces expressions of the right type ( evaluation )
, holds n $ isJust . evaluateInt . unIntE
, holds n $ isJust . evaluateBool . unBoolE
, holds n $ isJust . evaluateInts . unIntsE
, holds n $ isJust . evaluateIntToInt . unIntToIntE
, holds n $ isJust . evaluateChar . unCharE
, holds n $ \(IntToIntE ff) (IntE xx) -> isJust . evaluateInt $ ff :$ xx
, holds n $ \(IntToIntToIntE ff) (IntE xx) (IntE yy) -> isJust . evaluateInt $ ff :$ xx :$ yy
Listable TypeE produces expressions of the right type ( typ )
, holds n $ \(SameTypeE e1 e2) -> typ e1 == typ e2
, holds n $ \(SameTypedPairsE ees) -> all (\(e1,e2) -> typ e1 == typ e2) ees
, holds n $ \(IntE e) -> typ e == typ i_
, holds n $ \(BoolE e) -> typ e == typ b_
, holds n $ \(CharE e) -> typ e == typ c_
, holds n $ \(IntsE e) -> typ e == typ is_
Listable TypeE does not produce expressions of the wrong type
, holds n $ isNothing . evaluateInt . unBoolE
, holds n $ isNothing . evaluateBool . unIntE
, holds n $ isNothing . evaluateInts . unIntE
, holds n $ isNothing . evaluateIntToInt . unIntE
, holds n $ isNothing . evaluateChar . unIntE
Listable TypeE0 only returns terminal constants
, holds n $ isConst . unE0
, holds n $ isConst . unIntE0
, holds n $ isConst . unBoolE0
, holds n $ isConst . unIntsE0
, holds n $ isConst . unCharE0
Listable TypeEV only returns variables
, holds n $ isVar . unEV
, holds n $ isVar . unIntEV
, holds n $ isVar . unBoolEV
, holds n $ isVar . unIntsEV
, holds n $ isVar . unCharEV
-- counter-examples are of the right type
, (counterExample n $ \(IntE xx) -> False) == Just ["_ :: Int"]
, isNub (take (n`div`10) list :: [Expr])
, isNub (take (n`div`10) $ map unSameTypeE list)
, isNub (take (n`div`10) $ map unIntE list)
]
evaluateInt :: Expr -> Maybe Int
evaluateInt = evaluate
evaluateBool :: Expr -> Maybe Bool
evaluateBool = evaluate
evaluateInts :: Expr -> Maybe [Int]
evaluateInts = evaluate
evaluateIntToInt :: Expr -> Maybe (Int -> Int)
evaluateIntToInt = evaluate
evaluateChar :: Expr -> Maybe Char
evaluateChar = evaluate
| null | https://raw.githubusercontent.com/rudymatela/express/24193a8ea5e238404808a8ef196b0973d0383c21/test/listable.hs | haskell | Distributed under the 3-Clause BSD licence (see the file LICENSE).
counter-examples are of the right type | Copyright ( c ) 2017 - 2021 .
import Test
main :: IO ()
main = mainTest tests 5040
tests :: Int -> [Bool]
tests n =
[ True
only produces well - typed expressions
, holds n $ isJust . toDynamic
, holds n $ isJust . mtyp
only produces ill - typed expressions
, holds n $ isNothing . toDynamic . unIll
, holds n $ isNothing . mtyp . unIll
Listable TypeE produces expressions of the right type ( evaluation )
, holds n $ isJust . evaluateInt . unIntE
, holds n $ isJust . evaluateBool . unBoolE
, holds n $ isJust . evaluateInts . unIntsE
, holds n $ isJust . evaluateIntToInt . unIntToIntE
, holds n $ isJust . evaluateChar . unCharE
, holds n $ \(IntToIntE ff) (IntE xx) -> isJust . evaluateInt $ ff :$ xx
, holds n $ \(IntToIntToIntE ff) (IntE xx) (IntE yy) -> isJust . evaluateInt $ ff :$ xx :$ yy
Listable TypeE produces expressions of the right type ( typ )
, holds n $ \(SameTypeE e1 e2) -> typ e1 == typ e2
, holds n $ \(SameTypedPairsE ees) -> all (\(e1,e2) -> typ e1 == typ e2) ees
, holds n $ \(IntE e) -> typ e == typ i_
, holds n $ \(BoolE e) -> typ e == typ b_
, holds n $ \(CharE e) -> typ e == typ c_
, holds n $ \(IntsE e) -> typ e == typ is_
Listable TypeE does not produce expressions of the wrong type
, holds n $ isNothing . evaluateInt . unBoolE
, holds n $ isNothing . evaluateBool . unIntE
, holds n $ isNothing . evaluateInts . unIntE
, holds n $ isNothing . evaluateIntToInt . unIntE
, holds n $ isNothing . evaluateChar . unIntE
Listable TypeE0 only returns terminal constants
, holds n $ isConst . unE0
, holds n $ isConst . unIntE0
, holds n $ isConst . unBoolE0
, holds n $ isConst . unIntsE0
, holds n $ isConst . unCharE0
Listable TypeEV only returns variables
, holds n $ isVar . unEV
, holds n $ isVar . unIntEV
, holds n $ isVar . unBoolEV
, holds n $ isVar . unIntsEV
, holds n $ isVar . unCharEV
, (counterExample n $ \(IntE xx) -> False) == Just ["_ :: Int"]
, isNub (take (n`div`10) list :: [Expr])
, isNub (take (n`div`10) $ map unSameTypeE list)
, isNub (take (n`div`10) $ map unIntE list)
]
evaluateInt :: Expr -> Maybe Int
evaluateInt = evaluate
evaluateBool :: Expr -> Maybe Bool
evaluateBool = evaluate
evaluateInts :: Expr -> Maybe [Int]
evaluateInts = evaluate
evaluateIntToInt :: Expr -> Maybe (Int -> Int)
evaluateIntToInt = evaluate
evaluateChar :: Expr -> Maybe Char
evaluateChar = evaluate
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.